Compare commits

..

1 Commits

Author SHA1 Message Date
Terry Brady
cfc200f2c7 Update README.md 2018-06-22 14:33:35 -07:00
1826 changed files with 37537 additions and 108066 deletions

View File

@@ -1,11 +0,0 @@
.git/
.idea/
.settings/
*/target/
dspace/modules/*/target/
Dockerfile.*
dspace/src/main/docker/dspace-postgres-pgcrypto
dspace/src/main/docker/dspace-postgres-pgcrypto-curl
dspace/src/main/docker/solr
dspace/src/main/docker/README.md
dspace/src/main/docker-compose/

2
.gitignore vendored
View File

@@ -41,4 +41,4 @@ nb-configuration.xml
.DS_Store .DS_Store
##Ignore JRebel project configuration ##Ignore JRebel project configuration
rebel.xml rebel.xml

View File

@@ -1,15 +1,14 @@
language: java language: java
sudo: false sudo: false
dist: trusty
env: env:
# Give Maven 1GB of memory to work with # Give Maven 1GB of memory to work with
- MAVEN_OPTS=-Xmx1024M - MAVEN_OPTS=-Xmx1024M
jdk: jdk:
# DS-3384 Oracle JDK has DocLint enabled by default. # DS-3384 Oracle JDK 8 has DocLint enabled by default.
# Let's use this to catch any newly introduced DocLint issues. # Let's use this to catch any newly introduced DocLint issues.
- oraclejdk11 - oraclejdk8
## Should we run into any problems with oraclejdk8 on Travis, we may try the following workaround. ## Should we run into any problems with oraclejdk8 on Travis, we may try the following workaround.
## https://docs.travis-ci.com/user/languages/java#Testing-Against-Multiple-JDKs ## https://docs.travis-ci.com/user/languages/java#Testing-Against-Multiple-JDKs
@@ -19,6 +18,7 @@ jdk:
# packages: # packages:
# - oracle-java8-installer # - oracle-java8-installer
# Install prerequisites for building Mirage2 more rapidly
before_install: before_install:
# Remove outdated settings.xml from Travis builds. Workaround for https://github.com/travis-ci/travis-ci/issues/4629 # Remove outdated settings.xml from Travis builds. Workaround for https://github.com/travis-ci/travis-ci/issues/4629
- rm ~/.m2/settings.xml - rm ~/.m2/settings.xml
@@ -26,21 +26,19 @@ before_install:
# Skip install stage, as we'll do it below # Skip install stage, as we'll do it below
install: "echo 'Skipping install stage, dependencies will be downloaded during build and test stages.'" install: "echo 'Skipping install stage, dependencies will be downloaded during build and test stages.'"
# Build DSpace and run both Unit and Integration Tests # Two stage Build and Test
# 1. Install & Unit Test APIs
# 2. Assemble DSpace
script: script:
# Summary of flags used (below): # 1. [Install & Unit Test] Check source code licenses and run source code Unit Tests
# license:check => Validate all source code license headers # license:check => Validate all source code license headers
# -Dmaven.test.skip=false => Enable DSpace Unit Tests # -Dmaven.test.skip=false => Enable DSpace Unit Tests
# -DskipITs=false => Enable DSpace Integration Tests # -DskipITs=false => Enable DSpace Integration Tests
# -Pdspace-rest => Enable optional dspace-rest module as part of build # -P !assembly => Skip normal assembly (as it can be memory intensive)
# -P !assembly => Skip assembly of "dspace-installer" directory (as it can be memory intensive) # -B => Maven batch/non-interactive mode (recommended for CI)
# -B => Maven batch/non-interactive mode (recommended for CI) # -V => Display Maven version info before build
# -V => Display Maven version info before build # -Dsurefire.rerunFailingTestsCount=2 => try again for flakey tests, and keep track of/report on number of retries
# -Dsurefire.rerunFailingTestsCount=2 => try again for flakey tests, and keep track of/report on number of retries - "mvn clean install license:check -Dmaven.test.skip=false -DskipITs=false -P !assembly -B -V -Dsurefire.rerunFailingTestsCount=2"
- "mvn clean install license:check -Dmaven.test.skip=false -DskipITs=false -Pdspace-rest -P !assembly -B -V -Dsurefire.rerunFailingTestsCount=2" # 2. [Assemble DSpace] Ensure overlay & assembly process works (from [src]/dspace/)
# -P !assembly => SKIP the actual building of [src]/dspace/dspace-installer (as it can be memory intensive)
# After a successful build and test (see 'script'), send code coverage reports to coveralls.io - "cd dspace && mvn package -P !assembly -B -V -Dsurefire.rerunFailingTestsCount=2"
# These code coverage reports are generated by jacoco-maven-plugin (during test process above).
after_success:
# Run "verify", enabling the "coveralls" profile. This sends our reports to coveralls.io (see coveralls-maven-plugin)
- "cd dspace && mvn verify -P coveralls"

View File

@@ -1,63 +0,0 @@
# This image will be published as dspace/dspace
# See https://github.com/DSpace/DSpace/tree/master/dspace/src/main/docker for usage details
#
# This version is JDK11 compatible
# - tomcat:8-jdk11
# - ANT 1.10.7
# - maven:3-jdk-11 (see dspace-dependencies)
# - note: default tag for branch: dspace/dspace: dspace/dspace:dspace-7_x
# Step 1 - Run Maven Build
FROM dspace/dspace-dependencies:dspace-7_x as build
ARG TARGET_DIR=dspace-installer
WORKDIR /app
# The dspace-install directory will be written to /install
RUN mkdir /install \
&& chown -Rv dspace: /install \
&& chown -Rv dspace: /app
USER dspace
# Copy the DSpace source code into the workdir (excluding .dockerignore contents)
ADD --chown=dspace . /app/
COPY dspace/src/main/docker/local.cfg /app/local.cfg
# Build DSpace (note: this build doesn't include the optional, deprecated "dspace-rest" webapp)
# Copy the dspace-install directory to /install. Clean up the build to keep the docker image small
RUN mvn package && \
mv /app/dspace/target/${TARGET_DIR}/* /install && \
mvn clean
# Step 2 - Run Ant Deploy
FROM tomcat:8-jdk11 as ant_build
ARG TARGET_DIR=dspace-installer
COPY --from=build /install /dspace-src
WORKDIR /dspace-src
# Create the initial install deployment using ANT
ENV ANT_VERSION 1.10.7
ENV ANT_HOME /tmp/ant-$ANT_VERSION
ENV PATH $ANT_HOME/bin:$PATH
RUN mkdir $ANT_HOME && \
wget -qO- "https://archive.apache.org/dist/ant/binaries/apache-ant-$ANT_VERSION-bin.tar.gz" | tar -zx --strip-components=1 -C $ANT_HOME
RUN ant init_installation update_configs update_code update_webapps
# Step 3 - Run tomcat
# Create a new tomcat image that does not retain the the build directory contents
FROM tomcat:8-jdk11
ENV DSPACE_INSTALL=/dspace
COPY --from=ant_build /dspace $DSPACE_INSTALL
EXPOSE 8080 8009
ENV JAVA_OPTS=-Xmx2000m
# Run the "server" webapp off the /server path (e.g. http://localhost:8080/server/)
RUN ln -s $DSPACE_INSTALL/webapps/server /usr/local/tomcat/webapps/server
# If you wish to run "server" webapp off the ROOT path, then comment out the above RUN, and uncomment the below RUN.
# You also MUST update the URL in dspace/src/main/docker/local.cfg
# Please note that server webapp should only run on one path at a time.
#RUN mv /usr/local/tomcat/webapps/ROOT /usr/local/tomcat/webapps/ROOT.bk && \
# ln -s $DSPACE_INSTALL/webapps/server /usr/local/tomcat/webapps/ROOT

View File

@@ -1,53 +0,0 @@
# This image will be published as dspace/dspace-cli
# See https://github.com/DSpace/DSpace/tree/master/dspace/src/main/docker for usage details
#
# This version is JDK11 compatible
# - openjdk:11
# - ANT 1.10.7
# - maven:3-jdk-11 (see dspace-dependencies)
# - note: default tag for branch: dspace/dspace-cli: dspace/dspace-cli:dspace-7_x
# Step 1 - Run Maven Build
FROM dspace/dspace-dependencies:dspace-7_x as build
ARG TARGET_DIR=dspace-installer
WORKDIR /app
# The dspace-install directory will be written to /install
RUN mkdir /install \
&& chown -Rv dspace: /install \
&& chown -Rv dspace: /app
USER dspace
# Copy the DSpace source code into the workdir (excluding .dockerignore contents)
ADD --chown=dspace . /app/
COPY dspace/src/main/docker/local.cfg /app/local.cfg
# Build DSpace. Copy the dspace-install directory to /install. Clean up the build to keep the docker image small
RUN mvn package && \
mv /app/dspace/target/${TARGET_DIR}/* /install && \
mvn clean
# Step 2 - Run Ant Deploy
FROM openjdk:11 as ant_build
ARG TARGET_DIR=dspace-installer
COPY --from=build /install /dspace-src
WORKDIR /dspace-src
# Create the initial install deployment using ANT
ENV ANT_VERSION 1.10.7
ENV ANT_HOME /tmp/ant-$ANT_VERSION
ENV PATH $ANT_HOME/bin:$PATH
RUN mkdir $ANT_HOME && \
wget -qO- "https://archive.apache.org/dist/ant/binaries/apache-ant-$ANT_VERSION-bin.tar.gz" | tar -zx --strip-components=1 -C $ANT_HOME
RUN ant init_installation update_configs update_code
# Step 3 - Run jdk
# Create a new tomcat image that does not retain the the build directory contents
FROM openjdk:11
ENV DSPACE_INSTALL=/dspace
COPY --from=ant_build /dspace $DSPACE_INSTALL
ENV JAVA_OPTS=-Xmx1000m

View File

@@ -1,27 +0,0 @@
# This image will be published as dspace/dspace-dependencies
# The purpose of this image is to make the build for dspace/dspace run faster
#
# This version is JDK11 compatible
# - maven:3-jdk-11
# Step 1 - Run Maven Build
FROM maven:3-jdk-11 as build
ARG TARGET_DIR=dspace-installer
WORKDIR /app
RUN useradd dspace \
&& mkdir /home/dspace \
&& chown -Rv dspace: /home/dspace
USER dspace
# Copy the DSpace source code into the workdir (excluding .dockerignore contents)
ADD --chown=dspace . /app/
COPY dspace/src/main/docker/local.cfg /app/local.cfg
# Trigger the installation of all maven dependencies
RUN mvn package
# Clear the contents of the /app directory (including all maven builds), so no artifacts remain.
# This ensures when dspace:dspace is built, it will just the Maven local cache (.m2) for dependencies
USER root
RUN rm -rf /app/*

View File

@@ -1,72 +0,0 @@
# This image will be published as dspace/dspace
# See https://github.com/DSpace/DSpace/tree/master/dspace/src/main/docker for usage details
#
# This version is JDK11 compatible
# - tomcat:8-jdk11
# - ANT 1.10.7
# - maven:3-jdk-11 (see dspace-dependencies)
# - note: default tag for branch: dspace/dspace: dspace/dspace:dspace-7_x-test
#
# This image is meant for TESTING/DEVELOPMENT ONLY as it deploys the old v6 REST API under HTTP (not HTTPS)
# Step 1 - Run Maven Build
FROM dspace/dspace-dependencies:dspace-7_x as build
ARG TARGET_DIR=dspace-installer
WORKDIR /app
# The dspace-install directory will be written to /install
RUN mkdir /install \
&& chown -Rv dspace: /install \
&& chown -Rv dspace: /app
USER dspace
# Copy the DSpace source code into the workdir (excluding .dockerignore contents)
ADD --chown=dspace . /app/
COPY dspace/src/main/docker/local.cfg /app/local.cfg
# Build DSpace (including the optional, deprecated "dspace-rest" webapp)
# Copy the dspace-install directory to /install. Clean up the build to keep the docker image small
RUN mvn package -Pdspace-rest && \
mv /app/dspace/target/${TARGET_DIR}/* /install && \
mvn clean
# Step 2 - Run Ant Deploy
FROM tomcat:8-jdk11 as ant_build
ARG TARGET_DIR=dspace-installer
COPY --from=build /install /dspace-src
WORKDIR /dspace-src
# Create the initial install deployment using ANT
ENV ANT_VERSION 1.10.7
ENV ANT_HOME /tmp/ant-$ANT_VERSION
ENV PATH $ANT_HOME/bin:$PATH
RUN mkdir $ANT_HOME && \
wget -qO- "https://archive.apache.org/dist/ant/binaries/apache-ant-$ANT_VERSION-bin.tar.gz" | tar -zx --strip-components=1 -C $ANT_HOME
RUN ant init_installation update_configs update_code update_webapps
# Step 3 - Run tomcat
# Create a new tomcat image that does not retain the the build directory contents
FROM tomcat:8-jdk11
ENV DSPACE_INSTALL=/dspace
COPY --from=ant_build /dspace $DSPACE_INSTALL
EXPOSE 8080 8009
ENV JAVA_OPTS=-Xmx2000m
# Run the "server" webapp off the /server path (e.g. http://localhost:8080/server/)
# and the v6.x (deprecated) REST API off the "/rest" path
RUN ln -s $DSPACE_INSTALL/webapps/server /usr/local/tomcat/webapps/server && \
ln -s $DSPACE_INSTALL/webapps/rest /usr/local/tomcat/webapps/rest
# If you wish to run "server" webapp off the ROOT path, then comment out the above RUN, and uncomment the below RUN.
# You also MUST update the URL in dspace/src/main/docker/local.cfg
# Please note that server webapp should only run on one path at a time.
#RUN mv /usr/local/tomcat/webapps/ROOT /usr/local/tomcat/webapps/ROOT.bk && \
# ln -s $DSPACE_INSTALL/webapps/server /usr/local/tomcat/webapps/ROOT && \
# ln -s $DSPACE_INSTALL/webapps/rest /usr/local/tomcat/webapps/rest
# Overwrite the v6.x (deprecated) REST API's web.xml, so that we can run it on HTTP (defaults to requiring HTTPS)
COPY dspace/src/main/docker/test/rest_web.xml $DSPACE_INSTALL/webapps/rest/WEB-INF/web.xml
RUN sed -i -e "s|\${dspace.dir}|$DSPACE_INSTALL|" $DSPACE_INSTALL/webapps/rest/WEB-INF/web.xml

View File

@@ -366,6 +366,7 @@ https://wiki.duraspace.org/display/DSPACE/Code+Contribution+Guidelines
* FindBugs-Annotations (com.google.code.findbugs:annotations:3.0.1u2 - http://findbugs.sourceforge.net/) * FindBugs-Annotations (com.google.code.findbugs:annotations:3.0.1u2 - http://findbugs.sourceforge.net/)
* MaxMind GeoIP Legacy API (com.maxmind.geoip:geoip-api:1.3.0 - https://github.com/maxmind/geoip-api-java) * MaxMind GeoIP Legacy API (com.maxmind.geoip:geoip-api:1.3.0 - https://github.com/maxmind/geoip-api-java)
* JHighlight (com.uwyn:jhighlight:1.0 - https://jhighlight.dev.java.net/) * JHighlight (com.uwyn:jhighlight:1.0 - https://jhighlight.dev.java.net/)
* DSpace TM-Extractors Dependency (org.dspace.dependencies:dspace-tm-extractors:1.0.1 - http://projects.dspace.org/dspace-pom/dspace-tm-extractors)
* A Hibernate O/RM Module (org.hibernate:hibernate-core:4.2.21.Final - http://hibernate.org) * A Hibernate O/RM Module (org.hibernate:hibernate-core:4.2.21.Final - http://hibernate.org)
* A Hibernate O/RM Module (org.hibernate:hibernate-ehcache:4.2.21.Final - http://hibernate.org) * A Hibernate O/RM Module (org.hibernate:hibernate-ehcache:4.2.21.Final - http://hibernate.org)
* Hibernate Commons Annotations (org.hibernate.common:hibernate-commons-annotations:4.0.2.Final - http://hibernate.org) * Hibernate Commons Annotations (org.hibernate.common:hibernate-commons-annotations:4.0.2.Final - http://hibernate.org)

View File

@@ -1,23 +1,26 @@
# DSpace # DSpace
## NOTE: The rest-tutorial branch has been created to support the [DSpace 7 REST documentation](https://dspace-labs.github.io/DSpace7RestTutorial/walkthrough/intro)
- This branch provides stable, referencable line numbers in code
[![Build Status](https://travis-ci.org/DSpace/DSpace.png?branch=master)](https://travis-ci.org/DSpace/DSpace) [![Build Status](https://travis-ci.org/DSpace/DSpace.png?branch=master)](https://travis-ci.org/DSpace/DSpace)
[DSpace Documentation](https://wiki.duraspace.org/display/DSDOC/) | [DSpace Documentation](https://wiki.duraspace.org/display/DSDOC/) |
[DSpace Releases](https://github.com/DSpace/DSpace/releases) | [DSpace Releases](https://github.com/DSpace/DSpace/releases) |
[DSpace Wiki](https://wiki.duraspace.org/display/DSPACE/Home) | [DSpace Wiki](https://wiki.duraspace.org/display/DSPACE/Home) |
[Support](https://wiki.duraspace.org/display/DSPACE/Support) [Support](https://wiki.duraspace.org/display/DSPACE/Support)
DSpace open source software is a turnkey repository application used by more than DSpace open source software is a turnkey repository application used by more than
2,000 organizations and institutions worldwide to provide durable access to digital resources. 2,000 organizations and institutions worldwide to provide durable access to digital resources.
For more information, visit http://www.dspace.org/ For more information, visit http://www.dspace.org/
*** ***
:warning: **Work on DSpace 7 has begun on our `master` branch.** This means that there is temporarily NO user interface on this `master` branch. DSpace 7 will feature a new, unified [Angular](https://angular.io/) user interface, along with an enhanced, rebuilt REST API. The latest status of this work can be found on the [DSpace 7 UI Working Group](https://wiki.duraspace.org/display/DSPACE/DSpace+7+UI+Working+Group) page. Additionally, the codebases can be found in the following places: :warning: **Work on DSpace 7 has begun on our `master` branch.** This means that there is temporarily NO user interface on this `master` branch. DSpace 7 will feature a new, unified [Angular](https://angular.io/) user interface, along with an enhanced, rebuilt REST API. The latest status of this work can be found on the [DSpace 7 UI Working Group](https://wiki.duraspace.org/display/DSPACE/DSpace+7+UI+Working+Group) page. Additionally, the codebases can be found in the following places:
* DSpace 7 REST API work is occurring on the [`master` branch](https://github.com/DSpace/DSpace/tree/master/dspace-server-webapp) of this repository. * DSpace 7 REST API work is occurring on the [`master` branch](https://github.com/DSpace/DSpace/tree/master/dspace-spring-rest) of this repository.
* The REST Contract is being documented at https://github.com/DSpace/Rest7Contract * The REST Contract is being documented at https://github.com/DSpace/Rest7Contract
* DSpace 7 Angular UI work is occurring at https://github.com/DSpace/dspace-angular * DSpace 7 Angular UI work is occurring at https://github.com/DSpace/dspace-angular
**If you would like to get involved in our DSpace 7 development effort, we welcome new contributors.** Just join one of our meetings or get in touch via Slack. See the [DSpace 7 UI Working Group](https://wiki.duraspace.org/display/DSPACE/DSpace+7+UI+Working+Group) wiki page for more info. **If you would like to get involved in our DSpace 7 development effort, we welcome new contributors.** Just join one of our meetings or get in touch via Slack. See the [DSpace 7 UI Working Group](https://wiki.duraspace.org/display/DSPACE/DSpace+7+UI+Working+Group) wiki page for more info.
**If you are looking for the ongoing maintenance work for DSpace 6 (or prior releases)**, you can find that work on the corresponding maintenance branch (e.g. [`dspace-6_x`](https://github.com/DSpace/DSpace/tree/dspace-6_x)) in this repository. **If you are looking for the ongoing maintenance work for DSpace 6 (or prior releases)**, you can find that work on the corresponding maintenance branch (e.g. [`dspace-6_x`](https://github.com/DSpace/DSpace/tree/dspace-6_x)) in this repository.
@@ -31,21 +34,18 @@ Past releases are all available via GitHub at https://github.com/DSpace/DSpace/r
## Documentation / Installation ## Documentation / Installation
Documentation for each release may be viewed online or downloaded via our [Documentation Wiki](https://wiki.duraspace.org/display/DSDOC/). Documentation for each release may be viewed online or downloaded via our [Documentation Wiki](https://wiki.duraspace.org/display/DSDOC/).
The latest DSpace Installation instructions are available at: The latest DSpace Installation instructions are available at:
https://wiki.duraspace.org/display/DSDOC6x/Installing+DSpace https://wiki.duraspace.org/display/DSDOC6x/Installing+DSpace
Please be aware that, as a Java web application, DSpace requires a database (PostgreSQL or Oracle) Please be aware that, as a Java web application, DSpace requires a database (PostgreSQL or Oracle)
and a servlet container (usually Tomcat) in order to function. and a servlet container (usually Tomcat) in order to function.
More information about these and all other prerequisites can be found in the Installation instructions above. More information about these and all other prerequisites can be found in the Installation instructions above.
## Running DSpace 7 in Docker
See [Running DSpace 7 with Docker Compose](dspace/src/main/docker-compose/README.md)
## Contributing ## Contributing
DSpace is a community built and supported project. We do not have a centralized development or support team, DSpace is a community built and supported project. We do not have a centralized development or support team,
but have a dedicated group of volunteers who help us improve the software, documentation, resources, etc. but have a dedicated group of volunteers who help us improve the software, documentation, resources, etc.
We welcome contributions of any type. Here's a few basic guides that provide suggestions for contributing to DSpace: We welcome contributions of any type. Here's a few basic guides that provide suggestions for contributing to DSpace:
@@ -71,65 +71,14 @@ Great Q&A is also available under the [DSpace tag on Stackoverflow](http://stack
Additional support options are listed at https://wiki.duraspace.org/display/DSPACE/Support Additional support options are listed at https://wiki.duraspace.org/display/DSPACE/Support
DSpace also has an active service provider network. If you'd rather hire a service provider to DSpace also has an active service provider network. If you'd rather hire a service provider to
install, upgrade, customize or host DSpace, then we recommend getting in touch with one of our install, upgrade, customize or host DSpace, then we recommend getting in touch with one of our
[Registered Service Providers](http://www.dspace.org/service-providers). [Registered Service Providers](http://www.dspace.org/service-providers).
## Issue Tracker ## Issue Tracker
The DSpace Issue Tracker can be found at: https://jira.duraspace.org/projects/DS/summary The DSpace Issue Tracker can be found at: https://jira.duraspace.org/projects/DS/summary
## Testing
### Running Tests
By default, in DSpace, Unit Tests and Integration Tests are disabled. However, they are
run automatically by [Travis CI](https://travis-ci.org/DSpace/DSpace/) for all Pull Requests and code commits.
* How to run both Unit Tests (via `maven-surefire-plugin`) and Integration Tests (via `maven-failsafe-plugin`):
```
# NOTE: while "mvn test" runs Unit Tests,
# Integration Tests only run for "verify" or "install" phases
mvn clean install -Dmaven.test.skip=false -DskipITs=false
```
* How to run just Unit Tests:
```
mvn clean test -Dmaven.test.skip=false
```
* How to run a *single* Unit Test
```
# Run all tests in a specific test class
# NOTE: testClassName is just the class name, do not include package
mvn clean test -Dmaven.test.skip=false -Dtest=[testClassName]
# Run one test method in a specific test class
mvn clean test -Dmaven.test.skip=false -Dtest=[testClassName]#[testMethodName]
```
* How to run Integration Tests (requires running Unit tests too)
```
mvn clean verify -Dmaven.test.skip=false -DskipITs=false
```
* How to run a *single* Integration Test (requires running Unit tests too)
```
# Run all integration tests in a specific test class
# NOTE: Integration Tests only run for "verify" or "install" phases
# NOTE: testClassName is just the class name, do not include package
mvn clean verify -Dmaven.test.skip=false -DskipITs=false -Dit.test=[testClassName]
# Run one test method in a specific test class
mvn clean verify -Dmaven.test.skip=false -DskipITs=false -Dit.test=[testClassName]#[testMethodName]
```
* How to run only tests of a specific DSpace module
```
# Before you can run only one module's tests, other modules may need installing into your ~/.m2
cd [dspace-src]
mvn clean install
# Then, move into a module subdirectory, and run the test command
cd [dspace-src]/dspace-server-webapp
# Choose your test command from the lists above
```
## License ## License
DSpace source code is freely available under a standard [BSD 3-Clause license](https://opensource.org/licenses/BSD-3-Clause). DSpace source code is freely available under a standard [BSD 3-Clause license](https://opensource.org/licenses/BSD-3-Clause).

View File

@@ -1,25 +0,0 @@
version: "3.7"
services:
dspace-cli:
image: "${DOCKER_OWNER:-dspace}/dspace-cli:${DSPACE_VER:-dspace-7_x}"
container_name: dspace-cli
build:
context: .
dockerfile: Dockerfile.cli
#environment:
volumes:
- ./dspace/src/main/docker-compose/local.cfg:/dspace/config/local.cfg
- assetstore:/dspace/assetstore
entrypoint: /dspace/bin/dspace
command: help
networks:
- dspacenet
tty: true
stdin_open: true
volumes:
assetstore:
networks:
dspacenet:

View File

@@ -1,69 +0,0 @@
version: '3.7'
networks:
dspacenet:
services:
dspace:
container_name: dspace
image: "${DOCKER_OWNER:-dspace}/dspace:${DSPACE_VER:-dspace-7_x-test}"
build:
context: .
dockerfile: Dockerfile.test
depends_on:
- dspacedb
networks:
dspacenet:
ports:
- published: 8080
target: 8080
stdin_open: true
tty: true
volumes:
- assetstore:/dspace/assetstore
- ./dspace/src/main/docker-compose/local.cfg:/dspace/config/local.cfg
# Ensure that the database is ready BEFORE starting tomcat
# 1. While a TCP connection to dspacedb port 5432 is not available, continue to sleep
# 2. Then, run database migration to init database tables
# 3. Finally, start Tomcat
entrypoint:
- /bin/bash
- '-c'
- |
while (!</dev/tcp/dspacedb/5432) > /dev/null 2>&1; do sleep 1; done;
/dspace/bin/dspace database migrate
catalina.sh run
dspacedb:
container_name: dspacedb
environment:
PGDATA: /pgdata
image: dspace/dspace-postgres-pgcrypto
networks:
dspacenet:
ports:
- published: 5432
target: 5432
stdin_open: true
tty: true
volumes:
- pgdata:/pgdata
dspacesolr:
container_name: dspacesolr
image: dspace/dspace-solr
networks:
dspacenet:
ports:
- published: 8983
target: 8983
stdin_open: true
tty: true
volumes:
- solr_authority:/opt/solr/server/solr/authority/data
- solr_oai:/opt/solr/server/solr/oai/data
- solr_search:/opt/solr/server/solr/search/data
- solr_statistics:/opt/solr/server/solr/statistics/data
volumes:
assetstore:
pgdata:
solr_authority:
solr_oai:
solr_search:
solr_statistics:

View File

@@ -1,4 +1,5 @@
<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd"> <project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd">
<modelVersion>4.0.0</modelVersion> <modelVersion>4.0.0</modelVersion>
<groupId>org.dspace</groupId> <groupId>org.dspace</groupId>
<artifactId>dspace-api</artifactId> <artifactId>dspace-api</artifactId>
@@ -12,7 +13,7 @@
<parent> <parent>
<groupId>org.dspace</groupId> <groupId>org.dspace</groupId>
<artifactId>dspace-parent</artifactId> <artifactId>dspace-parent</artifactId>
<version>7.0-beta1</version> <version>7.0-SNAPSHOT</version>
<relativePath>..</relativePath> <relativePath>..</relativePath>
</parent> </parent>
@@ -50,33 +51,6 @@
<configuration> <configuration>
<debug>true</debug> <debug>true</debug>
<showDeprecation>true</showDeprecation> <showDeprecation>true</showDeprecation>
<annotationProcessorPaths>
<!-- Enable Hibernate's Metamodel Generator to generate metadata model classes
(ending in _ suffix) for more type-safe Criteria queries -->
<path>
<groupId>org.hibernate</groupId>
<artifactId>hibernate-jpamodelgen</artifactId>
<version>${hibernate.version}</version>
</path>
<!-- Enable JAXB -->
<path>
<groupId>javax.xml.bind</groupId>
<artifactId>jaxb-api</artifactId>
<version>${jaxb-api.version}</version>
</path>
<!-- Enable Commons Annotations -->
<path>
<groupId>javax.annotation</groupId>
<artifactId>javax.annotation-api</artifactId>
<version>${javax-annotation.version}</version>
</path>
<!-- Enable http://errorprone.info -->
<path>
<groupId>com.google.errorprone</groupId>
<artifactId>error_prone_core</artifactId>
<version>${errorprone.version}</version>
</path>
</annotationProcessorPaths>
</configuration> </configuration>
</plugin> </plugin>
<plugin> <plugin>
@@ -107,8 +81,8 @@
<exclude>**/src/test/resources/**</exclude> <exclude>**/src/test/resources/**</exclude>
<exclude>**/src/test/data/**</exclude> <exclude>**/src/test/data/**</exclude>
<exclude>**/.gitignore</exclude> <exclude>**/.gitignore</exclude>
<exclude>**/src/main/resources/rebel.xml</exclude>
<exclude>src/test/data/dspaceFolder/config/spiders/**</exclude> <exclude>src/test/data/dspaceFolder/config/spiders/**</exclude>
<exclude>src/main/java/org/apache/solr/handler/extraction/ExtractingParams.java</exclude>
</excludes> </excludes>
</configuration> </configuration>
</plugin> </plugin>
@@ -116,7 +90,7 @@
<plugin> <plugin>
<groupId>org.codehaus.mojo</groupId> <groupId>org.codehaus.mojo</groupId>
<artifactId>build-helper-maven-plugin</artifactId> <artifactId>build-helper-maven-plugin</artifactId>
<version>3.0.0</version> <version>1.9.1</version>
<executions> <executions>
<execution> <execution>
<phase>validate</phase> <phase>validate</phase>
@@ -184,6 +158,7 @@
install of DSpace, against which Tests can be run. --> install of DSpace, against which Tests can be run. -->
<plugin> <plugin>
<artifactId>maven-dependency-plugin</artifactId> <artifactId>maven-dependency-plugin</artifactId>
<version>2.8</version>
<configuration> <configuration>
<outputDirectory>${project.build.directory}/testing</outputDirectory> <outputDirectory>${project.build.directory}/testing</outputDirectory>
<artifactItems> <artifactItems>
@@ -218,7 +193,7 @@
(see: http://gmaven.codehaus.org/Executing+Groovy+Code ) (see: http://gmaven.codehaus.org/Executing+Groovy+Code )
We are generating a OS-agnostic version (agnostic.build.dir) of We are generating a OS-agnostic version (agnostic.build.dir) of
the ${project.build.directory} property (full path of target dir). the ${project.build.directory} property (full path of target dir).
This is needed by the Surefire & Failsafe plugins (see below) This is needed by the FileWeaver & Surefire plugins (see below)
to initialize the Unit Test environment's dspace.cfg file. to initialize the Unit Test environment's dspace.cfg file.
Otherwise, the Unit Test Framework will not work on Windows OS. Otherwise, the Unit Test Framework will not work on Windows OS.
This Groovy code was mostly borrowed from: This Groovy code was mostly borrowed from:
@@ -227,17 +202,19 @@
<plugin> <plugin>
<groupId>org.codehaus.gmaven</groupId> <groupId>org.codehaus.gmaven</groupId>
<artifactId>groovy-maven-plugin</artifactId> <artifactId>groovy-maven-plugin</artifactId>
<version>2.0</version>
<executions> <executions>
<execution> <execution>
<id>setproperty</id> <id>setproperty</id>
<phase>initialize</phase> <phase>generate-test-resources
</phase> <!-- XXX I think this should be 'initialize' - MHW -->
<goals> <goals>
<goal>execute</goal> <goal>execute</goal>
</goals> </goals>
<configuration> <configuration>
<source> <source>
project.properties['agnostic.build.dir'] = project.build.directory.replace(File.separator, '/'); project.properties['agnostic.build.dir'] = project.build.directory.replace(File.separator, '/');
log.info("Initializing Maven property 'agnostic.build.dir' to: {}", project.properties['agnostic.build.dir']); println("Initializing Maven property 'agnostic.build.dir' to: " + project.properties['agnostic.build.dir']);
</source> </source>
</configuration> </configuration>
</execution> </execution>
@@ -250,7 +227,6 @@
<configuration> <configuration>
<systemPropertyVariables> <systemPropertyVariables>
<!-- Specify the dspace.dir to use for test environment --> <!-- Specify the dspace.dir to use for test environment -->
<!-- ${agnostic.build.dir} is set dynamically by groovy-maven-plugin above -->
<!-- This system property is loaded by AbstractDSpaceTest to initialize the test environment --> <!-- This system property is loaded by AbstractDSpaceTest to initialize the test environment -->
<dspace.dir>${agnostic.build.dir}/testing/dspace/</dspace.dir> <dspace.dir>${agnostic.build.dir}/testing/dspace/</dspace.dir>
<!-- Turn off any DSpace logging --> <!-- Turn off any DSpace logging -->
@@ -259,13 +235,51 @@
</configuration> </configuration>
</plugin> </plugin>
<plugin>
<groupId>org.codehaus.mojo</groupId>
<artifactId>xml-maven-plugin</artifactId>
<version>1.0.1</version>
<executions>
<execution>
<id>validate-ALL-xml-and-xsl</id>
<phase>process-test-resources</phase>
<goals>
<goal>validate</goal>
</goals>
</execution>
</executions>
<configuration>
<validationSets>
<!-- validate ALL XML and XSL config files in the testing folder -->
<validationSet>
<dir>${agnostic.build.dir}/testing</dir>
<includes>
<include>**/*.xml</include>
<include>**/*.xsl</include>
<include>**/*.xconf</include>
</includes>
</validationSet>
<!-- validate ALL XML and XSL files throughout the project -->
<validationSet>
<dir>${root.basedir}</dir>
<includes>
<include>**/*.xml</include>
<include>**/*.xsl</include>
<include>**/*.xmap</include>
</includes>
</validationSet>
</validationSets>
</configuration>
</plugin>
<!-- Run Integration Testing! This plugin just kicks off the tests (when enabled). --> <!-- Run Integration Testing! This plugin just kicks off the tests (when enabled). -->
<plugin> <plugin>
<artifactId>maven-failsafe-plugin</artifactId> <artifactId>maven-failsafe-plugin</artifactId>
<configuration> <configuration>
<systemPropertyVariables> <systemPropertyVariables>
<!-- Specify the dspace.dir to use for test environment --> <!-- Specify the dspace.dir to use for test environment -->
<!-- ${agnostic.build.dir} is set dynamically by groovy-maven-plugin above -->
<dspace.dir>${agnostic.build.dir}/testing/dspace/</dspace.dir> <dspace.dir>${agnostic.build.dir}/testing/dspace/</dspace.dir>
<!-- Turn off any DSpace logging --> <!-- Turn off any DSpace logging -->
<dspace.log.init.disable>true</dspace.log.init.disable> <dspace.log.init.disable>true</dspace.log.init.disable>
@@ -279,21 +293,25 @@
</profiles> </profiles>
<dependencies> <dependencies>
<dependency>
<groupId>org.hibernate</groupId>
<artifactId>hibernate-core</artifactId>
<exclusions>
<exclusion>
<groupId>org.jboss.logging</groupId>
<artifactId>jboss-logging</artifactId>
</exclusion>
</exclusions>
</dependency>
<dependency> <dependency>
<groupId>org.hibernate</groupId> <groupId>org.hibernate</groupId>
<artifactId>hibernate-ehcache</artifactId> <artifactId>hibernate-ehcache</artifactId>
</dependency> </dependency>
<dependency> <dependency>
<groupId>org.hibernate</groupId> <groupId>org.hibernate</groupId>
<artifactId>hibernate-jpamodelgen</artifactId> <artifactId>hibernate-validator-cdi</artifactId>
<version>${hibernate-validator.version}</version>
</dependency> </dependency>
<dependency>
<groupId>org.hibernate.validator</groupId>
<artifactId>hibernate-validator-cdi</artifactId>
<version>${hibernate-validator.version}</version>
</dependency>
<dependency> <dependency>
<groupId>org.springframework</groupId> <groupId>org.springframework</groupId>
<artifactId>spring-orm</artifactId> <artifactId>spring-orm</artifactId>
@@ -308,16 +326,6 @@
<groupId>org.dspace</groupId> <groupId>org.dspace</groupId>
<artifactId>handle</artifactId> <artifactId>handle</artifactId>
</dependency> </dependency>
<dependency>
<groupId>org.eclipse.jetty.aggregate</groupId>
<artifactId>jetty-all</artifactId>
<exclusions>
<exclusion>
<artifactId>javax.servlet</artifactId>
<groupId>org.eclipse.jetty.orbit</groupId>
</exclusion>
</exclusions>
</dependency>
<dependency> <dependency>
<groupId>org.dspace</groupId> <groupId>org.dspace</groupId>
<artifactId>jargon</artifactId> <artifactId>jargon</artifactId>
@@ -326,15 +334,37 @@
<groupId>org.dspace</groupId> <groupId>org.dspace</groupId>
<artifactId>mets</artifactId> <artifactId>mets</artifactId>
</dependency> </dependency>
<dependency>
<groupId>org.dspace.dependencies</groupId>
<artifactId>dspace-tm-extractors</artifactId>
</dependency>
<dependency> <dependency>
<groupId>org.apache.jena</groupId> <groupId>org.apache.jena</groupId>
<artifactId>apache-jena-libs</artifactId> <artifactId>apache-jena-libs</artifactId>
<type>pom</type> <type>pom</type>
<exclusions>
<exclusion>
<groupId>com.fasterxml.jackson.core</groupId>
<artifactId>jackson-core</artifactId>
</exclusion>
<exclusion>
<groupId>com.fasterxml.jackson.core</groupId>
<artifactId>jackson-databind</artifactId>
</exclusion>
</exclusions>
</dependency>
<dependency>
<groupId>commons-cli</groupId>
<artifactId>commons-cli</artifactId>
</dependency> </dependency>
<dependency> <dependency>
<groupId>commons-codec</groupId> <groupId>commons-codec</groupId>
<artifactId>commons-codec</artifactId> <artifactId>commons-codec</artifactId>
</dependency> </dependency>
<dependency>
<groupId>commons-collections</groupId>
<artifactId>commons-collections</artifactId>
</dependency>
<dependency> <dependency>
<groupId>org.apache.commons</groupId> <groupId>org.apache.commons</groupId>
<artifactId>commons-collections4</artifactId> <artifactId>commons-collections4</artifactId>
@@ -353,8 +383,8 @@
<artifactId>commons-io</artifactId> <artifactId>commons-io</artifactId>
</dependency> </dependency>
<dependency> <dependency>
<groupId>org.apache.commons</groupId> <groupId>commons-lang</groupId>
<artifactId>commons-lang3</artifactId> <artifactId>commons-lang</artifactId>
</dependency> </dependency>
<dependency> <dependency>
<groupId>org.apache.commons</groupId> <groupId>org.apache.commons</groupId>
@@ -370,7 +400,7 @@
</dependency> </dependency>
<dependency> <dependency>
<groupId>javax.servlet</groupId> <groupId>javax.servlet</groupId>
<artifactId>javax.servlet-api</artifactId> <artifactId>servlet-api</artifactId>
<scope>provided</scope> <scope>provided</scope>
</dependency> </dependency>
<dependency> <dependency>
@@ -387,6 +417,10 @@
<groupId>org.jdom</groupId> <groupId>org.jdom</groupId>
<artifactId>jdom</artifactId> <artifactId>jdom</artifactId>
</dependency> </dependency>
<dependency>
<groupId>log4j</groupId>
<artifactId>log4j</artifactId>
</dependency>
<dependency> <dependency>
<groupId>oro</groupId> <groupId>oro</groupId>
<artifactId>oro</artifactId> <artifactId>oro</artifactId>
@@ -399,6 +433,18 @@
<groupId>org.apache.pdfbox</groupId> <groupId>org.apache.pdfbox</groupId>
<artifactId>fontbox</artifactId> <artifactId>fontbox</artifactId>
</dependency> </dependency>
<dependency>
<groupId>org.bouncycastle</groupId>
<artifactId>bcprov-jdk15</artifactId>
</dependency>
<dependency>
<groupId>org.bouncycastle</groupId>
<artifactId>bcmail-jdk15</artifactId>
</dependency>
<dependency>
<groupId>org.apache.poi</groupId>
<artifactId>poi</artifactId>
</dependency>
<dependency> <dependency>
<groupId>org.apache.poi</groupId> <groupId>org.apache.poi</groupId>
<artifactId>poi-scratchpad</artifactId> <artifactId>poi-scratchpad</artifactId>
@@ -418,6 +464,12 @@
<dependency> <dependency>
<groupId>xerces</groupId> <groupId>xerces</groupId>
<artifactId>xercesImpl</artifactId> <artifactId>xercesImpl</artifactId>
<exclusions>
<exclusion>
<groupId>xml-apis</groupId>
<artifactId>xml-apis</artifactId>
</exclusion>
</exclusions>
</dependency> </dependency>
<dependency> <dependency>
<groupId>xml-apis</groupId> <groupId>xml-apis</groupId>
@@ -439,6 +491,11 @@
<groupId>org.dspace</groupId> <groupId>org.dspace</groupId>
<artifactId>dspace-services</artifactId> <artifactId>dspace-services</artifactId>
</dependency> </dependency>
<dependency> <!-- Keep jmockit before junit -->
<groupId>org.jmockit</groupId>
<artifactId>jmockit</artifactId>
<scope>test</scope>
</dependency>
<dependency> <dependency>
<groupId>junit</groupId> <groupId>junit</groupId>
<artifactId>junit</artifactId> <artifactId>junit</artifactId>
@@ -459,38 +516,69 @@
<artifactId>mockito-core</artifactId> <artifactId>mockito-core</artifactId>
<scope>test</scope> <scope>test</scope>
</dependency> </dependency>
<dependency>
<groupId>org.springframework</groupId>
<artifactId>spring-test</artifactId>
<scope>test</scope>
</dependency>
<dependency> <dependency>
<groupId>org.rometools</groupId> <groupId>org.rometools</groupId>
<artifactId>rome-modules</artifactId> <artifactId>rome-modules</artifactId>
<version>1.0</version> <version>1.0</version>
</dependency> </dependency>
<dependency>
<groupId>gr.ekt.bte</groupId>
<artifactId>bte-core</artifactId>
<version>0.9.3.5</version>
<exclusions>
<!-- A more recent version is retrieved from another dependency -->
<exclusion>
<groupId>org.mockito</groupId>
<artifactId>mockito-core</artifactId>
</exclusion>
</exclusions>
</dependency>
<dependency> <dependency>
<groupId>gr.ekt.bte</groupId> <groupId>gr.ekt.bte</groupId>
<artifactId>bte-io</artifactId> <artifactId>bte-io</artifactId>
<version>0.9.3.5</version> <version>0.9.3.5</version>
<exclusions> <exclusions>
<!-- A more recent version is retrieved from another dependency -->
<exclusion> <exclusion>
<groupId>net.bytebuddy</groupId> <groupId>org.apache.commons</groupId>
<artifactId>byte-buddy</artifactId> <artifactId>commons-lang3</artifactId>
</exclusion>
<!-- A more recent version is retrieved from another dependency -->
<exclusion>
<groupId>org.mockito</groupId>
<artifactId>mockito-core</artifactId>
</exclusion> </exclusion>
</exclusions> </exclusions>
</dependency> </dependency>
<dependency>
<groupId>org.apache.httpcomponents</groupId>
<artifactId>httpcore</artifactId>
</dependency>
<dependency> <dependency>
<groupId>org.apache.httpcomponents</groupId> <groupId>org.apache.httpcomponents</groupId>
<artifactId>httpclient</artifactId> <artifactId>httpclient</artifactId>
</dependency> </dependency>
<dependency>
<groupId>org.apache.solr</groupId>
<artifactId>solr-solrj</artifactId>
<version>${solr.version}</version>
<exclusions>
<exclusion>
<groupId>org.slf4j</groupId>
<artifactId>jcl-over-slf4j</artifactId>
</exclusion>
<exclusion>
<groupId>org.slf4j</groupId>
<artifactId>slf4j-api</artifactId>
</exclusion>
</exclusions>
</dependency>
<dependency> <dependency>
<groupId>org.apache.solr</groupId> <groupId>commons-configuration</groupId>
<artifactId>solr-cell</artifactId> <artifactId>commons-configuration</artifactId>
<version>${solr.client.version}</version>
</dependency> </dependency>
<dependency> <dependency>
<groupId>com.maxmind.geoip2</groupId> <groupId>com.maxmind.geoip2</groupId>
<artifactId>geoip2</artifactId> <artifactId>geoip2</artifactId>
@@ -506,6 +594,12 @@
<version>2.1.7</version> <version>2.1.7</version>
</dependency> </dependency>
<dependency>
<groupId>org.apache.lucene</groupId>
<artifactId>lucene-core</artifactId>
<version>4.10.4</version>
</dependency>
<dependency> <dependency>
<groupId>com.coverity.security</groupId> <groupId>com.coverity.security</groupId>
<artifactId>coverity-escapers</artifactId> <artifactId>coverity-escapers</artifactId>
@@ -522,6 +616,7 @@
<dependency> <dependency>
<groupId>com.google.guava</groupId> <groupId>com.google.guava</groupId>
<artifactId>guava</artifactId> <artifactId>guava</artifactId>
<version>19.0</version>
</dependency> </dependency>
@@ -530,6 +625,11 @@
<artifactId>postgresql</artifactId> <artifactId>postgresql</artifactId>
</dependency> </dependency>
<dependency>
<groupId>org.slf4j</groupId>
<artifactId>slf4j-api</artifactId>
</dependency>
<dependency> <dependency>
<groupId>jdbm</groupId> <groupId>jdbm</groupId>
<artifactId>jdbm</artifactId> <artifactId>jdbm</artifactId>
@@ -591,44 +691,19 @@
<type>jar</type> <type>jar</type>
</dependency> </dependency>
<!-- JAXB API and implementation (no longer bundled as of Java 11) -->
<dependency>
<groupId>javax.xml.bind</groupId>
<artifactId>jaxb-api</artifactId>
</dependency>
<dependency>
<groupId>org.glassfish.jaxb</groupId>
<artifactId>jaxb-runtime</artifactId>
</dependency>
<!-- Apache Axiom -->
<dependency> <dependency>
<groupId>org.apache.ws.commons.axiom</groupId> <groupId>org.apache.ws.commons.axiom</groupId>
<artifactId>axiom-impl</artifactId> <artifactId>axiom-impl</artifactId>
<version>${axiom.version}</version> <!-- NOTE: SWORDv2 needs 1.2.14, required by Abdera: https://abdera.apache.org/ -->
<exclusions> <version>1.2.14</version>
<!-- Exclude Geronimo as it is NOT necessary when using javax.activation (which we use)
See: https://ws.apache.org/axiom/userguide/ch04.html#d0e732 -->
<exclusion>
<groupId>org.apache.geronimo.specs</groupId>
<artifactId>*</artifactId>
</exclusion>
</exclusions>
</dependency> </dependency>
<dependency> <dependency>
<groupId>org.apache.ws.commons.axiom</groupId> <groupId>org.apache.ws.commons.axiom</groupId>
<artifactId>axiom-api</artifactId> <artifactId>axiom-api</artifactId>
<version>${axiom.version}</version> <!-- NOTE: SWORDv2 needs 1.2.14, required by Abdera: https://abdera.apache.org/ -->
<exclusions> <version>1.2.14</version>
<!-- Exclude Geronimo as it is NOT necessary when using javax.activation (which we use)
See: https://ws.apache.org/axiom/userguide/ch04.html#d0e732 -->
<exclusion>
<groupId>org.apache.geronimo.specs</groupId>
<artifactId>*</artifactId>
</exclusion>
</exclusions>
</dependency> </dependency>
<dependency> <dependency>
<groupId>org.glassfish.jersey.core</groupId> <groupId>org.glassfish.jersey.core</groupId>
<artifactId>jersey-client</artifactId> <artifactId>jersey-client</artifactId>
@@ -639,54 +714,26 @@
<groupId>com.amazonaws</groupId> <groupId>com.amazonaws</groupId>
<artifactId>aws-java-sdk-s3</artifactId> <artifactId>aws-java-sdk-s3</artifactId>
<version>1.10.50</version> <version>1.10.50</version>
<exclusions>
<exclusion>
<groupId>joda-time</groupId>
<artifactId>joda-time</artifactId>
</exclusion>
<exclusion>
<groupId>com.fasterxml.jackson.core</groupId>
<artifactId>jackson-databind</artifactId>
</exclusion>
</exclusions>
</dependency> </dependency>
<!-- S3 also wanted jackson... -->
<!-- For ORCID v2 integration -->
<dependency> <dependency>
<groupId>org.dspace</groupId> <groupId>com.fasterxml.jackson.core</groupId>
<artifactId>orcid-jaxb-api</artifactId> <artifactId>jackson-core</artifactId>
<version>2.1.0</version>
</dependency> </dependency>
<dependency> <dependency>
<groupId>org.json</groupId> <groupId>com.fasterxml.jackson.core</groupId>
<artifactId>json</artifactId> <artifactId>jackson-databind</artifactId>
<version>20180130</version>
</dependency> </dependency>
<!-- Used for Solr core export/import -->
<dependency>
<groupId>com.opencsv</groupId>
<artifactId>opencsv</artifactId>
<version>4.5</version>
</dependency>
<!-- Email templating -->
<dependency>
<groupId>org.apache.velocity</groupId>
<artifactId>velocity-engine-core</artifactId>
<version>2.0</version>
<type>jar</type>
</dependency>
<dependency>
<groupId>org.xmlunit</groupId>
<artifactId>xmlunit-matchers</artifactId>
<version>2.6.3</version>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.hibernate.javax.persistence</groupId>
<artifactId>hibernate-jpa-2.1-api</artifactId>
<version>1.0.0.Final</version>
</dependency>
<dependency>
<groupId>org.apache.bcel</groupId>
<artifactId>bcel</artifactId>
<version>6.4.0</version>
</dependency>
</dependencies> </dependencies>
</project> </project>

View File

@@ -0,0 +1,163 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.solr.handler.extraction;
/**
* The various Solr Parameters names to use when extracting content.
**/
public interface ExtractingParams {
/**
* Map all generated attribute names to field names with lowercase and underscores.
*/
public static final String LOWERNAMES = "lowernames";
/**
* if true, ignore TikaException (give up to extract text but index meta data)
*/
public static final String IGNORE_TIKA_EXCEPTION = "ignoreTikaException";
/**
* The param prefix for mapping Tika metadata to Solr fields.
* <p>
* To map a field, add a name like:
* <pre>fmap.title=solr.title</pre>
*
* In this example, the tika "title" metadata value will be added to a Solr field named "solr.title"
*/
public static final String MAP_PREFIX = "fmap.";
/**
* The boost value for the name of the field. The boost can be specified by a name mapping.
* <p>
* For example
* <pre>
* map.title=solr.title
* boost.solr.title=2.5
* </pre>
* will boost the solr.title field for this document by 2.5
*/
public static final String BOOST_PREFIX = "boost.";
/**
* Pass in literal values to be added to the document, as in
* <pre>
* literal.myField=Foo
* </pre>
*/
public static final String LITERALS_PREFIX = "literal.";
/**
* Restrict the extracted parts of a document to be indexed
* by passing in an XPath expression. All content that satisfies the XPath expr.
* will be passed to the {@link org.apache.solr.handler.extraction.SolrContentHandler}.
* <p>
* See Tika's docs for what the extracted document looks like.
*
* @see #CAPTURE_ELEMENTS
*/
public static final String XPATH_EXPRESSION = "xpath";
/**
* Only extract and return the content, do not index it.
*/
public static final String EXTRACT_ONLY = "extractOnly";
/**
* Content output format if extractOnly is true. Default is "xml", alternative is "text".
*/
public static final String EXTRACT_FORMAT = "extractFormat";
/**
* Capture attributes separately according to the name of the element, instead of just adding them to the string
* buffer
*/
public static final String CAPTURE_ATTRIBUTES = "captureAttr";
/**
* Literal field values will by default override other values such as metadata and content. Set this to false to
* revert to pre-4.0 behaviour
*/
public static final String LITERALS_OVERRIDE = "literalsOverride";
/**
* Capture the specified fields (and everything included below it that isn't capture by some other capture field)
* separately from the default. This is different
* then the case of passing in an XPath expression.
* <p>
* The Capture field is based on the localName returned to the
* {@link org.apache.solr.handler.extraction.SolrContentHandler}
* by Tika, not to be confused by the mapped field. The field name can then
* be mapped into the index schema.
* <p>
* For instance, a Tika document may look like:
* <pre>
* &lt;html&gt;
* ...
* &lt;body&gt;
* &lt;p&gt;some text here. &lt;div&gt;more text&lt;/div&gt;&lt;/p&gt;
* Some more text
* &lt;/body&gt;
* </pre>
* By passing in the p tag, you could capture all P tags separately from the rest of the t
* Thus, in the example, the capture of the P tag would be: "some text here. more text"
*/
public static final String CAPTURE_ELEMENTS = "capture";
/**
* The type of the stream. If not specified, Tika will use mime type detection.
*/
public static final String STREAM_TYPE = "stream.type";
/**
* Optional. The file name. If specified, Tika can take this into account while
* guessing the MIME type.
*/
public static final String RESOURCE_NAME = "resource.name";
/**
* Optional. The password for this resource. Will be used instead of the rule based password lookup mechanisms
*/
public static final String RESOURCE_PASSWORD = "resource.password";
/**
* Optional. If specified, the prefix will be prepended to all Metadata, such that it would be possible
* to setup a dynamic field to automatically capture it
*/
public static final String UNKNOWN_FIELD_PREFIX = "uprefix";
/**
* Optional. If specified and the name of a potential field cannot be determined, the default Field specified
* will be used instead.
*/
public static final String DEFAULT_FIELD = "defaultField";
/**
* Optional. If specified, loads the file as a source for password lookups for Tika encrypted documents.
* <p>
* File format is Java properties format with one key=value per line.
* The key is evaluated as a regex against the file name, and the value is the password
* The rules are evaluated top-bottom, i.e. the first match will be used
* If you want a fallback password to be always used, supply a .*=&lt;defaultmypassword&gt; at the end
*/
public static final String PASSWORD_MAP_FILE = "passwordsFile";
}

View File

@@ -17,7 +17,7 @@ import org.apache.commons.cli.CommandLineParser;
import org.apache.commons.cli.HelpFormatter; import org.apache.commons.cli.HelpFormatter;
import org.apache.commons.cli.Options; import org.apache.commons.cli.Options;
import org.apache.commons.cli.PosixParser; import org.apache.commons.cli.PosixParser;
import org.apache.commons.collections4.CollectionUtils; import org.apache.commons.collections.CollectionUtils;
import org.dspace.authorize.AuthorizeException; import org.dspace.authorize.AuthorizeException;
import org.dspace.content.Community; import org.dspace.content.Community;
import org.dspace.content.factory.ContentServiceFactory; import org.dspace.content.factory.ContentServiceFactory;
@@ -180,9 +180,13 @@ public class CommunityFiliator {
// second test - circularity: parent's parents can't include proposed // second test - circularity: parent's parents can't include proposed
// child // child
List<Community> parentDads = parent.getParentCommunities(); List<Community> parentDads = parent.getParentCommunities();
if (parentDads.contains(child)) {
System.out.println("Error, circular parentage - child is parent of parent"); for (int i = 0; i < parentDads.size(); i++) {
System.exit(1); if (parentDads.get(i).getID().equals(child.getID())) {
System.out
.println("Error, circular parentage - child is parent of parent");
System.exit(1);
}
} }
// everthing's OK // everthing's OK
@@ -206,15 +210,26 @@ public class CommunityFiliator {
throws SQLException, AuthorizeException, IOException { throws SQLException, AuthorizeException, IOException {
// verify that child is indeed a child of parent // verify that child is indeed a child of parent
List<Community> parentKids = parent.getSubcommunities(); List<Community> parentKids = parent.getSubcommunities();
if (!parentKids.contains(child)) { boolean isChild = false;
System.out.println("Error, child community not a child of parent community");
for (int i = 0; i < parentKids.size(); i++) {
if (parentKids.get(i).getID().equals(child.getID())) {
isChild = true;
break;
}
}
if (!isChild) {
System.out
.println("Error, child community not a child of parent community");
System.exit(1); System.exit(1);
} }
// OK remove the mappings - but leave the community, which will become // OK remove the mappings - but leave the community, which will become
// top-level // top-level
child.removeParentCommunity(parent); child.getParentCommunities().remove(parent);
parent.removeSubCommunity(child); parent.getSubcommunities().remove(child);
communityService.update(c, child); communityService.update(c, child);
communityService.update(c, parent); communityService.update(c, parent);

View File

@@ -15,7 +15,7 @@ import org.apache.commons.cli.CommandLine;
import org.apache.commons.cli.CommandLineParser; import org.apache.commons.cli.CommandLineParser;
import org.apache.commons.cli.Options; import org.apache.commons.cli.Options;
import org.apache.commons.cli.PosixParser; import org.apache.commons.cli.PosixParser;
import org.apache.commons.lang3.StringUtils; import org.apache.commons.lang.StringUtils;
import org.dspace.core.ConfigurationManager; import org.dspace.core.ConfigurationManager;
import org.dspace.core.Context; import org.dspace.core.Context;
import org.dspace.core.I18nUtil; import org.dspace.core.I18nUtil;

View File

@@ -21,7 +21,6 @@ import org.apache.xpath.XPathAPI;
import org.dspace.authorize.AuthorizeException; import org.dspace.authorize.AuthorizeException;
import org.dspace.content.MetadataField; import org.dspace.content.MetadataField;
import org.dspace.content.MetadataSchema; import org.dspace.content.MetadataSchema;
import org.dspace.content.MetadataSchemaEnum;
import org.dspace.content.NonUniqueMetadataException; import org.dspace.content.NonUniqueMetadataException;
import org.dspace.content.factory.ContentServiceFactory; import org.dspace.content.factory.ContentServiceFactory;
import org.dspace.content.service.MetadataFieldService; import org.dspace.content.service.MetadataFieldService;
@@ -249,7 +248,7 @@ public class MetadataImporter {
// If the schema is not provided default to DC // If the schema is not provided default to DC
if (schema == null) { if (schema == null) {
schema = MetadataSchemaEnum.DC.getName(); schema = MetadataSchema.DC_SCHEMA;
} }

View File

@@ -17,7 +17,7 @@ import javax.xml.parsers.DocumentBuilderFactory;
import javax.xml.parsers.ParserConfigurationException; import javax.xml.parsers.ParserConfigurationException;
import javax.xml.transform.TransformerException; import javax.xml.transform.TransformerException;
import org.apache.logging.log4j.Logger; import org.apache.log4j.Logger;
import org.apache.xpath.XPathAPI; import org.apache.xpath.XPathAPI;
import org.dspace.authorize.AuthorizeException; import org.dspace.authorize.AuthorizeException;
import org.dspace.content.BitstreamFormat; import org.dspace.content.BitstreamFormat;
@@ -47,7 +47,7 @@ public class RegistryLoader {
/** /**
* log4j category * log4j category
*/ */
private static Logger log = org.apache.logging.log4j.LogManager.getLogger(RegistryLoader.class); private static Logger log = Logger.getLogger(RegistryLoader.class);
protected static BitstreamFormatService bitstreamFormatService = ContentServiceFactory.getInstance() protected static BitstreamFormatService bitstreamFormatService = ContentServiceFactory.getInstance()
.getBitstreamFormatService(); .getBitstreamFormatService();

View File

@@ -7,16 +7,12 @@
*/ */
package org.dspace.administer; package org.dspace.administer;
import java.io.FileInputStream; import java.io.BufferedWriter;
import java.io.FileNotFoundException; import java.io.File;
import java.io.FileOutputStream; import java.io.FileWriter;
import java.io.IOException; import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStream;
import java.io.PrintWriter;
import java.sql.SQLException; import java.sql.SQLException;
import java.util.HashMap; import java.util.HashMap;
import java.util.List;
import java.util.Map; import java.util.Map;
import javax.xml.parsers.DocumentBuilder; import javax.xml.parsers.DocumentBuilder;
import javax.xml.parsers.DocumentBuilderFactory; import javax.xml.parsers.DocumentBuilderFactory;
@@ -25,18 +21,12 @@ import javax.xml.transform.TransformerException;
import org.apache.commons.cli.CommandLine; import org.apache.commons.cli.CommandLine;
import org.apache.commons.cli.CommandLineParser; import org.apache.commons.cli.CommandLineParser;
import org.apache.commons.cli.DefaultParser;
import org.apache.commons.cli.HelpFormatter;
import org.apache.commons.cli.Option;
import org.apache.commons.cli.Options; import org.apache.commons.cli.Options;
import org.apache.commons.cli.ParseException; import org.apache.commons.cli.PosixParser;
import org.apache.xpath.XPathAPI; import org.apache.xpath.XPathAPI;
import org.dspace.authorize.AuthorizeException; import org.dspace.authorize.AuthorizeException;
import org.dspace.content.Collection; import org.dspace.content.Collection;
import org.dspace.content.Community; import org.dspace.content.Community;
import org.dspace.content.Item;
import org.dspace.content.MetadataSchemaEnum;
import org.dspace.content.MetadataValue;
import org.dspace.content.factory.ContentServiceFactory; import org.dspace.content.factory.ContentServiceFactory;
import org.dspace.content.service.CollectionService; import org.dspace.content.service.CollectionService;
import org.dspace.content.service.CommunityService; import org.dspace.content.service.CommunityService;
@@ -44,7 +34,6 @@ import org.dspace.core.Context;
import org.dspace.eperson.factory.EPersonServiceFactory; import org.dspace.eperson.factory.EPersonServiceFactory;
import org.dspace.eperson.service.EPersonService; import org.dspace.eperson.service.EPersonService;
import org.jdom.Element; import org.jdom.Element;
import org.jdom.output.Format;
import org.jdom.output.XMLOutputter; import org.jdom.output.XMLOutputter;
import org.w3c.dom.Document; import org.w3c.dom.Document;
import org.w3c.dom.Node; import org.w3c.dom.Node;
@@ -56,51 +45,44 @@ import org.xml.sax.SAXException;
* an XML file. * an XML file.
* *
* The XML file structure needs to be: * The XML file structure needs to be:
* <pre>{@code * {@code
* <import_structure> * <import_structure>
* <community> * <community>
* <name>....</name> * <name>....</name>
* <community>...</community> * <community>...</community>
* <collection> * <collection>
* <name>....</name> * <name>....</name>
* </collection> * </collection>
* </community> * </community>
* </import_structure> * </import_structure>
* }</pre> * }
* <p> * it can be arbitrarily deep, and supports all the metadata elements
* It can be arbitrarily deep, and supports all the metadata elements
* that make up the community and collection metadata. See the system * that make up the community and collection metadata. See the system
* documentation for more details. * documentation for more details
* *
* @author Richard Jones * @author Richard Jones
*/ */
public class StructBuilder { public class StructBuilder {
/** Name of the root element for the document to be imported. */ /**
static final String INPUT_ROOT = "import_structure"; * the output xml document which will contain updated information about the
* imported structure
/*
* Name of the root element for the document produced by importing.
* Community and collection elements are annotated with their identifiers.
*/ */
static final String RESULT_ROOT = "imported_structure"; private static org.jdom.Document xmlOutput = new org.jdom.Document(new Element("imported_structure"));
/** /**
* A table to hold metadata for the collection being worked on. * a hashtable to hold metadata for the collection being worked on
*/ */
private static final Map<String, String> collectionMap = new HashMap<>(); private static Map<String, String> collectionMap = new HashMap<String, String>();
/** /**
* A table to hold metadata for the community being worked on. * a hashtable to hold metadata for the community being worked on
*/ */
private static final Map<String, String> communityMap = new HashMap<>(); private static Map<String, String> communityMap = new HashMap<String, String>();
protected static CommunityService communityService protected static CommunityService communityService = ContentServiceFactory.getInstance().getCommunityService();
= ContentServiceFactory.getInstance().getCommunityService(); protected static CollectionService collectionService = ContentServiceFactory.getInstance().getCollectionService();
protected static CollectionService collectionService protected static EPersonService ePersonService = EPersonServiceFactory.getInstance().getEPersonService();
= ContentServiceFactory.getInstance().getCollectionService();
protected static EPersonService ePersonService
= EPersonServiceFactory.getInstance().getEPersonService();
/** /**
* Default constructor * Default constructor
@@ -109,156 +91,63 @@ public class StructBuilder {
/** /**
* Main method to be run from the command line to import a structure into * Main method to be run from the command line to import a structure into
* DSpacee or export existing structure to a file.The command is of the form: * DSpace
* *
* <p>{@code StructBuilder -f [XML source] -e [administrator email] -o [output file]} * This is of the form:
* *
* <p>to import, or * {@code StructBuilder -f [xml source] -e [administrator email] -o [output file]}
* *
* <p>{@code StructBuilder -x -e [administrator email] -o [output file]}</p> * The output file will contain exactly the same as the source xml document, but
* with the handle for each imported item added as an attribute.
* *
* <p>to export. The output will contain exactly the same as the source XML * @param argv the command line arguments given
* document, but with the Handle for each imported item added as an attribute. * @throws Exception if an error occurs
*
*
* @param argv command line arguments.
* @throws ParserConfigurationException passed through.
* @throws SQLException passed through.
* @throws FileNotFoundException if input or output could not be opened.
* @throws TransformerException if the input document is invalid.
*/ */
public static void main(String[] argv) public static void main(String[] argv)
throws ParserConfigurationException, SQLException, throws Exception {
FileNotFoundException, IOException, TransformerException { CommandLineParser parser = new PosixParser();
// Define command line options.
Options options = new Options(); Options options = new Options();
options.addOption("h", "help", false, "Print this help message."); options.addOption("f", "file", true, "file");
options.addOption("?", "help"); options.addOption("e", "eperson", true, "eperson");
options.addOption("x", "export", false, "Export the current structure as XML."); options.addOption("o", "output", true, "output");
options.addOption(Option.builder("e").longOpt("eperson") CommandLine line = parser.parse(options, argv);
.desc("User who is manipulating the repository's structure.")
.hasArg().argName("eperson").required().build());
options.addOption(Option.builder("f").longOpt("file") String file = null;
.desc("File of new structure information.") String eperson = null;
.hasArg().argName("input").build()); String output = null;
options.addOption(Option.builder("o").longOpt("output") if (line.hasOption('f')) {
.desc("File to receive the structure map ('-' for standard out).") file = line.getOptionValue('f');
.hasArg().argName("output").required().build());
// Parse the command line.
CommandLineParser parser = new DefaultParser();
CommandLine line = null;
try {
line = parser.parse(options, argv);
} catch (ParseException ex) {
System.err.println(ex.getMessage());
usage(options);
System.exit(1);
} }
// If the user asked for help, give it and exit. if (line.hasOption('e')) {
if (line.hasOption('h') || line.hasOption('?')) { eperson = line.getOptionValue('e');
giveHelp(options); }
if (line.hasOption('o')) {
output = line.getOptionValue('o');
}
if (output == null || eperson == null || file == null) {
usage();
System.exit(0); System.exit(0);
} }
// Otherwise, analyze the command.
// Must be import or export.
if (!(line.hasOption('f') || line.hasOption('x'))) {
giveHelp(options);
System.exit(1);
}
// Open the output stream.
String output = line.getOptionValue('o');
OutputStream outputStream;
if ("-".equals(output)) {
outputStream = System.out;
} else {
outputStream = new FileOutputStream(output);
}
// create a context // create a context
Context context = new Context(); Context context = new Context();
// set the context. // set the context
String eperson = line.getOptionValue('e'); context.setCurrentUser(ePersonService.findByEmail(context, eperson));
try {
context.setCurrentUser(ePersonService.findByEmail(context, eperson));
} catch (SQLException ex) {
System.err.format("That user could not be found: %s%n", ex.getMessage());
System.exit(1);
}
// Export? Import?
if (line.hasOption('x')) { // export
exportStructure(context, outputStream);
} else { // Must be import
String input = line.getOptionValue('f');
if (null == input) {
usage(options);
System.exit(1);
}
InputStream inputStream;
if ("-".equals(input)) {
inputStream = System.in;
} else {
inputStream = new FileInputStream(input);
}
importStructure(context, inputStream, outputStream);
// save changes from import
context.complete();
}
System.exit(0);
}
/**
* Import new Community/Collection structure.
*
* @param context
* @param input XML which describes the new communities and collections.
* @param output input, annotated with the new objects' identifiers.
* @throws IOException
* @throws ParserConfigurationException
* @throws SAXException
* @throws TransformerException
* @throws SQLException
*/
static void importStructure(Context context, InputStream input, OutputStream output)
throws IOException, ParserConfigurationException, SQLException, TransformerException {
// load the XML // load the XML
Document document = null; Document document = loadXML(file);
try {
document = loadXML(input);
} catch (IOException ex) {
System.err.format("The input document could not be read: %s%n", ex.getMessage());
System.exit(1);
} catch (SAXException ex) {
System.err.format("The input document could not be parsed: %s%n", ex.getMessage());
System.exit(1);
}
// run the preliminary validation, to be sure that the the XML document // run the preliminary validation, to be sure that the the XML document
// is properly structured. // is properly structured
try { validate(document);
validate(document);
} catch (TransformerException ex) {
System.err.format("The input document is invalid: %s%n", ex.getMessage());
System.exit(1);
}
// Check for 'identifier' attributes -- possibly output by this class.
NodeList identifierNodes = XPathAPI.selectNodeList(document, "//*[@identifier]");
if (identifierNodes.getLength() > 0) {
System.err.println("The input document has 'identifier' attributes, which will be ignored.");
}
// load the mappings into the member variable hashmaps // load the mappings into the member variable hashmaps
communityMap.put("name", "name"); communityMap.put("name", "name");
@@ -275,190 +164,62 @@ public class StructBuilder {
collectionMap.put("license", "license"); collectionMap.put("license", "license");
collectionMap.put("provenance", "provenance_description"); collectionMap.put("provenance", "provenance_description");
Element[] elements = new Element[]{}; // get the top level community list
try { NodeList first = XPathAPI.selectNodeList(document, "/import_structure/community");
// get the top level community list
NodeList first = XPathAPI.selectNodeList(document, "/import_structure/community");
// run the import starting with the top level communities // run the import starting with the top level communities
elements = handleCommunities(context, first, null); Element[] elements = handleCommunities(context, first, null);
} catch (TransformerException ex) {
System.err.format("Input content not understood: %s%n", ex.getMessage());
System.exit(1);
} catch (AuthorizeException ex) {
System.err.format("Not authorized: %s%n", ex.getMessage());
System.exit(1);
}
// generate the output // generate the output
final Element root = new Element(RESULT_ROOT); Element root = xmlOutput.getRootElement();
for (int i = 0; i < elements.length; i++) {
for (Element element : elements) { root.addContent(elements[i]);
root.addContent(element);
} }
// finally write the string into the output file. // finally write the string into the output file
final org.jdom.Document xmlOutput = new org.jdom.Document(root);
try { try {
new XMLOutputter().output(xmlOutput, output); BufferedWriter out = new BufferedWriter(new FileWriter(output));
out.write(new XMLOutputter().outputString(xmlOutput));
out.close();
} catch (IOException e) { } catch (IOException e) {
System.out.printf("Unable to write to output file %s: %s%n", System.out.println("Unable to write to output file " + output);
output, e.getMessage()); System.exit(0);
System.exit(1);
} }
context.complete();
} }
/** /**
* Add a single community, and its children, to the Document. * Output the usage information
*
* @param community
* @return a fragment representing this Community.
*/ */
private static Element exportACommunity(Community community) { private static void usage() {
// Export this Community. System.out.println("Usage: java StructBuilder -f <source XML file> -o <output file> -e <eperson email>");
Element element = new Element("community"); System.out.println(
element.setAttribute("identifier", community.getHandle()); "Communities will be created from the top level, and a map of communities to handles will be returned in " +
element.addContent(new Element("name").setText(community.getName())); "the output file");
element.addContent(new Element("description") return;
.setText(communityService.getMetadataFirstValue(community,
MetadataSchemaEnum.DC.getName(), "description", "abstract", Item.ANY)));
element.addContent(new Element("intro")
.setText(communityService.getMetadataFirstValue(community,
MetadataSchemaEnum.DC.getName(), "description", null, Item.ANY)));
element.addContent(new Element("copyright")
.setText(communityService.getMetadataFirstValue(community,
MetadataSchemaEnum.DC.getName(), "rights", null, Item.ANY)));
element.addContent(new Element("sidebar")
.setText(communityService.getMetadataFirstValue(community,
MetadataSchemaEnum.DC.getName(), "description", "tableofcontents", Item.ANY)));
// Export this Community's Community children.
for (Community subCommunity : community.getSubcommunities()) {
element.addContent(exportACommunity(subCommunity));
}
// Export this Community's Collection children.
for (Collection collection : community.getCollections()) {
element.addContent(exportACollection(collection));
}
return element;
} }
/** /**
* Add a single Collection to the Document. * Validate the XML document. This method does not return, but if validation
* * fails it generates an error and ceases execution
* @param collection
* @return a fragment representing this Collection.
*/
private static Element exportACollection(Collection collection) {
// Export this Collection.
Element element = new Element("collection");
element.setAttribute("identifier", collection.getHandle());
element.addContent(new Element("name").setText(collection.getName()));
element.addContent(new Element("description")
.setText(collectionService.getMetadataFirstValue(collection,
MetadataSchemaEnum.DC.getName(), "description", "abstract", Item.ANY)));
element.addContent(new Element("intro")
.setText(collectionService.getMetadataFirstValue(collection,
MetadataSchemaEnum.DC.getName(), "description", null, Item.ANY)));
element.addContent(new Element("copyright")
.setText(collectionService.getMetadataFirstValue(collection,
MetadataSchemaEnum.DC.getName(), "rights", null, Item.ANY)));
element.addContent(new Element("sidebar")
.setText(collectionService.getMetadataFirstValue(collection,
MetadataSchemaEnum.DC.getName(), "description", "tableofcontents", Item.ANY)));
element.addContent(new Element("license")
.setText(collectionService.getMetadataFirstValue(collection,
MetadataSchemaEnum.DC.getName(), "rights", "license", Item.ANY)));
// Provenance is special: multivalued
for (MetadataValue value : collectionService.getMetadata(collection,
MetadataSchemaEnum.DC.getName(), "provenance", null, Item.ANY)) {
element.addContent(new Element("provenance")
.setText(value.getValue()));
}
return element;
}
/**
* Write out the existing Community/Collection structure.
*/
static void exportStructure(Context context, OutputStream output) {
// Build a document from the Community/Collection hierarchy.
Element rootElement = new Element(INPUT_ROOT); // To be read by importStructure, perhaps
List<Community> communities = null;
try {
communities = communityService.findAllTop(context);
} catch (SQLException ex) {
System.out.printf("Unable to get the list of top-level communities: %s%n",
ex.getMessage());
System.exit(1);
}
for (Community community : communities) {
rootElement.addContent(exportACommunity(community));
}
// Now write the structure out.
org.jdom.Document xmlOutput = new org.jdom.Document(rootElement);
try {
XMLOutputter outputter = new XMLOutputter(Format.getPrettyFormat());
outputter.output(xmlOutput, output);
} catch (IOException e) {
System.out.printf("Unable to write to output file %s: %s%n",
output, e.getMessage());
System.exit(1);
}
}
/**
* Output the usage information.
*/
private static void usage(Options options) {
HelpFormatter helper = new HelpFormatter();
try (PrintWriter writer = new PrintWriter(System.out);) {
helper.printUsage(writer, 80/* FIXME Magic */,
"structure-builder", options);
}
}
/**
* Help the user more.
*/
private static void giveHelp(Options options) {
HelpFormatter formatter = new HelpFormatter();
formatter.printHelp("struct-builder",
"Import or export Community/Collection structure.",
options,
"When importing (-f), communities will be created from the "
+ "top level, and a map of communities to handles will "
+ "be returned in the output file. When exporting (-x),"
+ "the current structure will be written to the map file.",
true);
}
/**
* Validate the XML document. This method returns if the document is valid.
* If validation fails it generates an error and ceases execution.
* *
* @param document the XML document object * @param document the XML document object
* @throws TransformerException if transformer error * @throws TransformerException if transformer error
*/ */
private static void validate(org.w3c.dom.Document document) private static void validate(org.w3c.dom.Document document)
throws TransformerException { throws TransformerException {
StringBuilder err = new StringBuilder(); StringBuffer err = new StringBuffer();
boolean trip = false; boolean trip = false;
err.append("The following errors were encountered parsing the source XML.\n"); err.append("The following errors were encountered parsing the source XML\n");
err.append("No changes have been made to the DSpace instance.\n\n"); err.append("No changes have been made to the DSpace instance\n\n");
NodeList first = XPathAPI.selectNodeList(document, "/import_structure/community"); NodeList first = XPathAPI.selectNodeList(document, "/import_structure/community");
if (first.getLength() == 0) { if (first.getLength() == 0) {
err.append("-There are no top level communities in the source document."); err.append("-There are no top level communities in the source document");
System.out.println(err.toString()); System.out.println(err.toString());
System.exit(1); System.exit(0);
} }
String errs = validateCommunities(first, 1); String errs = validateCommunities(first, 1);
@@ -469,13 +230,13 @@ public class StructBuilder {
if (trip) { if (trip) {
System.out.println(err.toString()); System.out.println(err.toString());
System.exit(1); System.exit(0);
} }
} }
/** /**
* Validate the communities section of the XML document. This returns a string * Validate the communities section of the XML document. This returns a string
* containing any errors encountered, or null if there were no errors. * containing any errors encountered, or null if there were no errors
* *
* @param communities the NodeList of communities to validate * @param communities the NodeList of communities to validate
* @param level the level in the XML document that we are at, for the purposes * @param level the level in the XML document that we are at, for the purposes
@@ -485,7 +246,7 @@ public class StructBuilder {
*/ */
private static String validateCommunities(NodeList communities, int level) private static String validateCommunities(NodeList communities, int level)
throws TransformerException { throws TransformerException {
StringBuilder err = new StringBuilder(); StringBuffer err = new StringBuffer();
boolean trip = false; boolean trip = false;
String errs = null; String errs = null;
@@ -494,9 +255,8 @@ public class StructBuilder {
NodeList name = XPathAPI.selectNodeList(n, "name"); NodeList name = XPathAPI.selectNodeList(n, "name");
if (name.getLength() != 1) { if (name.getLength() != 1) {
String pos = Integer.toString(i + 1); String pos = Integer.toString(i + 1);
err.append("-The level ").append(level) err.append("-The level " + level + " community in position " + pos);
.append(" community in position ").append(pos) err.append(" does not contain exactly one name field\n");
.append(" does not contain exactly one name field.\n");
trip = true; trip = true;
} }
@@ -526,7 +286,7 @@ public class StructBuilder {
/** /**
* validate the collection section of the XML document. This generates a * validate the collection section of the XML document. This generates a
* string containing any errors encountered, or returns null if no errors. * string containing any errors encountered, or returns null if no errors
* *
* @param collections a NodeList of collections to validate * @param collections a NodeList of collections to validate
* @param level the level in the XML document for the purposes of error reporting * @param level the level in the XML document for the purposes of error reporting
@@ -534,7 +294,7 @@ public class StructBuilder {
*/ */
private static String validateCollections(NodeList collections, int level) private static String validateCollections(NodeList collections, int level)
throws TransformerException { throws TransformerException {
StringBuilder err = new StringBuilder(); StringBuffer err = new StringBuffer();
boolean trip = false; boolean trip = false;
String errs = null; String errs = null;
@@ -543,9 +303,8 @@ public class StructBuilder {
NodeList name = XPathAPI.selectNodeList(n, "name"); NodeList name = XPathAPI.selectNodeList(n, "name");
if (name.getLength() != 1) { if (name.getLength() != 1) {
String pos = Integer.toString(i + 1); String pos = Integer.toString(i + 1);
err.append("-The level ").append(level) err.append("-The level " + level + " collection in position " + pos);
.append(" collection in position ").append(pos) err.append(" does not contain exactly one name field\n");
.append(" does not contain exactly one name field.\n");
trip = true; trip = true;
} }
} }
@@ -558,17 +317,17 @@ public class StructBuilder {
} }
/** /**
* Load the XML document from input. * Load in the XML from file.
* *
* @param input the filename to load from. * @param filename the filename to load from
* @return the DOM representation of the XML input. * @return the DOM representation of the XML file
*/ */
private static org.w3c.dom.Document loadXML(InputStream input) private static org.w3c.dom.Document loadXML(String filename)
throws IOException, ParserConfigurationException, SAXException { throws IOException, ParserConfigurationException, SAXException {
DocumentBuilder builder = DocumentBuilderFactory.newInstance() DocumentBuilder builder = DocumentBuilderFactory.newInstance()
.newDocumentBuilder(); .newDocumentBuilder();
org.w3c.dom.Document document = builder.parse(input); org.w3c.dom.Document document = builder.parse(new File(filename));
return document; return document;
} }
@@ -579,7 +338,7 @@ public class StructBuilder {
* @param node the node from which we want to extract the string value * @param node the node from which we want to extract the string value
* @return the string value of the node * @return the string value of the node
*/ */
private static String getStringValue(Node node) { public static String getStringValue(Node node) {
String value = node.getNodeValue(); String value = node.getNodeValue();
if (node.hasChildNodes()) { if (node.hasChildNodes()) {
@@ -604,7 +363,7 @@ public class StructBuilder {
* created communities (e.g. the handles they have been assigned) * created communities (e.g. the handles they have been assigned)
*/ */
private static Element[] handleCommunities(Context context, NodeList communities, Community parent) private static Element[] handleCommunities(Context context, NodeList communities, Community parent)
throws TransformerException, SQLException, AuthorizeException { throws TransformerException, SQLException, Exception {
Element[] elements = new Element[communities.getLength()]; Element[] elements = new Element[communities.getLength()];
for (int i = 0; i < communities.getLength(); i++) { for (int i = 0; i < communities.getLength(); i++) {
@@ -631,10 +390,12 @@ public class StructBuilder {
} }
// FIXME: at the moment, if the community already exists by name // FIXME: at the moment, if the community already exists by name
// then this will throw an SQLException on a duplicate key // then this will throw a PSQLException on a duplicate key
// violation. // violation
// Ideally we'd skip this row and continue to create sub communities // Ideally we'd skip this row and continue to create sub
// and so forth where they don't exist, but it's proving difficult // communities
// and so forth where they don't exist, but it's proving
// difficult
// to isolate the community that already exists without hitting // to isolate the community that already exists without hitting
// the database directly. // the database directly.
communityService.update(context, community); communityService.update(context, community);
@@ -709,7 +470,7 @@ public class StructBuilder {
* created collections (e.g. the handle) * created collections (e.g. the handle)
*/ */
private static Element[] handleCollections(Context context, NodeList collections, Community parent) private static Element[] handleCollections(Context context, NodeList collections, Community parent)
throws TransformerException, SQLException, AuthorizeException { throws TransformerException, SQLException, AuthorizeException, IOException, Exception {
Element[] elements = new Element[collections.getLength()]; Element[] elements = new Element[collections.getLength()];
for (int i = 0; i < collections.getLength(); i++) { for (int i = 0; i < collections.getLength(); i++) {

View File

@@ -19,7 +19,6 @@ import org.dspace.content.Item;
* @author Stuart Lewis * @author Stuart Lewis
*/ */
public class BulkEditChange { public class BulkEditChange {
/** /**
* The item these changes relate to * The item these changes relate to
*/ */

View File

@@ -27,7 +27,6 @@ import java.util.UUID;
import java.util.regex.Matcher; import java.util.regex.Matcher;
import java.util.regex.Pattern; import java.util.regex.Pattern;
import org.apache.commons.lang3.StringUtils;
import org.dspace.authority.AuthorityValue; import org.dspace.authority.AuthorityValue;
import org.dspace.authority.factory.AuthorityServiceFactory; import org.dspace.authority.factory.AuthorityServiceFactory;
import org.dspace.authority.service.AuthorityValueService; import org.dspace.authority.service.AuthorityValueService;
@@ -35,7 +34,6 @@ import org.dspace.content.Collection;
import org.dspace.content.Item; import org.dspace.content.Item;
import org.dspace.content.MetadataField; import org.dspace.content.MetadataField;
import org.dspace.content.MetadataSchema; import org.dspace.content.MetadataSchema;
import org.dspace.content.MetadataSchemaEnum;
import org.dspace.content.MetadataValue; import org.dspace.content.MetadataValue;
import org.dspace.content.authority.Choices; import org.dspace.content.authority.Choices;
import org.dspace.content.factory.ContentServiceFactory; import org.dspace.content.factory.ContentServiceFactory;
@@ -170,9 +168,6 @@ public class DSpaceCSV implements Serializable {
if ("collection".equals(element)) { if ("collection".equals(element)) {
// Store the heading // Store the heading
headings.add(element); headings.add(element);
} else if ("rowName".equals(element)) {
// Store the heading
headings.add(element);
} else if ("action".equals(element)) { // Store the action } else if ("action".equals(element)) { // Store the action
// Store the heading // Store the heading
headings.add(element); headings.add(element);
@@ -203,24 +198,20 @@ public class DSpaceCSV implements Serializable {
} }
// Check that the scheme exists // Check that the scheme exists
if (!StringUtils.equals(metadataSchema, MetadataSchemaEnum.RELATION.getName())) { MetadataSchema foundSchema = metadataSchemaService.find(c, metadataSchema);
MetadataSchema foundSchema = metadataSchemaService.find(c, metadataSchema); if (foundSchema == null) {
if (foundSchema == null) { throw new MetadataImportInvalidHeadingException(clean[0],
throw new MetadataImportInvalidHeadingException(clean[0], MetadataImportInvalidHeadingException.SCHEMA,
MetadataImportInvalidHeadingException columnCounter);
.SCHEMA, }
columnCounter);
}
// Check that the metadata element exists in the schema // Check that the metadata element exists in the schema
MetadataField foundField = metadataFieldService MetadataField foundField = metadataFieldService
.findByElement(c, foundSchema, metadataElement, metadataQualifier); .findByElement(c, foundSchema, metadataElement, metadataQualifier);
if (foundField == null) { if (foundField == null) {
throw new MetadataImportInvalidHeadingException(clean[0], throw new MetadataImportInvalidHeadingException(clean[0],
MetadataImportInvalidHeadingException MetadataImportInvalidHeadingException.ELEMENT,
.ELEMENT, columnCounter);
columnCounter);
}
} }
// Store the heading // Store the heading

View File

@@ -14,14 +14,10 @@ import java.io.InputStreamReader;
import java.sql.SQLException; import java.sql.SQLException;
import java.util.ArrayList; import java.util.ArrayList;
import java.util.Enumeration; import java.util.Enumeration;
import java.util.HashMap;
import java.util.HashSet; import java.util.HashSet;
import java.util.Iterator;
import java.util.List; import java.util.List;
import java.util.Map;
import java.util.Set; import java.util.Set;
import java.util.UUID; import java.util.UUID;
import javax.annotation.Nullable;
import org.apache.commons.cli.CommandLine; import org.apache.commons.cli.CommandLine;
import org.apache.commons.cli.CommandLineParser; import org.apache.commons.cli.CommandLineParser;
@@ -29,33 +25,22 @@ import org.apache.commons.cli.HelpFormatter;
import org.apache.commons.cli.Options; import org.apache.commons.cli.Options;
import org.apache.commons.cli.ParseException; import org.apache.commons.cli.ParseException;
import org.apache.commons.cli.PosixParser; import org.apache.commons.cli.PosixParser;
import org.apache.commons.lang3.StringUtils; import org.apache.commons.lang.StringUtils;
import org.apache.logging.log4j.Logger; import org.apache.log4j.Logger;
import org.dspace.authority.AuthorityValue; import org.dspace.authority.AuthorityValue;
import org.dspace.authority.factory.AuthorityServiceFactory; import org.dspace.authority.factory.AuthorityServiceFactory;
import org.dspace.authority.service.AuthorityValueService; import org.dspace.authority.service.AuthorityValueService;
import org.dspace.authorize.AuthorizeException; import org.dspace.authorize.AuthorizeException;
import org.dspace.content.Collection; import org.dspace.content.Collection;
import org.dspace.content.DSpaceObject; import org.dspace.content.DSpaceObject;
import org.dspace.content.Entity;
import org.dspace.content.Item; import org.dspace.content.Item;
import org.dspace.content.MetadataField;
import org.dspace.content.MetadataSchemaEnum;
import org.dspace.content.MetadataValue; import org.dspace.content.MetadataValue;
import org.dspace.content.Relationship;
import org.dspace.content.RelationshipType;
import org.dspace.content.WorkspaceItem; import org.dspace.content.WorkspaceItem;
import org.dspace.content.authority.Choices; import org.dspace.content.authority.Choices;
import org.dspace.content.factory.ContentServiceFactory; import org.dspace.content.factory.ContentServiceFactory;
import org.dspace.content.service.CollectionService; import org.dspace.content.service.CollectionService;
import org.dspace.content.service.EntityService;
import org.dspace.content.service.EntityTypeService;
import org.dspace.content.service.InstallItemService; import org.dspace.content.service.InstallItemService;
import org.dspace.content.service.ItemService; import org.dspace.content.service.ItemService;
import org.dspace.content.service.MetadataFieldService;
import org.dspace.content.service.MetadataValueService;
import org.dspace.content.service.RelationshipService;
import org.dspace.content.service.RelationshipTypeService;
import org.dspace.content.service.WorkspaceItemService; import org.dspace.content.service.WorkspaceItemService;
import org.dspace.core.ConfigurationManager; import org.dspace.core.ConfigurationManager;
import org.dspace.core.Constants; import org.dspace.core.Constants;
@@ -104,49 +89,10 @@ public class MetadataImport {
*/ */
protected static final String AC_PREFIX = "authority.controlled."; protected static final String AC_PREFIX = "authority.controlled.";
/**
* Map of field:value to csv row number, used to resolve indirect entity target references.
*
* @see #populateRefAndRowMap(DSpaceCSVLine, UUID)
*/
protected Map<String, Set<Integer>> csvRefMap = new HashMap<>();
/**
* Map of csv row number to UUID, used to resolve indirect entity target references.
*
* @see #populateRefAndRowMap(DSpaceCSVLine, UUID)
*/
protected HashMap<Integer, UUID> csvRowMap = new HashMap<>();
/**
* Map of UUIDs to their entity types.
*
* @see #populateRefAndRowMap(DSpaceCSVLine, UUID)
*/
protected static HashMap<UUID, String> entityTypeMap = new HashMap<>();
/**
* Map of UUIDs to their relations that are referenced within any import with their referers.
*
* @see #populateEntityRelationMap(String, String, String)
*/
protected static HashMap<String, HashMap<String, ArrayList<String>>> entityRelationMap = new HashMap<>();
/**
* Collection of errors generated during relation validation process.
*/
protected ArrayList<String> relationValidationErrors = new ArrayList<>();
/**
* Counter of rows proccssed in a CSV.
*/
protected Integer rowCount = 1;
/** /**
* Logger * Logger
*/ */
protected static final Logger log = org.apache.logging.log4j.LogManager.getLogger(MetadataImport.class); protected static final Logger log = Logger.getLogger(MetadataImport.class);
protected final AuthorityValueService authorityValueService; protected final AuthorityValueService authorityValueService;
@@ -155,10 +101,6 @@ public class MetadataImport {
protected final CollectionService collectionService; protected final CollectionService collectionService;
protected final HandleService handleService; protected final HandleService handleService;
protected final WorkspaceItemService workspaceItemService; protected final WorkspaceItemService workspaceItemService;
protected final RelationshipTypeService relationshipTypeService;
protected final RelationshipService relationshipService;
protected final EntityTypeService entityTypeService;
protected final EntityService entityService;
/** /**
* Create an instance of the metadata importer. Requires a context and an array of CSV lines * Create an instance of the metadata importer. Requires a context and an array of CSV lines
@@ -178,10 +120,6 @@ public class MetadataImport {
handleService = HandleServiceFactory.getInstance().getHandleService(); handleService = HandleServiceFactory.getInstance().getHandleService();
authorityValueService = AuthorityServiceFactory.getInstance().getAuthorityValueService(); authorityValueService = AuthorityServiceFactory.getInstance().getAuthorityValueService();
workspaceItemService = ContentServiceFactory.getInstance().getWorkspaceItemService(); workspaceItemService = ContentServiceFactory.getInstance().getWorkspaceItemService();
relationshipService = ContentServiceFactory.getInstance().getRelationshipService();
relationshipTypeService = ContentServiceFactory.getInstance().getRelationshipTypeService();
entityTypeService = ContentServiceFactory.getInstance().getEntityTypeService();
entityService = ContentServiceFactory.getInstance().getEntityService();
} }
/** /**
@@ -193,7 +131,7 @@ public class MetadataImport {
* @param workflowNotify If the workflows should be used, whether to send notifications or not * @param workflowNotify If the workflows should be used, whether to send notifications or not
* @param useTemplate Use collection template if create new item * @param useTemplate Use collection template if create new item
* @return An array of BulkEditChange elements representing the items that have changed * @return An array of BulkEditChange elements representing the items that have changed
* @throws MetadataImportException if something goes wrong * @throws MetadataImportException if something goes wrong
*/ */
public List<BulkEditChange> runImport(boolean change, public List<BulkEditChange> runImport(boolean change,
boolean useWorkflow, boolean useWorkflow,
@@ -208,11 +146,7 @@ public class MetadataImport {
c.setMode(Context.Mode.BATCH_EDIT); c.setMode(Context.Mode.BATCH_EDIT);
// Process each change // Process each change
rowCount = 1;
for (DSpaceCSVLine line : toImport) { for (DSpaceCSVLine line : toImport) {
// Resolve target references to other items
populateRefAndRowMap(line, line.getID());
line = resolveEntityRefs(line);
// Get the DSpace item to compare with // Get the DSpace item to compare with
UUID id = line.getID(); UUID id = line.getID();
@@ -225,7 +159,7 @@ public class MetadataImport {
WorkflowItem wfItem = null; WorkflowItem wfItem = null;
Item item = null; Item item = null;
// Is this an existing item? // Is this a new item?
if (id != null) { if (id != null) {
// Get the item // Get the item
item = itemService.find(c, id); item = itemService.find(c, id);
@@ -262,8 +196,9 @@ public class MetadataImport {
} }
} }
} }
// Compare // Compare
compareAndUpdate(item, fromCSV, change, md, whatHasChanged, line); compare(item, fromCSV, change, md, whatHasChanged, line);
} }
} }
@@ -324,7 +259,7 @@ public class MetadataImport {
BulkEditChange whatHasChanged = new BulkEditChange(); BulkEditChange whatHasChanged = new BulkEditChange();
for (String md : line.keys()) { for (String md : line.keys()) {
// Get the values we already have // Get the values we already have
if (!"id".equals(md) && !"rowName".equals(md)) { if (!"id".equals(md)) {
// Get the values from the CSV // Get the values from the CSV
String[] fromCSV = line.get(md).toArray(new String[line.get(md).size()]); String[] fromCSV = line.get(md).toArray(new String[line.get(md).size()]);
@@ -402,23 +337,14 @@ public class MetadataImport {
// Add the metadata to the item // Add the metadata to the item
for (BulkEditMetadataValue dcv : whatHasChanged.getAdds()) { for (BulkEditMetadataValue dcv : whatHasChanged.getAdds()) {
if (!StringUtils.equals(dcv.getSchema(), MetadataSchemaEnum.RELATION.getName())) { itemService.addMetadata(c, item, dcv.getSchema(),
itemService.addMetadata(c, item, dcv.getSchema(), dcv.getElement(),
dcv.getElement(), dcv.getQualifier(),
dcv.getQualifier(), dcv.getLanguage(),
dcv.getLanguage(), dcv.getValue(),
dcv.getValue(), dcv.getAuthority(),
dcv.getAuthority(), dcv.getConfidence());
dcv.getConfidence());
}
} }
//Add relations after all metadata has been processed
for (BulkEditMetadataValue dcv : whatHasChanged.getAdds()) {
if (StringUtils.equals(dcv.getSchema(), MetadataSchemaEnum.RELATION.getName())) {
addRelationship(c, item, dcv.getElement(), dcv.getValue());
}
}
// Should the workflow be used? // Should the workflow be used?
if (useWorkflow) { if (useWorkflow) {
@@ -442,6 +368,8 @@ public class MetadataImport {
} }
} }
// Commit changes to the object
// c.commit();
whatHasChanged.setItem(item); whatHasChanged.setItem(item);
} }
@@ -455,9 +383,6 @@ public class MetadataImport {
c.uncacheEntity(wfItem); c.uncacheEntity(wfItem);
c.uncacheEntity(item); c.uncacheEntity(item);
} }
populateRefAndRowMap(line, item == null ? null : item.getID());
// keep track of current rows processed
rowCount++;
} }
c.setMode(originalMode); c.setMode(originalMode);
@@ -468,14 +393,11 @@ public class MetadataImport {
} }
// Return the changes // Return the changes
if (!change ) {
validateExpressedRelations();
}
return changes; return changes;
} }
/** /**
* Compare an item metadata with a line from CSV, and optionally update the item. * Compare an item metadata with a line from CSV, and optionally update the item
* *
* @param item The current item metadata * @param item The current item metadata
* @param fromCSV The metadata from the CSV file * @param fromCSV The metadata from the CSV file
@@ -485,11 +407,10 @@ public class MetadataImport {
* @param line line in CSV file * @param line line in CSV file
* @throws SQLException if there is a problem accessing a Collection from the database, from its handle * @throws SQLException if there is a problem accessing a Collection from the database, from its handle
* @throws AuthorizeException if there is an authorization problem with permissions * @throws AuthorizeException if there is an authorization problem with permissions
* @throws MetadataImportException custom exception for error handling within metadataimport
*/ */
protected void compareAndUpdate(Item item, String[] fromCSV, boolean change, protected void compare(Item item, String[] fromCSV, boolean change,
String md, BulkEditChange changes, DSpaceCSVLine line) String md, BulkEditChange changes, DSpaceCSVLine line)
throws SQLException, AuthorizeException, MetadataImportException { throws SQLException, AuthorizeException {
// Log what metadata element we're looking at // Log what metadata element we're looking at
String all = ""; String all = "";
for (String part : fromCSV) { for (String part : fromCSV) {
@@ -499,8 +420,8 @@ public class MetadataImport {
log.debug(LogManager.getHeader(c, "metadata_import", log.debug(LogManager.getHeader(c, "metadata_import",
"item_id=" + item.getID() + ",fromCSV=" + all)); "item_id=" + item.getID() + ",fromCSV=" + all));
// Don't compare collections or actions or rowNames // Don't compare collections or actions
if (("collection".equals(md)) || ("action".equals(md)) || ("rowName".equals(md))) { if (("collection".equals(md)) || ("action".equals(md))) {
return; return;
} }
@@ -662,131 +583,13 @@ public class MetadataImport {
} }
} }
if (StringUtils.equals(schema, MetadataSchemaEnum.RELATION.getName())) { // Set those values
List<RelationshipType> relationshipTypeList = relationshipTypeService itemService.clearMetadata(c, item, schema, element, qualifier, language);
.findByLeftwardOrRightwardTypeName(c, element); itemService.addMetadata(c, item, schema, element, qualifier, language, values, authorities, confidences);
for (RelationshipType relationshipType : relationshipTypeList) { itemService.update(c, item);
for (Relationship relationship : relationshipService
.findByItemAndRelationshipType(c, item, relationshipType)) {
relationshipService.delete(c, relationship);
relationshipService.update(c, relationship);
}
}
addRelationships(c, item, element, values);
} else {
itemService.clearMetadata(c, item, schema, element, qualifier, language);
itemService.addMetadata(c, item, schema, element, qualifier,
language, values, authorities, confidences);
itemService.update(c, item);
}
} }
} }
/**
*
* Adds multiple relationships with a matching typeName to an item.
*
* @param c The relevant DSpace context
* @param item The item to which this metadatavalue belongs to
* @param typeName The element for the metadatavalue
* @param values to iterate over
* @throws SQLException If something goes wrong
* @throws AuthorizeException If something goes wrong
*/
private void addRelationships(Context c, Item item, String typeName, List<String> values)
throws SQLException, AuthorizeException,
MetadataImportException {
for (String value : values) {
addRelationship(c, item, typeName, value);
}
}
/**
* Gets an existing entity from a target reference.
*
* @param context the context to use.
* @param targetReference the target reference which may be a UUID, metadata reference, or rowName reference.
* @return the entity, which is guaranteed to exist.
* @throws MetadataImportException if the target reference is badly formed or refers to a non-existing item.
*/
private Entity getEntity(Context context, String targetReference) throws MetadataImportException {
Entity entity = null;
UUID uuid = resolveEntityRef(context, targetReference);
// At this point, we have a uuid, so we can get an entity
try {
entity = entityService.findByItemId(context, uuid);
if (entity.getItem() == null) {
throw new IllegalArgumentException("No item found in repository with uuid: " + uuid);
}
return entity;
} catch (SQLException sqle) {
throw new MetadataImportException("Unable to find entity using reference: " + targetReference, sqle);
}
}
/**
*
* Creates a relationship for the given item
*
* @param c The relevant DSpace context
* @param item The item that the relationships will be made for
* @param typeName The relationship typeName
* @param value The value for the relationship
* @throws SQLException If something goes wrong
* @throws AuthorizeException If something goes wrong
*/
private void addRelationship(Context c, Item item, String typeName, String value)
throws SQLException, AuthorizeException, MetadataImportException {
if (value.isEmpty()) {
return;
}
boolean left = false;
// Get entity from target reference
Entity relationEntity = getEntity(c, value);
// Get relationship type of entity and item
String relationEntityRelationshipType = itemService.getMetadata(relationEntity.getItem(),
"relationship", "type",
null, Item.ANY).get(0).getValue();
String itemRelationshipType = itemService.getMetadata(item, "relationship", "type",
null, Item.ANY).get(0).getValue();
// Get the correct RelationshipType based on typeName
List<RelationshipType> relType = relationshipTypeService.findByLeftwardOrRightwardTypeName(c, typeName);
RelationshipType foundRelationshipType = matchRelationshipType(relType,
relationEntityRelationshipType, itemRelationshipType, typeName);
if (foundRelationshipType == null) {
throw new MetadataImportException("Error on CSV row " + rowCount + ":" + "\n" +
"No Relationship type found for:\n" +
"Target type: " + relationEntityRelationshipType + "\n" +
"Origin referer type: " + itemRelationshipType + "\n" +
"with typeName: " + typeName);
}
if (foundRelationshipType.getLeftwardType().equalsIgnoreCase(typeName)) {
left = true;
}
// Placeholder items for relation placing
Item leftItem = null;
Item rightItem = null;
if (left) {
leftItem = item;
rightItem = relationEntity.getItem();
} else {
leftItem = relationEntity.getItem();
rightItem = item;
}
// Create the relationship
int leftPlace = relationshipService.findNextLeftPlaceByLeftItem(c, leftItem);
int rightPlace = relationshipService.findNextRightPlaceByRightItem(c, rightItem);
Relationship persistedRelationship = relationshipService.create(c, leftItem, rightItem,
foundRelationshipType, leftPlace, rightPlace);
relationshipService.update(c, persistedRelationship);
}
/** /**
* Compare changes between an items owning collection and mapped collections * Compare changes between an items owning collection and mapped collections
* and what is in the CSV file * and what is in the CSV file
@@ -1275,8 +1078,6 @@ public class MetadataImport {
"notify - when adding new items using a workflow, send notification emails"); "notify - when adding new items using a workflow, send notification emails");
options.addOption("t", "template", false, options.addOption("t", "template", false,
"template - when adding new items, use the collection template (if it exists)"); "template - when adding new items, use the collection template (if it exists)");
options.addOption("v", "validate-only", false,
"validate - just validate the csv, don't run the import");
options.addOption("h", "help", false, "help"); options.addOption("h", "help", false, "help");
// Parse the command line arguments // Parse the command line arguments
@@ -1374,9 +1175,7 @@ public class MetadataImport {
MetadataImport importer = new MetadataImport(c, csv); MetadataImport importer = new MetadataImport(c, csv);
List<BulkEditChange> changes; List<BulkEditChange> changes;
boolean validateOnly = line.hasOption('v'); if (!line.hasOption('s')) {
if (!line.hasOption('s') || validateOnly) {
// See what has changed // See what has changed
try { try {
changes = importer.runImport(false, useWorkflow, workflowNotify, useTemplate); changes = importer.runImport(false, useWorkflow, workflowNotify, useTemplate);
@@ -1390,7 +1189,7 @@ public class MetadataImport {
int changeCounter = displayChanges(changes, false); int changeCounter = displayChanges(changes, false);
// If there were changes, ask if we should execute them // If there were changes, ask if we should execute them
if (!validateOnly && changeCounter > 0) { if (changeCounter > 0) {
try { try {
// Ask the user if they want to make the changes // Ask the user if they want to make the changes
System.out.println("\n" + changeCounter + " item(s) will be changed\n"); System.out.println("\n" + changeCounter + " item(s) will be changed\n");
@@ -1415,7 +1214,7 @@ public class MetadataImport {
try { try {
// If required, make the change // If required, make the change
if (change && !validateOnly) { if (change) {
try { try {
// Make the changes // Make the changes
changes = importer.runImport(true, useWorkflow, workflowNotify, useTemplate); changes = importer.runImport(true, useWorkflow, workflowNotify, useTemplate);
@@ -1427,6 +1226,9 @@ public class MetadataImport {
// Display the changes // Display the changes
displayChanges(changes, true); displayChanges(changes, true);
// Commit the change to the DB
// c.commit();
} }
// Finsh off and tidy up // Finsh off and tidy up
@@ -1439,418 +1241,4 @@ public class MetadataImport {
System.exit(1); System.exit(1);
} }
} }
/**
* Gets a copy of the given csv line with all entity target references resolved to UUID strings.
* Keys being iterated over represent metadatafields or special columns to be processed.
*
* @param line the csv line to process.
* @return a copy, with all references resolved.
* @throws MetadataImportException if there is an error resolving any entity target reference.
*/
public DSpaceCSVLine resolveEntityRefs(DSpaceCSVLine line) throws MetadataImportException {
DSpaceCSVLine newLine = new DSpaceCSVLine(line.getID());
UUID originId = evaluateOriginId(line.getID());
for (String key : line.keys()) {
// If a key represents a relation field attempt to resolve the target reference from the csvRefMap
if (key.split("\\.")[0].equalsIgnoreCase("relation")) {
if (line.get(key).size() > 0) {
for (String val : line.get(key)) {
// Attempt to resolve the relation target reference
// These can be a UUID, metadata target reference or rowName target reference
String uuid = resolveEntityRef(c, val).toString();
newLine.add(key, uuid);
//Entity refs have been resolved / placeholdered
//Populate the EntityRelationMap
populateEntityRelationMap(uuid, key, originId.toString());
}
}
} else {
if (line.get(key).size() > 1) {
for (String value : line.get(key)) {
newLine.add(key, value);
}
} else {
if (line.get(key).size() > 0) {
newLine.add(key, line.get(key).get(0));
}
}
}
}
return newLine;
}
/**
* Populate the entityRelationMap with all target references and it's asscoiated typeNames
* to their respective origins
*
* @param refUUID the target reference UUID for the relation
* @param relationField the field of the typeNames to relate from
*/
private void populateEntityRelationMap(String refUUID, String relationField, String originId) {
HashMap<String, ArrayList<String>> typeNames = null;
if (entityRelationMap.get(refUUID) == null) {
typeNames = new HashMap<>();
ArrayList<String> originIds = new ArrayList<>();
originIds.add(originId);
typeNames.put(relationField, originIds);
entityRelationMap.put(refUUID, typeNames);
} else {
typeNames = entityRelationMap.get(refUUID);
if (typeNames.get(relationField) == null) {
ArrayList<String> originIds = new ArrayList<>();
originIds.add(originId);
typeNames.put(relationField, originIds);
} else {
ArrayList<String> originIds = typeNames.get(relationField);
originIds.add(originId);
typeNames.put(relationField, originIds);
}
entityRelationMap.put(refUUID, typeNames);
}
}
/**
* Populates the csvRefMap, csvRowMap, and entityTypeMap for the given csv line.
*
* The csvRefMap is an index that keeps track of which rows have a specific value for
* a specific metadata field or the special "rowName" column. This is used to help resolve indirect
* entity target references in the same CSV.
*
* The csvRowMap is a row number to UUID map, and contains an entry for every row that has
* been processed so far which has a known (minted) UUID for its item. This is used to help complete
* the resolution after the row number has been determined.
*
* @param line the csv line.
* @param uuid the uuid of the item, which may be null if it has not been minted yet.
*/
private void populateRefAndRowMap(DSpaceCSVLine line, @Nullable UUID uuid) {
if (uuid != null) {
csvRowMap.put(rowCount, uuid);
} else {
csvRowMap.put(rowCount, new UUID(0, rowCount));
}
for (String key : line.keys()) {
if (key.contains(".") && !key.split("\\.")[0].equalsIgnoreCase("relation") ||
key.equalsIgnoreCase("rowName")) {
for (String value : line.get(key)) {
String valueKey = key + ":" + value;
Set<Integer> rowNums = csvRefMap.get(valueKey);
if (rowNums == null) {
rowNums = new HashSet<>();
csvRefMap.put(valueKey, rowNums);
}
rowNums.add(rowCount);
}
}
//Populate entityTypeMap
if (key.equalsIgnoreCase("relationship.type") && line.get(key).size() > 0) {
if (uuid == null) {
entityTypeMap.put(new UUID(0, rowCount), line.get(key).get(0));
} else {
entityTypeMap.put(uuid, line.get(key).get(0));
}
}
}
}
/**
* Gets the UUID of the item indicated by the given target reference,
* which may be a direct UUID string, a row reference
* of the form rowName:VALUE, or a metadata value reference of the form schema.element[.qualifier]:VALUE.
*
* The reference may refer to a previously-processed item in the CSV or an item in the database.
*
* @param context the context to use.
* @param reference the target reference which may be a UUID, metadata reference, or rowName reference.
* @return the uuid.
* @throws MetadataImportException if the target reference is malformed or ambiguous (refers to multiple items).
*/
private UUID resolveEntityRef(Context context, String reference) throws MetadataImportException {
// value reference
UUID uuid = null;
if (!reference.contains(":")) {
// assume it's a UUID
try {
return UUID.fromString(reference);
} catch (IllegalArgumentException e) {
throw new MetadataImportException("Error in CSV row " + rowCount + ":\n" +
"Not a UUID or indirect entity reference: '" + reference + "'");
}
} else if (!reference.startsWith("rowName:") ) { // Not a rowName ref; so it's a metadata value reference
MetadataValueService metadataValueService = ContentServiceFactory.getInstance().getMetadataValueService();
MetadataFieldService metadataFieldService =
ContentServiceFactory.getInstance().getMetadataFieldService();
int i = reference.indexOf(":");
String mfValue = reference.substring(i + 1);
String mf[] = reference.substring(0, i).split("\\.");
if (mf.length < 2) {
throw new MetadataImportException("Error in CSV row " + rowCount + ":\n" +
"Bad metadata field in reference: '" + reference
+ "' (expected syntax is schema.element[.qualifier])");
}
String schema = mf[0];
String element = mf[1];
String qualifier = mf.length == 2 ? null : mf[2];
try {
MetadataField mfo = metadataFieldService.findByElement(context, schema, element, qualifier);
Iterator<MetadataValue> mdv = metadataValueService.findByFieldAndValue(context, mfo, mfValue);
if (mdv.hasNext()) {
MetadataValue mdvVal = mdv.next();
uuid = mdvVal.getDSpaceObject().getID();
if (mdv.hasNext()) {
throw new MetadataImportException("Error in CSV row " + rowCount + ":\n" +
"Ambiguous reference; multiple matches in db: " + reference);
}
}
} catch (SQLException e) {
throw new MetadataImportException("Error in CSV row " + rowCount + ":\n" +
"Error looking up item by metadata reference: " + reference, e);
}
}
// Lookup UUIDs that may have already been processed into the csvRefMap
// See populateRefAndRowMap() for how the csvRefMap is populated
// See getMatchingCSVUUIDs() for how the reference param is sourced from the csvRefMap
Set<UUID> csvUUIDs = getMatchingCSVUUIDs(reference);
if (csvUUIDs.size() > 1) {
throw new MetadataImportException("Error in CSV row " + rowCount + ":\n" +
"Ambiguous reference; multiple matches in csv: " + reference);
} else if (csvUUIDs.size() == 1) {
UUID csvUUID = csvUUIDs.iterator().next();
if (csvUUID.equals(uuid)) {
return uuid; // one match from csv and db (same item)
} else if (uuid != null) {
throw new MetadataImportException("Error in CSV row " + rowCount + ":\n" +
"Ambiguous reference; multiple matches in db and csv: " + reference);
} else {
return csvUUID; // one match from csv
}
} else { // size == 0; the reference does not exist throw an error
if (uuid == null) {
throw new MetadataImportException("Error in CSV row " + rowCount + ":\n" +
"No matches found for reference: " + reference
+ "\nKeep in mind you can only reference entries that are listed before " +
"this one within the CSV.");
} else {
return uuid; // one match from db
}
}
}
/**
* Gets the set of matching lines as UUIDs that have already been processed given a metadata value.
*
* @param mdValueRef the metadataValue reference to search for.
* @return the set of matching lines as UUIDs.
*/
private Set<UUID> getMatchingCSVUUIDs(String mdValueRef) {
Set<UUID> set = new HashSet<>();
if (csvRefMap.containsKey(mdValueRef)) {
for (Integer rowNum : csvRefMap.get(mdValueRef)) {
set.add(getUUIDForRow(rowNum));
}
}
return set;
}
/**
* Gets the UUID of the item of a given row in the CSV, if it has been minted.
* If the UUID has not yet been minted, gets a UUID representation of the row
* (a UUID whose numeric value equals the row number).
*
* @param rowNum the row number.
* @return the UUID of the item
*/
private UUID getUUIDForRow(int rowNum) {
if (csvRowMap.containsKey(rowNum)) {
return csvRowMap.get(rowNum);
} else {
return new UUID(0, rowNum);
}
}
/**
* Return a UUID of the origin in process or a placeholder for the origin to be evaluated later
*
* @param originId UUID of the origin
* @return the UUID of the item or UUID placeholder
*/
private UUID evaluateOriginId(@Nullable UUID originId) {
if (originId != null) {
return originId;
} else {
return new UUID(0, rowCount);
}
}
/**
* Validate every relation modification expressed in the CSV.
*
*/
private void validateExpressedRelations() throws MetadataImportException {
for (String targetUUID : entityRelationMap.keySet()) {
String targetType = null;
try {
// Get the type of reference. Attempt lookup in processed map first before looking in archive.
if (entityTypeMap.get(UUID.fromString(targetUUID)) != null) {
targetType = entityTypeService.
findByEntityType(c, entityTypeMap.get(UUID.fromString(targetUUID))).getLabel();
} else {
// Target item may be archived; check there.
// Add to errors if Realtionship.type cannot be derived
Item targetItem = null;
if (itemService.find(c, UUID.fromString(targetUUID)) != null) {
targetItem = itemService.find(c, UUID.fromString(targetUUID));
List<MetadataValue> relTypes = itemService.
getMetadata(targetItem, "relationship", "type", null, Item.ANY);
String relTypeValue = null;
if (relTypes.size() > 0) {
relTypeValue = relTypes.get(0).getValue();
targetType = entityTypeService.findByEntityType(c, relTypeValue).getLabel();
} else {
relationValidationErrors.add("Cannot resolve Entity type for target UUID: " +
targetUUID);
}
} else {
relationValidationErrors.add("Cannot resolve Entity type for target UUID: " +
targetUUID);
}
}
if (targetType == null) {
continue;
}
// Get typeNames for each origin referer of this target.
for (String typeName : entityRelationMap.get(targetUUID).keySet()) {
// Resolve Entity Type for each origin referer.
for (String originRefererUUID : entityRelationMap.get(targetUUID).get(typeName)) {
// Evaluate row number for origin referer.
String originRow = "N/A";
if (csvRowMap.containsValue(UUID.fromString(originRefererUUID))) {
for (int key : csvRowMap.keySet()) {
if (csvRowMap.get(key).toString().equalsIgnoreCase(originRefererUUID)) {
originRow = key + "";
break;
}
}
}
String originType = "";
// Validate target type and origin type pairing with typeName or add to errors.
// Attempt lookup in processed map first before looking in archive.
if (entityTypeMap.get(UUID.fromString(originRefererUUID)) != null) {
originType = entityTypeMap.get(UUID.fromString(originRefererUUID));
validateTypesByTypeByTypeName(targetType, originType, typeName, originRow);
} else {
// Origin item may be archived; check there.
// Add to errors if Realtionship.type cannot be derived.
Item originItem = null;
if (itemService.find(c, UUID.fromString(targetUUID)) != null) {
originItem = itemService.find(c, UUID.fromString(originRefererUUID));
List<MetadataValue> relTypes = itemService.
getMetadata(originItem, "relationship", "type", null, Item.ANY);
String relTypeValue = null;
if (relTypes.size() > 0) {
relTypeValue = relTypes.get(0).getValue();
originType = entityTypeService.findByEntityType(c, relTypeValue).getLabel();
validateTypesByTypeByTypeName(targetType, originType, typeName, originRow);
} else {
relationValidationErrors.add("Error on CSV row " + originRow + ":" + "\n" +
"Cannot resolve Entity type for reference: "
+ originRefererUUID);
}
} else {
relationValidationErrors.add("Error on CSV row " + originRow + ":" + "\n" +
"Cannot resolve Entity type for reference: "
+ originRefererUUID + " in row: " + originRow );
}
}
}
}
} catch (SQLException sqle) {
throw new MetadataImportException("Error interacting with database!", sqle);
}
} // If relationValidationErrors is empty all described relationships are valid.
if (!relationValidationErrors.isEmpty()) {
StringBuilder errors = new StringBuilder();
for (String error : relationValidationErrors) {
errors.append(error + "\n");
}
throw new MetadataImportException("Error validating relationships: \n" + errors);
}
}
/**
* Generates a list of potenital Relationship Types given a typeName and attempts to match the given
* targetType and originType to a Relationship Type in the list.
*
* @param targetType entity type of target.
* @param originType entity type of origin referer.
* @param typeName left or right typeName of the respective Relationship.
* @return the UUID of the item.
*/
private void validateTypesByTypeByTypeName(String targetType, String originType, String typeName, String originRow)
throws MetadataImportException {
try {
RelationshipType foundRelationshipType = null;
List<RelationshipType> relationshipTypeList = relationshipTypeService.
findByLeftwardOrRightwardTypeName(c, typeName.split("\\.")[1]);
// Validate described relationship form the CSV.
foundRelationshipType = matchRelationshipType(relationshipTypeList, targetType, originType, typeName);
if (foundRelationshipType == null) {
relationValidationErrors.add("Error on CSV row " + originRow + ":" + "\n" +
"No Relationship type found for:\n" +
"Target type: " + targetType + "\n" +
"Origin referer type: " + originType + "\n" +
"with typeName: " + typeName + " for type: " + originType);
}
} catch (SQLException sqle) {
throw new MetadataImportException("Error interacting with database!", sqle);
}
}
/**
* Matches two Entity types to a Relationship Type from a set of Relationship Types.
*
* @param relTypes set of Relationship Types.
* @param targetType entity type of target.
* @param originType entity type of origin referer.
* @return null or matched Relationship Type.
*/
private RelationshipType matchRelationshipType(List<RelationshipType> relTypes,
String targetType, String originType, String originTypeName) {
RelationshipType foundRelationshipType = null;
if (originTypeName.split("\\.").length > 1) {
originTypeName = originTypeName.split("\\.")[1];
}
for (RelationshipType relationshipType : relTypes) {
// Is origin type leftward or righward
boolean isLeft = false;
if (relationshipType.getLeftType().getLabel().equalsIgnoreCase(originType)) {
isLeft = true;
}
if (isLeft) {
// Validate typeName reference
if (!relationshipType.getLeftwardType().equalsIgnoreCase(originTypeName)) {
continue;
}
if (relationshipType.getLeftType().getLabel().equalsIgnoreCase(originType) &&
relationshipType.getRightType().getLabel().equalsIgnoreCase(targetType)) {
foundRelationshipType = relationshipType;
}
} else {
if (!relationshipType.getRightwardType().equalsIgnoreCase(originTypeName)) {
continue;
}
if (relationshipType.getLeftType().getLabel().equalsIgnoreCase(targetType) &&
relationshipType.getRightType().getLabel().equalsIgnoreCase(originType)) {
foundRelationshipType = relationshipType;
}
}
}
return foundRelationshipType;
}
} }

View File

@@ -23,8 +23,7 @@ import org.apache.commons.cli.OptionBuilder;
import org.apache.commons.cli.Options; import org.apache.commons.cli.Options;
import org.apache.commons.cli.ParseException; import org.apache.commons.cli.ParseException;
import org.apache.commons.cli.PosixParser; import org.apache.commons.cli.PosixParser;
import org.apache.logging.log4j.LogManager; import org.apache.log4j.Logger;
import org.apache.logging.log4j.Logger;
import org.dspace.checker.BitstreamDispatcher; import org.dspace.checker.BitstreamDispatcher;
import org.dspace.checker.CheckerCommand; import org.dspace.checker.CheckerCommand;
import org.dspace.checker.HandleDispatcher; import org.dspace.checker.HandleDispatcher;
@@ -49,7 +48,7 @@ import org.dspace.core.Utils;
* @author Nathan Sarr * @author Nathan Sarr
*/ */
public final class ChecksumChecker { public final class ChecksumChecker {
private static final Logger LOG = LogManager.getLogger(ChecksumChecker.class); private static final Logger LOG = Logger.getLogger(ChecksumChecker.class);
private static final BitstreamService bitstreamService = ContentServiceFactory.getInstance().getBitstreamService(); private static final BitstreamService bitstreamService = ContentServiceFactory.getInstance().getBitstreamService();

View File

@@ -272,8 +272,9 @@ public class Harvest {
targetCollection = (Collection) dso; targetCollection = (Collection) dso;
} }
} else { } else {
// not a handle, try and treat it as an collection database UUID // not a handle, try and treat it as an integer collection database ID
System.out.println("Looking up by UUID: " + collectionID + ", " + "in context: " + context); System.out.println("Looking up by id: " + collectionID + ", parsed as '" + Integer
.parseInt(collectionID) + "', " + "in context: " + context);
targetCollection = collectionService.find(context, UUID.fromString(collectionID)); targetCollection = collectionService.find(context, UUID.fromString(collectionID));
} }
} }
@@ -459,7 +460,7 @@ public class Harvest {
List<String> errors; List<String> errors;
System.out.print("Testing basic PMH access: "); System.out.print("Testing basic PMH access: ");
errors = harvestedCollectionService.verifyOAIharvester(server, set, errors = OAIHarvester.verifyOAIharvester(server, set,
(null != metadataFormat) ? metadataFormat : "dc", false); (null != metadataFormat) ? metadataFormat : "dc", false);
if (errors.isEmpty()) { if (errors.isEmpty()) {
System.out.println("OK"); System.out.println("OK");
@@ -470,7 +471,7 @@ public class Harvest {
} }
System.out.print("Testing ORE support: "); System.out.print("Testing ORE support: ");
errors = harvestedCollectionService.verifyOAIharvester(server, set, errors = OAIHarvester.verifyOAIharvester(server, set,
(null != metadataFormat) ? metadataFormat : "dc", true); (null != metadataFormat) ? metadataFormat : "dc", true);
if (errors.isEmpty()) { if (errors.isEmpty()) {
System.out.println("OK"); System.out.println("OK");

View File

@@ -32,8 +32,8 @@ import java.util.zip.ZipEntry;
import java.util.zip.ZipOutputStream; import java.util.zip.ZipOutputStream;
import javax.mail.MessagingException; import javax.mail.MessagingException;
import org.apache.commons.lang3.StringUtils; import org.apache.commons.lang.StringUtils;
import org.apache.logging.log4j.Logger; import org.apache.log4j.Logger;
import org.dspace.app.itemexport.service.ItemExportService; import org.dspace.app.itemexport.service.ItemExportService;
import org.dspace.content.Bitstream; import org.dspace.content.Bitstream;
import org.dspace.content.Bundle; import org.dspace.content.Bundle;
@@ -42,7 +42,7 @@ import org.dspace.content.Community;
import org.dspace.content.DSpaceObject; import org.dspace.content.DSpaceObject;
import org.dspace.content.Item; import org.dspace.content.Item;
import org.dspace.content.MetadataField; import org.dspace.content.MetadataField;
import org.dspace.content.MetadataSchemaEnum; import org.dspace.content.MetadataSchema;
import org.dspace.content.MetadataValue; import org.dspace.content.MetadataValue;
import org.dspace.content.service.BitstreamService; import org.dspace.content.service.BitstreamService;
import org.dspace.content.service.CommunityService; import org.dspace.content.service.CommunityService;
@@ -98,7 +98,7 @@ public class ItemExportServiceImpl implements ItemExportService {
/** /**
* log4j logger * log4j logger
*/ */
private Logger log = org.apache.logging.log4j.LogManager.getLogger(ItemExportServiceImpl.class); private Logger log = Logger.getLogger(ItemExportServiceImpl.class);
protected ItemExportServiceImpl() { protected ItemExportServiceImpl() {
@@ -214,7 +214,7 @@ public class ItemExportServiceImpl implements ItemExportService {
protected void writeMetadata(Context c, String schema, Item i, protected void writeMetadata(Context c, String schema, Item i,
File destDir, boolean migrate) throws Exception { File destDir, boolean migrate) throws Exception {
String filename; String filename;
if (schema.equals(MetadataSchemaEnum.DC.getName())) { if (schema.equals(MetadataSchema.DC_SCHEMA)) {
filename = "dublin_core.xml"; filename = "dublin_core.xml";
} else { } else {
filename = "metadata_" + schema + ".xml"; filename = "metadata_" + schema + ".xml";
@@ -271,8 +271,9 @@ public class ItemExportServiceImpl implements ItemExportService {
("date".equals(metadataField.getElement()) && "accessioned".equals(qualifier)) || ("date".equals(metadataField.getElement()) && "accessioned".equals(qualifier)) ||
("date".equals(metadataField.getElement()) && "available".equals(qualifier)) || ("date".equals(metadataField.getElement()) && "available".equals(qualifier)) ||
("identifier".equals(metadataField.getElement()) && "uri".equals(qualifier) && ("identifier".equals(metadataField.getElement()) && "uri".equals(qualifier) &&
(dcv.getValue() != null && dcv.getValue().startsWith( (dcv.getValue() != null && dcv.getValue().startsWith("http://hdl.handle.net/" +
handleService.getCanonicalPrefix() + handleService.getPrefix() + "/"))) || handleService
.getPrefix() + "/"))) ||
("description".equals(metadataField.getElement()) && "provenance".equals(qualifier)) || ("description".equals(metadataField.getElement()) && "provenance".equals(qualifier)) ||
("format".equals(metadataField.getElement()) && "extent".equals(qualifier)) || ("format".equals(metadataField.getElement()) && "extent".equals(qualifier)) ||
("format".equals(metadataField.getElement()) && "mimetype".equals(qualifier))))) { ("format".equals(metadataField.getElement()) && "mimetype".equals(qualifier))))) {
@@ -546,7 +547,7 @@ public class ItemExportServiceImpl implements ItemExportService {
List<Bitstream> bitstreams = bundle.getBitstreams(); List<Bitstream> bitstreams = bundle.getBitstreams();
for (Bitstream bitstream : bitstreams) { for (Bitstream bitstream : bitstreams) {
// add up the size // add up the size
size += bitstream.getSizeBytes(); size += bitstream.getSize();
} }
} }
items.add(item.getID()); items.add(item.getID());
@@ -573,7 +574,7 @@ public class ItemExportServiceImpl implements ItemExportService {
List<Bitstream> bitstreams = bundle.getBitstreams(); List<Bitstream> bitstreams = bundle.getBitstreams();
for (Bitstream bitstream : bitstreams) { for (Bitstream bitstream : bitstreams) {
// add up the size // add up the size
size += bitstream.getSizeBytes(); size += bitstream.getSize();
} }
} }
items.add(item.getID()); items.add(item.getID());
@@ -592,7 +593,7 @@ public class ItemExportServiceImpl implements ItemExportService {
List<Bitstream> bitstreams = bundle.getBitstreams(); List<Bitstream> bitstreams = bundle.getBitstreams();
for (Bitstream bitstream : bitstreams) { for (Bitstream bitstream : bitstreams) {
// add up the size // add up the size
size += bitstream.getSizeBytes(); size += bitstream.getSize();
} }
} }
ArrayList<UUID> items = new ArrayList<>(); ArrayList<UUID> items = new ArrayList<>();
@@ -929,7 +930,7 @@ public class ItemExportServiceImpl implements ItemExportService {
Locale supportedLocale = I18nUtil.getEPersonLocale(eperson); Locale supportedLocale = I18nUtil.getEPersonLocale(eperson);
Email email = Email.getEmail(I18nUtil.getEmailFilename(supportedLocale, "export_success")); Email email = Email.getEmail(I18nUtil.getEmailFilename(supportedLocale, "export_success"));
email.addRecipient(eperson.getEmail()); email.addRecipient(eperson.getEmail());
email.addArgument(ConfigurationManager.getProperty("dspace.ui.url") + "/exportdownload/" + fileName); email.addArgument(ConfigurationManager.getProperty("dspace.url") + "/exportdownload/" + fileName);
email.addArgument(ConfigurationManager.getProperty("org.dspace.app.itemexport.life.span.hours")); email.addArgument(ConfigurationManager.getProperty("org.dspace.app.itemexport.life.span.hours"));
email.send(); email.send();
@@ -947,7 +948,7 @@ public class ItemExportServiceImpl implements ItemExportService {
Email email = Email.getEmail(I18nUtil.getEmailFilename(supportedLocale, "export_error")); Email email = Email.getEmail(I18nUtil.getEmailFilename(supportedLocale, "export_error"));
email.addRecipient(eperson.getEmail()); email.addRecipient(eperson.getEmail());
email.addArgument(error); email.addArgument(error);
email.addArgument(ConfigurationManager.getProperty("dspace.ui.url") + "/feedback"); email.addArgument(ConfigurationManager.getProperty("dspace.url") + "/feedback");
email.send(); email.send();
} catch (Exception e) { } catch (Exception e) {

View File

@@ -52,13 +52,13 @@ import gr.ekt.bte.core.TransformationSpec;
import gr.ekt.bte.dataloader.FileDataLoader; import gr.ekt.bte.dataloader.FileDataLoader;
import gr.ekt.bteio.generators.DSpaceOutputGenerator; import gr.ekt.bteio.generators.DSpaceOutputGenerator;
import gr.ekt.bteio.loaders.OAIPMHDataLoader; import gr.ekt.bteio.loaders.OAIPMHDataLoader;
import org.apache.commons.collections4.ComparatorUtils; import org.apache.commons.collections.ComparatorUtils;
import org.apache.commons.io.FileDeleteStrategy; import org.apache.commons.io.FileDeleteStrategy;
import org.apache.commons.io.FileUtils; import org.apache.commons.io.FileUtils;
import org.apache.commons.lang3.RandomStringUtils; import org.apache.commons.lang.RandomStringUtils;
import org.apache.commons.lang3.StringUtils; import org.apache.commons.lang.StringUtils;
import org.apache.commons.lang3.exception.ExceptionUtils; import org.apache.commons.lang3.exception.ExceptionUtils;
import org.apache.logging.log4j.Logger; import org.apache.log4j.Logger;
import org.apache.xpath.XPathAPI; import org.apache.xpath.XPathAPI;
import org.dspace.app.itemimport.service.ItemImportService; import org.dspace.app.itemimport.service.ItemImportService;
import org.dspace.app.util.LocalSchemaFilenameFilter; import org.dspace.app.util.LocalSchemaFilenameFilter;
@@ -74,7 +74,6 @@ import org.dspace.content.DSpaceObject;
import org.dspace.content.Item; import org.dspace.content.Item;
import org.dspace.content.MetadataField; import org.dspace.content.MetadataField;
import org.dspace.content.MetadataSchema; import org.dspace.content.MetadataSchema;
import org.dspace.content.MetadataSchemaEnum;
import org.dspace.content.WorkspaceItem; import org.dspace.content.WorkspaceItem;
import org.dspace.content.service.BitstreamFormatService; import org.dspace.content.service.BitstreamFormatService;
import org.dspace.content.service.BitstreamService; import org.dspace.content.service.BitstreamService;
@@ -125,7 +124,7 @@ import org.xml.sax.SAXException;
* allow the registration of files (bitstreams) into DSpace. * allow the registration of files (bitstreams) into DSpace.
*/ */
public class ItemImportServiceImpl implements ItemImportService, InitializingBean { public class ItemImportServiceImpl implements ItemImportService, InitializingBean {
private final Logger log = org.apache.logging.log4j.LogManager.getLogger(ItemImportServiceImpl.class); private final Logger log = Logger.getLogger(ItemImportServiceImpl.class);
@Autowired(required = true) @Autowired(required = true)
protected AuthorizeService authorizeService; protected AuthorizeService authorizeService;
@@ -678,7 +677,7 @@ public class ItemImportServiceImpl implements ItemImportService, InitializingBea
Node schemaAttr = metadata.item(0).getAttributes().getNamedItem( Node schemaAttr = metadata.item(0).getAttributes().getNamedItem(
"schema"); "schema");
if (schemaAttr == null) { if (schemaAttr == null) {
schema = MetadataSchemaEnum.DC.getName(); schema = MetadataSchema.DC_SCHEMA;
} else { } else {
schema = schemaAttr.getNodeValue(); schema = schemaAttr.getNodeValue();
} }
@@ -1797,7 +1796,7 @@ public class ItemImportServiceImpl implements ItemImportService, InitializingBea
Email email = Email.getEmail(I18nUtil.getEmailFilename(supportedLocale, "bte_batch_import_error")); Email email = Email.getEmail(I18nUtil.getEmailFilename(supportedLocale, "bte_batch_import_error"));
email.addRecipient(eperson.getEmail()); email.addRecipient(eperson.getEmail());
email.addArgument(error); email.addArgument(error);
email.addArgument(ConfigurationManager.getProperty("dspace.ui.url") + "/feedback"); email.addArgument(ConfigurationManager.getProperty("dspace.url") + "/feedback");
email.send(); email.send();
} catch (Exception e) { } catch (Exception e) {

View File

@@ -30,7 +30,7 @@ import javax.xml.transform.TransformerConfigurationException;
import javax.xml.transform.TransformerException; import javax.xml.transform.TransformerException;
import javax.xml.transform.TransformerFactory; import javax.xml.transform.TransformerFactory;
import org.apache.logging.log4j.Logger; import org.apache.log4j.Logger;
import org.dspace.app.util.LocalSchemaFilenameFilter; import org.dspace.app.util.LocalSchemaFilenameFilter;
import org.dspace.authorize.AuthorizeException; import org.dspace.authorize.AuthorizeException;
import org.dspace.content.DSpaceObject; import org.dspace.content.DSpaceObject;
@@ -47,7 +47,7 @@ import org.w3c.dom.Document;
* Encapsulates the Item in the context of the DSpace Archive Format * Encapsulates the Item in the context of the DSpace Archive Format
*/ */
public class ItemArchive { public class ItemArchive {
private static final Logger log = org.apache.logging.log4j.LogManager.getLogger(ItemArchive.class); private static final Logger log = Logger.getLogger(ItemArchive.class);
public static final String DUBLIN_CORE_XML = "dublin_core.xml"; public static final String DUBLIN_CORE_XML = "dublin_core.xml";

View File

@@ -31,12 +31,11 @@ import org.apache.commons.cli.PosixParser;
import org.dspace.content.Item; import org.dspace.content.Item;
import org.dspace.content.factory.ContentServiceFactory; import org.dspace.content.factory.ContentServiceFactory;
import org.dspace.content.service.ItemService; import org.dspace.content.service.ItemService;
import org.dspace.core.ConfigurationManager;
import org.dspace.core.Context; import org.dspace.core.Context;
import org.dspace.eperson.EPerson; import org.dspace.eperson.EPerson;
import org.dspace.eperson.factory.EPersonServiceFactory; import org.dspace.eperson.factory.EPersonServiceFactory;
import org.dspace.eperson.service.EPersonService; import org.dspace.eperson.service.EPersonService;
import org.dspace.handle.factory.HandleServiceFactory;
import org.dspace.handle.service.HandleService;
/** /**
* Provides some batch editing capabilities for items in DSpace: * Provides some batch editing capabilities for items in DSpace:
@@ -79,7 +78,6 @@ public class ItemUpdate {
protected static final EPersonService epersonService = EPersonServiceFactory.getInstance().getEPersonService(); protected static final EPersonService epersonService = EPersonServiceFactory.getInstance().getEPersonService();
protected static final ItemService itemService = ContentServiceFactory.getInstance().getItemService(); protected static final ItemService itemService = ContentServiceFactory.getInstance().getItemService();
protected static final HandleService handleService = HandleServiceFactory.getInstance().getHandleService();
static { static {
filterAliases.put("ORIGINAL", "org.dspace.app.itemupdate.OriginalBitstreamFilter"); filterAliases.put("ORIGINAL", "org.dspace.app.itemupdate.OriginalBitstreamFilter");
@@ -332,7 +330,10 @@ public class ItemUpdate {
iu.setEPerson(context, iu.eperson); iu.setEPerson(context, iu.eperson);
context.turnOffAuthorisationSystem(); context.turnOffAuthorisationSystem();
HANDLE_PREFIX = handleService.getCanonicalPrefix(); HANDLE_PREFIX = ConfigurationManager.getProperty("handle.canonical.prefix");
if (HANDLE_PREFIX == null || HANDLE_PREFIX.length() == 0) {
HANDLE_PREFIX = "http://hdl.handle.net/";
}
iu.processArchive(context, sourcedir, itemField, metadataIndexName, alterProvenance, isTest); iu.processArchive(context, sourcedir, itemField, metadataIndexName, alterProvenance, isTest);

View File

@@ -28,13 +28,12 @@ import javax.xml.transform.TransformerException;
import javax.xml.transform.dom.DOMSource; import javax.xml.transform.dom.DOMSource;
import javax.xml.transform.stream.StreamResult; import javax.xml.transform.stream.StreamResult;
import org.apache.commons.lang3.StringUtils; import org.apache.commons.lang.StringUtils;
import org.apache.xpath.XPathAPI; import org.apache.xpath.XPathAPI;
import org.dspace.authorize.AuthorizeException; import org.dspace.authorize.AuthorizeException;
import org.dspace.content.Item; import org.dspace.content.Item;
import org.dspace.content.MetadataField; import org.dspace.content.MetadataField;
import org.dspace.content.MetadataSchema; import org.dspace.content.MetadataSchema;
import org.dspace.content.MetadataSchemaEnum;
import org.dspace.content.MetadataValue; import org.dspace.content.MetadataValue;
import org.dspace.content.factory.ContentServiceFactory; import org.dspace.content.factory.ContentServiceFactory;
import org.dspace.content.service.ItemService; import org.dspace.content.service.ItemService;
@@ -190,7 +189,7 @@ public class MetadataUtilities {
NodeList metadata = XPathAPI.selectNodeList(document, "/dublin_core"); NodeList metadata = XPathAPI.selectNodeList(document, "/dublin_core");
Node schemaAttr = metadata.item(0).getAttributes().getNamedItem("schema"); Node schemaAttr = metadata.item(0).getAttributes().getNamedItem("schema");
if (schemaAttr == null) { if (schemaAttr == null) {
schema = MetadataSchemaEnum.DC.getName(); schema = MetadataSchema.DC_SCHEMA;
} else { } else {
schema = schemaAttr.getNodeValue(); schema = schemaAttr.getNodeValue();
} }

View File

@@ -13,12 +13,6 @@ import java.lang.reflect.Method;
import java.util.List; import java.util.List;
import java.util.TreeMap; import java.util.TreeMap;
import org.apache.commons.cli.ParseException;
import org.apache.log4j.Logger;
import org.dspace.scripts.DSpaceRunnable;
import org.dspace.scripts.factory.ScriptServiceFactory;
import org.dspace.scripts.handler.DSpaceRunnableHandler;
import org.dspace.scripts.handler.impl.CommandLineDSpaceRunnableHandler;
import org.dspace.servicemanager.DSpaceKernelImpl; import org.dspace.servicemanager.DSpaceKernelImpl;
import org.dspace.servicemanager.DSpaceKernelInit; import org.dspace.servicemanager.DSpaceKernelInit;
import org.dspace.services.RequestService; import org.dspace.services.RequestService;
@@ -33,9 +27,6 @@ import org.jdom.input.SAXBuilder;
* @author Mark Diggory * @author Mark Diggory
*/ */
public class ScriptLauncher { public class ScriptLauncher {
private static final Logger log = Logger.getLogger(ScriptLauncher.class);
/** /**
* The service manager kernel * The service manager kernel
*/ */
@@ -85,9 +76,8 @@ public class ScriptLauncher {
} }
// Look up command in the configuration, and execute. // Look up command in the configuration, and execute.
int status;
CommandLineDSpaceRunnableHandler commandLineDSpaceRunnableHandler = new CommandLineDSpaceRunnableHandler(); status = runOneCommand(commandConfigs, args);
int status = handleScript(args, commandConfigs, commandLineDSpaceRunnableHandler, kernelImpl);
// Destroy the service kernel if it is still alive // Destroy the service kernel if it is still alive
if (kernelImpl != null) { if (kernelImpl != null) {
@@ -96,50 +86,6 @@ public class ScriptLauncher {
} }
System.exit(status); System.exit(status);
}
/**
* This method will take the arguments from a commandline input and it'll find the script that the first argument
* refers to and it'll execute this script.
* It can return a 1 or a 0 depending on whether the script failed or passed respectively
* @param args The arguments for the script and the script as first one in the array
* @param commandConfigs The Document
* @param dSpaceRunnableHandler The DSpaceRunnableHandler for this execution
* @param kernelImpl The relevant DSpaceKernelImpl
* @return A 1 or 0 depending on whether the script failed or passed respectively
*/
public static int handleScript(String[] args, Document commandConfigs,
DSpaceRunnableHandler dSpaceRunnableHandler,
DSpaceKernelImpl kernelImpl) {
int status;
DSpaceRunnable script = ScriptServiceFactory.getInstance().getScriptService().getScriptForName(args[0]);
if (script != null) {
status = executeScript(args, dSpaceRunnableHandler, script);
} else {
status = runOneCommand(commandConfigs, args, kernelImpl);
}
return status;
}
/**
* This method will simply execute the script
* @param args The arguments of the script with the script name as first place in the array
* @param dSpaceRunnableHandler The relevant DSpaceRunnableHandler
* @param script The script to be executed
* @return A 1 or 0 depending on whether the script failed or passed respectively
*/
private static int executeScript(String[] args, DSpaceRunnableHandler dSpaceRunnableHandler,
DSpaceRunnable script) {
try {
script.initialize(args, dSpaceRunnableHandler);
script.run();
return 0;
} catch (ParseException e) {
script.printHelp();
e.printStackTrace();
return 1;
}
} }
protected static int runOneCommand(Document commandConfigs, String[] args) { protected static int runOneCommand(Document commandConfigs, String[] args) {
@@ -152,7 +98,7 @@ public class ScriptLauncher {
* @param commandConfigs Document * @param commandConfigs Document
* @param args the command line arguments given * @param args the command line arguments given
*/ */
protected static int runOneCommand(Document commandConfigs, String[] args, DSpaceKernelImpl kernelImpl) { public static int runOneCommand(Document commandConfigs, String[] args, DSpaceKernelImpl kernelImpl) {
String request = args[0]; String request = args[0];
Element root = commandConfigs.getRootElement(); Element root = commandConfigs.getRootElement();
List<Element> commands = root.getChildren("command"); List<Element> commands = root.getChildren("command");

View File

@@ -11,7 +11,7 @@ import java.io.InputStream;
import java.nio.charset.StandardCharsets; import java.nio.charset.StandardCharsets;
import org.apache.commons.io.IOUtils; import org.apache.commons.io.IOUtils;
import org.apache.logging.log4j.Logger; import org.apache.log4j.Logger;
import org.apache.poi.POITextExtractor; import org.apache.poi.POITextExtractor;
import org.apache.poi.extractor.ExtractorFactory; import org.apache.poi.extractor.ExtractorFactory;
import org.apache.poi.hssf.extractor.ExcelExtractor; import org.apache.poi.hssf.extractor.ExcelExtractor;
@@ -36,7 +36,7 @@ import org.dspace.content.Item;
*/ */
public class ExcelFilter extends MediaFilter { public class ExcelFilter extends MediaFilter {
private static Logger log = org.apache.logging.log4j.LogManager.getLogger(ExcelFilter.class); private static Logger log = Logger.getLogger(ExcelFilter.class);
public String getFilteredName(String oldFilename) { public String getFilteredName(String oldFilename) {
return oldFilename + ".txt"; return oldFilename + ".txt";

View File

@@ -143,7 +143,7 @@ public abstract class ImageMagickThumbnailFilter extends MediaFilter {
// PDFs using the CMYK color system can be handled specially if // PDFs using the CMYK color system can be handled specially if
// profiles are defined // profiles are defined
if (cmyk_profile != null && srgb_profile != null) { if (cmyk_profile != null && srgb_profile != null) {
Info imageInfo = new Info(f.getAbsolutePath() + s, true); Info imageInfo = new Info(f.getAbsolutePath(), true);
String imageClass = imageInfo.getImageClass(); String imageClass = imageInfo.getImageClass();
if (imageClass.contains("CMYK")) { if (imageClass.contains("CMYK")) {
op.profile(cmyk_profile); op.profile(cmyk_profile);

View File

@@ -22,7 +22,7 @@ import org.apache.commons.cli.Option;
import org.apache.commons.cli.OptionBuilder; import org.apache.commons.cli.OptionBuilder;
import org.apache.commons.cli.Options; import org.apache.commons.cli.Options;
import org.apache.commons.cli.PosixParser; import org.apache.commons.cli.PosixParser;
import org.apache.commons.lang3.ArrayUtils; import org.apache.commons.lang.ArrayUtils;
import org.dspace.app.mediafilter.factory.MediaFilterServiceFactory; import org.dspace.app.mediafilter.factory.MediaFilterServiceFactory;
import org.dspace.app.mediafilter.service.MediaFilterService; import org.dspace.app.mediafilter.service.MediaFilterService;
import org.dspace.content.Collection; import org.dspace.content.Collection;

View File

@@ -220,7 +220,7 @@ public class MediaFilterServiceImpl implements MediaFilterService, InitializingB
} catch (Exception e) { } catch (Exception e) {
String handle = myItem.getHandle(); String handle = myItem.getHandle();
List<Bundle> bundles = myBitstream.getBundles(); List<Bundle> bundles = myBitstream.getBundles();
long size = myBitstream.getSizeBytes(); long size = myBitstream.getSize();
String checksum = myBitstream.getChecksum() + " (" + myBitstream.getChecksumAlgorithm() + ")"; String checksum = myBitstream.getChecksum() + " (" + myBitstream.getChecksumAlgorithm() + ")";
int assetstore = myBitstream.getStoreNumber(); int assetstore = myBitstream.getStoreNumber();
@@ -310,11 +310,12 @@ public class MediaFilterServiceImpl implements MediaFilterService, InitializingB
// get bitstream filename, calculate destination filename // get bitstream filename, calculate destination filename
String newName = formatFilter.getFilteredName(source.getName()); String newName = formatFilter.getFilteredName(source.getName());
// check if destination bitstream exists Bitstream existingBitstream = null; // is there an existing rendition?
Bundle existingBundle = null; Bundle targetBundle = null; // bundle we're modifying
Bitstream existingBitstream = null;
List<Bundle> bundles = itemService.getBundles(item, formatFilter.getBundleName()); List<Bundle> bundles = itemService.getBundles(item, formatFilter.getBundleName());
// check if destination bitstream exists
if (bundles.size() > 0) { if (bundles.size() > 0) {
// only finds the last match (FIXME?) // only finds the last match (FIXME?)
for (Bundle bundle : bundles) { for (Bundle bundle : bundles) {
@@ -322,7 +323,7 @@ public class MediaFilterServiceImpl implements MediaFilterService, InitializingB
for (Bitstream bitstream : bitstreams) { for (Bitstream bitstream : bitstreams) {
if (bitstream.getName().trim().equals(newName.trim())) { if (bitstream.getName().trim().equals(newName.trim())) {
existingBundle = bundle; targetBundle = bundle;
existingBitstream = bitstream; existingBitstream = bitstream;
} }
} }
@@ -344,71 +345,63 @@ public class MediaFilterServiceImpl implements MediaFilterService, InitializingB
+ " (item: " + item.getHandle() + ")"); + " (item: " + item.getHandle() + ")");
} }
System.out.println("File: " + newName); InputStream destStream;
try {
// start filtering of the bitstream, using try with resource to close all InputStreams properly System.out.println("File: " + newName);
try ( destStream = formatFilter.getDestinationStream(item, bitstreamService.retrieve(context, source), isVerbose);
// get the source stream
InputStream srcStream = bitstreamService.retrieve(context, source);
// filter the source stream to produce the destination stream
// this is the hard work, check for OutOfMemoryErrors at the end of the try clause.
InputStream destStream = formatFilter.getDestinationStream(item, srcStream, isVerbose);
) {
if (destStream == null) { if (destStream == null) {
if (!isQuiet) { if (!isQuiet) {
System.out.println("SKIPPED: bitstream " + source.getID() System.out.println("SKIPPED: bitstream " + source.getID()
+ " (item: " + item.getHandle() + ") because filtering was unsuccessful"); + " (item: " + item.getHandle() + ") because filtering was unsuccessful");
} }
return false; return false;
} }
Bundle targetBundle; // bundle we're modifying
if (bundles.size() < 1) {
// create new bundle if needed
targetBundle = bundleService.create(context, item, formatFilter.getBundleName());
} else {
// take the first match as we already looked out for the correct bundle name
targetBundle = bundles.get(0);
}
// create bitstream to store the filter result
Bitstream b = bitstreamService.create(context, targetBundle, destStream);
// set the name, source and description of the bitstream
b.setName(context, newName);
b.setSource(context, "Written by FormatFilter " + formatFilter.getClass().getName() +
" on " + DCDate.getCurrent() + " (GMT).");
b.setDescription(context, formatFilter.getDescription());
// Set the format of the bitstream
BitstreamFormat bf = bitstreamFormatService.findByShortDescription(context,
formatFilter.getFormatString());
bitstreamService.setFormat(context, b, bf);
bitstreamService.update(context, b);
//Set permissions on the derivative bitstream
//- First remove any existing policies
authorizeService.removeAllPolicies(context, b);
//- Determine if this is a public-derivative format
if (publicFiltersClasses.contains(formatFilter.getClass().getSimpleName())) {
//- Set derivative bitstream to be publicly accessible
Group anonymous = groupService.findByName(context, Group.ANONYMOUS);
authorizeService.addPolicy(context, b, Constants.READ, anonymous);
} else {
//- Inherit policies from the source bitstream
authorizeService.inheritPolicies(context, source, b);
}
//do post-processing of the generated bitstream
formatFilter.postProcessBitstream(context, item, b);
} catch (OutOfMemoryError oome) { } catch (OutOfMemoryError oome) {
System.out.println("!!! OutOfMemoryError !!!"); System.out.println("!!! OutOfMemoryError !!!");
return false;
}
// create new bundle if needed
if (bundles.size() < 1) {
targetBundle = bundleService.create(context, item, formatFilter.getBundleName());
} else {
// take the first match
targetBundle = bundles.get(0);
}
Bitstream b = bitstreamService.create(context, targetBundle, destStream);
// Now set the format and name of the bitstream
b.setName(context, newName);
b.setSource(context, "Written by FormatFilter " + formatFilter.getClass().getName() +
" on " + DCDate.getCurrent() + " (GMT).");
b.setDescription(context, formatFilter.getDescription());
// Find the proper format
BitstreamFormat bf = bitstreamFormatService.findByShortDescription(context,
formatFilter.getFormatString());
bitstreamService.setFormat(context, b, bf);
bitstreamService.update(context, b);
//Set permissions on the derivative bitstream
//- First remove any existing policies
authorizeService.removeAllPolicies(context, b);
//- Determine if this is a public-derivative format
if (publicFiltersClasses.contains(formatFilter.getClass().getSimpleName())) {
//- Set derivative bitstream to be publicly accessible
Group anonymous = groupService.findByName(context, Group.ANONYMOUS);
authorizeService.addPolicy(context, b, Constants.READ, anonymous);
} else {
//- Inherit policies from the source bitstream
authorizeService.inheritPolicies(context, source, b);
} }
// fixme - set date? // fixme - set date?
// we are overwriting, so remove old bitstream // we are overwriting, so remove old bitstream
if (existingBitstream != null) { if (existingBitstream != null) {
bundleService.removeBitstream(context, existingBundle, existingBitstream); bundleService.removeBitstream(context, targetBundle, existingBitstream);
} }
if (!isQuiet) { if (!isQuiet) {
@@ -416,6 +409,9 @@ public class MediaFilterServiceImpl implements MediaFilterService, InitializingB
+ " (item: " + item.getHandle() + ") and created '" + newName + "'"); + " (item: " + item.getHandle() + ") and created '" + newName + "'");
} }
//do post-processing of the generated bitstream
formatFilter.postProcessBitstream(context, item, b);
return true; return true;
} }

View File

@@ -11,9 +11,7 @@ import java.awt.image.BufferedImage;
import java.io.InputStream; import java.io.InputStream;
import javax.imageio.ImageIO; import javax.imageio.ImageIO;
import org.apache.logging.log4j.Logger;
import org.apache.pdfbox.pdmodel.PDDocument; import org.apache.pdfbox.pdmodel.PDDocument;
import org.apache.pdfbox.pdmodel.encryption.InvalidPasswordException;
import org.apache.pdfbox.rendering.PDFRenderer; import org.apache.pdfbox.rendering.PDFRenderer;
import org.dspace.content.Item; import org.dspace.content.Item;
@@ -28,8 +26,6 @@ import org.dspace.content.Item;
* @author Jason Sherman jsherman@usao.edu * @author Jason Sherman jsherman@usao.edu
*/ */
public class PDFBoxThumbnail extends MediaFilter implements SelfRegisterInputFormats { public class PDFBoxThumbnail extends MediaFilter implements SelfRegisterInputFormats {
private static Logger log = org.apache.logging.log4j.LogManager.getLogger(PDFBoxThumbnail.class);
@Override @Override
public String getFilteredName(String oldFilename) { public String getFilteredName(String oldFilename) {
return oldFilename + ".jpg"; return oldFilename + ".jpg";
@@ -69,19 +65,12 @@ public class PDFBoxThumbnail extends MediaFilter implements SelfRegisterInputFor
@Override @Override
public InputStream getDestinationStream(Item currentItem, InputStream source, boolean verbose) public InputStream getDestinationStream(Item currentItem, InputStream source, boolean verbose)
throws Exception { throws Exception {
BufferedImage buf; PDDocument doc = PDDocument.load(source);
PDFRenderer renderer = new PDFRenderer(doc);
BufferedImage buf = renderer.renderImage(0);
// ImageIO.write(buf, "PNG", new File("custom-render.png"));
doc.close();
// Render the page image.
try ( PDDocument doc = PDDocument.load(source); ) {
PDFRenderer renderer = new PDFRenderer(doc);
buf = renderer.renderImage(0);
} catch (InvalidPasswordException ex) {
log.error("PDF is encrypted. Cannot create thumbnail (item: {})",
() -> currentItem.getHandle());
return null;
}
// Generate thumbnail derivative and return as IO stream.
JPEGFilter jpegFilter = new JPEGFilter(); JPEGFilter jpegFilter = new JPEGFilter();
return jpegFilter.getThumb(currentItem, buf, verbose); return jpegFilter.getThumb(currentItem, buf, verbose);
} }

View File

@@ -16,9 +16,8 @@ import java.io.InputStream;
import java.io.OutputStreamWriter; import java.io.OutputStreamWriter;
import java.io.Writer; import java.io.Writer;
import org.apache.logging.log4j.Logger; import org.apache.log4j.Logger;
import org.apache.pdfbox.pdmodel.PDDocument; import org.apache.pdfbox.pdmodel.PDDocument;
import org.apache.pdfbox.pdmodel.encryption.InvalidPasswordException;
import org.apache.pdfbox.text.PDFTextStripper; import org.apache.pdfbox.text.PDFTextStripper;
import org.dspace.content.Item; import org.dspace.content.Item;
import org.dspace.core.ConfigurationManager; import org.dspace.core.ConfigurationManager;
@@ -31,7 +30,7 @@ import org.dspace.core.ConfigurationManager;
*/ */
public class PDFFilter extends MediaFilter { public class PDFFilter extends MediaFilter {
private static Logger log = org.apache.logging.log4j.LogManager.getLogger(PDFFilter.class); private static Logger log = Logger.getLogger(PDFFilter.class);
@Override @Override
public String getFilteredName(String oldFilename) { public String getFilteredName(String oldFilename) {
@@ -96,10 +95,6 @@ public class PDFFilter extends MediaFilter {
try { try {
pdfDoc = PDDocument.load(source); pdfDoc = PDDocument.load(source);
pts.writeText(pdfDoc, writer); pts.writeText(pdfDoc, writer);
} catch (InvalidPasswordException ex) {
log.error("PDF is encrypted. Cannot extract text (item: {})",
() -> currentItem.getHandle());
return null;
} finally { } finally {
try { try {
if (pdfDoc != null) { if (pdfDoc != null) {

View File

@@ -10,7 +10,7 @@ package org.dspace.app.mediafilter;
import java.io.ByteArrayInputStream; import java.io.ByteArrayInputStream;
import java.io.InputStream; import java.io.InputStream;
import org.apache.logging.log4j.Logger; import org.apache.log4j.Logger;
import org.apache.poi.POITextExtractor; import org.apache.poi.POITextExtractor;
import org.apache.poi.extractor.ExtractorFactory; import org.apache.poi.extractor.ExtractorFactory;
import org.apache.poi.hslf.extractor.PowerPointExtractor; import org.apache.poi.hslf.extractor.PowerPointExtractor;
@@ -23,7 +23,7 @@ import org.dspace.content.Item;
*/ */
public class PowerPointFilter extends MediaFilter { public class PowerPointFilter extends MediaFilter {
private static Logger log = org.apache.logging.log4j.LogManager.getLogger(PowerPointFilter.class); private static Logger log = Logger.getLogger(PowerPointFilter.class);
@Override @Override
public String getFilteredName(String oldFilename) { public String getFilteredName(String oldFilename) {

View File

@@ -0,0 +1,93 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.app.mediafilter;
import java.io.ByteArrayInputStream;
import java.io.IOException;
import java.io.InputStream;
import org.apache.log4j.Logger;
import org.dspace.content.Item;
import org.textmining.extraction.TextExtractor;
import org.textmining.extraction.word.WordTextExtractorFactory;
/*
*
* to do: helpful error messages - can't find mediafilter.cfg - can't
* instantiate filter - bitstream format doesn't exist.
*
*/
public class WordFilter extends MediaFilter {
private static Logger log = Logger.getLogger(WordFilter.class);
@Override
public String getFilteredName(String oldFilename) {
return oldFilename + ".txt";
}
/**
* @return String bundle name
*/
@Override
public String getBundleName() {
return "TEXT";
}
/**
* @return String bitstreamformat
*/
@Override
public String getFormatString() {
return "Text";
}
/**
* @return String description
*/
@Override
public String getDescription() {
return "Extracted text";
}
/**
* @param currentItem item
* @param source source input stream
* @param verbose verbose mode
* @return InputStream the resulting input stream
* @throws Exception if error
*/
@Override
public InputStream getDestinationStream(Item currentItem, InputStream source, boolean verbose)
throws Exception {
// get input stream from bitstream
// pass to filter, get string back
try {
WordTextExtractorFactory factory = new WordTextExtractorFactory();
TextExtractor e = factory.textExtractor(source);
String extractedText = e.getText();
// if verbose flag is set, print out extracted text
// to STDOUT
if (verbose) {
System.out.println(extractedText);
}
// generate an input stream with the extracted text
byte[] textBytes = extractedText.getBytes();
ByteArrayInputStream bais = new ByteArrayInputStream(textBytes);
return bais; // will this work? or will the byte array be out of scope?
} catch (IOException ioe) {
System.out.println("Invalid Word Format");
log.error("Error detected - Word File format not recognized: "
+ ioe.getMessage(), ioe);
throw ioe;
}
}
}

View File

@@ -78,7 +78,7 @@ public class RequestItem implements ReloadableEntity<Integer> {
private Date request_date = null; private Date request_date = null;
@Column(name = "accept_request") @Column(name = "accept_request")
private boolean accept_request; private Boolean accept_request = null;
/** /**
* Protected constructor, create object using: * Protected constructor, create object using:
@@ -88,7 +88,6 @@ public class RequestItem implements ReloadableEntity<Integer> {
protected RequestItem() { protected RequestItem() {
} }
@Override
public Integer getID() { public Integer getID() {
return requestitem_id; return requestitem_id;
} }

View File

@@ -9,8 +9,8 @@ package org.dspace.app.requestitem;
import java.sql.SQLException; import java.sql.SQLException;
import org.apache.commons.lang3.StringUtils; import org.apache.commons.lang.StringUtils;
import org.apache.logging.log4j.Logger; import org.apache.log4j.Logger;
import org.dspace.content.Item; import org.dspace.content.Item;
import org.dspace.core.ConfigurationManager; import org.dspace.core.ConfigurationManager;
import org.dspace.core.Context; import org.dspace.core.Context;
@@ -31,7 +31,7 @@ import org.springframework.beans.factory.annotation.Autowired;
*/ */
public class RequestItemHelpdeskStrategy extends RequestItemSubmitterStrategy { public class RequestItemHelpdeskStrategy extends RequestItemSubmitterStrategy {
private Logger log = org.apache.logging.log4j.LogManager.getLogger(RequestItemHelpdeskStrategy.class); private Logger log = Logger.getLogger(RequestItemHelpdeskStrategy.class);
@Autowired(required = true) @Autowired(required = true)
protected EPersonService ePersonService; protected EPersonService ePersonService;

View File

@@ -10,7 +10,7 @@ package org.dspace.app.requestitem;
import java.sql.SQLException; import java.sql.SQLException;
import java.util.List; import java.util.List;
import org.apache.commons.lang3.StringUtils; import org.apache.commons.lang.StringUtils;
import org.dspace.content.Item; import org.dspace.content.Item;
import org.dspace.content.MetadataValue; import org.dspace.content.MetadataValue;
import org.dspace.content.service.ItemService; import org.dspace.content.service.ItemService;

View File

@@ -10,7 +10,7 @@ package org.dspace.app.requestitem;
import java.sql.SQLException; import java.sql.SQLException;
import java.util.Date; import java.util.Date;
import org.apache.logging.log4j.Logger; import org.apache.log4j.Logger;
import org.dspace.app.requestitem.dao.RequestItemDAO; import org.dspace.app.requestitem.dao.RequestItemDAO;
import org.dspace.app.requestitem.service.RequestItemService; import org.dspace.app.requestitem.service.RequestItemService;
import org.dspace.content.Bitstream; import org.dspace.content.Bitstream;
@@ -28,7 +28,7 @@ import org.springframework.beans.factory.annotation.Autowired;
*/ */
public class RequestItemServiceImpl implements RequestItemService { public class RequestItemServiceImpl implements RequestItemService {
private final Logger log = org.apache.logging.log4j.LogManager.getLogger(RequestItemServiceImpl.class); private final Logger log = Logger.getLogger(RequestItemServiceImpl.class);
@Autowired(required = true) @Autowired(required = true)
protected RequestItemDAO requestItemDAO; protected RequestItemDAO requestItemDAO;

View File

@@ -8,15 +8,13 @@
package org.dspace.app.requestitem.dao.impl; package org.dspace.app.requestitem.dao.impl;
import java.sql.SQLException; import java.sql.SQLException;
import javax.persistence.criteria.CriteriaBuilder;
import javax.persistence.criteria.CriteriaQuery;
import javax.persistence.criteria.Root;
import org.dspace.app.requestitem.RequestItem; import org.dspace.app.requestitem.RequestItem;
import org.dspace.app.requestitem.RequestItem_;
import org.dspace.app.requestitem.dao.RequestItemDAO; import org.dspace.app.requestitem.dao.RequestItemDAO;
import org.dspace.core.AbstractHibernateDAO; import org.dspace.core.AbstractHibernateDAO;
import org.dspace.core.Context; import org.dspace.core.Context;
import org.hibernate.Criteria;
import org.hibernate.criterion.Restrictions;
/** /**
* Hibernate implementation of the Database Access Object interface class for the RequestItem object. * Hibernate implementation of the Database Access Object interface class for the RequestItem object.
@@ -32,12 +30,9 @@ public class RequestItemDAOImpl extends AbstractHibernateDAO<RequestItem> implem
@Override @Override
public RequestItem findByToken(Context context, String token) throws SQLException { public RequestItem findByToken(Context context, String token) throws SQLException {
CriteriaBuilder criteriaBuilder = getCriteriaBuilder(context); Criteria criteria = createCriteria(context, RequestItem.class);
CriteriaQuery criteriaQuery = getCriteriaQuery(criteriaBuilder, RequestItem.class); criteria.add(Restrictions.eq("token", token));
Root<RequestItem> requestItemRoot = criteriaQuery.from(RequestItem.class); return uniqueResult(criteria);
criteriaQuery.select(requestItemRoot);
criteriaQuery.where(criteriaBuilder.equal(requestItemRoot.get(RequestItem_.token), token));
return uniqueResult(context, criteriaQuery, false, RequestItem.class, -1, -1);
} }

View File

@@ -15,8 +15,8 @@ import javax.xml.parsers.DocumentBuilder;
import javax.xml.parsers.DocumentBuilderFactory; import javax.xml.parsers.DocumentBuilderFactory;
import javax.xml.parsers.ParserConfigurationException; import javax.xml.parsers.ParserConfigurationException;
import org.apache.commons.lang3.StringUtils; import org.apache.commons.lang.StringUtils;
import org.apache.logging.log4j.Logger; import org.apache.log4j.Logger;
import org.dspace.app.sfx.service.SFXFileReaderService; import org.dspace.app.sfx.service.SFXFileReaderService;
import org.dspace.content.DCPersonName; import org.dspace.content.DCPersonName;
import org.dspace.content.Item; import org.dspace.content.Item;
@@ -58,7 +58,7 @@ public class SFXFileReaderServiceImpl implements SFXFileReaderService {
/** /**
* log4j logger * log4j logger
*/ */
private final Logger log = org.apache.logging.log4j.LogManager.getLogger(SFXFileReaderServiceImpl.class); private final Logger log = Logger.getLogger(SFXFileReaderServiceImpl.class);
protected SFXFileReaderServiceImpl() { protected SFXFileReaderServiceImpl() {
} }

View File

@@ -15,9 +15,6 @@ import java.util.List;
* @author Andrea Bollini * @author Andrea Bollini
*/ */
public class SHERPAPublisher { public class SHERPAPublisher {
private String id;
private String name; private String name;
private String alias; private String alias;
@@ -52,7 +49,7 @@ public class SHERPAPublisher {
private String dateupdated; private String dateupdated;
public SHERPAPublisher(String id, String name, String alias, String homeurl, public SHERPAPublisher(String name, String alias, String homeurl,
String prearchiving, List<String> prerestriction, String prearchiving, List<String> prerestriction,
String postarchiving, List<String> postrestriction, String postarchiving, List<String> postrestriction,
String pubarchiving, List<String> pubrestriction, String pubarchiving, List<String> pubrestriction,
@@ -60,8 +57,6 @@ public class SHERPAPublisher {
String paidaccessname, String paidaccessnotes, String paidaccessname, String paidaccessnotes,
List<String[]> copyright, String romeocolour, String datedded, List<String[]> copyright, String romeocolour, String datedded,
String dateupdated) { String dateupdated) {
this.id = id;
this.name = name; this.name = name;
this.alias = alias; this.alias = alias;
@@ -165,11 +160,4 @@ public class SHERPAPublisher {
return dateupdated; return dateupdated;
} }
/**
* Generic getter for the id
* @return the id value of this SHERPAPublisher
*/
public String getId() {
return id;
}
} }

View File

@@ -13,8 +13,7 @@ import java.util.List;
import javax.xml.parsers.DocumentBuilder; import javax.xml.parsers.DocumentBuilder;
import javax.xml.parsers.DocumentBuilderFactory; import javax.xml.parsers.DocumentBuilderFactory;
import org.apache.commons.lang3.StringUtils; import org.apache.commons.lang.StringUtils;
import org.apache.logging.log4j.Logger;
import org.dspace.app.util.XMLUtils; import org.dspace.app.util.XMLUtils;
import org.w3c.dom.Document; import org.w3c.dom.Document;
import org.w3c.dom.Element; import org.w3c.dom.Element;
@@ -25,9 +24,7 @@ import org.w3c.dom.Element;
* @author Andrea Bollini * @author Andrea Bollini
*/ */
public class SHERPAResponse { public class SHERPAResponse {
private static final Logger log = org.apache.logging.log4j.LogManager.getLogger(SHERPAResponse.class); private boolean error;
private int numHits;
private String message; private String message;
@@ -60,14 +57,13 @@ public class SHERPAResponse {
Element publishersElement = XMLUtils.getSingleElement(xmlRoot, Element publishersElement = XMLUtils.getSingleElement(xmlRoot,
"publishers"); "publishers");
String numhitsString = XMLUtils.getElementValue(headersElement, "numhits");
if (StringUtils.isNotBlank(numhitsString)) {
numHits = Integer.parseInt(numhitsString);
} else {
numHits = 0;
}
message = XMLUtils.getElementValue(headersElement, "message"); message = XMLUtils.getElementValue(headersElement, "message");
if (StringUtils.isNotBlank(message)) {
error = true;
return;
}
license = XMLUtils.getElementValue(headersElement, "license"); license = XMLUtils.getElementValue(headersElement, "license");
licenseURL = XMLUtils.getElementValue(headersElement, "licenseurl"); licenseURL = XMLUtils.getElementValue(headersElement, "licenseurl");
disclaimer = XMLUtils.getElementValue(headersElement, "disclaimer"); disclaimer = XMLUtils.getElementValue(headersElement, "disclaimer");
@@ -116,8 +112,9 @@ public class SHERPAResponse {
Element copyrightlinksElement = XMLUtils.getSingleElement( Element copyrightlinksElement = XMLUtils.getSingleElement(
publisherElement, "copyrightlinks"); publisherElement, "copyrightlinks");
publishers publishers
.add(new SHERPAPublisher(publisherElement.getAttribute("id"), XMLUtils.getElementValue( .add(new SHERPAPublisher(XMLUtils.getElementValue(
publisherElement, "name"), publisherElement, "name"),
XMLUtils.getElementValue(publisherElement, XMLUtils.getElementValue(publisherElement,
"alias"), XMLUtils.getElementValue( "alias"), XMLUtils.getElementValue(
@@ -165,12 +162,17 @@ public class SHERPAResponse {
} }
} }
} catch (Exception e) { } catch (Exception e) {
log.error("Error parsing SHERPA API Response", e); error = true;
} }
} }
public SHERPAResponse(String message) { public SHERPAResponse(String message) {
this.message = message; this.message = message;
this.error = true;
}
public boolean isError() {
return error;
} }
public String getMessage() { public String getMessage() {
@@ -196,8 +198,4 @@ public class SHERPAResponse {
public List<SHERPAPublisher> getPublishers() { public List<SHERPAPublisher> getPublishers() {
return publishers; return publishers;
} }
public int getNumHits() {
return numHits;
}
} }

View File

@@ -7,7 +7,7 @@
*/ */
package org.dspace.app.sherpa; package org.dspace.app.sherpa;
import org.apache.commons.lang3.StringUtils; import org.apache.commons.lang.StringUtils;
import org.apache.http.HttpEntity; import org.apache.http.HttpEntity;
import org.apache.http.HttpResponse; import org.apache.http.HttpResponse;
import org.apache.http.HttpStatus; import org.apache.http.HttpStatus;
@@ -16,7 +16,7 @@ import org.apache.http.client.methods.HttpGet;
import org.apache.http.client.utils.URIBuilder; import org.apache.http.client.utils.URIBuilder;
import org.apache.http.impl.client.CloseableHttpClient; import org.apache.http.impl.client.CloseableHttpClient;
import org.apache.http.impl.client.HttpClientBuilder; import org.apache.http.impl.client.HttpClientBuilder;
import org.apache.logging.log4j.Logger; import org.apache.log4j.Logger;
import org.dspace.core.ConfigurationManager; import org.dspace.core.ConfigurationManager;
public class SHERPAService { public class SHERPAService {
@@ -29,7 +29,7 @@ public class SHERPAService {
/** /**
* log4j category * log4j category
*/ */
private static final Logger log = org.apache.logging.log4j.LogManager.getLogger(SHERPAService.class); private static final Logger log = Logger.getLogger(SHERPAService.class);
public SHERPAService() { public SHERPAService() {
HttpClientBuilder builder = HttpClientBuilder.create(); HttpClientBuilder builder = HttpClientBuilder.create();

View File

@@ -11,8 +11,8 @@ import java.util.LinkedHashSet;
import java.util.List; import java.util.List;
import java.util.Set; import java.util.Set;
import org.apache.commons.lang3.StringUtils; import org.apache.commons.lang.StringUtils;
import org.apache.logging.log4j.Logger; import org.apache.log4j.Logger;
import org.dspace.app.sherpa.SHERPAResponse; import org.dspace.app.sherpa.SHERPAResponse;
import org.dspace.app.sherpa.SHERPAService; import org.dspace.app.sherpa.SHERPAService;
import org.dspace.content.Item; import org.dspace.content.Item;
@@ -27,7 +27,7 @@ public class SHERPASubmitService {
/** /**
* log4j logger * log4j logger
*/ */
private static Logger log = org.apache.logging.log4j.LogManager.getLogger(SHERPASubmitService.class); private static Logger log = Logger.getLogger(SHERPASubmitService.class);
public void setConfiguration(SHERPASubmitConfigurationService configuration) { public void setConfiguration(SHERPASubmitConfigurationService configuration) {
this.configuration = configuration; this.configuration = configuration;

View File

@@ -27,9 +27,9 @@ import org.apache.commons.cli.HelpFormatter;
import org.apache.commons.cli.Options; import org.apache.commons.cli.Options;
import org.apache.commons.cli.ParseException; import org.apache.commons.cli.ParseException;
import org.apache.commons.cli.PosixParser; import org.apache.commons.cli.PosixParser;
import org.apache.commons.lang3.ArrayUtils; import org.apache.commons.lang.ArrayUtils;
import org.apache.commons.lang3.StringUtils; import org.apache.commons.lang.StringUtils;
import org.apache.logging.log4j.Logger; import org.apache.log4j.Logger;
import org.dspace.content.Collection; import org.dspace.content.Collection;
import org.dspace.content.Community; import org.dspace.content.Community;
import org.dspace.content.Item; import org.dspace.content.Item;
@@ -52,7 +52,7 @@ public class GenerateSitemaps {
/** /**
* Logger * Logger
*/ */
private static Logger log = org.apache.logging.log4j.LogManager.getLogger(GenerateSitemaps.class); private static Logger log = Logger.getLogger(GenerateSitemaps.class);
private static final CommunityService communityService = ContentServiceFactory.getInstance().getCommunityService(); private static final CommunityService communityService = ContentServiceFactory.getInstance().getCommunityService();
private static final CollectionService collectionService = private static final CollectionService collectionService =
@@ -152,11 +152,11 @@ public class GenerateSitemaps {
*/ */
public static void generateSitemaps(boolean makeHTMLMap, public static void generateSitemaps(boolean makeHTMLMap,
boolean makeSitemapOrg) throws SQLException, IOException { boolean makeSitemapOrg) throws SQLException, IOException {
String sitemapStem = configurationService.getProperty("dspace.ui.url") String sitemapStem = configurationService.getProperty("dspace.url")
+ "/sitemap"; + "/sitemap";
String htmlMapStem = configurationService.getProperty("dspace.ui.url") String htmlMapStem = configurationService.getProperty("dspace.url")
+ "/htmlmap"; + "/htmlmap";
String handleURLStem = configurationService.getProperty("dspace.ui.url") String handleURLStem = configurationService.getProperty("dspace.url")
+ "/handle/"; + "/handle/";
File outputDir = new File(configurationService.getProperty("sitemap.dir")); File outputDir = new File(configurationService.getProperty("sitemap.dir"));
@@ -293,7 +293,7 @@ public class GenerateSitemaps {
.getProperty("http.proxy.port")); .getProperty("http.proxy.port"));
} }
String sitemapURL = configurationService.getProperty("dspace.ui.url") String sitemapURL = configurationService.getProperty("dspace.url")
+ "/sitemap"; + "/sitemap";
URL url = new URL(engineURL + URLEncoder.encode(sitemapURL, "UTF-8")); URL url = new URL(engineURL + URLEncoder.encode(sitemapURL, "UTF-8"));

View File

@@ -33,7 +33,6 @@ import org.apache.commons.lang3.StringUtils;
import org.dspace.core.ConfigurationManager; import org.dspace.core.ConfigurationManager;
import org.dspace.core.Context; import org.dspace.core.Context;
import org.dspace.core.LogManager; import org.dspace.core.LogManager;
import org.dspace.core.Utils;
import org.dspace.discovery.DiscoverQuery; import org.dspace.discovery.DiscoverQuery;
import org.dspace.discovery.SearchServiceException; import org.dspace.discovery.SearchServiceException;
import org.dspace.discovery.SearchUtils; import org.dspace.discovery.SearchUtils;
@@ -582,9 +581,9 @@ public class LogAnalyser {
} }
// now do the host name and url lookup // now do the host name and url lookup
hostName = Utils.getHostName(ConfigurationManager.getProperty("dspace.ui.url")); hostName = ConfigurationManager.getProperty("dspace.hostname").trim();
name = ConfigurationManager.getProperty("dspace.name").trim(); name = ConfigurationManager.getProperty("dspace.name").trim();
url = ConfigurationManager.getProperty("dspace.ui.url").trim(); url = ConfigurationManager.getProperty("dspace.url").trim();
if ((url != null) && (!url.endsWith("/"))) { if ((url != null) && (!url.endsWith("/"))) {
url = url + "/"; url = url + "/";
} }

View File

@@ -28,7 +28,7 @@ import java.util.regex.Matcher;
import java.util.regex.Pattern; import java.util.regex.Pattern;
import org.dspace.content.Item; import org.dspace.content.Item;
import org.dspace.content.MetadataSchemaEnum; import org.dspace.content.MetadataSchema;
import org.dspace.content.MetadataValue; import org.dspace.content.MetadataValue;
import org.dspace.content.factory.ContentServiceFactory; import org.dspace.content.factory.ContentServiceFactory;
import org.dspace.content.service.ItemService; import org.dspace.content.service.ItemService;
@@ -763,10 +763,9 @@ public class ReportGenerator {
// build the referece // build the referece
// FIXME: here we have blurred the line between content and presentation // FIXME: here we have blurred the line between content and presentation
// and it should probably be un-blurred // and it should probably be un-blurred
List<MetadataValue> title = itemService.getMetadata(item, MetadataSchemaEnum.DC.getName(), List<MetadataValue> title = itemService.getMetadata(item, MetadataSchema.DC_SCHEMA, "title", null, Item.ANY);
"title", null, Item.ANY);
List<MetadataValue> author = itemService List<MetadataValue> author = itemService
.getMetadata(item, MetadataSchemaEnum.DC.getName(), "contributor", "author", Item.ANY); .getMetadata(item, MetadataSchema.DC_SCHEMA, "contributor", "author", Item.ANY);
StringBuffer authors = new StringBuffer(); StringBuffer authors = new StringBuffer();
if (author.size() > 0) { if (author.size() > 0) {

View File

@@ -15,7 +15,7 @@ package org.dspace.app.statistics;
* *
* @author Richard Jones * @author Richard Jones
*/ */
public class Stat implements Comparable<Stat> { public class Stat implements Comparable {
// FIXME: this class is functional but a bit messy, and should be neatened // FIXME: this class is functional but a bit messy, and should be neatened
// up and completed // up and completed
@@ -132,17 +132,17 @@ public class Stat implements Comparable<Stat> {
/** /**
* Compare the current Stat to the given Stat returning -1 if o is less * compare the current object to the given object returning -1 if o is less
* than the current Stat, 0 if they are the same, and +1 if o is greater * than the current object, 0 if they are the same, and +1 if o is greater
* than the current Stat. * than the current object.
* *
* @param stat the Stat object to compare to the current one * @param o the object to compare to the current one
* @return +1, 0, -1 if o is less than, equal to, or greater than the * @return +1, 0, -1 if o is less than, equal to, or greater than the
* current object value. * current object value.
*/ */
@Override @Override
public int compareTo(Stat stat) { public int compareTo(Object o) {
int objectValue = stat.getValue(); int objectValue = ((Stat) o).getValue();
if (objectValue < this.getValue()) { if (objectValue < this.getValue()) {
return -1; return -1;

View File

@@ -21,7 +21,7 @@ import java.util.Set;
import java.util.regex.Matcher; import java.util.regex.Matcher;
import java.util.regex.Pattern; import java.util.regex.Pattern;
import org.apache.commons.lang3.time.DateUtils; import org.apache.commons.lang.time.DateUtils;
import org.dspace.core.ConfigurationManager; import org.dspace.core.ConfigurationManager;
/** /**

View File

@@ -50,16 +50,16 @@ abstract public class AbstractDSpaceWebapp
/** /**
* Construct a particular kind of DSpace application. * Construct a particular kind of DSpace application.
* *
* @param kind what kind of application is this? * @param kind what kind of application is this? (XMLUI, JSPUI, etc.)
*/ */
public AbstractDSpaceWebapp(String kind) { public AbstractDSpaceWebapp(String kind) {
this.kind = kind; this.kind = kind;
started = new Date(); started = new Date();
url = ConfigurationManager.getProperty("dspace.ui.url"); url = ConfigurationManager.getProperty("dspace.url");
if (null == url) { if (null == url) {
throw new IllegalStateException("dspace.ui.url is undefined"); throw new IllegalStateException("dspace.url is undefined");
} }
} }

View File

@@ -34,6 +34,12 @@ import org.dspace.core.Context;
*/ */
public class AuthorizeUtil { public class AuthorizeUtil {
private static final AuthorizeService authorizeService =
AuthorizeServiceFactory.getInstance().getAuthorizeService();
private static final ItemService itemService = ContentServiceFactory.getInstance().getItemService();
private static final CollectionService collectionService =
ContentServiceFactory.getInstance().getCollectionService();
/** /**
* Default constructor * Default constructor
*/ */
@@ -89,9 +95,8 @@ public class AuthorizeUtil {
*/ */
public static void authorizeManageItemPolicy(Context context, Item item) public static void authorizeManageItemPolicy(Context context, Item item)
throws AuthorizeException, SQLException { throws AuthorizeException, SQLException {
AuthorizeService authorizeService = AuthorizeServiceFactory.getInstance().getAuthorizeService();
if (AuthorizeConfiguration.canItemAdminManagePolicies()) { if (AuthorizeConfiguration.canItemAdminManagePolicies()) {
AuthorizeServiceFactory.getInstance().getAuthorizeService().authorizeAction(context, item, Constants.ADMIN); authorizeService.authorizeAction(context, item, Constants.ADMIN);
} else if (AuthorizeConfiguration.canCollectionAdminManageItemPolicies()) { } else if (AuthorizeConfiguration.canCollectionAdminManageItemPolicies()) {
authorizeService.authorizeAction(context, item authorizeService.authorizeAction(context, item
.getOwningCollection(), Constants.ADMIN); .getOwningCollection(), Constants.ADMIN);
@@ -119,7 +124,6 @@ public class AuthorizeUtil {
*/ */
public static void authorizeManageCollectionPolicy(Context context, public static void authorizeManageCollectionPolicy(Context context,
Collection collection) throws AuthorizeException, SQLException { Collection collection) throws AuthorizeException, SQLException {
AuthorizeService authorizeService = AuthorizeServiceFactory.getInstance().getAuthorizeService();
if (AuthorizeConfiguration.canCollectionAdminManagePolicies()) { if (AuthorizeConfiguration.canCollectionAdminManagePolicies()) {
authorizeService.authorizeAction(context, collection, authorizeService.authorizeAction(context, collection,
Constants.ADMIN); Constants.ADMIN);
@@ -147,7 +151,6 @@ public class AuthorizeUtil {
*/ */
public static void authorizeManageCommunityPolicy(Context context, public static void authorizeManageCommunityPolicy(Context context,
Community community) throws AuthorizeException, SQLException { Community community) throws AuthorizeException, SQLException {
AuthorizeService authorizeService = AuthorizeServiceFactory.getInstance().getAuthorizeService();
if (AuthorizeConfiguration.canCommunityAdminManagePolicies()) { if (AuthorizeConfiguration.canCommunityAdminManagePolicies()) {
authorizeService.authorizeAction(context, community, authorizeService.authorizeAction(context, community,
Constants.ADMIN); Constants.ADMIN);
@@ -168,7 +171,6 @@ public class AuthorizeUtil {
*/ */
public static void requireAdminRole(Context context) public static void requireAdminRole(Context context)
throws AuthorizeException, SQLException { throws AuthorizeException, SQLException {
AuthorizeService authorizeService = AuthorizeServiceFactory.getInstance().getAuthorizeService();
if (!authorizeService.isAdmin(context)) { if (!authorizeService.isAdmin(context)) {
throw new AuthorizeException( throw new AuthorizeException(
"Only system admin are allowed to perform this action"); "Only system admin are allowed to perform this action");
@@ -189,8 +191,6 @@ public class AuthorizeUtil {
*/ */
public static void authorizeManageCCLicense(Context context, Item item) public static void authorizeManageCCLicense(Context context, Item item)
throws AuthorizeException, SQLException { throws AuthorizeException, SQLException {
AuthorizeService authorizeService = AuthorizeServiceFactory.getInstance().getAuthorizeService();
ItemService itemService = ContentServiceFactory.getInstance().getItemService();
try { try {
authorizeService.authorizeAction(context, item, Constants.ADD); authorizeService.authorizeAction(context, item, Constants.ADD);
authorizeService.authorizeAction(context, item, Constants.REMOVE); authorizeService.authorizeAction(context, item, Constants.REMOVE);
@@ -224,8 +224,6 @@ public class AuthorizeUtil {
*/ */
public static void authorizeManageTemplateItem(Context context, public static void authorizeManageTemplateItem(Context context,
Collection collection) throws AuthorizeException, SQLException { Collection collection) throws AuthorizeException, SQLException {
AuthorizeService authorizeService = AuthorizeServiceFactory.getInstance().getAuthorizeService();
CollectionService collectionService = ContentServiceFactory.getInstance().getCollectionService();
boolean isAuthorized = collectionService.canEditBoolean(context, collection, false); boolean isAuthorized = collectionService.canEditBoolean(context, collection, false);
if (!isAuthorized if (!isAuthorized
@@ -260,7 +258,6 @@ public class AuthorizeUtil {
*/ */
public static void authorizeManageSubmittersGroup(Context context, public static void authorizeManageSubmittersGroup(Context context,
Collection collection) throws AuthorizeException, SQLException { Collection collection) throws AuthorizeException, SQLException {
AuthorizeService authorizeService = AuthorizeServiceFactory.getInstance().getAuthorizeService();
if (AuthorizeConfiguration.canCollectionAdminManageSubmitters()) { if (AuthorizeConfiguration.canCollectionAdminManageSubmitters()) {
authorizeService.authorizeAction(context, collection, authorizeService.authorizeAction(context, collection,
Constants.ADMIN); Constants.ADMIN);
@@ -288,7 +285,6 @@ public class AuthorizeUtil {
*/ */
public static void authorizeManageWorkflowsGroup(Context context, public static void authorizeManageWorkflowsGroup(Context context,
Collection collection) throws AuthorizeException, SQLException { Collection collection) throws AuthorizeException, SQLException {
AuthorizeService authorizeService = AuthorizeServiceFactory.getInstance().getAuthorizeService();
if (AuthorizeConfiguration.canCollectionAdminManageWorkflows()) { if (AuthorizeConfiguration.canCollectionAdminManageWorkflows()) {
authorizeService.authorizeAction(context, collection, authorizeService.authorizeAction(context, collection,
Constants.ADMIN); Constants.ADMIN);
@@ -317,7 +313,6 @@ public class AuthorizeUtil {
*/ */
public static void authorizeManageAdminGroup(Context context, public static void authorizeManageAdminGroup(Context context,
Collection collection) throws AuthorizeException, SQLException { Collection collection) throws AuthorizeException, SQLException {
AuthorizeService authorizeService = AuthorizeServiceFactory.getInstance().getAuthorizeService();
if (AuthorizeConfiguration.canCollectionAdminManageAdminGroup()) { if (AuthorizeConfiguration.canCollectionAdminManageAdminGroup()) {
authorizeService.authorizeAction(context, collection, authorizeService.authorizeAction(context, collection,
Constants.ADMIN); Constants.ADMIN);
@@ -346,7 +341,6 @@ public class AuthorizeUtil {
*/ */
public static void authorizeRemoveAdminGroup(Context context, public static void authorizeRemoveAdminGroup(Context context,
Collection collection) throws AuthorizeException, SQLException { Collection collection) throws AuthorizeException, SQLException {
AuthorizeService authorizeService = AuthorizeServiceFactory.getInstance().getAuthorizeService();
List<Community> parentCommunities = collection.getCommunities(); List<Community> parentCommunities = collection.getCommunities();
if (AuthorizeConfiguration if (AuthorizeConfiguration
.canCommunityAdminManageCollectionAdminGroup() .canCommunityAdminManageCollectionAdminGroup()
@@ -374,7 +368,6 @@ public class AuthorizeUtil {
*/ */
public static void authorizeManageAdminGroup(Context context, public static void authorizeManageAdminGroup(Context context,
Community community) throws AuthorizeException, SQLException { Community community) throws AuthorizeException, SQLException {
AuthorizeService authorizeService = AuthorizeServiceFactory.getInstance().getAuthorizeService();
if (AuthorizeConfiguration.canCommunityAdminManageAdminGroup()) { if (AuthorizeConfiguration.canCommunityAdminManageAdminGroup()) {
authorizeService.authorizeAction(context, community, authorizeService.authorizeAction(context, community,
Constants.ADMIN); Constants.ADMIN);
@@ -399,7 +392,6 @@ public class AuthorizeUtil {
*/ */
public static void authorizeRemoveAdminGroup(Context context, public static void authorizeRemoveAdminGroup(Context context,
Community community) throws SQLException, AuthorizeException { Community community) throws SQLException, AuthorizeException {
AuthorizeService authorizeService = AuthorizeServiceFactory.getInstance().getAuthorizeService();
List<Community> parentCommunities = community.getParentCommunities(); List<Community> parentCommunities = community.getParentCommunities();
Community parentCommunity = null; Community parentCommunity = null;
if (0 < parentCommunities.size()) { if (0 < parentCommunities.size()) {
@@ -466,7 +458,6 @@ public class AuthorizeUtil {
public static void authorizeWithdrawItem(Context context, Item item) public static void authorizeWithdrawItem(Context context, Item item)
throws SQLException, AuthorizeException { throws SQLException, AuthorizeException {
boolean authorized = false; boolean authorized = false;
AuthorizeService authorizeService = AuthorizeServiceFactory.getInstance().getAuthorizeService();
if (AuthorizeConfiguration.canCollectionAdminPerformItemWithdrawn()) { if (AuthorizeConfiguration.canCollectionAdminPerformItemWithdrawn()) {
authorized = authorizeService.authorizeActionBoolean(context, item authorized = authorizeService.authorizeActionBoolean(context, item
.getOwningCollection(), Constants.ADMIN); .getOwningCollection(), Constants.ADMIN);
@@ -501,7 +492,6 @@ public class AuthorizeUtil {
*/ */
public static void authorizeReinstateItem(Context context, Item item) public static void authorizeReinstateItem(Context context, Item item)
throws SQLException, AuthorizeException { throws SQLException, AuthorizeException {
AuthorizeService authorizeService = AuthorizeServiceFactory.getInstance().getAuthorizeService();
List<Collection> colls = item.getCollections(); List<Collection> colls = item.getCollections();
for (Collection coll : colls) { for (Collection coll : colls) {

View File

@@ -1,58 +0,0 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.app.util;
import net.sf.ehcache.Cache;
import net.sf.ehcache.CacheManager;
import org.dspace.core.Context;
import org.dspace.servicemanager.DSpaceKernelImpl;
import org.dspace.servicemanager.DSpaceKernelInit;
import org.dspace.services.CachingService;
/**
* List all EhCache CacheManager and Cache instances.
*
* <p>This is a debugging tool, not used in the daily operation of DSpace.
* Just run it from the installed instance using
* {@code bin/dspace dsrun org.dspace.app.util.CacheSnooper}
* to check that the cache configuration is what you expect it to be,
* given your configuration.
*
* <p>This was created to prove a specific cache configuration patch,
* but I leave it here in the hope that it may be useful to others.
*
* @author Mark H. Wood <mwood@iupui.edu>
*/
public class CacheSnooper {
private CacheSnooper() { }
public static void main(String[] argv) {
// Ensure that the DSpace kernel is started.
DSpaceKernelImpl kernel = DSpaceKernelInit.getKernel(null);
// Ensure that the services cache manager is started.
CachingService serviceCaches = kernel.getServiceManager()
.getServiceByName(null, CachingService.class);
// Ensure that the database layer is started.
Context ctx = new Context();
// List those caches!
for (CacheManager manager : CacheManager.ALL_CACHE_MANAGERS) {
System.out.format("CacheManager: %s%n", manager);
for (String cacheName : manager.getCacheNames()) {
Cache cache = manager.getCache(cacheName);
System.out.format(" Cache: '%s'; maxHeap: %d; maxDisk: %d%n",
cacheName,
cache.getCacheConfiguration().getMaxEntriesLocalHeap(),
cache.getCacheConfiguration().getMaxEntriesLocalDisk());
}
}
}
}

View File

@@ -13,8 +13,8 @@ import java.util.Map;
import java.util.regex.Pattern; import java.util.regex.Pattern;
import java.util.regex.PatternSyntaxException; import java.util.regex.PatternSyntaxException;
import org.apache.commons.lang3.StringUtils; import org.apache.commons.lang.StringUtils;
import org.dspace.content.MetadataSchemaEnum; import org.dspace.content.MetadataSchema;
import org.dspace.core.Utils; import org.dspace.core.Utils;
import org.slf4j.Logger; import org.slf4j.Logger;
import org.slf4j.LoggerFactory; import org.slf4j.LoggerFactory;
@@ -63,12 +63,6 @@ public class DCInput {
*/ */
private String label = null; private String label = null;
/**
* a style instruction to apply to the input. The exact way to use the style value is UI depending that receive the
* value from the REST API as is
*/
private String style = null;
/** /**
* the input type * the input type
*/ */
@@ -89,11 +83,6 @@ public class DCInput {
*/ */
private boolean repeatable = false; private boolean repeatable = false;
/**
* should name-variants be used?
*/
private boolean nameVariants = false;
/** /**
* 'hint' text to display * 'hint' text to display
*/ */
@@ -139,12 +128,6 @@ public class DCInput {
*/ */
private List<String> typeBind = null; private List<String> typeBind = null;
private boolean isRelationshipField = false;
private boolean isMetadataField = false;
private String relationshipType = null;
private String searchConfiguration = null;
private String filter;
/** /**
* The scope of the input sets, this restricts hidden metadata fields from * The scope of the input sets, this restricts hidden metadata fields from
* view during workflow processing. * view during workflow processing.
@@ -171,7 +154,7 @@ public class DCInput {
// Default the schema to dublin core // Default the schema to dublin core
dcSchema = fieldMap.get("dc-schema"); dcSchema = fieldMap.get("dc-schema");
if (dcSchema == null) { if (dcSchema == null) {
dcSchema = MetadataSchemaEnum.DC.getName(); dcSchema = MetadataSchema.DC_SCHEMA;
} }
//check if the input have a language tag //check if the input have a language tag
@@ -188,9 +171,6 @@ public class DCInput {
String repStr = fieldMap.get("repeatable"); String repStr = fieldMap.get("repeatable");
repeatable = "true".equalsIgnoreCase(repStr) repeatable = "true".equalsIgnoreCase(repStr)
|| "yes".equalsIgnoreCase(repStr); || "yes".equalsIgnoreCase(repStr);
String nameVariantsString = fieldMap.get("name-variants");
nameVariants = (StringUtils.isNotBlank(nameVariantsString)) ?
nameVariantsString.equalsIgnoreCase("true") : false;
label = fieldMap.get("label"); label = fieldMap.get("label");
inputType = fieldMap.get("input-type"); inputType = fieldMap.get("input-type");
// these types are list-controlled // these types are list-controlled
@@ -219,12 +199,7 @@ public class DCInput {
typeBind.add(type.trim()); typeBind.add(type.trim());
} }
} }
style = fieldMap.get("style");
isRelationshipField = fieldMap.containsKey("relationship-type");
isMetadataField = fieldMap.containsKey("dc-schema");
relationshipType = fieldMap.get("relationship-type");
searchConfiguration = fieldMap.get("search-configuration");
filter = fieldMap.get("filter");
} }
/** /**
@@ -277,15 +252,6 @@ public class DCInput {
return isRepeatable(); return isRepeatable();
} }
/**
* Get the nameVariants flag for this row
*
* @return the nameVariants flag
*/
public boolean areNameVariantsAllowed() {
return nameVariants;
}
/** /**
* Get the input type for this row * Get the input type for this row
* *
@@ -296,7 +262,7 @@ public class DCInput {
} }
/** /**
* Get the DC element for this form field. * Get the DC element for this form row.
* *
* @return the DC element * @return the DC element
*/ */
@@ -305,7 +271,7 @@ public class DCInput {
} }
/** /**
* Get the DC namespace prefix for this form field. * Get the DC namespace prefix for this form row.
* *
* @return the DC namespace prefix * @return the DC namespace prefix
*/ */
@@ -324,7 +290,7 @@ public class DCInput {
} }
/** /**
* Is there a required string for this form field? * Is there a required string for this form row?
* *
* @return true if a required string is set * @return true if a required string is set
*/ */
@@ -333,7 +299,7 @@ public class DCInput {
} }
/** /**
* Get the DC qualifier for this form field. * Get the DC qualifier for this form row.
* *
* @return the DC qualifier * @return the DC qualifier
*/ */
@@ -342,7 +308,7 @@ public class DCInput {
} }
/** /**
* Get the language for this form field. * Get the language for this form row.
* *
* @return the language state * @return the language state
*/ */
@@ -351,7 +317,7 @@ public class DCInput {
} }
/** /**
* Get the hint for this form field * Get the hint for this form row, formatted for an HTML table
* *
* @return the hints * @return the hints
*/ */
@@ -360,7 +326,7 @@ public class DCInput {
} }
/** /**
* Get the label for this form field. * Get the label for this form row.
* *
* @return the label * @return the label
*/ */
@@ -368,15 +334,6 @@ public class DCInput {
return label; return label;
} }
/**
* Get the style for this form field
*
* @return the style
*/
public String getStyle() {
return style;
}
/** /**
* Get the name of the pairs type * Get the name of the pairs type
* *
@@ -509,18 +466,6 @@ public class DCInput {
return Utils.standardize(this.getSchema(), this.getElement(), this.getQualifier(), "."); return Utils.standardize(this.getSchema(), this.getElement(), this.getQualifier(), ".");
} }
public String getRelationshipType() {
return relationshipType;
}
public String getSearchConfiguration() {
return searchConfiguration;
}
public String getFilter() {
return filter;
}
public boolean isQualdropValue() { public boolean isQualdropValue() {
if ("qualdrop_value".equals(getInputType())) { if ("qualdrop_value".equals(getInputType())) {
return true; return true;
@@ -545,22 +490,4 @@ public class DCInput {
return true; return true;
} }
/**
* Verify whether the current field contains an entity relationship
* This also implies a relationship type is defined for this field
* The field can contain both an entity relationship and a metadata field simultaneously
*/
public boolean isRelationshipField() {
return isRelationshipField;
}
/**
* Verify whether the current field contains a metadata field
* This also implies a field type is defined for this field
* The field can contain both an entity relationship and a metadata field simultaneously
*/
public boolean isMetadataField() {
return isMetadataField;
}
} }

View File

@@ -25,26 +25,25 @@ public class DCInputSet {
/** /**
* the inputs ordered by row position * the inputs ordered by row position
*/ */
private DCInput[][] inputs = null; private DCInput[] inputs = null;
/** /**
* constructor * constructor
* *
* @param formName form name * @param formName form name
* @param headings
* @param mandatoryFlags * @param mandatoryFlags
* @param rows the rows * @param fields fields
* @param listMap map * @param listMap map
*/ */
public DCInputSet(String formName, List<List<Map<String, String>>> rows, Map<String, List<String>> listMap) { public DCInputSet(String formName,
List<Map<String, String>> fields, Map<String, List<String>> listMap) {
this.formName = formName; this.formName = formName;
this.inputs = new DCInput[rows.size()][]; this.inputs = new DCInput[fields.size()];
for (int i = 0; i < inputs.length; i++) { for (int i = 0; i < inputs.length; i++) {
List<Map<String, String>> fields = rows.get(i); Map<String, String> field = fields.get(i);
inputs[i] = new DCInput[fields.size()]; inputs[i] = new DCInput(field, listMap);
for (int j = 0; j < inputs[i].length; j++) {
Map<String, String> field = rows.get(i).get(j);
inputs[i][j] = new DCInput(field, listMap);
}
} }
} }
@@ -72,7 +71,7 @@ public class DCInputSet {
* @return an array containing the fields * @return an array containing the fields
*/ */
public DCInput[][] getFields() { public DCInput[] getFields() {
return inputs; return inputs;
} }
@@ -105,12 +104,10 @@ public class DCInputSet {
*/ */
public boolean isFieldPresent(String fieldName) { public boolean isFieldPresent(String fieldName) {
for (int i = 0; i < inputs.length; i++) { for (int i = 0; i < inputs.length; i++) {
for (int j = 0; j < inputs[i].length; j++) { DCInput field = inputs[i];
DCInput field = inputs[i][j]; String fullName = field.getFieldName();
String fullName = field.getFieldName(); if (fullName.equals(fieldName)) {
if (fullName.equals(fieldName)) { return true;
return true;
}
} }
} }
return false; return false;
@@ -130,13 +127,11 @@ public class DCInputSet {
documentType = ""; documentType = "";
} }
for (int i = 0; i < inputs.length; i++) { for (int i = 0; i < inputs.length; i++) {
for (int j = 0; j < inputs[i].length; j++) { DCInput field = inputs[i];
DCInput field = inputs[i][j]; String fullName = field.getFieldName();
String fullName = field.getFieldName(); if (fullName.equals(fieldName)) {
if (fullName.equals(fieldName)) { if (field.isAllowedFor(documentType)) {
if (field.isAllowedFor(documentType)) { return true;
return true;
}
} }
} }
} }

View File

@@ -21,7 +21,7 @@ import javax.xml.parsers.FactoryConfigurationError;
import org.apache.commons.lang3.StringUtils; import org.apache.commons.lang3.StringUtils;
import org.dspace.content.Collection; import org.dspace.content.Collection;
import org.dspace.content.MetadataSchemaEnum; import org.dspace.content.MetadataSchema;
import org.dspace.core.Utils; import org.dspace.core.Utils;
import org.dspace.services.factory.DSpaceServicesFactory; import org.dspace.services.factory.DSpaceServicesFactory;
import org.w3c.dom.Document; import org.w3c.dom.Document;
@@ -74,7 +74,7 @@ public class DCInputsReader {
* Reference to the forms definitions map, computed from the forms * Reference to the forms definitions map, computed from the forms
* definition file * definition file
*/ */
private Map<String, List<List<Map<String, String>>>> formDefns = null; private Map<String, List<Map<String, String>>> formDefns = null;
/** /**
* Reference to the value-pairs map, computed from the forms definition file * Reference to the value-pairs map, computed from the forms definition file
@@ -115,7 +115,7 @@ public class DCInputsReader {
private void buildInputs(String fileName) private void buildInputs(String fileName)
throws DCInputsReaderException { throws DCInputsReaderException {
formDefns = new HashMap<String, List<List<Map<String, String>>>>(); formDefns = new HashMap<String, List<Map<String, String>>>();
valuePairs = new HashMap<String, List<String>>(); valuePairs = new HashMap<String, List<String>>();
String uri = "file:" + new File(fileName).getAbsolutePath(); String uri = "file:" + new File(fileName).getAbsolutePath();
@@ -212,7 +212,7 @@ public class DCInputsReader {
return lastInputSet; return lastInputSet;
} }
// cache miss - construct new DCInputSet // cache miss - construct new DCInputSet
List<List<Map<String, String>>> pages = formDefns.get(formName); List<Map<String, String>> pages = formDefns.get(formName);
if (pages == null) { if (pages == null) {
throw new DCInputsReaderException("Missing the " + formName + " form"); throw new DCInputsReaderException("Missing the " + formName + " form");
} }
@@ -292,8 +292,8 @@ public class DCInputsReader {
/** /**
* Process the form-definitions section of the XML file. Each element is * Process the form-definitions section of the XML file. Each element is
* formed thusly: <form name="formname">...row...</form> Each rows * formed thusly: <form name="formname">...pages...</form> Each pages
* subsection is formed: <row> ...fields... </row> Each field * subsection is formed: <page number="#"> ...fields... </page> Each field
* is formed from: dc-element, dc-qualifier, label, hint, input-type name, * is formed from: dc-element, dc-qualifier, label, hint, input-type name,
* required text, and repeatable flag. * required text, and repeatable flag.
*/ */
@@ -311,24 +311,26 @@ public class DCInputsReader {
if (formName == null) { if (formName == null) {
throw new SAXException("form element has no name attribute"); throw new SAXException("form element has no name attribute");
} }
List<List<Map<String, String>>> rows = new ArrayList<List<Map<String, String>>>(); // the form List<Map<String, String>> fields = new ArrayList<Map<String, String>>(); // the form contains fields
// contains rows of fields formDefns.put(formName, fields);
formDefns.put(formName, rows);
NodeList pl = nd.getChildNodes(); NodeList pl = nd.getChildNodes();
int lenpg = pl.getLength(); int lenpg = pl.getLength();
for (int j = 0; j < lenpg; j++) { for (int j = 0; j < lenpg; j++) {
Node npg = pl.item(j); Node npg = pl.item(j);
if (npg.getNodeName().equals("row")) {
List<Map<String, String>> fields = new ArrayList<Map<String, String>>(); // the fields in the if (npg.getNodeName().equals("field")) {
// row // process each field definition
// process each row definition Map<String, String> field = new HashMap<String, String>();
processRow(formName, j, npg, fields); processField(formName, npg, field);
rows.add(fields); fields.add(field);
// we omit the duplicate validation, allowing multiple
// fields definition for
// the same metadata and different visibility/type-bind
} }
} }
// sanity check number of fields // sanity check number of fields
if (rows.size() < 1) { if (fields.size() < 1) {
throw new DCInputsReaderException("Form " + formName + " has no rows"); throw new DCInputsReaderException("Form " + formName + " has no fields");
} }
} }
} }
@@ -337,52 +339,6 @@ public class DCInputsReader {
} }
} }
/**
* Process parts of a row
*/
private void processRow(String formName, int rowIdx, Node n, List<Map<String, String>> fields)
throws SAXException, DCInputsReaderException {
NodeList pl = n.getChildNodes();
int lenpg = pl.getLength();
for (int j = 0; j < lenpg; j++) {
Node npg = pl.item(j);
if (npg.getNodeName().equals("field")) {
// process each field definition
Map<String, String> field = new HashMap<String, String>();
processField(formName, npg, field);
fields.add(field);
String key = field.get(PAIR_TYPE_NAME);
if (StringUtils
.isNotBlank(key)) {
String schema = field.get("dc-schema");
String element = field.get("dc-element");
String qualifier = field
.get("dc-qualifier");
String metadataField = schema + "."
+ element;
if (StringUtils.isNotBlank(qualifier)) {
metadataField += "." + qualifier;
}
}
// we omit the duplicate validation, allowing multiple
// fields definition for
// the same metadata and different visibility/type-bind
} else if (StringUtils.equalsIgnoreCase(npg.getNodeName(), "relation-field")) {
Map<String, String> relationField = new HashMap<>();
processField(formName, npg, relationField);
fields.add(relationField);
}
}
// sanity check number of fields
if (fields.size() < 1) {
throw new DCInputsReaderException("Form " + formName + "row " + rowIdx + " has no fields");
}
}
/** /**
* Process parts of a field * Process parts of a field
* At the end, make sure that input-types 'qualdrop_value' and * At the end, make sure that input-types 'qualdrop_value' and
@@ -400,7 +356,19 @@ public class DCInputsReader {
String value = getValue(nd); String value = getValue(nd);
field.put(tagName, value); field.put(tagName, value);
if (tagName.equals("input-type")) { if (tagName.equals("input-type")) {
handleInputTypeTagName(formName, field, nd, value); if (value.equals("dropdown")
|| value.equals("qualdrop_value")
|| value.equals("list")) {
String pairTypeName = getAttribute(nd, PAIR_TYPE_NAME);
if (pairTypeName == null) {
throw new SAXException("Form " + formName + ", field " +
field.get("dc-element") +
"." + field.get("dc-qualifier") +
" has no name attribute");
} else {
field.put(PAIR_TYPE_NAME, pairTypeName);
}
}
} else if (tagName.equals("vocabulary")) { } else if (tagName.equals("vocabulary")) {
String closedVocabularyString = getAttribute(nd, "closed"); String closedVocabularyString = getAttribute(nd, "closed");
field.put("closedVocabulary", closedVocabularyString); field.put("closedVocabulary", closedVocabularyString);
@@ -416,30 +384,17 @@ public class DCInputsReader {
field.put(PAIR_TYPE_NAME, pairTypeName); field.put(PAIR_TYPE_NAME, pairTypeName);
} }
} }
} else if (StringUtils.equalsIgnoreCase(tagName, "linked-metadata-field")) {
for (int j = 0; j < nd.getChildNodes().getLength(); j ++) {
Node nestedNode = nd.getChildNodes().item(j);
String nestedTagName = nestedNode.getNodeName();
String nestedValue = getValue(nestedNode);
field.put(nestedTagName, nestedValue);
if (nestedTagName.equals("input-type")) {
handleInputTypeTagName(formName, field, nestedNode, nestedValue);
}
}
} }
} }
} }
String missing = null; String missing = null;
String nodeName = n.getNodeName(); if (field.get("dc-element") == null) {
if (field.get("dc-element") == null &&
(nodeName.equals("field") || field.containsKey("linked-metadata-field"))) {
missing = "dc-element"; missing = "dc-element";
} }
if (field.get("label") == null) { if (field.get("label") == null) {
missing = "label"; missing = "label";
} }
if (field.get("input-type") == null && if (field.get("input-type") == null) {
(nodeName.equals("field") || field.containsKey("linked-metadata-field"))) {
missing = "input-type"; missing = "input-type";
} }
if (missing != null) { if (missing != null) {
@@ -447,7 +402,7 @@ public class DCInputsReader {
throw new SAXException(msg); throw new SAXException(msg);
} }
String type = field.get("input-type"); String type = field.get("input-type");
if (StringUtils.isNotBlank(type) && (type.equals("twobox") || type.equals("qualdrop_value"))) { if (type.equals("twobox") || type.equals("qualdrop_value")) {
String rpt = field.get("repeatable"); String rpt = field.get("repeatable");
if ((rpt == null) || if ((rpt == null) ||
((!rpt.equalsIgnoreCase("yes")) && ((!rpt.equalsIgnoreCase("yes")) &&
@@ -458,23 +413,6 @@ public class DCInputsReader {
} }
} }
private void handleInputTypeTagName(String formName, Map<String, String> field, Node nd, String value)
throws SAXException {
if (value.equals("dropdown")
|| value.equals("qualdrop_value")
|| value.equals("list")) {
String pairTypeName = getAttribute(nd, PAIR_TYPE_NAME);
if (pairTypeName == null) {
throw new SAXException("Form " + formName + ", field " +
field.get("dc-element") +
"." + field.get("dc-qualifier") +
" has no name attribute");
} else {
field.put(PAIR_TYPE_NAME, pairTypeName);
}
}
}
/** /**
* Check that this is the only field with the name dc-element.dc-qualifier * Check that this is the only field with the name dc-element.dc-qualifier
* If there is a duplicate, return an error message, else return null; * If there is a duplicate, return an error message, else return null;
@@ -486,7 +424,7 @@ public class DCInputsReader {
String elem = field.get("dc-element"); String elem = field.get("dc-element");
String qual = field.get("dc-qualifier"); String qual = field.get("dc-qualifier");
if ((schema == null) || (schema.equals(""))) { if ((schema == null) || (schema.equals(""))) {
schema = MetadataSchemaEnum.DC.getName(); schema = MetadataSchema.DC_SCHEMA;
} }
String schemaTest; String schemaTest;
@@ -496,7 +434,7 @@ public class DCInputsReader {
Map<String, String> fld = pg.get(j); Map<String, String> fld = pg.get(j);
if ((fld.get("dc-schema") == null) || if ((fld.get("dc-schema") == null) ||
((fld.get("dc-schema")).equals(""))) { ((fld.get("dc-schema")).equals(""))) {
schemaTest = MetadataSchemaEnum.DC.getName(); schemaTest = MetadataSchema.DC_SCHEMA;
} else { } else {
schemaTest = fld.get("dc-schema"); schemaTest = fld.get("dc-schema");
} }
@@ -599,29 +537,26 @@ public class DCInputsReader {
Iterator<String> ki = formDefns.keySet().iterator(); Iterator<String> ki = formDefns.keySet().iterator();
while (ki.hasNext()) { while (ki.hasNext()) {
String idName = ki.next(); String idName = ki.next();
List<List<Map<String, String>>> rows = formDefns.get(idName); List<Map<String, String>> fields = formDefns.get(idName);
for (int j = 0; j < rows.size(); j++) { for (int i = 0; i < fields.size(); i++) {
List<Map<String, String>> fields = rows.get(j); Map<String, String> fld = fields.get(i);
for (int i = 0; i < fields.size(); i++) { // verify reference in certain input types
Map<String, String> fld = fields.get(i); String type = fld.get("input-type");
// verify reference in certain input types if (type.equals("dropdown")
String type = fld.get("input-type"); || type.equals("qualdrop_value")
if (StringUtils.isNotBlank(type) && (type.equals("dropdown") || type.equals("list")) {
|| type.equals("qualdrop_value") String pairsName = fld.get(PAIR_TYPE_NAME);
|| type.equals("list"))) { List<String> v = valuePairs.get(pairsName);
String pairsName = fld.get(PAIR_TYPE_NAME); if (v == null) {
List<String> v = valuePairs.get(pairsName); String errString = "Cannot find value pairs for " + pairsName;
if (v == null) { throw new DCInputsReaderException(errString);
String errString = "Cannot find value pairs for " + pairsName;
throw new DCInputsReaderException(errString);
}
} }
// we omit the "required" and "visibility" validation, provided this must be checked in the
// processing class
// only when it makes sense (if the field isn't visible means that it is not applicable,
// therefore it can't be required)
} }
// we omit the "required" and "visibility" validation, provided this must be checked in the
// processing class
// only when it makes sense (if the field isn't visible means that it is not applicable, therefore it
// can't be required)
} }
} }
} }
@@ -704,5 +639,4 @@ public class DCInputsReader {
} }
throw new DCInputsReaderException("No field configuration found!"); throw new DCInputsReaderException("No field configuration found!");
} }
} }

View File

@@ -16,14 +16,14 @@ import java.util.Enumeration;
import javax.servlet.ServletContextEvent; import javax.servlet.ServletContextEvent;
import javax.servlet.ServletContextListener; import javax.servlet.ServletContextListener;
import org.apache.logging.log4j.Logger; import org.apache.log4j.Logger;
/** /**
* Class to initialize / cleanup resources used by DSpace when the web application * Class to initialize / cleanup resources used by DSpace when the web application
* is started or stopped. * is started or stopped.
*/ */
public class DSpaceContextListener implements ServletContextListener { public class DSpaceContextListener implements ServletContextListener {
private static Logger log = org.apache.logging.log4j.LogManager.getLogger(DSpaceContextListener.class); private static Logger log = Logger.getLogger(DSpaceContextListener.class);
/** /**
* Initialize any resources required by the application. * Initialize any resources required by the application.

View File

@@ -0,0 +1,299 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.app.util;
import java.io.File;
import java.io.IOException;
import java.net.UnknownHostException;
import java.text.SimpleDateFormat;
import java.util.Calendar;
import java.util.Date;
import org.apache.commons.lang.time.DateUtils;
import org.apache.log4j.FileAppender;
import org.apache.log4j.helpers.LogLog;
import org.apache.log4j.spi.LoggingEvent;
/**
* Special log appender for log4j. Adds the current date (ie. year-mon) to
* the end of the file name, so that rolling on to the next log is simply
* a case of starting a new one - no renaming of old logs.
*
* This is advisable if you are using Windows, and have multiple applications
* (ie. dspace, dspace-oai, dspace-sword) that all want to write to the same log file,
* as each would otherwise try to rename the old files during rollover.
*
* An example log4j.properties (one log per month, retains three months of logs)
*
* log4j.rootCategory=INFO, A1
* log4j.appender.A1=org.dspace.app.util.DailyFileAppender
* log4j.appender.A1.File=@@log.dir@@/dspace.log
* log4j.appender.A1.DatePattern=yyyy-MM
* log4j.appender.A1.MaxLogs=3
* log4j.appender.A1.layout=org.apache.log4j.PatternLayout
* log4j.appender.A1.layout.ConversionPattern=%d %-5p %c @ %m%n
*/
public class DailyFileAppender extends FileAppender {
/**
* The fixed date pattern to be used if one is not specified.
*/
private static final String DATE_PATTERN = "yyyy-MM-dd";
/**
* The folder under which daily folders are created. This can be a absolute path
* or relative path also.
* e.g. JavaLogs/CPRILog or F:/LogFiles/CPRILog
*/
private String mstrFileName;
/**
* Used internally and contains the name of the date derived from current system date.
*/
private Date mstrDate = new Date(System.currentTimeMillis());
/**
* Holds the user specified DatePattern,
*/
private String mstrDatePattern = DATE_PATTERN;
private boolean mMonthOnly = false;
/**
* The date formatter object used for parsing the user specified DatePattern.
*/
private SimpleDateFormat mobjSDF;
private boolean mWithHostName = false;
private int mMaxLogs = 0;
/**
* Default constructor. This is required as the appender class is dynamically
* loaded.
*/
public DailyFileAppender() {
super();
}
/* (non-Javadoc)
* @see org.apache.log4j.FileAppender#activateOptions()
*/
@Override
public void activateOptions() {
setFileName();
cleanupOldFiles();
super.activateOptions();
}
/*------------------------------------------------------------------------------
* Getters
*----------------------------------------------------------------------------*/
public String getDatePattern() {
return this.mstrDatePattern;
}
@Override
public String getFile() {
return this.mstrFileName;
}
public boolean getWithHost() {
return mWithHostName;
}
public int getMaxLogs() {
return mMaxLogs;
}
/*------------------------------------------------------------------------------
* Setters
*----------------------------------------------------------------------------*/
public void setDatePattern(String pstrPattern) {
this.mstrDatePattern = checkPattern(pstrPattern);
if (mstrDatePattern.contains("dd") || mstrDatePattern.contains("DD")) {
mMonthOnly = false;
} else {
mMonthOnly = true;
}
}
@Override
public void setFile(String file) {
// Trim spaces from both ends. The users probably does not want
// trailing spaces in file names.
String val = file.trim();
mstrFileName = val;
}
public void setWithHost(boolean wh) {
mWithHostName = wh;
}
public void setMaxLogs(int ml) {
mMaxLogs = ml;
}
/*------------------------------------------------------------------------------
* Methods
*----------------------------------------------------------------------------*/
/* (non-Javadoc)
* @see org.apache.log4j.WriterAppender#subAppend(org.apache.log4j.spi.LoggingEvent)
*/
@Override
protected void subAppend(LoggingEvent pobjEvent) {
Date dtNow = new Date(System.currentTimeMillis());
boolean rollover = false;
if (mMonthOnly) {
Calendar now = Calendar.getInstance();
Calendar cur = Calendar.getInstance();
now.setTime(dtNow);
cur.setTime(mstrDate);
rollover = !(now.get(Calendar.YEAR) == cur.get(Calendar.YEAR) && now.get(Calendar.MONTH) == cur
.get(Calendar.MONTH));
} else {
rollover = !(DateUtils.isSameDay(dtNow, mstrDate));
}
if (rollover) {
try {
rollOver(dtNow);
} catch (IOException IOEx) {
LogLog.error("rollOver() failed!", IOEx);
}
}
super.subAppend(pobjEvent);
}
/*------------------------------------------------------------------------------
* Helpers
*----------------------------------------------------------------------------*/
/**
* The helper function to validate the DatePattern.
*
* @param pstrPattern The DatePattern to be validated.
* @return The validated date pattern or defautlt DATE_PATTERN
*/
private String checkPattern(String pstrPattern) {
String strRet = null;
SimpleDateFormat objFmt = new SimpleDateFormat(DATE_PATTERN);
try {
this.mobjSDF = new SimpleDateFormat(pstrPattern);
strRet = pstrPattern;
} catch (NullPointerException NPExIgnore) {
LogLog.error("Invalid DatePattern " + pstrPattern, NPExIgnore);
this.mobjSDF = objFmt;
strRet = DATE_PATTERN;
} catch (IllegalArgumentException IlArgExIgnore) {
LogLog.error("Invalid DatePattern " + pstrPattern, IlArgExIgnore);
this.mobjSDF = objFmt;
strRet = DATE_PATTERN;
} finally {
objFmt = null;
}
return strRet;
}
/**
* This function is responsible for performing the actual file rollover.
*
* @param pstrName The name of the new folder based on current system date.
* @throws IOException if IO error
*/
private static boolean deletingFiles = false;
private void cleanupOldFiles() {
// If we need to delete log files
if (mMaxLogs > 0 && !deletingFiles) {
deletingFiles = true;
// Determine the final file extension with the hostname
String hostFileExt = null;
try {
hostFileExt = "." + java.net.InetAddress.getLocalHost().getHostName();
} catch (UnknownHostException e) {
LogLog.error("Unable to retrieve host name");
}
try {
// Array to hold the logs we are going to keep
File[] logsToKeep = new File[mMaxLogs];
// Get a 'master' file handle, and the parent directory from it
File logMaster = new File(mstrFileName);
File logDir = logMaster.getParentFile();
if (logDir.isDirectory()) {
// Iterate all the files in that directory
File[] logArr = logDir.listFiles();
for (File curLog : logArr) {
LogLog.debug("Comparing '" + curLog.getAbsolutePath() + "' to '" + mstrFileName + "'");
String name = curLog.getAbsolutePath();
// First, see if we are not using hostname, or the log file ends with this host
if (!mWithHostName || (hostFileExt != null && name.endsWith(hostFileExt))) {
// Check that the file is indeed one we want (contains the master file name)
if (name.contains(mstrFileName)) {
// Iterate through the array of logs we are keeping
for (int i = 0; curLog != null && i < logsToKeep.length; i++) {
// Have we exhausted the 'to keep' array?
if (logsToKeep[i] == null) {
// Empty space, retain this log file
logsToKeep[i] = curLog;
curLog = null;
} else if (logsToKeep[i].getName().compareTo(curLog.getName()) < 0) {
// If the 'kept' file is older than the current one
// Replace tested entry with current file
File temp = logsToKeep[i];
logsToKeep[i] = curLog;
curLog = temp;
}
}
// If we have a 'current' entry at this point, it's a log we don't want
if (curLog != null) {
LogLog.debug("Deleting log " + curLog.getName());
if (!curLog.delete()) {
LogLog.error("Unable to delete log file");
}
}
}
}
}
}
} catch (Exception e) {
// Don't worry about exceptions
} finally {
deletingFiles = false;
}
}
}
private void rollOver(Date dtNow) throws IOException {
mstrDate = dtNow;
setFileName();
this.setFile(fileName, true, bufferedIO, bufferSize);
cleanupOldFiles();
}
private void setFileName() {
fileName = mstrFileName + "." + mobjSDF.format(mstrDate);
if (mWithHostName) {
try {
fileName += "." + java.net.InetAddress.getLocalHost().getHostName();
} catch (UnknownHostException e) {
LogLog.error("Unable to retrieve host name");
}
}
}
}

View File

@@ -12,7 +12,7 @@ import java.util.Comparator;
import java.util.HashMap; import java.util.HashMap;
import java.util.Map; import java.util.Map;
import org.apache.logging.log4j.Logger; import org.apache.log4j.Logger;
import org.dspace.content.Bitstream; import org.dspace.content.Bitstream;
import org.dspace.content.BitstreamFormat; import org.dspace.content.BitstreamFormat;
import org.dspace.content.factory.ContentServiceFactory; import org.dspace.content.factory.ContentServiceFactory;
@@ -24,7 +24,7 @@ import org.dspace.core.Context;
*/ */
public class GoogleBitstreamComparator implements Comparator<Bitstream> { public class GoogleBitstreamComparator implements Comparator<Bitstream> {
private final static Logger log = org.apache.logging.log4j.LogManager.getLogger(GoogleBitstreamComparator.class); private final static Logger log = Logger.getLogger(GoogleBitstreamComparator.class);
HashMap<String, Integer> priorityMap = new HashMap<>(); HashMap<String, Integer> priorityMap = new HashMap<>();
@@ -86,7 +86,7 @@ public class GoogleBitstreamComparator implements Comparator<Bitstream> {
if (priority1 > priority2) { if (priority1 > priority2) {
return 1; return 1;
} else if (priority1 == priority2) { } else if (priority1 == priority2) {
if (b1.getSizeBytes() <= b2.getSizeBytes()) { if (b1.getSize() <= b2.getSize()) {
return 1; return 1;
} else { } else {
return -1; return -1;

View File

@@ -26,7 +26,7 @@ import java.util.Properties;
import com.google.common.collect.ArrayListMultimap; import com.google.common.collect.ArrayListMultimap;
import com.google.common.collect.ListMultimap; import com.google.common.collect.ListMultimap;
import org.apache.logging.log4j.Logger; import org.apache.log4j.Logger;
import org.dspace.authorize.factory.AuthorizeServiceFactory; import org.dspace.authorize.factory.AuthorizeServiceFactory;
import org.dspace.content.Bitstream; import org.dspace.content.Bitstream;
import org.dspace.content.Bundle; import org.dspace.content.Bundle;
@@ -51,7 +51,7 @@ import org.jdom.Element;
@SuppressWarnings("deprecation") @SuppressWarnings("deprecation")
public class GoogleMetadata { public class GoogleMetadata {
private final static Logger log = org.apache.logging.log4j.LogManager.getLogger(GoogleMetadata.class); private final static Logger log = Logger.getLogger(GoogleMetadata.class);
protected static final String GOOGLE_PREFIX = "google."; protected static final String GOOGLE_PREFIX = "google.";
@@ -889,7 +889,7 @@ public class GoogleMetadata {
Bitstream bitstream = findLinkableFulltext(item); Bitstream bitstream = findLinkableFulltext(item);
if (bitstream != null) { if (bitstream != null) {
StringBuilder path = new StringBuilder(); StringBuilder path = new StringBuilder();
path.append(ConfigurationManager.getProperty("dspace.ui.url")); path.append(ConfigurationManager.getProperty("dspace.url"));
if (item.getHandle() != null) { if (item.getHandle() != null) {
path.append("/bitstream/"); path.append("/bitstream/");

View File

@@ -121,24 +121,20 @@ public class IndexVersion {
} }
// Open this index directory in Lucene // Open this index directory in Lucene
Directory indexDir = FSDirectory.open(dir.toPath()); Directory indexDir = FSDirectory.open(dir);
// Get info on the Lucene segment file(s) in index directory // Get info on the Lucene segment file(s) in index directory
SegmentInfos sis; SegmentInfos sis = new SegmentInfos();
try { try {
sis = SegmentInfos.readLatestCommit(indexDir); sis.read(indexDir);
} catch (IOException ie) { } catch (IOException ie) {
// Wrap default IOException, providing more info about which directory cannot be read // Wrap default IOException, providing more info about which directory cannot be read
throw new IOException("Could not read Lucene segments files in " + dir.getAbsolutePath(), ie); throw new IOException("Could not read Lucene segments files in " + dir.getAbsolutePath(), ie);
} }
if (null == sis) {
throw new IOException("Could not read Lucene segments files in " + dir.getAbsolutePath());
}
// If we have a valid Solr index dir, but it has no existing segments // If we have a valid Solr index dir, but it has no existing segments
// then just return an empty string. It's a valid but empty index. // then just return an empty string. It's a valid but empty index.
if (sis.size() == 0) { if (sis != null && sis.size() == 0) {
return ""; return "";
} }

View File

@@ -1,245 +0,0 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.app.util;
import java.io.File;
import java.io.IOException;
import java.sql.SQLException;
import java.util.LinkedList;
import java.util.List;
import javax.xml.parsers.DocumentBuilder;
import javax.xml.parsers.DocumentBuilderFactory;
import javax.xml.parsers.ParserConfigurationException;
import org.apache.commons.cli.CommandLine;
import org.apache.commons.cli.CommandLineParser;
import org.apache.commons.cli.HelpFormatter;
import org.apache.commons.cli.Options;
import org.apache.commons.cli.ParseException;
import org.apache.commons.cli.PosixParser;
import org.apache.commons.lang3.StringUtils;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
import org.dspace.authorize.AuthorizeException;
import org.dspace.content.EntityType;
import org.dspace.content.RelationshipType;
import org.dspace.content.factory.ContentServiceFactory;
import org.dspace.content.service.EntityTypeService;
import org.dspace.content.service.RelationshipService;
import org.dspace.content.service.RelationshipTypeService;
import org.dspace.core.Context;
import org.w3c.dom.Document;
import org.w3c.dom.Element;
import org.w3c.dom.Node;
import org.w3c.dom.NodeList;
import org.xml.sax.SAXException;
/**
* This script is used to initialize the database with a set of relationshiptypes that are written
* in an xml file that is given to this script.
* This XML file needs to have a proper XML structure and needs to define the variables of the RelationshipType object
*/
public class InitializeEntities {
private final static Logger log = LogManager.getLogger();
private RelationshipTypeService relationshipTypeService;
private RelationshipService relationshipService;
private EntityTypeService entityTypeService;
private InitializeEntities() {
relationshipTypeService = ContentServiceFactory.getInstance().getRelationshipTypeService();
relationshipService = ContentServiceFactory.getInstance().getRelationshipService();
entityTypeService = ContentServiceFactory.getInstance().getEntityTypeService();
}
/**
* The main method for this script
*
* @param argv The commandline arguments given with this command
* @throws SQLException If something goes wrong with the database
* @throws AuthorizeException If something goes wrong with permissions
* @throws ParseException If something goes wrong with the parsing
*/
public static void main(String[] argv) throws SQLException, AuthorizeException, ParseException {
InitializeEntities initializeEntities = new InitializeEntities();
CommandLineParser parser = new PosixParser();
Options options = createCommandLineOptions();
CommandLine line = parser.parse(options,argv);
String fileLocation = getFileLocationFromCommandLine(line);
checkHelpEntered(options, line);
initializeEntities.run(fileLocation);
}
private static void checkHelpEntered(Options options, CommandLine line) {
if (line.hasOption("h")) {
HelpFormatter formatter = new HelpFormatter();
formatter.printHelp("Intialize Entities", options);
System.exit(0);
}
}
private static String getFileLocationFromCommandLine(CommandLine line) {
String query = line.getOptionValue("f");
if (StringUtils.isEmpty(query)) {
System.out.println("No file location was entered");
log.info("No file location was entered");
System.exit(1);
}
return query;
}
protected static Options createCommandLineOptions() {
Options options = new Options();
options.addOption("f", "file", true, "the location for the file containing the xml data");
return options;
}
private void run(String fileLocation) throws SQLException, AuthorizeException {
Context context = new Context();
context.turnOffAuthorisationSystem();
this.parseXMLToRelations(context, fileLocation);
context.complete();
}
private void parseXMLToRelations(Context context, String fileLocation) throws AuthorizeException {
try {
File fXmlFile = new File(fileLocation);
DocumentBuilderFactory dbFactory = DocumentBuilderFactory.newInstance();
DocumentBuilder dBuilder = null;
dBuilder = dbFactory.newDocumentBuilder();
Document doc = dBuilder.parse(fXmlFile);
doc.getDocumentElement().normalize();
NodeList nList = doc.getElementsByTagName("type");
List<RelationshipType> relationshipTypes = new LinkedList<>();
for (int i = 0; i < nList.getLength(); i++) {
Node nNode = nList.item(i);
if (nNode.getNodeType() == Node.ELEMENT_NODE) {
Element eElement = (Element) nNode;
String leftType = eElement.getElementsByTagName("leftType").item(0).getTextContent();
String rightType = eElement.getElementsByTagName("rightType").item(0).getTextContent();
String leftwardType = eElement.getElementsByTagName("leftwardType").item(0).getTextContent();
String rightwardType = eElement.getElementsByTagName("rightwardType").item(0).getTextContent();
Node copyToLeftNode = eElement.getElementsByTagName("copyToLeft").item(0);
Boolean copyToLeft;
if (copyToLeftNode == null) {
copyToLeft = false;
} else {
copyToLeft = Boolean.valueOf(copyToLeftNode.getTextContent());
}
Node copyToRightNode = eElement.getElementsByTagName("copyToRight").item(0);
Boolean copyToRight;
if (copyToRightNode == null) {
copyToRight = false;
} else {
copyToRight = Boolean.valueOf(copyToRightNode.getTextContent());
}
NodeList leftCardinalityList = eElement.getElementsByTagName("leftCardinality");
NodeList rightCardinalityList = eElement.getElementsByTagName("rightCardinality");
String leftCardinalityMin = "";
String leftCardinalityMax = "";
String rightCardinalityMin = "";
String rightCardinalityMax = "";
for (int j = 0; j < leftCardinalityList.getLength(); j++) {
Node node = leftCardinalityList.item(j);
leftCardinalityMin = getString(leftCardinalityMin,(Element) node, "min");
leftCardinalityMax = getString(leftCardinalityMax,(Element) node, "max");
}
for (int j = 0; j < rightCardinalityList.getLength(); j++) {
Node node = rightCardinalityList.item(j);
rightCardinalityMin = getString(rightCardinalityMin,(Element) node, "min");
rightCardinalityMax = getString(rightCardinalityMax,(Element) node, "max");
}
populateRelationshipType(context, leftType, rightType, leftwardType, rightwardType,
leftCardinalityMin, leftCardinalityMax,
rightCardinalityMin, rightCardinalityMax, copyToLeft, copyToRight);
}
}
} catch (ParserConfigurationException | SAXException | IOException | SQLException e) {
log.error("An error occurred while parsing the XML file to relations", e);
}
}
private String getString(String leftCardinalityMin,Element node, String minOrMax) {
if (node.getElementsByTagName(minOrMax).getLength() > 0) {
leftCardinalityMin = node.getElementsByTagName(minOrMax).item(0).getTextContent();
}
return leftCardinalityMin;
}
private void populateRelationshipType(Context context, String leftType, String rightType, String leftwardType,
String rightwardType, String leftCardinalityMin, String leftCardinalityMax,
String rightCardinalityMin, String rightCardinalityMax,
Boolean copyToLeft, Boolean copyToRight)
throws SQLException, AuthorizeException {
EntityType leftEntityType = entityTypeService.findByEntityType(context,leftType);
if (leftEntityType == null) {
leftEntityType = entityTypeService.create(context, leftType);
}
EntityType rightEntityType = entityTypeService.findByEntityType(context, rightType);
if (rightEntityType == null) {
rightEntityType = entityTypeService.create(context, rightType);
}
Integer leftCardinalityMinInteger;
Integer leftCardinalityMaxInteger;
Integer rightCardinalityMinInteger;
Integer rightCardinalityMaxInteger;
if (StringUtils.isNotBlank(leftCardinalityMin)) {
leftCardinalityMinInteger = Integer.parseInt(leftCardinalityMin);
} else {
leftCardinalityMinInteger = null;
}
if (StringUtils.isNotBlank(leftCardinalityMax)) {
leftCardinalityMaxInteger = Integer.parseInt(leftCardinalityMax);
} else {
leftCardinalityMaxInteger = null;
}
if (StringUtils.isNotBlank(rightCardinalityMin)) {
rightCardinalityMinInteger = Integer.parseInt(rightCardinalityMin);
} else {
rightCardinalityMinInteger = null;
}
if (StringUtils.isNotBlank(rightCardinalityMax)) {
rightCardinalityMaxInteger = Integer.parseInt(rightCardinalityMax);
} else {
rightCardinalityMaxInteger = null;
}
RelationshipType relationshipType = relationshipTypeService
.findbyTypesAndTypeName(context, leftEntityType, rightEntityType, leftwardType, rightwardType);
if (relationshipType == null) {
relationshipTypeService.create(context, leftEntityType, rightEntityType, leftwardType, rightwardType,
leftCardinalityMinInteger, leftCardinalityMaxInteger,
rightCardinalityMinInteger, rightCardinalityMaxInteger,
copyToLeft, copyToRight);
} else {
relationshipType.setCopyToLeft(copyToLeft);
relationshipType.setCopyToRight(copyToRight);
relationshipType.setLeftMinCardinality(leftCardinalityMinInteger);
relationshipType.setLeftMaxCardinality(leftCardinalityMaxInteger);
relationshipType.setRightMinCardinality(rightCardinalityMinInteger);
relationshipType.setRightMaxCardinality(rightCardinalityMaxInteger);
relationshipTypeService.update(context, relationshipType);
}
}
}

View File

@@ -14,7 +14,7 @@ import java.util.List;
import java.util.Map; import java.util.Map;
import java.util.Set; import java.util.Set;
import org.apache.logging.log4j.Logger; import org.apache.log4j.Logger;
import org.dspace.app.util.service.MetadataExposureService; import org.dspace.app.util.service.MetadataExposureService;
import org.dspace.authorize.service.AuthorizeService; import org.dspace.authorize.service.AuthorizeService;
import org.dspace.core.Context; import org.dspace.core.Context;
@@ -58,7 +58,7 @@ import org.springframework.beans.factory.annotation.Autowired;
* @version $Revision: 3734 $ * @version $Revision: 3734 $
*/ */
public class MetadataExposureServiceImpl implements MetadataExposureService { public class MetadataExposureServiceImpl implements MetadataExposureService {
protected Logger log = org.apache.logging.log4j.LogManager.getLogger(MetadataExposureServiceImpl.class); protected Logger log = Logger.getLogger(MetadataExposureServiceImpl.class);
protected Map<String, Set<String>> hiddenElementSets = null; protected Map<String, Set<String>> hiddenElementSets = null;
protected Map<String, Map<String, Set<String>>> hiddenElementMaps = null; protected Map<String, Map<String, Set<String>>> hiddenElementMaps = null;

View File

@@ -20,13 +20,11 @@ import com.sun.syndication.feed.module.opensearch.OpenSearchModule;
import com.sun.syndication.feed.module.opensearch.entity.OSQuery; import com.sun.syndication.feed.module.opensearch.entity.OSQuery;
import com.sun.syndication.feed.module.opensearch.impl.OpenSearchModuleImpl; import com.sun.syndication.feed.module.opensearch.impl.OpenSearchModuleImpl;
import com.sun.syndication.io.FeedException; import com.sun.syndication.io.FeedException;
import org.apache.log4j.Logger;
import org.apache.logging.log4j.Logger;
import org.dspace.app.util.service.OpenSearchService; import org.dspace.app.util.service.OpenSearchService;
import org.dspace.content.DSpaceObject; import org.dspace.content.DSpaceObject;
import org.dspace.core.Constants; import org.dspace.core.Constants;
import org.dspace.core.Context; import org.dspace.core.Context;
import org.dspace.discovery.IndexableObject;
import org.dspace.handle.service.HandleService; import org.dspace.handle.service.HandleService;
import org.dspace.services.ConfigurationService; import org.dspace.services.ConfigurationService;
import org.dspace.services.factory.DSpaceServicesFactory; import org.dspace.services.factory.DSpaceServicesFactory;
@@ -35,6 +33,7 @@ import org.jdom.JDOMException;
import org.jdom.Namespace; import org.jdom.Namespace;
import org.jdom.output.DOMOutputter; import org.jdom.output.DOMOutputter;
import org.jdom.output.XMLOutputter; import org.jdom.output.XMLOutputter;
import org.springframework.beans.factory.InitializingBean;
import org.springframework.beans.factory.annotation.Autowired; import org.springframework.beans.factory.annotation.Autowired;
import org.w3c.dom.Document; import org.w3c.dom.Document;
@@ -54,14 +53,20 @@ import org.w3c.dom.Document;
* *
* @author Richard Rodgers * @author Richard Rodgers
*/ */
public class OpenSearchServiceImpl implements OpenSearchService { public class OpenSearchServiceImpl implements OpenSearchService, InitializingBean {
private static final Logger log = org.apache.logging.log4j.LogManager.getLogger(OpenSearchServiceImpl.class); private static final Logger log = Logger.getLogger(OpenSearchServiceImpl.class);
// are open search queries enabled?
protected boolean enabled = false;
// supported results formats
protected List<String> formats = null;
// Namespaces used // Namespaces used
protected final String osNs = "http://a9.com/-/spec/opensearch/1.1/"; protected final String osNs = "http://a9.com/-/spec/opensearch/1.1/";
@Autowired(required = true) // base search UI URL
protected ConfigurationService configurationService; protected String uiUrl = null;
// base search service URL
protected String svcUrl = null;
@Autowired(required = true) @Autowired(required = true)
protected HandleService handleService; protected HandleService handleService;
@@ -70,35 +75,25 @@ public class OpenSearchServiceImpl implements OpenSearchService {
} }
@Override @Override
public List<String> getFormats() { public void afterPropertiesSet() throws Exception {
List<String> formats = new ArrayList<>(); ConfigurationService config = DSpaceServicesFactory.getInstance().getConfigurationService();
// read formats only if enabled enabled = config.getBooleanProperty("websvc.opensearch.enable");
if (isEnabled()) { svcUrl = config.getProperty("dspace.url") + "/" +
String[] fmts = configurationService.getArrayProperty("websvc.opensearch.formats"); config.getProperty("websvc.opensearch.svccontext");
uiUrl = config.getProperty("dspace.url") + "/" +
config.getProperty("websvc.opensearch.uicontext");
// read rest of config info if enabled
formats = new ArrayList<String>();
if (enabled) {
String[] fmts = config.getArrayProperty("websvc.opensearch.formats");
formats = Arrays.asList(fmts); formats = Arrays.asList(fmts);
} }
return formats;
} }
@Override @Override
public boolean isEnabled() { public List<String> getFormats() {
return configurationService.getBooleanProperty("websvc.opensearch.enable"); return formats;
}
/**
* Get base search service URL (websvc.opensearch.svccontext)
*/
protected String getBaseSearchServiceURL() {
return configurationService.getProperty("dspace.server.url") + "/" +
configurationService.getProperty("websvc.opensearch.svccontext");
}
/**
* Get base search UI URL (websvc.opensearch.uicontext)
*/
protected String getBaseSearchUIURL() {
return configurationService.getProperty("dspace.server.url") + "/" +
configurationService.getProperty("websvc.opensearch.uicontext");
} }
@Override @Override
@@ -120,7 +115,7 @@ public class OpenSearchServiceImpl implements OpenSearchService {
@Override @Override
public String getResultsString(Context context, String format, String query, int totalResults, int start, public String getResultsString(Context context, String format, String query, int totalResults, int start,
int pageSize, int pageSize,
IndexableObject scope, List<IndexableObject> results, DSpaceObject scope, List<DSpaceObject> results,
Map<String, String> labels) throws IOException { Map<String, String> labels) throws IOException {
try { try {
return getResults(context, format, query, totalResults, start, pageSize, scope, results, labels) return getResults(context, format, query, totalResults, start, pageSize, scope, results, labels)
@@ -134,7 +129,7 @@ public class OpenSearchServiceImpl implements OpenSearchService {
@Override @Override
public Document getResultsDoc(Context context, String format, String query, int totalResults, int start, public Document getResultsDoc(Context context, String format, String query, int totalResults, int start,
int pageSize, int pageSize,
IndexableObject scope, List<IndexableObject> results, Map<String, String> labels) DSpaceObject scope, List<DSpaceObject> results, Map<String, String> labels)
throws IOException { throws IOException {
try { try {
return getResults(context, format, query, totalResults, start, pageSize, scope, results, labels) return getResults(context, format, query, totalResults, start, pageSize, scope, results, labels)
@@ -146,8 +141,8 @@ public class OpenSearchServiceImpl implements OpenSearchService {
} }
protected SyndicationFeed getResults(Context context, String format, String query, int totalResults, int start, protected SyndicationFeed getResults(Context context, String format, String query, int totalResults, int start,
int pageSize, IndexableObject scope, int pageSize,
List<IndexableObject> results, Map<String, String> labels) { DSpaceObject scope, List<DSpaceObject> results, Map<String, String> labels) {
// Encode results in requested format // Encode results in requested format
if ("rss".equals(format)) { if ("rss".equals(format)) {
format = "rss_2.0"; format = "rss_2.0";
@@ -226,13 +221,13 @@ public class OpenSearchServiceImpl implements OpenSearchService {
root.addContent(fav); root.addContent(fav);
} }
// service URLs // service URLs
for (String format : getFormats()) { for (String format : formats) {
Element url = new Element("Url", ns).setAttribute("type", getContentType(format)); Element url = new Element("Url", ns).setAttribute("type", getContentType(format));
StringBuilder template = new StringBuilder(); StringBuilder template = new StringBuilder();
if ("html".equals(format)) { if ("html".equals(format)) {
template.append(getBaseSearchUIURL()); template.append(uiUrl);
} else { } else {
template.append(getBaseSearchServiceURL()); template.append(svcUrl);
} }
template.append("?query={searchTerms}"); template.append("?query={searchTerms}");
if (!"html".equals(format)) { if (!"html".equals(format)) {

View File

@@ -11,7 +11,7 @@ import java.sql.SQLException;
import java.util.ArrayList; import java.util.ArrayList;
import java.util.List; import java.util.List;
import org.apache.logging.log4j.Logger; import org.apache.log4j.Logger;
import org.dspace.content.Collection; import org.dspace.content.Collection;
import org.dspace.content.factory.ContentServiceFactory; import org.dspace.content.factory.ContentServiceFactory;
import org.dspace.content.service.CollectionService; import org.dspace.content.service.CollectionService;
@@ -28,7 +28,7 @@ import org.springframework.util.StopWatch;
* Invocation: dsrun org.dspace.app.util.OptimizeSelectCollection * Invocation: dsrun org.dspace.app.util.OptimizeSelectCollection
*/ */
public class OptimizeSelectCollection { public class OptimizeSelectCollection {
private static final Logger log = org.apache.logging.log4j.LogManager.getLogger(OptimizeSelectCollection.class); private static final Logger log = Logger.getLogger(OptimizeSelectCollection.class);
private static Context context; private static Context context;
private static ArrayList<EPerson> brokenPeople; private static ArrayList<EPerson> brokenPeople;

View File

@@ -12,7 +12,7 @@ import java.util.ArrayList;
import java.util.List; import java.util.List;
import java.util.Map; import java.util.Map;
import org.apache.logging.log4j.Logger; import org.apache.log4j.Logger;
/** /**
* Class representing a single Item Submission config definition, organized into * Class representing a single Item Submission config definition, organized into
@@ -44,7 +44,7 @@ public class SubmissionConfig implements Serializable {
/** /**
* log4j logger * log4j logger
*/ */
private static Logger log = org.apache.logging.log4j.LogManager.getLogger(SubmissionConfig.class); private static Logger log = Logger.getLogger(SubmissionConfig.class);
/** /**
* Constructs a new Submission Configuration object, based on the XML * Constructs a new Submission Configuration object, based on the XML

View File

@@ -19,7 +19,7 @@ import javax.xml.parsers.DocumentBuilderFactory;
import javax.xml.parsers.FactoryConfigurationError; import javax.xml.parsers.FactoryConfigurationError;
import org.apache.commons.lang3.StringUtils; import org.apache.commons.lang3.StringUtils;
import org.apache.logging.log4j.Logger; import org.apache.log4j.Logger;
import org.dspace.content.Collection; import org.dspace.content.Collection;
import org.dspace.content.DSpaceObject; import org.dspace.content.DSpaceObject;
import org.dspace.core.Context; import org.dspace.core.Context;
@@ -71,7 +71,7 @@ public class SubmissionConfigReader {
/** /**
* log4j logger * log4j logger
*/ */
private static Logger log = org.apache.logging.log4j.LogManager.getLogger(SubmissionConfigReader.class); private static Logger log = Logger.getLogger(SubmissionConfigReader.class);
/** /**
* The fully qualified pathname of the directory containing the Item Submission Configuration file * The fully qualified pathname of the directory containing the Item Submission Configuration file

View File

@@ -34,9 +34,9 @@ import com.sun.syndication.feed.synd.SyndPerson;
import com.sun.syndication.feed.synd.SyndPersonImpl; import com.sun.syndication.feed.synd.SyndPersonImpl;
import com.sun.syndication.io.FeedException; import com.sun.syndication.io.FeedException;
import com.sun.syndication.io.SyndFeedOutput; import com.sun.syndication.io.SyndFeedOutput;
import org.apache.commons.lang3.ArrayUtils; import org.apache.commons.lang.ArrayUtils;
import org.apache.commons.lang3.StringUtils; import org.apache.commons.lang.StringUtils;
import org.apache.logging.log4j.Logger; import org.apache.log4j.Logger;
import org.dspace.content.Bitstream; import org.dspace.content.Bitstream;
import org.dspace.content.Bundle; import org.dspace.content.Bundle;
import org.dspace.content.Collection; import org.dspace.content.Collection;
@@ -50,11 +50,8 @@ import org.dspace.content.service.CollectionService;
import org.dspace.content.service.CommunityService; import org.dspace.content.service.CommunityService;
import org.dspace.content.service.ItemService; import org.dspace.content.service.ItemService;
import org.dspace.core.ConfigurationManager; import org.dspace.core.ConfigurationManager;
import org.dspace.core.Constants;
import org.dspace.core.Context; import org.dspace.core.Context;
import org.dspace.discovery.IndexableObject;
import org.dspace.discovery.indexobject.IndexableCollection;
import org.dspace.discovery.indexobject.IndexableCommunity;
import org.dspace.discovery.indexobject.IndexableItem;
import org.dspace.handle.factory.HandleServiceFactory; import org.dspace.handle.factory.HandleServiceFactory;
import org.dspace.services.ConfigurationService; import org.dspace.services.ConfigurationService;
import org.dspace.services.factory.DSpaceServicesFactory; import org.dspace.services.factory.DSpaceServicesFactory;
@@ -72,7 +69,7 @@ import org.w3c.dom.Document;
* @author Larry Stone * @author Larry Stone
*/ */
public class SyndicationFeed { public class SyndicationFeed {
protected final Logger log = org.apache.logging.log4j.LogManager.getLogger(SyndicationFeed.class); protected final Logger log = Logger.getLogger(SyndicationFeed.class);
/** /**
@@ -182,12 +179,12 @@ public class SyndicationFeed {
* *
* @param request request * @param request request
* @param context context * @param context context
* @param dso the scope * @param dso DSpaceObject
* @param items array of objects * @param items array of objects
* @param labels label map * @param labels label map
*/ */
public void populate(HttpServletRequest request, Context context, IndexableObject dso, public void populate(HttpServletRequest request, Context context, DSpaceObject dso,
List<IndexableObject> items, Map<String, String> labels) { List<? extends DSpaceObject> items, Map<String, String> labels) {
String logoURL = null; String logoURL = null;
String objectURL = null; String objectURL = null;
String defaultTitle = null; String defaultTitle = null;
@@ -202,8 +199,8 @@ public class SyndicationFeed {
logoURL = ConfigurationManager.getProperty("webui.feed.logo.url"); logoURL = ConfigurationManager.getProperty("webui.feed.logo.url");
} else { } else {
Bitstream logo = null; Bitstream logo = null;
if (dso instanceof IndexableCollection) { if (dso.getType() == Constants.COLLECTION) {
Collection col = ((IndexableCollection) dso).getIndexedObject(); Collection col = (Collection) dso;
defaultTitle = col.getName(); defaultTitle = col.getName();
feed.setDescription(collectionService.getMetadata(col, "short_description")); feed.setDescription(collectionService.getMetadata(col, "short_description"));
logo = col.getLogo(); logo = col.getLogo();
@@ -211,9 +208,8 @@ public class SyndicationFeed {
if (cols != null && cols.length() > 1 && cols.contains(col.getHandle())) { if (cols != null && cols.length() > 1 && cols.contains(col.getHandle())) {
podcastFeed = true; podcastFeed = true;
} }
objectURL = resolveURL(request, col); } else if (dso.getType() == Constants.COMMUNITY) {
} else if (dso instanceof IndexableCommunity) { Community comm = (Community) dso;
Community comm = ((IndexableCommunity) dso).getIndexedObject();
defaultTitle = comm.getName(); defaultTitle = comm.getName();
feed.setDescription(communityService.getMetadata(comm, "short_description")); feed.setDescription(communityService.getMetadata(comm, "short_description"));
logo = comm.getLogo(); logo = comm.getLogo();
@@ -221,9 +217,8 @@ public class SyndicationFeed {
if (comms != null && comms.length() > 1 && comms.contains(comm.getHandle())) { if (comms != null && comms.length() > 1 && comms.contains(comm.getHandle())) {
podcastFeed = true; podcastFeed = true;
} }
objectURL = resolveURL(request, comm);
} }
objectURL = resolveURL(request, dso);
if (logo != null) { if (logo != null) {
logoURL = urlOfBitstream(request, logo); logoURL = urlOfBitstream(request, logo);
} }
@@ -252,11 +247,11 @@ public class SyndicationFeed {
// add entries for items // add entries for items
if (items != null) { if (items != null) {
List<SyndEntry> entries = new ArrayList<SyndEntry>(); List<SyndEntry> entries = new ArrayList<SyndEntry>();
for (IndexableObject idxObj : items) { for (DSpaceObject itemDSO : items) {
if (!(idxObj instanceof IndexableItem)) { if (itemDSO.getType() != Constants.ITEM) {
continue; continue;
} }
Item item = ((IndexableItem) idxObj).getIndexedObject(); Item item = (Item) itemDSO;
boolean hasDate = false; boolean hasDate = false;
SyndEntry entry = new SyndEntryImpl(); SyndEntry entry = new SyndEntryImpl();
entries.add(entry); entries.add(entry);
@@ -371,7 +366,7 @@ public class SyndicationFeed {
if (ArrayUtils.contains(podcastableMIMETypes, mime)) { if (ArrayUtils.contains(podcastableMIMETypes, mime)) {
SyndEnclosure enc = new SyndEnclosureImpl(); SyndEnclosure enc = new SyndEnclosureImpl();
enc.setType(bit.getFormat(context).getMIMEType()); enc.setType(bit.getFormat(context).getMIMEType());
enc.setLength(bit.getSizeBytes()); enc.setLength(bit.getSize());
enc.setUrl(urlOfBitstream(request, bit)); enc.setUrl(urlOfBitstream(request, bit));
enclosures.add(enc); enclosures.add(enc);
} else { } else {
@@ -531,9 +526,11 @@ public class SyndicationFeed {
if (dso == null) { if (dso == null) {
if (baseURL == null) { if (baseURL == null) {
if (request == null) { if (request == null) {
baseURL = ConfigurationManager.getProperty("dspace.ui.url"); baseURL = ConfigurationManager.getProperty("dspace.url");
} else { } else {
baseURL = ConfigurationManager.getProperty("dspace.ui.url"); baseURL = (request.isSecure()) ? "https://" : "http://";
baseURL += ConfigurationManager.getProperty("dspace.hostname");
baseURL += ":" + request.getServerPort();
baseURL += request.getContextPath(); baseURL += request.getContextPath();
} }
} }

View File

@@ -22,9 +22,9 @@ import java.util.Set;
import java.util.UUID; import java.util.UUID;
import javax.servlet.http.HttpServletRequest; import javax.servlet.http.HttpServletRequest;
import org.apache.commons.collections4.ListUtils; import org.apache.commons.collections.ListUtils;
import org.apache.commons.lang3.StringUtils; import org.apache.commons.lang.StringUtils;
import org.apache.logging.log4j.Logger; import org.apache.log4j.Logger;
import org.dspace.content.Collection; import org.dspace.content.Collection;
import org.dspace.content.Item; import org.dspace.content.Item;
import org.dspace.content.MetadataValue; import org.dspace.content.MetadataValue;
@@ -44,7 +44,7 @@ public class Util {
// cache for source version result // cache for source version result
private static String sourceVersion = null; private static String sourceVersion = null;
private static Logger log = org.apache.logging.log4j.LogManager.getLogger(Util.class); private static Logger log = Logger.getLogger(Util.class);
/** /**
* Default constructor. Must be protected as org.dspace.xmlworkflow.WorkflowUtils extends it * Default constructor. Must be protected as org.dspace.xmlworkflow.WorkflowUtils extends it
@@ -360,13 +360,9 @@ public class Util {
InputStream cis = null; InputStream cis = null;
try { try {
cis = Util.class.getResourceAsStream("/META-INF/maven/org.dspace/dspace-api/pom.properties"); cis = Util.class.getResourceAsStream("/META-INF/maven/org.dspace/dspace-api/pom.properties");
if (cis == null) {
// pom.properties will not exist when running tests
return "unknown";
}
constants.load(cis); constants.load(cis);
} catch (Exception e) { } catch (Exception e) {
log.error("Could not open dspace-api's pom.properties", e); log.error(e.getMessage(), e);
} finally { } finally {
if (cis != null) { if (cis != null) {
try { try {
@@ -422,12 +418,11 @@ public class Util {
List<DCInputSet> inputSets = inputsReader.getInputsByCollectionHandle(col_handle); List<DCInputSet> inputSets = inputsReader.getInputsByCollectionHandle(col_handle);
// Replace the values of Metadatum[] with the correct ones in case
// of
// controlled vocabularies
String currentField = Utils.standardize(schema, element, qualifier, ".");
for (DCInputSet inputSet : inputSets) { for (DCInputSet inputSet : inputSets) {
// Replace the values of Metadatum[] with the correct ones in case
// of
// controlled vocabularies
String currentField = Utils.standardize(schema, element, qualifier, ".");
if (inputSet != null) { if (inputSet != null) {
@@ -435,20 +430,19 @@ public class Util {
for (int p = 0; p < fieldsNums; p++) { for (int p = 0; p < fieldsNums; p++) {
DCInput[][] inputs = inputSet.getFields(); DCInput[] inputs = inputSet.getFields();
if (inputs != null) { if (inputs != null) {
for (int i = 0; i < inputs.length; i++) { for (int i = 0; i < inputs.length; i++) {
for (int j = 0; j < inputs[i].length; j++) { String inputField = Utils.standardize(inputs[i].getSchema(), inputs[i].getElement(),
String inputField = Utils inputs[i].getQualifier(), ".");
.standardize(inputs[i][j].getSchema(), inputs[i][j].getElement(), if (currentField.equals(inputField)) {
inputs[i][j].getQualifier(), ".");
if (currentField.equals(inputField)) { myInputs = inputs[i];
myInputs = inputs[i][j]; myInputsFound = true;
myInputsFound = true; break;
break;
}
} }
} }
} }
@@ -477,26 +471,6 @@ public class Util {
return toReturn; return toReturn;
} }
/**
* Split a list in an array of i sub-lists uniformly sized
*
* @param idsList the list to split
* @param i the number of sublists to return
*
* @return an array of sub-lists of fixed size
*/
public static <T> List<T>[] splitList(List<T> idsList, int i) {
int setmin = idsList.size() / i;
List<T>[] result = new List[i];
int offset = 0;
for (int idx = 0; idx < i - 1; idx++) {
result[idx] = idsList.subList(offset, offset + setmin);
offset += setmin;
}
result[i - 1] = idsList.subList(offset, idsList.size());
return result;
}
public static List<String> differenceInSubmissionFields(Collection fromCollection, Collection toCollection) public static List<String> differenceInSubmissionFields(Collection fromCollection, Collection toCollection)
throws DCInputsReaderException { throws DCInputsReaderException {
DCInputsReader reader = new DCInputsReader(); DCInputsReader reader = new DCInputsReader();
@@ -506,17 +480,13 @@ public class Util {
Set<String> fromFieldName = new HashSet<>(); Set<String> fromFieldName = new HashSet<>();
Set<String> toFieldName = new HashSet<>(); Set<String> toFieldName = new HashSet<>();
for (DCInputSet ff : from) { for (DCInputSet ff : from) {
for (DCInput[] fdcrow : ff.getFields()) { for (DCInput fdc : ff.getFields()) {
for (DCInput fdc : fdcrow) { fromFieldName.add(fdc.getFieldName());
fromFieldName.add(fdc.getFieldName());
}
} }
} }
for (DCInputSet tt : to) { for (DCInputSet tt : to) {
for (DCInput[] tdcrow : tt.getFields()) { for (DCInput tdc : tt.getFields()) {
for (DCInput tdc : tdcrow) { toFieldName.add(tdc.getFieldName());
toFieldName.add(tdc.getFieldName());
}
} }
} }

View File

@@ -18,7 +18,7 @@ import org.apache.http.HttpStatus;
import org.apache.http.client.HttpClient; import org.apache.http.client.HttpClient;
import org.apache.http.client.methods.HttpHead; import org.apache.http.client.methods.HttpHead;
import org.apache.http.impl.client.DefaultHttpClient; import org.apache.http.impl.client.DefaultHttpClient;
import org.apache.logging.log4j.Logger; import org.apache.log4j.Logger;
import org.dspace.app.util.dao.WebAppDAO; import org.dspace.app.util.dao.WebAppDAO;
import org.dspace.app.util.service.WebAppService; import org.dspace.app.util.service.WebAppService;
import org.dspace.core.Context; import org.dspace.core.Context;
@@ -33,7 +33,7 @@ import org.springframework.beans.factory.annotation.Autowired;
*/ */
public class WebAppServiceImpl implements WebAppService { public class WebAppServiceImpl implements WebAppService {
private final Logger log = org.apache.logging.log4j.LogManager.getLogger(WebAppServiceImpl.class); private final Logger log = Logger.getLogger(WebAppServiceImpl.class);
@Autowired(required = true) @Autowired(required = true)
protected WebAppDAO webAppDAO; protected WebAppDAO webAppDAO;

View File

@@ -11,7 +11,7 @@ import java.util.ArrayList;
import java.util.LinkedList; import java.util.LinkedList;
import java.util.List; import java.util.List;
import org.apache.commons.lang3.StringUtils; import org.apache.commons.lang.StringUtils;
import org.w3c.dom.Element; import org.w3c.dom.Element;
import org.w3c.dom.NodeList; import org.w3c.dom.NodeList;

View File

@@ -14,7 +14,6 @@ import java.util.Map;
import org.dspace.content.DSpaceObject; import org.dspace.content.DSpaceObject;
import org.dspace.core.Context; import org.dspace.core.Context;
import org.dspace.discovery.IndexableObject;
import org.w3c.dom.Document; import org.w3c.dom.Document;
/** /**
@@ -42,13 +41,6 @@ public interface OpenSearchService {
*/ */
public List<String> getFormats(); public List<String> getFormats();
/**
* Determine if the module is active
*
* @return boolean indicator if the OpenSearch module is enabled or not
*/
public boolean isEnabled();
/** /**
* Returns a mime-type associated with passed format * Returns a mime-type associated with passed format
* *
@@ -84,7 +76,7 @@ public interface OpenSearchService {
* @param totalResults - the hit count * @param totalResults - the hit count
* @param start - start result index * @param start - start result index
* @param pageSize - page size * @param pageSize - page size
* @param scope - search scope, null or the community/collection * @param scope - search scope, null or community/collection handle
* @param results the retreived DSpace objects satisfying search * @param results the retreived DSpace objects satisfying search
* @param labels labels to apply - format specific * @param labels labels to apply - format specific
* @return formatted search results * @return formatted search results
@@ -92,7 +84,7 @@ public interface OpenSearchService {
*/ */
public String getResultsString(Context context, String format, String query, int totalResults, int start, public String getResultsString(Context context, String format, String query, int totalResults, int start,
int pageSize, int pageSize,
IndexableObject scope, List<IndexableObject> results, DSpaceObject scope, List<DSpaceObject> results,
Map<String, String> labels) throws IOException; Map<String, String> labels) throws IOException;
/** /**
@@ -104,7 +96,7 @@ public interface OpenSearchService {
* @param totalResults - the hit count * @param totalResults - the hit count
* @param start - start result index * @param start - start result index
* @param pageSize - page size * @param pageSize - page size
* @param scope - search scope, null or the community/collection * @param scope - search scope, null or community/collection handle
* @param results the retreived DSpace objects satisfying search * @param results the retreived DSpace objects satisfying search
* @param labels labels to apply - format specific * @param labels labels to apply - format specific
* @return formatted search results * @return formatted search results
@@ -112,7 +104,7 @@ public interface OpenSearchService {
*/ */
public Document getResultsDoc(Context context, String format, String query, int totalResults, int start, public Document getResultsDoc(Context context, String format, String query, int totalResults, int start,
int pageSize, int pageSize,
IndexableObject scope, List<IndexableObject> results, Map<String, String> labels) DSpaceObject scope, List<DSpaceObject> results, Map<String, String> labels)
throws IOException; throws IOException;
public DSpaceObject resolveScope(Context context, String scope) throws SQLException; public DSpaceObject resolveScope(Context context, String scope) throws SQLException;

View File

@@ -210,10 +210,4 @@ public interface AuthenticationMethod {
public String loginPageURL(Context context, public String loginPageURL(Context context,
HttpServletRequest request, HttpServletRequest request,
HttpServletResponse response); HttpServletResponse response);
/**
* Returns a short name that uniquely identifies this authentication method
* @return The authentication method name
*/
public String getName();
} }

View File

@@ -9,7 +9,6 @@ package org.dspace.authenticate;
import java.sql.SQLException; import java.sql.SQLException;
import java.util.ArrayList; import java.util.ArrayList;
import java.util.Collections;
import java.util.HashMap; import java.util.HashMap;
import java.util.List; import java.util.List;
import java.util.Map; import java.util.Map;
@@ -17,15 +16,15 @@ import java.util.UUID;
import javax.servlet.http.HttpServletRequest; import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse; import javax.servlet.http.HttpServletResponse;
import org.apache.logging.log4j.Logger; import org.apache.commons.collections.ListUtils;
import org.apache.log4j.Logger;
import org.dspace.core.ConfigurationManager;
import org.dspace.core.Context; import org.dspace.core.Context;
import org.dspace.core.LogManager; import org.dspace.core.LogManager;
import org.dspace.core.factory.CoreServiceFactory;
import org.dspace.eperson.EPerson; import org.dspace.eperson.EPerson;
import org.dspace.eperson.Group; import org.dspace.eperson.Group;
import org.dspace.eperson.factory.EPersonServiceFactory; import org.dspace.eperson.factory.EPersonServiceFactory;
import org.dspace.eperson.service.GroupService; import org.dspace.eperson.service.GroupService;
import org.dspace.service.ClientInfoService;
import org.dspace.services.factory.DSpaceServicesFactory; import org.dspace.services.factory.DSpaceServicesFactory;
/** /**
@@ -50,7 +49,7 @@ public class IPAuthentication implements AuthenticationMethod {
/** /**
* Our logger * Our logger
*/ */
private static Logger log = org.apache.logging.log4j.LogManager.getLogger(IPAuthentication.class); private static Logger log = Logger.getLogger(IPAuthentication.class);
/** /**
* Whether to look for x-forwarded headers for logging IP addresses * Whether to look for x-forwarded headers for logging IP addresses
@@ -68,7 +67,6 @@ public class IPAuthentication implements AuthenticationMethod {
protected List<IPMatcher> ipNegativeMatchers; protected List<IPMatcher> ipNegativeMatchers;
protected GroupService groupService; protected GroupService groupService;
protected ClientInfoService clientInfoService;
/** /**
@@ -93,7 +91,6 @@ public class IPAuthentication implements AuthenticationMethod {
ipMatcherGroupIDs = new HashMap<>(); ipMatcherGroupIDs = new HashMap<>();
ipMatcherGroupNames = new HashMap<>(); ipMatcherGroupNames = new HashMap<>();
groupService = EPersonServiceFactory.getInstance().getGroupService(); groupService = EPersonServiceFactory.getInstance().getGroupService();
clientInfoService = CoreServiceFactory.getInstance().getClientInfoService();
List<String> propNames = DSpaceServicesFactory.getInstance().getConfigurationService() List<String> propNames = DSpaceServicesFactory.getInstance().getConfigurationService()
.getPropertyKeys("authentication-ip"); .getPropertyKeys("authentication-ip");
@@ -167,12 +164,23 @@ public class IPAuthentication implements AuthenticationMethod {
public List<Group> getSpecialGroups(Context context, HttpServletRequest request) public List<Group> getSpecialGroups(Context context, HttpServletRequest request)
throws SQLException { throws SQLException {
if (request == null) { if (request == null) {
return Collections.EMPTY_LIST; return ListUtils.EMPTY_LIST;
} }
List<Group> groups = new ArrayList<Group>(); List<Group> groups = new ArrayList<Group>();
// Get the user's IP address // Get the user's IP address
String addr = clientInfoService.getClientIp(request); String addr = request.getRemoteAddr();
if (useProxies == null) {
useProxies = ConfigurationManager.getBooleanProperty("useProxies", false);
}
if (useProxies && request.getHeader("X-Forwarded-For") != null) {
/* This header is a comma delimited list */
for (String xfip : request.getHeader("X-Forwarded-For").split(",")) {
if (!request.getHeader("X-Forwarded-For").contains(addr)) {
addr = xfip.trim();
}
}
}
for (IPMatcher ipm : ipMatchers) { for (IPMatcher ipm : ipMatchers) {
try { try {
@@ -268,9 +276,4 @@ public class IPAuthentication implements AuthenticationMethod {
HttpServletResponse response) { HttpServletResponse response) {
return null; return null;
} }
@Override
public String getName() {
return "ip";
}
} }

View File

@@ -10,7 +10,7 @@ package org.dspace.authenticate;
import java.net.Inet6Address; import java.net.Inet6Address;
import java.net.UnknownHostException; import java.net.UnknownHostException;
import org.apache.logging.log4j.Logger; import org.apache.log4j.Logger;
/** /**
* <p> * <p>
@@ -38,7 +38,7 @@ import org.apache.logging.log4j.Logger;
* @version $Revision$ * @version $Revision$
*/ */
public class IPMatcher { public class IPMatcher {
private static Logger log = org.apache.logging.log4j.LogManager.getLogger(IPMatcher.class); private static Logger log = Logger.getLogger(IPMatcher.class);
/** /**
* Network to match * Network to match

View File

@@ -10,7 +10,6 @@ package org.dspace.authenticate;
import java.io.IOException; import java.io.IOException;
import java.sql.SQLException; import java.sql.SQLException;
import java.util.Arrays; import java.util.Arrays;
import java.util.Collections;
import java.util.Hashtable; import java.util.Hashtable;
import java.util.List; import java.util.List;
import javax.naming.NamingEnumeration; import javax.naming.NamingEnumeration;
@@ -28,8 +27,9 @@ import javax.naming.ldap.StartTlsResponse;
import javax.servlet.http.HttpServletRequest; import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse; import javax.servlet.http.HttpServletResponse;
import org.apache.commons.lang3.StringUtils; import org.apache.commons.collections.ListUtils;
import org.apache.logging.log4j.Logger; import org.apache.commons.lang.StringUtils;
import org.apache.log4j.Logger;
import org.dspace.authenticate.factory.AuthenticateServiceFactory; import org.dspace.authenticate.factory.AuthenticateServiceFactory;
import org.dspace.authenticate.service.AuthenticationService; import org.dspace.authenticate.service.AuthenticationService;
import org.dspace.authorize.AuthorizeException; import org.dspace.authorize.AuthorizeException;
@@ -61,7 +61,7 @@ public class LDAPAuthentication
/** /**
* log4j category * log4j category
*/ */
private static Logger log = org.apache.logging.log4j.LogManager.getLogger(LDAPAuthentication.class); private static Logger log = Logger.getLogger(LDAPAuthentication.class);
protected AuthenticationService authenticationService = AuthenticateServiceFactory.getInstance() protected AuthenticationService authenticationService = AuthenticateServiceFactory.getInstance()
.getAuthenticationService(); .getAuthenticationService();
@@ -136,7 +136,7 @@ public class LDAPAuthentication
log.warn(LogManager.getHeader(context, log.warn(LogManager.getHeader(context,
"ldap_specialgroup", "ldap_specialgroup",
"Group defined in login.specialgroup does not exist")); "Group defined in login.specialgroup does not exist"));
return Collections.EMPTY_LIST; return ListUtils.EMPTY_LIST;
} else { } else {
return Arrays.asList(ldapGroup); return Arrays.asList(ldapGroup);
} }
@@ -145,7 +145,7 @@ public class LDAPAuthentication
} catch (Exception npe) { } catch (Exception npe) {
// The user is not an LDAP user, so we don't need to worry about them // The user is not an LDAP user, so we don't need to worry about them
} }
return Collections.EMPTY_LIST; return ListUtils.EMPTY_LIST;
} }
/* /*
@@ -639,11 +639,6 @@ public class LDAPAuthentication
return null; return null;
} }
@Override
public String getName() {
return "ldap";
}
/* /*
* Add authenticated users to the group defined in dspace.cfg by * Add authenticated users to the group defined in dspace.cfg by
* the authentication-ldap.login.groupmap.* key. * the authentication-ldap.login.groupmap.* key.

View File

@@ -9,13 +9,13 @@ package org.dspace.authenticate;
import java.sql.SQLException; import java.sql.SQLException;
import java.util.Arrays; import java.util.Arrays;
import java.util.Collections;
import java.util.List; import java.util.List;
import javax.servlet.http.HttpServletRequest; import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse; import javax.servlet.http.HttpServletResponse;
import org.apache.commons.collections.ListUtils;
import org.apache.commons.lang3.StringUtils; import org.apache.commons.lang3.StringUtils;
import org.apache.logging.log4j.Logger; import org.apache.log4j.Logger;
import org.dspace.core.Context; import org.dspace.core.Context;
import org.dspace.core.LogManager; import org.dspace.core.LogManager;
import org.dspace.eperson.EPerson; import org.dspace.eperson.EPerson;
@@ -49,7 +49,7 @@ public class PasswordAuthentication
/** /**
* log4j category * log4j category
*/ */
private static Logger log = org.apache.logging.log4j.LogManager.getLogger(PasswordAuthentication.class); private static Logger log = Logger.getLogger(PasswordAuthentication.class);
/** /**
@@ -151,7 +151,7 @@ public class PasswordAuthentication
"password_specialgroup", "password_specialgroup",
"Group defined in modules/authentication-password.cfg login" + "Group defined in modules/authentication-password.cfg login" +
".specialgroup does not exist")); ".specialgroup does not exist"));
return Collections.EMPTY_LIST; return ListUtils.EMPTY_LIST;
} else { } else {
return Arrays.asList(specialGroup); return Arrays.asList(specialGroup);
} }
@@ -160,7 +160,7 @@ public class PasswordAuthentication
} catch (Exception e) { } catch (Exception e) {
log.error(LogManager.getHeader(context, "getSpecialGroups", ""), e); log.error(LogManager.getHeader(context, "getSpecialGroups", ""), e);
} }
return Collections.EMPTY_LIST; return ListUtils.EMPTY_LIST;
} }
/** /**
@@ -242,9 +242,4 @@ public class PasswordAuthentication
HttpServletResponse response) { HttpServletResponse response) {
return null; return null;
} }
@Override
public String getName() {
return "password";
}
} }

View File

@@ -12,7 +12,6 @@ import java.net.URLEncoder;
import java.sql.SQLException; import java.sql.SQLException;
import java.util.ArrayList; import java.util.ArrayList;
import java.util.Arrays; import java.util.Arrays;
import java.util.Collections;
import java.util.Enumeration; import java.util.Enumeration;
import java.util.HashMap; import java.util.HashMap;
import java.util.HashSet; import java.util.HashSet;
@@ -23,8 +22,9 @@ import java.util.UUID;
import javax.servlet.http.HttpServletRequest; import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse; import javax.servlet.http.HttpServletResponse;
import org.apache.commons.lang3.StringUtils; import org.apache.commons.collections.ListUtils;
import org.apache.logging.log4j.Logger; import org.apache.commons.lang.StringUtils;
import org.apache.log4j.Logger;
import org.dspace.authenticate.factory.AuthenticateServiceFactory; import org.dspace.authenticate.factory.AuthenticateServiceFactory;
import org.dspace.authorize.AuthorizeException; import org.dspace.authorize.AuthorizeException;
import org.dspace.content.MetadataField; import org.dspace.content.MetadataField;
@@ -70,7 +70,7 @@ public class ShibAuthentication implements AuthenticationMethod {
/** /**
* log4j category * log4j category
*/ */
private static Logger log = org.apache.logging.log4j.LogManager.getLogger(ShibAuthentication.class); private static Logger log = Logger.getLogger(ShibAuthentication.class);
/** /**
* Additional metadata mappings * Additional metadata mappings
@@ -288,7 +288,7 @@ public class ShibAuthentication implements AuthenticationMethod {
if (request == null || if (request == null ||
context.getCurrentUser() == null || context.getCurrentUser() == null ||
request.getSession().getAttribute("shib.authenticated") == null) { request.getSession().getAttribute("shib.authenticated") == null) {
return Collections.EMPTY_LIST; return ListUtils.EMPTY_LIST;
} }
// If we have already calculated the special groups then return them. // If we have already calculated the special groups then return them.
@@ -404,7 +404,7 @@ public class ShibAuthentication implements AuthenticationMethod {
return new ArrayList<>(groups); return new ArrayList<>(groups);
} catch (Throwable t) { } catch (Throwable t) {
log.error("Unable to validate any sepcial groups this user may belong too because of an exception.", t); log.error("Unable to validate any sepcial groups this user may belong too because of an exception.", t);
return Collections.EMPTY_LIST; return ListUtils.EMPTY_LIST;
} }
} }
@@ -538,11 +538,6 @@ public class ShibAuthentication implements AuthenticationMethod {
} }
} }
@Override
public String getName() {
return "shibboleth";
}
/** /**
* Identify an existing EPerson based upon the shibboleth attributes provided on * Identify an existing EPerson based upon the shibboleth attributes provided on
* the request object. There are three cases where this can occurr, each as * the request object. There are three cases where this can occurr, each as

View File

@@ -21,7 +21,6 @@ import java.security.cert.CertificateFactory;
import java.security.cert.X509Certificate; import java.security.cert.X509Certificate;
import java.sql.SQLException; import java.sql.SQLException;
import java.util.ArrayList; import java.util.ArrayList;
import java.util.Collections;
import java.util.Enumeration; import java.util.Enumeration;
import java.util.List; import java.util.List;
import java.util.StringTokenizer; import java.util.StringTokenizer;
@@ -29,8 +28,9 @@ import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse; import javax.servlet.http.HttpServletResponse;
import javax.servlet.http.HttpSession; import javax.servlet.http.HttpSession;
import org.apache.commons.lang3.ArrayUtils; import org.apache.commons.collections.ListUtils;
import org.apache.logging.log4j.Logger; import org.apache.commons.lang.ArrayUtils;
import org.apache.log4j.Logger;
import org.dspace.authenticate.factory.AuthenticateServiceFactory; import org.dspace.authenticate.factory.AuthenticateServiceFactory;
import org.dspace.authenticate.service.AuthenticationService; import org.dspace.authenticate.service.AuthenticationService;
import org.dspace.authorize.AuthorizeException; import org.dspace.authorize.AuthorizeException;
@@ -105,7 +105,7 @@ public class X509Authentication implements AuthenticationMethod {
/** /**
* log4j category * log4j category
*/ */
private static Logger log = org.apache.logging.log4j.LogManager.getLogger(X509Authentication.class); private static Logger log = Logger.getLogger(X509Authentication.class);
/** /**
* public key of CA to check client certs against. * public key of CA to check client certs against.
@@ -442,7 +442,7 @@ public class X509Authentication implements AuthenticationMethod {
public List<Group> getSpecialGroups(Context context, HttpServletRequest request) public List<Group> getSpecialGroups(Context context, HttpServletRequest request)
throws SQLException { throws SQLException {
if (request == null) { if (request == null) {
return Collections.EMPTY_LIST; return ListUtils.EMPTY_LIST;
} }
Boolean authenticated = false; Boolean authenticated = false;
@@ -472,7 +472,7 @@ public class X509Authentication implements AuthenticationMethod {
return groups; return groups;
} }
return Collections.EMPTY_LIST; return ListUtils.EMPTY_LIST;
} }
/** /**
@@ -589,9 +589,4 @@ public class X509Authentication implements AuthenticationMethod {
HttpServletResponse response) { HttpServletResponse response) {
return loginPageURL; return loginPageURL;
} }
@Override
public String getName() {
return "x509";
}
} }

View File

@@ -7,7 +7,6 @@
*/ */
package org.dspace.authority; package org.dspace.authority;
import java.io.IOException;
import java.net.MalformedURLException; import java.net.MalformedURLException;
import java.util.List; import java.util.List;
@@ -23,8 +22,7 @@ import org.apache.solr.client.solrj.response.QueryResponse;
*/ */
public interface AuthoritySearchService { public interface AuthoritySearchService {
public QueryResponse search(SolrQuery query) public QueryResponse search(SolrQuery query) throws SolrServerException, MalformedURLException;
throws SolrServerException, MalformedURLException, IOException;
public List<String> getAllIndexedMetadataFields() throws Exception; public List<String> getAllIndexedMetadataFields() throws Exception;

View File

@@ -46,10 +46,16 @@ public class AuthorityServiceImpl implements AuthorityService {
} }
for (AuthorityIndexerInterface indexerInterface : indexers) { for (AuthorityIndexerInterface indexerInterface : indexers) {
List<AuthorityValue> authorityValues = indexerInterface.getAuthorityValues(context , item);
for (AuthorityValue authorityValue : authorityValues) { indexerInterface.init(context, item);
indexingService.indexContent(authorityValue); while (indexerInterface.hasMore()) {
AuthorityValue authorityValue = indexerInterface.nextValue();
if (authorityValue != null) {
indexingService.indexContent(authorityValue, true);
}
} }
//Close up
indexerInterface.close();
} }
//Commit to our server //Commit to our server
indexingService.commit(); indexingService.commit();

View File

@@ -12,11 +12,10 @@ import java.net.MalformedURLException;
import java.util.ArrayList; import java.util.ArrayList;
import java.util.List; import java.util.List;
import org.apache.logging.log4j.Logger; import org.apache.log4j.Logger;
import org.apache.solr.client.solrj.SolrClient;
import org.apache.solr.client.solrj.SolrQuery; import org.apache.solr.client.solrj.SolrQuery;
import org.apache.solr.client.solrj.SolrServerException; import org.apache.solr.client.solrj.SolrServerException;
import org.apache.solr.client.solrj.impl.HttpSolrClient; import org.apache.solr.client.solrj.impl.HttpSolrServer;
import org.apache.solr.client.solrj.response.FacetField; import org.apache.solr.client.solrj.response.FacetField;
import org.apache.solr.client.solrj.response.QueryResponse; import org.apache.solr.client.solrj.response.QueryResponse;
import org.apache.solr.common.SolrInputDocument; import org.apache.solr.common.SolrInputDocument;
@@ -31,7 +30,7 @@ import org.dspace.core.ConfigurationManager;
*/ */
public class AuthoritySolrServiceImpl implements AuthorityIndexingService, AuthoritySearchService { public class AuthoritySolrServiceImpl implements AuthorityIndexingService, AuthoritySearchService {
private static final Logger log = org.apache.logging.log4j.LogManager.getLogger(AuthoritySolrServiceImpl.class); private static final Logger log = Logger.getLogger(AuthoritySolrServiceImpl.class);
protected AuthoritySolrServiceImpl() { protected AuthoritySolrServiceImpl() {
@@ -40,31 +39,28 @@ public class AuthoritySolrServiceImpl implements AuthorityIndexingService, Autho
/** /**
* Non-Static CommonsHttpSolrServer for processing indexing events. * Non-Static CommonsHttpSolrServer for processing indexing events.
*/ */
protected SolrClient solr = null; protected HttpSolrServer solr = null;
protected SolrClient getSolr() protected HttpSolrServer getSolr() throws MalformedURLException, SolrServerException {
throws MalformedURLException, SolrServerException, IOException {
if (solr == null) { if (solr == null) {
String solrService = ConfigurationManager.getProperty("solr.authority.server"); String solrService = ConfigurationManager.getProperty("solr.authority.server");
log.debug("Solr authority URL: " + solrService); log.debug("Solr authority URL: " + solrService);
HttpSolrClient solrServer = new HttpSolrClient.Builder(solrService).build(); solr = new HttpSolrServer(solrService);
solrServer.setBaseURL(solrService); solr.setBaseURL(solrService);
SolrQuery solrQuery = new SolrQuery().setQuery("*:*"); SolrQuery solrQuery = new SolrQuery().setQuery("*:*");
solrServer.query(solrQuery); solr.query(solrQuery);
solr = solrServer;
} }
return solr; return solr;
} }
@Override @Override
public void indexContent(AuthorityValue value) { public void indexContent(AuthorityValue value, boolean force) {
SolrInputDocument doc = value.getSolrInputDocument(); SolrInputDocument doc = value.getSolrInputDocument();
try { try {
@@ -133,8 +129,7 @@ public class AuthoritySolrServiceImpl implements AuthorityIndexingService, Autho
} }
@Override @Override
public QueryResponse search(SolrQuery query) public QueryResponse search(SolrQuery query) throws SolrServerException, MalformedURLException {
throws SolrServerException, MalformedURLException, IOException {
return getSolr().query(query); return getSolr().query(query);
} }

View File

@@ -12,7 +12,7 @@ import java.util.HashMap;
import java.util.List; import java.util.List;
import java.util.Map; import java.util.Map;
import org.apache.logging.log4j.Logger; import org.apache.log4j.Logger;
/** /**
* This class contains a list of active authority types. * This class contains a list of active authority types.
@@ -32,7 +32,7 @@ public class AuthorityTypes {
/** /**
* log4j logger * log4j logger
*/ */
private static Logger log = org.apache.logging.log4j.LogManager.getLogger(AuthorityTypes.class); private static Logger log = Logger.getLogger(AuthorityTypes.class);
protected List<AuthorityValue> types = new ArrayList<AuthorityValue>(); protected List<AuthorityValue> types = new ArrayList<AuthorityValue>();

View File

@@ -8,15 +8,14 @@
package org.dspace.authority; package org.dspace.authority;
import java.sql.SQLException; import java.sql.SQLException;
import java.text.DateFormat;
import java.util.ArrayList; import java.util.ArrayList;
import java.util.Date; import java.util.Date;
import java.util.HashMap; import java.util.HashMap;
import java.util.List; import java.util.List;
import java.util.Map; import java.util.Map;
import org.apache.commons.lang3.StringUtils; import org.apache.commons.lang.StringUtils;
import org.apache.logging.log4j.Logger; import org.apache.log4j.Logger;
import org.apache.solr.common.SolrDocument; import org.apache.solr.common.SolrDocument;
import org.apache.solr.common.SolrInputDocument; import org.apache.solr.common.SolrInputDocument;
import org.dspace.authorize.AuthorizeException; import org.dspace.authorize.AuthorizeException;
@@ -24,7 +23,6 @@ import org.dspace.content.Item;
import org.dspace.content.MetadataValue; import org.dspace.content.MetadataValue;
import org.dspace.content.factory.ContentServiceFactory; import org.dspace.content.factory.ContentServiceFactory;
import org.dspace.core.Context; import org.dspace.core.Context;
import org.dspace.util.SolrUtils;
import org.joda.time.DateTime; import org.joda.time.DateTime;
import org.joda.time.format.DateTimeFormatter; import org.joda.time.format.DateTimeFormatter;
import org.joda.time.format.ISODateTimeFormat; import org.joda.time.format.ISODateTimeFormat;
@@ -36,6 +34,8 @@ import org.joda.time.format.ISODateTimeFormat;
* @author Mark Diggory (markd at atmire dot com) * @author Mark Diggory (markd at atmire dot com)
*/ */
public class AuthorityValue { public class AuthorityValue {
/** /**
* The id of the record in solr * The id of the record in solr
*/ */
@@ -150,13 +150,12 @@ public class AuthorityValue {
public SolrInputDocument getSolrInputDocument() { public SolrInputDocument getSolrInputDocument() {
SolrInputDocument doc = new SolrInputDocument(); SolrInputDocument doc = new SolrInputDocument();
DateFormat solrDateFormatter = SolrUtils.getDateFormatter();
doc.addField("id", getId()); doc.addField("id", getId());
doc.addField("field", getField()); doc.addField("field", getField());
doc.addField("value", getValue()); doc.addField("value", getValue());
doc.addField("deleted", isDeleted()); doc.addField("deleted", isDeleted());
doc.addField("creation_date", solrDateFormatter.format(getCreationDate())); doc.addField("creation_date", getCreationDate());
doc.addField("last_modified_date", solrDateFormatter.format(getLastModified())); doc.addField("last_modified_date", getLastModified());
doc.addField("authority_type", getAuthorityType()); doc.addField("authority_type", getAuthorityType());
return doc; return doc;
} }
@@ -197,12 +196,12 @@ public class AuthorityValue {
* @return map * @return map
*/ */
public Map<String, String> choiceSelectMap() { public Map<String, String> choiceSelectMap() {
return new HashMap<>(); return new HashMap<String, String>();
} }
public List<DateTimeFormatter> getDateFormatters() { public List<DateTimeFormatter> getDateFormatters() {
List<DateTimeFormatter> list = new ArrayList<>(); List<DateTimeFormatter> list = new ArrayList<DateTimeFormatter>();
list.add(ISODateTimeFormat.dateTime()); list.add(ISODateTimeFormat.dateTime());
list.add(ISODateTimeFormat.dateTimeNoMillis()); list.add(ISODateTimeFormat.dateTimeNoMillis());
return list; return list;
@@ -235,7 +234,7 @@ public class AuthorityValue {
/** /**
* log4j logger * log4j logger
*/ */
private static Logger log = org.apache.logging.log4j.LogManager.getLogger(AuthorityValue.class); private static Logger log = Logger.getLogger(AuthorityValue.class);
@Override @Override
public String toString() { public String toString() {

View File

@@ -13,8 +13,8 @@ import java.util.List;
import java.util.Map; import java.util.Map;
import java.util.UUID; import java.util.UUID;
import org.apache.commons.lang3.StringUtils; import org.apache.commons.lang.StringUtils;
import org.apache.logging.log4j.Logger; import org.apache.log4j.Logger;
import org.apache.solr.client.solrj.SolrQuery; import org.apache.solr.client.solrj.SolrQuery;
import org.apache.solr.client.solrj.response.QueryResponse; import org.apache.solr.client.solrj.response.QueryResponse;
import org.apache.solr.common.SolrDocument; import org.apache.solr.common.SolrDocument;
@@ -34,7 +34,7 @@ import org.springframework.beans.factory.annotation.Autowired;
*/ */
public class AuthorityValueServiceImpl implements AuthorityValueService { public class AuthorityValueServiceImpl implements AuthorityValueService {
private final Logger log = org.apache.logging.log4j.LogManager.getLogger(AuthorityValueServiceImpl.class); private final Logger log = Logger.getLogger(AuthorityValueServiceImpl.class);
@Autowired(required = true) @Autowired(required = true)
protected AuthorityTypes authorityTypes; protected AuthorityTypes authorityTypes;

View File

@@ -11,9 +11,9 @@ import java.util.ArrayList;
import java.util.Collection; import java.util.Collection;
import java.util.List; import java.util.List;
import java.util.Map; import java.util.Map;
import java.util.Objects;
import org.apache.commons.lang3.StringUtils; import org.apache.commons.lang.ObjectUtils;
import org.apache.commons.lang.StringUtils;
import org.apache.solr.common.SolrDocument; import org.apache.solr.common.SolrDocument;
import org.apache.solr.common.SolrInputDocument; import org.apache.solr.common.SolrInputDocument;
@@ -140,8 +140,8 @@ public class PersonAuthorityValue extends AuthorityValue {
@Override @Override
public void setValues(SolrDocument document) { public void setValues(SolrDocument document) {
super.setValues(document); super.setValues(document);
this.firstName = Objects.toString(document.getFieldValue("first_name"), ""); this.firstName = ObjectUtils.toString(document.getFieldValue("first_name"));
this.lastName = Objects.toString(document.getFieldValue("last_name"), ""); this.lastName = ObjectUtils.toString(document.getFieldValue("last_name"));
nameVariants = new ArrayList<String>(); nameVariants = new ArrayList<String>();
Collection<Object> document_name_variant = document.getFieldValues("name_variant"); Collection<Object> document_name_variant = document.getFieldValues("name_variant");
if (document_name_variant != null) { if (document_name_variant != null) {

View File

@@ -1,20 +0,0 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.authority;
import java.util.List;
/**
* @author Jonas Van Goolen (jonas at atmire dot com)
*/
public interface SolrAuthorityInterface {
List<AuthorityValue> queryAuthorities(String text, int max);
AuthorityValue queryAuthorityID(String id);
}

View File

@@ -19,7 +19,7 @@ import org.apache.commons.cli.HelpFormatter;
import org.apache.commons.cli.Options; import org.apache.commons.cli.Options;
import org.apache.commons.cli.ParseException; import org.apache.commons.cli.ParseException;
import org.apache.commons.cli.PosixParser; import org.apache.commons.cli.PosixParser;
import org.apache.logging.log4j.Logger; import org.apache.log4j.Logger;
import org.dspace.authority.factory.AuthorityServiceFactory; import org.dspace.authority.factory.AuthorityServiceFactory;
import org.dspace.authority.service.AuthorityValueService; import org.dspace.authority.service.AuthorityValueService;
import org.dspace.content.Item; import org.dspace.content.Item;
@@ -40,7 +40,7 @@ public class UpdateAuthorities {
/** /**
* log4j logger * log4j logger
*/ */
private static Logger log = org.apache.logging.log4j.LogManager.getLogger(UpdateAuthorities.class); private static Logger log = Logger.getLogger(UpdateAuthorities.class);
protected PrintWriter print = null; protected PrintWriter print = null;

View File

@@ -35,7 +35,6 @@ public abstract class AuthorityServiceFactory {
public abstract AuthorityService getAuthorityService(); public abstract AuthorityService getAuthorityService();
public abstract List<AuthorityIndexerInterface> getAuthorityIndexers(); public abstract List<AuthorityIndexerInterface> getAuthorityIndexers();
public static AuthorityServiceFactory getInstance() { public static AuthorityServiceFactory getInstance() {

View File

@@ -12,7 +12,7 @@ import java.util.HashSet;
import java.util.Set; import java.util.Set;
import java.util.UUID; import java.util.UUID;
import org.apache.logging.log4j.Logger; import org.apache.log4j.Logger;
import org.dspace.authority.factory.AuthorityServiceFactory; import org.dspace.authority.factory.AuthorityServiceFactory;
import org.dspace.authority.service.AuthorityService; import org.dspace.authority.service.AuthorityService;
import org.dspace.content.DSpaceObject; import org.dspace.content.DSpaceObject;
@@ -33,7 +33,7 @@ import org.dspace.event.Event;
*/ */
public class AuthorityConsumer implements Consumer { public class AuthorityConsumer implements Consumer {
private final Logger log = org.apache.logging.log4j.LogManager.getLogger(AuthorityConsumer.class); private final Logger log = Logger.getLogger(AuthorityConsumer.class);
/** /**
* A set of all item IDs installed which need their authority updated * A set of all item IDs installed which need their authority updated

View File

@@ -8,17 +8,13 @@
package org.dspace.authority.indexer; package org.dspace.authority.indexer;
import java.util.HashMap; import java.util.HashMap;
import java.util.Iterator;
import java.util.List; import java.util.List;
import java.util.Map; import java.util.Map;
import org.apache.logging.log4j.Logger; import org.apache.log4j.Logger;
import org.dspace.authority.AuthorityValue; import org.dspace.authority.AuthorityValue;
import org.dspace.authority.factory.AuthorityServiceFactory; import org.dspace.authority.factory.AuthorityServiceFactory;
import org.dspace.authority.service.AuthorityService; import org.dspace.authority.service.AuthorityService;
import org.dspace.content.Item;
import org.dspace.content.factory.ContentServiceFactory;
import org.dspace.content.service.ItemService;
import org.dspace.core.Context; import org.dspace.core.Context;
/** /**
@@ -29,7 +25,7 @@ import org.dspace.core.Context;
*/ */
public class AuthorityIndexClient { public class AuthorityIndexClient {
private static Logger log = org.apache.logging.log4j.LogManager.getLogger(AuthorityIndexClient.class); private static Logger log = Logger.getLogger(AuthorityIndexClient.class);
protected static final AuthorityService authorityService = protected static final AuthorityService authorityService =
AuthorityServiceFactory.getInstance().getAuthorityService(); AuthorityServiceFactory.getInstance().getAuthorityService();
@@ -37,8 +33,6 @@ public class AuthorityIndexClient {
AuthorityServiceFactory.getInstance().getAuthorityIndexingService(); AuthorityServiceFactory.getInstance().getAuthorityIndexingService();
protected static final List<AuthorityIndexerInterface> indexers = protected static final List<AuthorityIndexerInterface> indexers =
AuthorityServiceFactory.getInstance().getAuthorityIndexers(); AuthorityServiceFactory.getInstance().getAuthorityIndexers();
protected static final ItemService itemService =
ContentServiceFactory.getInstance().getItemService();
/** /**
* Default constructor * Default constructor
@@ -70,17 +64,15 @@ public class AuthorityIndexClient {
for (AuthorityIndexerInterface indexerInterface : indexers) { for (AuthorityIndexerInterface indexerInterface : indexers) {
log.info("Initialize " + indexerInterface.getClass().getName()); log.info("Initialize " + indexerInterface.getClass().getName());
System.out.println("Initialize " + indexerInterface.getClass().getName()); System.out.println("Initialize " + indexerInterface.getClass().getName());
Iterator<Item> allItems = itemService.findAll(context); indexerInterface.init(context, true);
Map<String, AuthorityValue> authorityCache = new HashMap<>(); while (indexerInterface.hasMore()) {
while (allItems.hasNext()) { AuthorityValue authorityValue = indexerInterface.nextValue();
Item item = allItems.next(); if (authorityValue != null) {
List<AuthorityValue> authorityValues = indexerInterface.getAuthorityValues(
context, item, authorityCache);
for (AuthorityValue authorityValue : authorityValues) {
toIndexValues.put(authorityValue.getId(), authorityValue); toIndexValues.put(authorityValue.getId(), authorityValue);
} }
context.uncacheEntity(item);
} }
//Close up
indexerInterface.close();
} }
@@ -90,7 +82,7 @@ public class AuthorityIndexClient {
log.info("Writing new data"); log.info("Writing new data");
System.out.println("Writing new data"); System.out.println("Writing new data");
for (String id : toIndexValues.keySet()) { for (String id : toIndexValues.keySet()) {
indexingService.indexContent(toIndexValues.get(id)); indexingService.indexContent(toIndexValues.get(id), true);
indexingService.commit(); indexingService.commit();
} }

View File

@@ -9,8 +9,6 @@
package org.dspace.authority.indexer; package org.dspace.authority.indexer;
import java.sql.SQLException; import java.sql.SQLException;
import java.util.List;
import java.util.Map;
import org.dspace.authority.AuthorityValue; import org.dspace.authority.AuthorityValue;
import org.dspace.authorize.AuthorizeException; import org.dspace.authorize.AuthorizeException;
@@ -25,10 +23,17 @@ import org.dspace.core.Context;
*/ */
public interface AuthorityIndexerInterface { public interface AuthorityIndexerInterface {
public List<AuthorityValue> getAuthorityValues(Context context, Item item) public void init(Context context, Item item);
throws SQLException, AuthorizeException;
public List<AuthorityValue> getAuthorityValues(Context context, Item item, Map<String, AuthorityValue> cache) public void init(Context context, boolean useCache);
throws SQLException, AuthorizeException;
public void init(Context context);
public AuthorityValue nextValue();
public boolean hasMore() throws SQLException, AuthorizeException;
public void close();
public boolean isConfiguredProperly(); public boolean isConfiguredProperly();
} }

View File

@@ -19,7 +19,7 @@ import org.dspace.authority.AuthorityValue;
public interface AuthorityIndexingService { public interface AuthorityIndexingService {
public void indexContent(AuthorityValue value); public void indexContent(AuthorityValue value, boolean force);
public void cleanIndex() throws Exception; public void cleanIndex() throws Exception;

View File

@@ -9,12 +9,14 @@ package org.dspace.authority.indexer;
import java.sql.SQLException; import java.sql.SQLException;
import java.util.ArrayList; import java.util.ArrayList;
import java.util.HashMap;
import java.util.Iterator;
import java.util.List; import java.util.List;
import java.util.Map; import java.util.Map;
import org.apache.commons.collections4.CollectionUtils; import org.apache.commons.collections.CollectionUtils;
import org.apache.commons.lang3.StringUtils; import org.apache.commons.lang.StringUtils;
import org.apache.logging.log4j.Logger; import org.apache.log4j.Logger;
import org.dspace.authority.AuthorityValue; import org.dspace.authority.AuthorityValue;
import org.dspace.authority.service.AuthorityValueService; import org.dspace.authority.service.AuthorityValueService;
import org.dspace.authorize.AuthorizeException; import org.dspace.authorize.AuthorizeException;
@@ -29,13 +31,12 @@ import org.springframework.beans.factory.annotation.Autowired;
/** /**
* DSpaceAuthorityIndexer is used in IndexClient, which is called by the AuthorityConsumer and the indexing-script. * DSpaceAuthorityIndexer is used in IndexClient, which is called by the AuthorityConsumer and the indexing-script.
* <p> * <p>
* The DSpaceAuthorityIndexer will return a list of all authority values for a * An instance of DSpaceAuthorityIndexer is bound to a list of items.
* given item. It will return an authority value for all metadata fields defined * This can be one item or all items too depending on the init() method.
* in dspace.conf with 'authority.author.indexer.field'.
* <p> * <p>
* You have to call getAuthorityValues for every Item you want to index. But you * DSpaceAuthorityIndexer lets you iterate over each metadata value
* can supply an optional cache, to save the mapping from the metadata value to * for each metadata field defined in dspace.cfg with 'authority.author.indexer.field'
* the new authority values for metadata fields without an authority key. * for each item in the list.
* <p> * <p>
* *
* @author Antoine Snyers (antoine at atmire.com) * @author Antoine Snyers (antoine at atmire.com)
@@ -45,16 +46,25 @@ import org.springframework.beans.factory.annotation.Autowired;
*/ */
public class DSpaceAuthorityIndexer implements AuthorityIndexerInterface, InitializingBean { public class DSpaceAuthorityIndexer implements AuthorityIndexerInterface, InitializingBean {
private static final Logger log = org.apache.logging.log4j.LogManager.getLogger(DSpaceAuthorityIndexer.class); private static final Logger log = Logger.getLogger(DSpaceAuthorityIndexer.class);
protected Iterator<Item> itemIterator;
protected Item currentItem;
/** /**
* The list of metadata fields which are to be indexed * * The list of metadata fields which are to be indexed *
*/ */
protected List<String> metadataFields; protected List<String> metadataFields;
protected int currentFieldIndex;
protected int currentMetadataIndex;
protected AuthorityValue nextValue;
protected Context context;
@Autowired(required = true) @Autowired(required = true)
protected AuthorityValueService authorityValueService; protected AuthorityValueService authorityValueService;
@Autowired(required = true) @Autowired(required = true)
protected ItemService itemService; protected ItemService itemService;
protected boolean useCache;
protected Map<String, AuthorityValue> cache;
@Autowired(required = true) @Autowired(required = true)
protected ConfigurationService configurationService; protected ConfigurationService configurationService;
@@ -70,83 +80,146 @@ public class DSpaceAuthorityIndexer implements AuthorityIndexerInterface, Initia
} }
} }
@Override @Override
public List<AuthorityValue> getAuthorityValues(Context context, Item item) public void init(Context context, Item item) {
throws SQLException, AuthorizeException { ArrayList<Item> itemList = new ArrayList<>();
return getAuthorityValues(context, item, null); itemList.add(item);
this.itemIterator = itemList.iterator();
currentItem = this.itemIterator.next();
initialize(context);
} }
public List<AuthorityValue> getAuthorityValues(Context context, Item item, Map<String, AuthorityValue> cache) @Override
throws SQLException, AuthorizeException { public void init(Context context) {
List<AuthorityValue> values = new ArrayList<>(); init(context, false);
for (String metadataField : metadataFields) { }
List<MetadataValue> metadataValues = itemService.getMetadataByMetadataString(item, metadataField);
for (MetadataValue metadataValue : metadataValues) { @Override
String content = metadataValue.getValue(); public void init(Context context, boolean useCache) {
String authorityKey = metadataValue.getAuthority(); try {
// We only want to update our item IF our UUID is not present this.itemIterator = itemService.findAll(context);
// or if we need to generate one. currentItem = this.itemIterator.next();
boolean requiresItemUpdate = StringUtils.isBlank(authorityKey) || } catch (SQLException e) {
StringUtils.startsWith(authorityKey, AuthorityValueService.GENERATE); log.error("Error while retrieving all items in the metadata indexer");
AuthorityValue value = null; }
if (StringUtils.isBlank(authorityKey) && cache != null) { initialize(context);
// This is a value currently without an authority. So query this.useCache = useCache;
// the cache, if an authority is found for the exact value. }
value = cache.get(content);
} protected void initialize(Context context) {
if (value == null) { this.context = context;
value = getAuthorityValue(context, metadataField, content,authorityKey);
} currentFieldIndex = 0;
if (value != null) { currentMetadataIndex = 0;
if (requiresItemUpdate) { useCache = false;
value.updateItem(context, item, metadataValue); cache = new HashMap<>();
try { }
itemService.update(context, item);
} catch (Exception e) { @Override
log.error("Error creating a metadatavalue's authority", e); public AuthorityValue nextValue() {
} return nextValue;
} }
if (cache != null) {
cache.put(content, value);
} @Override
values.add(value); public boolean hasMore() throws SQLException, AuthorizeException {
if (currentItem == null) {
return false;
}
// 1. iterate over the metadata values
String metadataField = metadataFields.get(currentFieldIndex);
List<MetadataValue> values = itemService.getMetadataByMetadataString(currentItem, metadataField);
if (currentMetadataIndex < values.size()) {
prepareNextValue(metadataField, values.get(currentMetadataIndex));
currentMetadataIndex++;
return true;
} else {
// 2. iterate over the metadata fields
if ((currentFieldIndex + 1) < metadataFields.size()) {
currentFieldIndex++;
//Reset our current metadata index since we are moving to another field
currentMetadataIndex = 0;
return hasMore();
} else {
// 3. iterate over the items
if (itemIterator.hasNext()) {
currentItem = itemIterator.next();
//Reset our current field index
currentFieldIndex = 0;
//Reset our current metadata index
currentMetadataIndex = 0;
} else { } else {
log.error("Error getting an authority value for " + currentItem = null;
"the metadata value \"" + content + "\" " +
"in the field \"" + metadataField + "\" " +
"of the item " + item.getHandle());
} }
return hasMore();
} }
} }
return values;
} }
/** /**
* This method looks at the authority of a metadata value. * This method looks at the authority of a metadata.
* If the authority can be found in solr, that value is reused. * If the authority can be found in solr, that value is reused.
* Otherwise a new authority value will be generated that will be indexed in solr. * Otherwise a new authority value will be generated that will be indexed in solr.
*
* If the authority starts with AuthorityValueGenerator.GENERATE, a specific type of AuthorityValue will be * If the authority starts with AuthorityValueGenerator.GENERATE, a specific type of AuthorityValue will be
* generated. * generated.
* Depending on the type this may involve querying an external REST service * Depending on the type this may involve querying an external REST service
* *
* @param context Current DSpace context
* @param metadataField Is one of the fields defined in dspace.cfg to be indexed. * @param metadataField Is one of the fields defined in dspace.cfg to be indexed.
* @param metadataContent Content of the current metadata value. * @param value Is one of the values of the given metadataField in one of the items being indexed.
* @param metadataAuthorityKey Existing authority of the metadata value. * @throws SQLException if database error
* @throws AuthorizeException if authorization error
*/ */
private AuthorityValue getAuthorityValue(Context context, String metadataField, protected void prepareNextValue(String metadataField, MetadataValue value) throws SQLException, AuthorizeException {
String metadataContent, String metadataAuthorityKey) {
if (StringUtils.isNotBlank(metadataAuthorityKey) && nextValue = null;
!metadataAuthorityKey.startsWith(AuthorityValueService.GENERATE)) {
// !uid.startsWith(AuthorityValueGenerator.GENERATE) is not strictly String content = value.getValue();
// necessary here but it prevents exceptions in solr String authorityKey = value.getAuthority();
AuthorityValue value = authorityValueService.findByUID(context, metadataAuthorityKey); //We only want to update our item IF our UUID is not present or if we need to generate one.
if (value != null) { boolean requiresItemUpdate = StringUtils.isBlank(authorityKey) || StringUtils
return value; .startsWith(authorityKey, AuthorityValueService.GENERATE);
if (StringUtils.isNotBlank(authorityKey) && !authorityKey.startsWith(AuthorityValueService.GENERATE)) {
// !uid.startsWith(AuthorityValueGenerator.GENERATE) is not strictly necessary here but it prevents
// exceptions in solr
nextValue = authorityValueService.findByUID(context, authorityKey);
}
if (nextValue == null && StringUtils.isBlank(authorityKey) && useCache) {
// A metadata without authority is being indexed
// If there is an exact match in the cache, reuse it rather than adding a new one.
AuthorityValue cachedAuthorityValue = cache.get(content);
if (cachedAuthorityValue != null) {
nextValue = cachedAuthorityValue;
} }
} }
return authorityValueService.generate(context, metadataAuthorityKey, if (nextValue == null) {
metadataContent, metadataField.replaceAll("\\.", "_")); nextValue = authorityValueService
.generate(context, authorityKey, content, metadataField.replaceAll("\\.", "_"));
}
if (nextValue != null && requiresItemUpdate) {
nextValue.updateItem(context, currentItem, value);
try {
itemService.update(context, currentItem);
} catch (Exception e) {
log.error("Error creating a metadatavalue's authority", e);
}
}
if (useCache) {
cache.put(content, nextValue);
}
}
@Override
public void close() {
itemIterator = null;
cache.clear();
} }
@Override @Override

View File

@@ -0,0 +1,87 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.authority.orcid;
import java.net.URLEncoder;
import java.util.ArrayList;
import java.util.List;
import org.apache.log4j.Logger;
import org.dspace.authority.AuthorityValue;
import org.dspace.authority.orcid.model.Bio;
import org.dspace.authority.orcid.model.Work;
import org.dspace.authority.orcid.xml.XMLtoBio;
import org.dspace.authority.orcid.xml.XMLtoWork;
import org.dspace.authority.rest.RestSource;
import org.dspace.services.factory.DSpaceServicesFactory;
import org.w3c.dom.Document;
/**
* @author Antoine Snyers (antoine at atmire.com)
* @author Kevin Van de Velde (kevin at atmire dot com)
* @author Ben Bosman (ben at atmire dot com)
* @author Mark Diggory (markd at atmire dot com)
*/
public class Orcid extends RestSource {
/**
* log4j logger
*/
private static Logger log = Logger.getLogger(Orcid.class);
private static Orcid orcid;
public static Orcid getOrcid() {
if (orcid == null) {
orcid = DSpaceServicesFactory.getInstance().getServiceManager()
.getServiceByName("OrcidSource", Orcid.class);
}
return orcid;
}
private Orcid(String url) {
super(url);
}
public Bio getBio(String id) {
Document bioDocument = restConnector.get(id + "/orcid-bio");
XMLtoBio converter = new XMLtoBio();
Bio bio = converter.convert(bioDocument).get(0);
bio.setOrcid(id);
return bio;
}
public List<Work> getWorks(String id) {
Document document = restConnector.get(id + "/orcid-works");
XMLtoWork converter = new XMLtoWork();
return converter.convert(document);
}
public List<Bio> queryBio(String name, int start, int rows) {
Document bioDocument = restConnector
.get("search/orcid-bio?q=" + URLEncoder.encode("\"" + name + "\"") + "&start=" + start + "&rows=" + rows);
XMLtoBio converter = new XMLtoBio();
return converter.convert(bioDocument);
}
@Override
public List<AuthorityValue> queryAuthorities(String text, int max) {
List<Bio> bios = queryBio(text, 0, max);
List<AuthorityValue> authorities = new ArrayList<AuthorityValue>();
for (Bio bio : bios) {
authorities.add(OrcidAuthorityValue.create(bio));
}
return authorities;
}
@Override
public AuthorityValue queryAuthorityID(String id) {
Bio bio = getBio(id);
return OrcidAuthorityValue.create(bio);
}
}

View File

@@ -0,0 +1,328 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.authority.orcid;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Date;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.UUID;
import org.apache.commons.lang.StringUtils;
import org.apache.log4j.Logger;
import org.apache.solr.common.SolrDocument;
import org.apache.solr.common.SolrInputDocument;
import org.dspace.authority.AuthorityValue;
import org.dspace.authority.AuthorityValueServiceImpl;
import org.dspace.authority.PersonAuthorityValue;
import org.dspace.authority.orcid.model.Bio;
import org.dspace.authority.orcid.model.BioExternalIdentifier;
import org.dspace.authority.orcid.model.BioName;
import org.dspace.authority.orcid.model.BioResearcherUrl;
/**
* @author Antoine Snyers (antoine at atmire.com)
* @author Kevin Van de Velde (kevin at atmire dot com)
* @author Ben Bosman (ben at atmire dot com)
* @author Mark Diggory (markd at atmire dot com)
*/
public class OrcidAuthorityValue extends PersonAuthorityValue {
/**
* log4j logger
*/
private static Logger log = Logger.getLogger(OrcidAuthorityValue.class);
private String orcid_id;
private Map<String, List<String>> otherMetadata = new HashMap<String, List<String>>();
private boolean update; // used in setValues(Bio bio)
/**
* Creates an instance of OrcidAuthorityValue with only uninitialized fields.
* This is meant to be filled in with values from an existing record.
* To create a brand new OrcidAuthorityValue, use create()
*/
public OrcidAuthorityValue() {
}
public OrcidAuthorityValue(SolrDocument document) {
super(document);
}
public String getOrcid_id() {
return orcid_id;
}
public void setOrcid_id(String orcid_id) {
this.orcid_id = orcid_id;
}
public Map<String, List<String>> getOtherMetadata() {
return otherMetadata;
}
public void addOtherMetadata(String label, String data) {
List<String> strings = otherMetadata.get(label);
if (strings == null) {
strings = new ArrayList<String>();
}
strings.add(data);
otherMetadata.put(label, strings);
}
@Override
public SolrInputDocument getSolrInputDocument() {
SolrInputDocument doc = super.getSolrInputDocument();
if (StringUtils.isNotBlank(getOrcid_id())) {
doc.addField("orcid_id", getOrcid_id());
}
for (String t : otherMetadata.keySet()) {
List<String> data = otherMetadata.get(t);
for (String data_entry : data) {
doc.addField("label_" + t, data_entry);
}
}
return doc;
}
@Override
public void setValues(SolrDocument document) {
super.setValues(document);
this.orcid_id = String.valueOf(document.getFieldValue("orcid_id"));
otherMetadata = new HashMap<String, List<String>>();
for (String fieldName : document.getFieldNames()) {
String labelPrefix = "label_";
if (fieldName.startsWith(labelPrefix)) {
String label = fieldName.substring(labelPrefix.length());
List<String> list = new ArrayList<String>();
Collection<Object> fieldValues = document.getFieldValues(fieldName);
for (Object o : fieldValues) {
list.add(String.valueOf(o));
}
otherMetadata.put(label, list);
}
}
}
public static OrcidAuthorityValue create() {
OrcidAuthorityValue orcidAuthorityValue = new OrcidAuthorityValue();
orcidAuthorityValue.setId(UUID.randomUUID().toString());
orcidAuthorityValue.updateLastModifiedDate();
orcidAuthorityValue.setCreationDate(new Date());
return orcidAuthorityValue;
}
/**
* Create an authority based on a given orcid bio
*
* @param bio Bio
* @return OrcidAuthorityValue
*/
public static OrcidAuthorityValue create(Bio bio) {
OrcidAuthorityValue authority = OrcidAuthorityValue.create();
authority.setValues(bio);
return authority;
}
public boolean setValues(Bio bio) {
BioName name = bio.getName();
if (updateValue(bio.getOrcid(), getOrcid_id())) {
setOrcid_id(bio.getOrcid());
}
if (updateValue(name.getFamilyName(), getLastName())) {
setLastName(name.getFamilyName());
}
if (updateValue(name.getGivenNames(), getFirstName())) {
setFirstName(name.getGivenNames());
}
if (StringUtils.isNotBlank(name.getCreditName())) {
if (!getNameVariants().contains(name.getCreditName())) {
addNameVariant(name.getCreditName());
update = true;
}
}
for (String otherName : name.getOtherNames()) {
if (!getNameVariants().contains(otherName)) {
addNameVariant(otherName);
update = true;
}
}
if (updateOtherMetadata("country", bio.getCountry())) {
addOtherMetadata("country", bio.getCountry());
}
for (String keyword : bio.getKeywords()) {
if (updateOtherMetadata("keyword", keyword)) {
addOtherMetadata("keyword", keyword);
}
}
for (BioExternalIdentifier externalIdentifier : bio.getBioExternalIdentifiers()) {
if (updateOtherMetadata("external_identifier", externalIdentifier.toString())) {
addOtherMetadata("external_identifier", externalIdentifier.toString());
}
}
for (BioResearcherUrl researcherUrl : bio.getResearcherUrls()) {
if (updateOtherMetadata("researcher_url", researcherUrl.toString())) {
addOtherMetadata("researcher_url", researcherUrl.toString());
}
}
if (updateOtherMetadata("biography", bio.getBiography())) {
addOtherMetadata("biography", bio.getBiography());
}
setValue(getName());
if (update) {
update();
}
boolean result = update;
update = false;
return result;
}
private boolean updateOtherMetadata(String label, String data) {
List<String> strings = getOtherMetadata().get(label);
boolean update;
if (strings == null) {
update = StringUtils.isNotBlank(data);
} else {
update = !strings.contains(data);
}
if (update) {
this.update = true;
}
return update;
}
private boolean updateValue(String incoming, String resident) {
boolean update = StringUtils.isNotBlank(incoming) && !incoming.equals(resident);
if (update) {
this.update = true;
}
return update;
}
@Override
public Map<String, String> choiceSelectMap() {
Map<String, String> map = super.choiceSelectMap();
map.put("orcid", getOrcid_id());
return map;
}
@Override
public String getAuthorityType() {
return "orcid";
}
@Override
public String generateString() {
String generateString = AuthorityValueServiceImpl.GENERATE + getAuthorityType() + AuthorityValueServiceImpl
.SPLIT;
if (StringUtils.isNotBlank(getOrcid_id())) {
generateString += getOrcid_id();
}
return generateString;
}
@Override
public AuthorityValue newInstance(String info) {
AuthorityValue authorityValue = null;
if (StringUtils.isNotBlank(info)) {
Orcid orcid = Orcid.getOrcid();
authorityValue = orcid.queryAuthorityID(info);
} else {
authorityValue = OrcidAuthorityValue.create();
}
return authorityValue;
}
@Override
public boolean equals(Object o) {
if (this == o) {
return true;
}
if (o == null || getClass() != o.getClass()) {
return false;
}
OrcidAuthorityValue that = (OrcidAuthorityValue) o;
if (orcid_id != null ? !orcid_id.equals(that.orcid_id) : that.orcid_id != null) {
return false;
}
return true;
}
@Override
public int hashCode() {
return orcid_id != null ? orcid_id.hashCode() : 0;
}
@Override
public boolean hasTheSameInformationAs(Object o) {
if (this == o) {
return true;
}
if (o == null || getClass() != o.getClass()) {
return false;
}
if (!super.hasTheSameInformationAs(o)) {
return false;
}
OrcidAuthorityValue that = (OrcidAuthorityValue) o;
if (orcid_id != null ? !orcid_id.equals(that.orcid_id) : that.orcid_id != null) {
return false;
}
for (String key : otherMetadata.keySet()) {
if (otherMetadata.get(key) != null) {
List<String> metadata = otherMetadata.get(key);
List<String> otherMetadata = that.otherMetadata.get(key);
if (otherMetadata == null) {
return false;
} else {
HashSet<String> metadataSet = new HashSet<String>(metadata);
HashSet<String> otherMetadataSet = new HashSet<String>(otherMetadata);
if (!metadataSet.equals(otherMetadataSet)) {
return false;
}
}
} else {
if (that.otherMetadata.get(key) != null) {
return false;
}
}
}
return true;
}
}

View File

@@ -0,0 +1,112 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.authority.orcid.model;
import java.util.LinkedHashSet;
import java.util.Set;
/**
* @author Antoine Snyers (antoine at atmire.com)
* @author Kevin Van de Velde (kevin at atmire dot com)
* @author Ben Bosman (ben at atmire dot com)
* @author Mark Diggory (markd at atmire dot com)
*/
public class Bio {
protected String orcid;
protected BioName name;
protected String country;
protected Set<String> keywords;
protected Set<BioExternalIdentifier> bioExternalIdentifiers;
protected Set<BioResearcherUrl> researcherUrls;
protected String biography;
public Bio() {
this.name = new BioName();
keywords = new LinkedHashSet<String>();
bioExternalIdentifiers = new LinkedHashSet<BioExternalIdentifier>();
researcherUrls = new LinkedHashSet<BioResearcherUrl>();
}
public String getOrcid() {
return orcid;
}
public void setOrcid(String orcid) {
this.orcid = orcid;
}
public BioName getName() {
return name;
}
public void setName(BioName name) {
this.name = name;
}
public String getCountry() {
return country;
}
public void setCountry(String country) {
this.country = country;
}
public Set<String> getKeywords() {
return keywords;
}
public void addKeyword(String keyword) {
this.keywords.add(keyword);
}
public Set<BioExternalIdentifier> getBioExternalIdentifiers() {
return bioExternalIdentifiers;
}
public void addExternalIdentifier(BioExternalIdentifier externalReference) {
bioExternalIdentifiers.add(externalReference);
}
public Set<BioResearcherUrl> getResearcherUrls() {
return researcherUrls;
}
public void addResearcherUrl(BioResearcherUrl researcherUrl) {
researcherUrls.add(researcherUrl);
}
public String getBiography() {
return biography;
}
public void setBiography(String biography) {
this.biography = biography;
}
@Override
public String toString() {
return "Bio{" +
"orcid='" + orcid + '\'' +
", name=" + name +
", country='" + country + '\'' +
", keywords=" + keywords +
", bioExternalIdentifiers=" + bioExternalIdentifiers +
", researcherUrls=" + researcherUrls +
", biography='" + biography + '\'' +
'}';
}
}

View File

@@ -0,0 +1,108 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.authority.orcid.model;
/**
* @author Antoine Snyers (antoine at atmire.com)
* @author Kevin Van de Velde (kevin at atmire dot com)
* @author Ben Bosman (ben at atmire dot com)
* @author Mark Diggory (markd at atmire dot com)
*/
public class BioExternalIdentifier {
protected String id_orcid;
protected String id_common_name;
protected String id_reference;
protected String id_url;
public BioExternalIdentifier(String id_orcid, String id_common_name, String id_reference, String id_url) {
this.id_orcid = id_orcid;
this.id_common_name = id_common_name;
this.id_reference = id_reference;
this.id_url = id_url;
}
public String getId_orcid() {
return id_orcid;
}
public void setId_orcid(String id_orcid) {
this.id_orcid = id_orcid;
}
public String getId_common_name() {
return id_common_name;
}
public void setId_common_name(String id_common_name) {
this.id_common_name = id_common_name;
}
public String getId_reference() {
return id_reference;
}
public void setId_reference(String id_reference) {
this.id_reference = id_reference;
}
public String getId_url() {
return id_url;
}
public void setId_url(String id_url) {
this.id_url = id_url;
}
@Override
public String toString() {
return "BioExternalIdentifier{" +
"id_orcid='" + id_orcid + '\'' +
", id_common_name='" + id_common_name + '\'' +
", id_reference='" + id_reference + '\'' +
", id_url='" + id_url + '\'' +
'}';
}
@Override
public boolean equals(Object o) {
if (this == o) {
return true;
}
if (o == null || getClass() != o.getClass()) {
return false;
}
BioExternalIdentifier that = (BioExternalIdentifier) o;
if (id_common_name != null ? !id_common_name.equals(that.id_common_name) : that.id_common_name != null) {
return false;
}
if (id_orcid != null ? !id_orcid.equals(that.id_orcid) : that.id_orcid != null) {
return false;
}
if (id_reference != null ? !id_reference.equals(that.id_reference) : that.id_reference != null) {
return false;
}
if (id_url != null ? !id_url.equals(that.id_url) : that.id_url != null) {
return false;
}
return true;
}
@Override
public int hashCode() {
int result = id_orcid != null ? id_orcid.hashCode() : 0;
result = 31 * result + (id_common_name != null ? id_common_name.hashCode() : 0);
result = 31 * result + (id_reference != null ? id_reference.hashCode() : 0);
result = 31 * result + (id_url != null ? id_url.hashCode() : 0);
return result;
}
}

Some files were not shown because too many files have changed in this diff Show More