mirror of
https://github.com/DSpace/DSpace.git
synced 2025-10-07 01:54:22 +00:00
Merge remote-tracking branch 'upstream/main' into DS-4410
This commit is contained in:
35
.codecov.yml
Normal file
35
.codecov.yml
Normal file
@@ -0,0 +1,35 @@
|
||||
# DSpace configuration for Codecov.io coverage reports
|
||||
# These override the default YAML settings at
|
||||
# https://docs.codecov.io/docs/codecov-yaml#section-default-yaml
|
||||
# Can be validated via instructions at:
|
||||
# https://docs.codecov.io/docs/codecov-yaml#validate-your-repository-yaml
|
||||
|
||||
# Tell Codecov not to send a coverage notification until (at least) 2 builds are completed
|
||||
# Since we run Unit & Integration tests in parallel, this lets Codecov know that coverage
|
||||
# needs to be merged across those builds
|
||||
codecov:
|
||||
notify:
|
||||
after_n_builds: 2
|
||||
|
||||
# Settings related to code coverage analysis
|
||||
coverage:
|
||||
status:
|
||||
# Configuration for project-level checks. This checks how the PR changes overall coverage.
|
||||
project:
|
||||
default:
|
||||
# For each PR, auto compare coverage to previous commit.
|
||||
# Require that overall (project) coverage does NOT drop more than 0.5%
|
||||
target: auto
|
||||
threshold: 0.5%
|
||||
# Configuration for patch-level checks. This checks the relative coverage of the new PR code ONLY.
|
||||
patch:
|
||||
default:
|
||||
# Enable informational mode, which just provides info to reviewers & always passes
|
||||
# https://docs.codecov.io/docs/commit-status#section-informational
|
||||
informational: true
|
||||
|
||||
# Turn PR comments "off". This feature adds the code coverage summary as a
|
||||
# comment on each PR. See https://docs.codecov.io/docs/pull-request-comments
|
||||
# However, this same info is available from the Codecov checks in the PR's
|
||||
# "Checks" tab in GitHub. So, the comment is unnecessary.
|
||||
comment: false
|
4
.github/pull_request_template.md
vendored
4
.github/pull_request_template.md
vendored
@@ -1,7 +1,7 @@
|
||||
## References
|
||||
_Add references/links to any related issues or PRs. These may include:_
|
||||
* Related to [REST Contract](https://github.com/DSpace/Rest7Contract) or an open REST Contract PR, if any
|
||||
* Fixes [GitHub issue](https://github.com/DSpace/DSpace/issues), if any
|
||||
* Fixes #[issue-number]
|
||||
* Related to [REST Contract](https://github.com/DSpace/Rest7Contract)
|
||||
|
||||
## Description
|
||||
Short summary of changes (1-2 sentences).
|
||||
|
65
.github/workflows/build.yml
vendored
Normal file
65
.github/workflows/build.yml
vendored
Normal file
@@ -0,0 +1,65 @@
|
||||
# DSpace Continuous Integration/Build via GitHub Actions
|
||||
# Concepts borrowed from
|
||||
# https://docs.github.com/en/free-pro-team@latest/actions/guides/building-and-testing-java-with-maven
|
||||
name: Build
|
||||
|
||||
# Run this Build for all pushes / PRs to current branch
|
||||
on: [push, pull_request]
|
||||
|
||||
jobs:
|
||||
tests:
|
||||
runs-on: ubuntu-latest
|
||||
env:
|
||||
# Give Maven 1GB of memory to work with
|
||||
# Suppress all Maven "downloading" messages in Travis logs (see https://stackoverflow.com/a/35653426)
|
||||
# This also slightly speeds builds, as there is less logging
|
||||
MAVEN_OPTS: "-Xmx1024M -Dorg.slf4j.simpleLogger.log.org.apache.maven.cli.transfer.Slf4jMavenTransferListener=warn"
|
||||
strategy:
|
||||
# Create a matrix of two separate configurations for Unit vs Integration Tests
|
||||
# This will ensure those tasks are run in parallel
|
||||
matrix:
|
||||
include:
|
||||
# NOTE: Unit Tests include deprecated REST API v6 (as it has unit tests)
|
||||
- type: "Unit Tests"
|
||||
mvnflags: "-DskipUnitTests=false -Pdspace-rest"
|
||||
# NOTE: ITs skip all code validation checks, as they are already done by Unit Test job.
|
||||
# - enforcer.skip => Skip maven-enforcer-plugin rules
|
||||
# - checkstyle.skip => Skip all checkstyle checks by maven-checkstyle-plugin
|
||||
# - license.skip => Skip all license header checks by license-maven-plugin
|
||||
# - xml.skip => Skip all XML/XSLT validation by xml-maven-plugin
|
||||
- type: "Integration Tests"
|
||||
mvnflags: "-DskipIntegrationTests=false -Denforcer.skip=true -Dcheckstyle.skip=true -Dlicense.skip=true -Dxml.skip=true"
|
||||
# Do NOT exit immediately if one matrix job fails
|
||||
# This ensures ITs continue running even if Unit Tests fail, or visa versa
|
||||
fail-fast: false
|
||||
# These are the actual CI steps to perform per job
|
||||
steps:
|
||||
# https://github.com/actions/checkout
|
||||
- name: Checkout codebase
|
||||
uses: actions/checkout@v1
|
||||
|
||||
# https://github.com/actions/setup-java
|
||||
- name: Install JDK 11
|
||||
uses: actions/setup-java@v1
|
||||
with:
|
||||
java-version: 11
|
||||
|
||||
# https://github.com/actions/cache
|
||||
- name: Cache Maven dependencies
|
||||
uses: actions/cache@v2
|
||||
with:
|
||||
# Cache entire ~/.m2/repository
|
||||
path: ~/.m2/repository
|
||||
# Cache key is hash of all pom.xml files. Therefore any changes to POMs will invalidate cache
|
||||
key: ${{ runner.os }}-maven-${{ hashFiles('**/pom.xml') }}
|
||||
restore-keys: ${{ runner.os }}-maven-
|
||||
|
||||
# Run parallel Maven builds based on the above 'strategy.matrix'
|
||||
- name: Run Maven ${{ matrix.type }}
|
||||
env:
|
||||
TEST_FLAGS: ${{ matrix.mvnflags }}
|
||||
run: mvn install -B -V -P-assembly -Pcoverage-report $TEST_FLAGS
|
||||
|
||||
# https://github.com/codecov/codecov-action
|
||||
- name: Upload coverage to Codecov.io
|
||||
uses: codecov/codecov-action@v1
|
25
.github/workflows/label_merge_conflicts.yml
vendored
Normal file
25
.github/workflows/label_merge_conflicts.yml
vendored
Normal file
@@ -0,0 +1,25 @@
|
||||
# This workflow checks open PRs for merge conflicts and labels them when conflicts are found
|
||||
name: Check for merge conflicts
|
||||
|
||||
# Run whenever the "main" branch is updated
|
||||
# NOTE: This means merge conflicts are only checked for when a PR is merged to main.
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- main
|
||||
|
||||
jobs:
|
||||
triage:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
# See: https://github.com/mschilde/auto-label-merge-conflicts/
|
||||
- name: Auto-label PRs with merge conflicts
|
||||
uses: mschilde/auto-label-merge-conflicts@v2.0
|
||||
# Add "merge conflict" label if a merge conflict is detected. Remove it when resolved.
|
||||
# Note, the authentication token is created automatically
|
||||
# See: https://docs.github.com/en/actions/configuring-and-managing-workflows/authenticating-with-the-github_token
|
||||
with:
|
||||
CONFLICT_LABEL_NAME: 'merge conflict'
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
# Ignore errors
|
||||
continue-on-error: true
|
4
.gitignore
vendored
4
.gitignore
vendored
@@ -19,7 +19,7 @@ tags
|
||||
overlays/
|
||||
|
||||
## Ignore project files created by NetBeans
|
||||
nbproject/private/
|
||||
nbproject/
|
||||
build/
|
||||
nbbuild/
|
||||
dist/
|
||||
@@ -41,4 +41,4 @@ nb-configuration.xml
|
||||
.DS_Store
|
||||
|
||||
##Ignore JRebel project configuration
|
||||
rebel.xml
|
||||
rebel.xml
|
||||
|
46
.travis.yml
46
.travis.yml
@@ -1,46 +0,0 @@
|
||||
language: java
|
||||
sudo: false
|
||||
dist: trusty
|
||||
|
||||
env:
|
||||
# Give Maven 1GB of memory to work with
|
||||
- MAVEN_OPTS=-Xmx1024M
|
||||
|
||||
jdk:
|
||||
# DS-3384 Oracle JDK has DocLint enabled by default.
|
||||
# Let's use this to catch any newly introduced DocLint issues.
|
||||
- oraclejdk11
|
||||
|
||||
## Should we run into any problems with oraclejdk8 on Travis, we may try the following workaround.
|
||||
## https://docs.travis-ci.com/user/languages/java#Testing-Against-Multiple-JDKs
|
||||
## https://github.com/travis-ci/travis-ci/issues/3259#issuecomment-130860338
|
||||
#addons:
|
||||
# apt:
|
||||
# packages:
|
||||
# - oracle-java8-installer
|
||||
|
||||
before_install:
|
||||
# Remove outdated settings.xml from Travis builds. Workaround for https://github.com/travis-ci/travis-ci/issues/4629
|
||||
- rm ~/.m2/settings.xml
|
||||
|
||||
# Skip install stage, as we'll do it below
|
||||
install: "echo 'Skipping install stage, dependencies will be downloaded during build and test stages.'"
|
||||
|
||||
# Build DSpace and run both Unit and Integration Tests
|
||||
script:
|
||||
# Summary of flags used (below):
|
||||
# license:check => Validate all source code license headers
|
||||
# -Dmaven.test.skip=false => Enable DSpace Unit Tests
|
||||
# -DskipITs=false => Enable DSpace Integration Tests
|
||||
# -Pdspace-rest => Enable optional dspace-rest module as part of build
|
||||
# -P !assembly => Skip assembly of "dspace-installer" directory (as it can be memory intensive)
|
||||
# -B => Maven batch/non-interactive mode (recommended for CI)
|
||||
# -V => Display Maven version info before build
|
||||
# -Dsurefire.rerunFailingTestsCount=2 => try again for flakey tests, and keep track of/report on number of retries
|
||||
- "mvn clean install license:check -Dmaven.test.skip=false -DskipITs=false -Pdspace-rest -P !assembly -B -V -Dsurefire.rerunFailingTestsCount=2"
|
||||
|
||||
# After a successful build and test (see 'script'), send code coverage reports to coveralls.io
|
||||
# These code coverage reports are generated by jacoco-maven-plugin (during test process above).
|
||||
after_success:
|
||||
# Run "verify", enabling the "coveralls" profile. This sends our reports to coveralls.io (see coveralls-maven-plugin)
|
||||
- "cd dspace && mvn verify -P coveralls"
|
@@ -51,7 +51,12 @@ RUN ant init_installation update_configs update_code update_webapps
|
||||
# Create a new tomcat image that does not retain the the build directory contents
|
||||
FROM tomcat:8-jdk11
|
||||
ENV DSPACE_INSTALL=/dspace
|
||||
ENV TOMCAT_INSTALL=/usr/local/tomcat
|
||||
COPY --from=ant_build /dspace $DSPACE_INSTALL
|
||||
# Enable the AJP connector in Tomcat's server.xml
|
||||
# NOTE: secretRequired="false" should only be used when AJP is NOT accessible from an external network. But, secretRequired="true" isn't supported by mod_proxy_ajp until Apache 2.5
|
||||
RUN sed -i '/Service name="Catalina".*/a \\n <Connector protocol="AJP/1.3" port="8009" address="0.0.0.0" redirectPort="8443" URIEncoding="UTF-8" secretRequired="false" />' $TOMCAT_INSTALL/conf/server.xml
|
||||
# Expose Tomcat port and AJP port
|
||||
EXPOSE 8080 8009
|
||||
|
||||
ENV JAVA_OPTS=-Xmx2000m
|
||||
|
24
README.md
24
README.md
@@ -1,7 +1,7 @@
|
||||
|
||||
# DSpace
|
||||
|
||||
[](https://travis-ci.com/DSpace/DSpace)
|
||||
[](https://github.com/DSpace/DSpace/actions?query=workflow%3ABuild)
|
||||
|
||||
[DSpace Documentation](https://wiki.lyrasis.org/display/DSDOC/) |
|
||||
[DSpace Releases](https://github.com/DSpace/DSpace/releases) |
|
||||
@@ -86,37 +86,37 @@ DSpace uses GitHub to track issues:
|
||||
### Running Tests
|
||||
|
||||
By default, in DSpace, Unit Tests and Integration Tests are disabled. However, they are
|
||||
run automatically by [Travis CI](https://travis-ci.com/DSpace/DSpace/) for all Pull Requests and code commits.
|
||||
run automatically by [GitHub Actions](https://github.com/DSpace/DSpace/actions?query=workflow%3ABuild) for all Pull Requests and code commits.
|
||||
|
||||
* How to run both Unit Tests (via `maven-surefire-plugin`) and Integration Tests (via `maven-failsafe-plugin`):
|
||||
```
|
||||
mvn clean test -Dmaven.test.skip=false -DskipITs=false
|
||||
mvn install -DskipUnitTests=false -DskipIntegrationTests=false
|
||||
```
|
||||
* How to run just Unit Tests:
|
||||
* How to run _only_ Unit Tests:
|
||||
```
|
||||
mvn test -Dmaven.test.skip=false
|
||||
mvn test -DskipUnitTests=false
|
||||
```
|
||||
* How to run a *single* Unit Test
|
||||
```
|
||||
# Run all tests in a specific test class
|
||||
# NOTE: failIfNoTests=false is required to skip tests in other modules
|
||||
mvn test -Dmaven.test.skip=false -Dtest=[full.package.testClassName] -DfailIfNoTests=false
|
||||
mvn test -DskipUnitTests=false -Dtest=[full.package.testClassName] -DfailIfNoTests=false
|
||||
|
||||
# Run one test method in a specific test class
|
||||
mvn test -Dmaven.test.skip=false -Dtest=[full.package.testClassName]#[testMethodName] -DfailIfNoTests=false
|
||||
mvn test -DskipUnitTests=false -Dtest=[full.package.testClassName]#[testMethodName] -DfailIfNoTests=false
|
||||
```
|
||||
* How to run Integration Tests (requires enabling Unit tests too)
|
||||
* How to run _only_ Integration Tests
|
||||
```
|
||||
mvn verify -Dmaven.test.skip=false -DskipITs=false
|
||||
mvn install -DskipIntegrationTests=false
|
||||
```
|
||||
* How to run a *single* Integration Test (requires enabling Unit tests too)
|
||||
* How to run a *single* Integration Test
|
||||
```
|
||||
# Run all integration tests in a specific test class
|
||||
# NOTE: failIfNoTests=false is required to skip tests in other modules
|
||||
mvn test -Dmaven.test.skip=false -DskipITs=false -Dtest=[full.package.testClassName] -DfailIfNoTests=false
|
||||
mvn install -DskipIntegrationTests=false -Dit.test=[full.package.testClassName] -DfailIfNoTests=false
|
||||
|
||||
# Run one test method in a specific test class
|
||||
mvn test -Dmaven.test.skip=false -DskipITs=false -Dtest=[full.package.testClassName]#[testMethodName] -DfailIfNoTests=false
|
||||
mvn install -DskipIntegrationTests=false -Dit.test=[full.package.testClassName]#[testMethodName] -DfailIfNoTests=false
|
||||
```
|
||||
* How to run only tests of a specific DSpace module
|
||||
```
|
||||
|
@@ -2,6 +2,7 @@ version: '3.7'
|
||||
networks:
|
||||
dspacenet:
|
||||
services:
|
||||
# DSpace (backend) webapp container
|
||||
dspace:
|
||||
container_name: dspace
|
||||
image: "${DOCKER_OWNER:-dspace}/dspace:${DSPACE_VER:-dspace-7_x-test}"
|
||||
@@ -15,6 +16,8 @@ services:
|
||||
ports:
|
||||
- published: 8080
|
||||
target: 8080
|
||||
- published: 8009
|
||||
target: 8009
|
||||
stdin_open: true
|
||||
tty: true
|
||||
volumes:
|
||||
@@ -31,10 +34,12 @@ services:
|
||||
while (!</dev/tcp/dspacedb/5432) > /dev/null 2>&1; do sleep 1; done;
|
||||
/dspace/bin/dspace database migrate
|
||||
catalina.sh run
|
||||
# DSpace database container
|
||||
dspacedb:
|
||||
container_name: dspacedb
|
||||
environment:
|
||||
PGDATA: /pgdata
|
||||
# Uses a custom Postgres image with pgcrypto installed
|
||||
image: dspace/dspace-postgres-pgcrypto
|
||||
networks:
|
||||
dspacenet:
|
||||
@@ -45,9 +50,11 @@ services:
|
||||
tty: true
|
||||
volumes:
|
||||
- pgdata:/pgdata
|
||||
# DSpace Solr container
|
||||
dspacesolr:
|
||||
container_name: dspacesolr
|
||||
image: dspace/dspace-solr
|
||||
# Uses official Solr image at https://hub.docker.com/_/solr/
|
||||
image: solr:8.7
|
||||
networks:
|
||||
dspacenet:
|
||||
ports:
|
||||
@@ -55,15 +62,27 @@ services:
|
||||
target: 8983
|
||||
stdin_open: true
|
||||
tty: true
|
||||
working_dir: /var/solr/data
|
||||
volumes:
|
||||
- solr_authority:/opt/solr/server/solr/authority/data
|
||||
- solr_oai:/opt/solr/server/solr/oai/data
|
||||
- solr_search:/opt/solr/server/solr/search/data
|
||||
- solr_statistics:/opt/solr/server/solr/statistics/data
|
||||
# Mount our local Solr core configs so that they are available as Solr configsets on container
|
||||
- ./dspace/solr/authority:/opt/solr/server/solr/configsets/authority
|
||||
- ./dspace/solr/oai:/opt/solr/server/solr/configsets/oai
|
||||
- ./dspace/solr/search:/opt/solr/server/solr/configsets/search
|
||||
- ./dspace/solr/statistics:/opt/solr/server/solr/configsets/statistics
|
||||
# Keep Solr data directory between reboots
|
||||
- solr_data:/var/solr/data
|
||||
# Initialize all DSpace Solr cores using the mounted local configsets (see above), then start Solr
|
||||
entrypoint:
|
||||
- /bin/bash
|
||||
- '-c'
|
||||
- |
|
||||
init-var-solr
|
||||
precreate-core authority /opt/solr/server/solr/configsets/authority
|
||||
precreate-core oai /opt/solr/server/solr/configsets/oai
|
||||
precreate-core search /opt/solr/server/solr/configsets/search
|
||||
precreate-core statistics /opt/solr/server/solr/configsets/statistics
|
||||
exec solr -f
|
||||
volumes:
|
||||
assetstore:
|
||||
pgdata:
|
||||
solr_authority:
|
||||
solr_oai:
|
||||
solr_search:
|
||||
solr_statistics:
|
||||
solr_data:
|
||||
|
@@ -12,7 +12,7 @@
|
||||
<parent>
|
||||
<groupId>org.dspace</groupId>
|
||||
<artifactId>dspace-parent</artifactId>
|
||||
<version>7.0-beta4-SNAPSHOT</version>
|
||||
<version>7.0-beta5-SNAPSHOT</version>
|
||||
<relativePath>..</relativePath>
|
||||
</parent>
|
||||
|
||||
@@ -127,44 +127,82 @@
|
||||
</executions>
|
||||
</plugin>
|
||||
|
||||
<!-- This plugin allows us to run a Groovy script in our Maven POM
|
||||
(see: https://groovy.github.io/gmaven/groovy-maven-plugin/execute.html )
|
||||
We are generating a OS-agnostic version (agnostic.build.dir) of
|
||||
the ${project.build.directory} property (full path of target dir).
|
||||
This is needed by the Surefire & Failsafe plugins (see below)
|
||||
to initialize the Unit Test environment's dspace.cfg file.
|
||||
Otherwise, the Unit Test Framework will not work on Windows OS.
|
||||
This Groovy code was mostly borrowed from:
|
||||
http://stackoverflow.com/questions/3872355/how-to-convert-file-separator-in-maven
|
||||
-->
|
||||
<plugin>
|
||||
<groupId>org.codehaus.gmaven</groupId>
|
||||
<artifactId>groovy-maven-plugin</artifactId>
|
||||
<executions>
|
||||
<execution>
|
||||
<id>setproperty</id>
|
||||
<phase>initialize</phase>
|
||||
<goals>
|
||||
<goal>execute</goal>
|
||||
</goals>
|
||||
<configuration>
|
||||
<source>
|
||||
project.properties['agnostic.build.dir'] = project.build.directory.replace(File.separator, '/');
|
||||
log.info("Initializing Maven property 'agnostic.build.dir' to: {}", project.properties['agnostic.build.dir']);
|
||||
</source>
|
||||
</configuration>
|
||||
</execution>
|
||||
</executions>
|
||||
</plugin>
|
||||
|
||||
<plugin>
|
||||
<groupId>com.mycila</groupId>
|
||||
<artifactId>license-maven-plugin</artifactId>
|
||||
<configuration>
|
||||
<excludes>
|
||||
<exclude>src/test/resources/**</exclude>
|
||||
<exclude>src/test/data/**</exclude>
|
||||
<!-- Ignore license header requirements on Flyway upgrade scripts -->
|
||||
<exclude>src/main/resources/org/dspace/storage/rdbms/flywayupgrade/**</exclude>
|
||||
</excludes>
|
||||
</configuration>
|
||||
</plugin>
|
||||
|
||||
</plugins>
|
||||
</build>
|
||||
|
||||
|
||||
<profiles>
|
||||
<profile>
|
||||
<id>findbugs</id>
|
||||
<id>spotbugs</id>
|
||||
<activation>
|
||||
<activeByDefault>false</activeByDefault>
|
||||
<!-- property>
|
||||
<name>maven.test.skip</name>
|
||||
<value>false</value>
|
||||
</property -->
|
||||
</activation>
|
||||
<build>
|
||||
<plugins>
|
||||
<plugin>
|
||||
<groupId>org.codehaus.mojo</groupId>
|
||||
<artifactId>findbugs-maven-plugin</artifactId>
|
||||
<groupId>com.github.spotbugs</groupId>
|
||||
<artifactId>spotbugs-maven-plugin</artifactId>
|
||||
</plugin>
|
||||
</plugins>
|
||||
</build>
|
||||
</profile>
|
||||
|
||||
<!-- If Unit Testing is enabled, then setup the Unit Test Environment.
|
||||
See also the 'skiptests' profile in Parent POM. -->
|
||||
<!-- Setup the Unit Test Environment (when -DskipUnitTests=false) -->
|
||||
<profile>
|
||||
<id>test-environment</id>
|
||||
<id>unit-test-environment</id>
|
||||
<activation>
|
||||
<activeByDefault>false</activeByDefault>
|
||||
<property>
|
||||
<name>maven.test.skip</name>
|
||||
<name>skipUnitTests</name>
|
||||
<value>false</value>
|
||||
</property>
|
||||
</activation>
|
||||
<build>
|
||||
<plugins>
|
||||
<!-- Unit/Integration Testing setup: This plugin unzips the
|
||||
<!-- Unit Testing setup: This plugin unzips the
|
||||
'testEnvironment.zip' file (created by dspace-parent POM), into
|
||||
the 'target/testing/' folder, to essentially create a test
|
||||
install of DSpace, against which Tests can be run. -->
|
||||
@@ -184,53 +222,16 @@
|
||||
</configuration>
|
||||
<executions>
|
||||
<execution>
|
||||
<id>setupTestEnvironment</id>
|
||||
<id>setupUnitTestEnvironment</id>
|
||||
<phase>generate-test-resources</phase>
|
||||
<goals>
|
||||
<goal>unpack</goal>
|
||||
</goals>
|
||||
</execution>
|
||||
<execution>
|
||||
<id>setupIntegrationTestEnvironment</id>
|
||||
<phase>pre-integration-test</phase>
|
||||
<goals>
|
||||
<goal>unpack</goal>
|
||||
</goals>
|
||||
</execution>
|
||||
</executions>
|
||||
</plugin>
|
||||
|
||||
<!-- This plugin allows us to run a Groovy script in our Maven POM
|
||||
(see: http://gmaven.codehaus.org/Executing+Groovy+Code )
|
||||
We are generating a OS-agnostic version (agnostic.build.dir) of
|
||||
the ${project.build.directory} property (full path of target dir).
|
||||
This is needed by the Surefire & Failsafe plugins (see below)
|
||||
to initialize the Unit Test environment's dspace.cfg file.
|
||||
Otherwise, the Unit Test Framework will not work on Windows OS.
|
||||
This Groovy code was mostly borrowed from:
|
||||
http://stackoverflow.com/questions/3872355/how-to-convert-file-separator-in-maven
|
||||
-->
|
||||
<plugin>
|
||||
<groupId>org.codehaus.gmaven</groupId>
|
||||
<artifactId>groovy-maven-plugin</artifactId>
|
||||
<executions>
|
||||
<execution>
|
||||
<id>setproperty</id>
|
||||
<phase>initialize</phase>
|
||||
<goals>
|
||||
<goal>execute</goal>
|
||||
</goals>
|
||||
<configuration>
|
||||
<source>
|
||||
project.properties['agnostic.build.dir'] = project.build.directory.replace(File.separator, '/');
|
||||
log.info("Initializing Maven property 'agnostic.build.dir' to: {}", project.properties['agnostic.build.dir']);
|
||||
</source>
|
||||
</configuration>
|
||||
</execution>
|
||||
</executions>
|
||||
</plugin>
|
||||
|
||||
<!-- Run Unit Testing! This plugin just kicks off the tests (when enabled). -->
|
||||
<!-- Run Unit Testing! This plugin just kicks off the tests. -->
|
||||
<plugin>
|
||||
<artifactId>maven-surefire-plugin</artifactId>
|
||||
<configuration>
|
||||
@@ -241,11 +242,56 @@
|
||||
<dspace.dir>${agnostic.build.dir}/testing/dspace/</dspace.dir>
|
||||
<!-- Turn off any DSpace logging -->
|
||||
<dspace.log.init.disable>true</dspace.log.init.disable>
|
||||
<solr.install.dir>${agnostic.build.dir}/testing/dspace/solr/</solr.install.dir>
|
||||
</systemPropertyVariables>
|
||||
</configuration>
|
||||
</plugin>
|
||||
</plugins>
|
||||
</build>
|
||||
</profile>
|
||||
|
||||
<!-- Run Integration Testing! This plugin just kicks off the tests (when enabled). -->
|
||||
<!-- Setup the Integration Test Environment (when -DskipIntegrationTests=false) -->
|
||||
<profile>
|
||||
<id>integration-test-environment</id>
|
||||
<activation>
|
||||
<activeByDefault>false</activeByDefault>
|
||||
<property>
|
||||
<name>skipIntegrationTests</name>
|
||||
<value>false</value>
|
||||
</property>
|
||||
</activation>
|
||||
<build>
|
||||
<plugins>
|
||||
<!-- Integration Testing setup: This plugin unzips the
|
||||
'testEnvironment.zip' file (created by dspace-parent POM), into
|
||||
the 'target/testing/' folder, to essentially create a test
|
||||
install of DSpace, against which Tests can be run. -->
|
||||
<plugin>
|
||||
<artifactId>maven-dependency-plugin</artifactId>
|
||||
<configuration>
|
||||
<outputDirectory>${project.build.directory}/testing</outputDirectory>
|
||||
<artifactItems>
|
||||
<artifactItem>
|
||||
<groupId>org.dspace</groupId>
|
||||
<artifactId>dspace-parent</artifactId>
|
||||
<version>${project.version}</version>
|
||||
<type>zip</type>
|
||||
<classifier>testEnvironment</classifier>
|
||||
</artifactItem>
|
||||
</artifactItems>
|
||||
</configuration>
|
||||
<executions>
|
||||
<execution>
|
||||
<id>setupIntegrationTestEnvironment</id>
|
||||
<phase>pre-integration-test</phase>
|
||||
<goals>
|
||||
<goal>unpack</goal>
|
||||
</goals>
|
||||
</execution>
|
||||
</executions>
|
||||
</plugin>
|
||||
|
||||
<!-- Run Integration Testing! This plugin just kicks off the tests. -->
|
||||
<plugin>
|
||||
<artifactId>maven-failsafe-plugin</artifactId>
|
||||
<configuration>
|
||||
@@ -255,12 +301,12 @@
|
||||
<dspace.dir>${agnostic.build.dir}/testing/dspace/</dspace.dir>
|
||||
<!-- Turn off any DSpace logging -->
|
||||
<dspace.log.init.disable>true</dspace.log.init.disable>
|
||||
<solr.install.dir>${agnostic.build.dir}/testing/dspace/solr/</solr.install.dir>
|
||||
</systemPropertyVariables>
|
||||
</configuration>
|
||||
</plugin>
|
||||
</plugins>
|
||||
</build>
|
||||
|
||||
</profile>
|
||||
</profiles>
|
||||
|
||||
@@ -269,6 +315,13 @@
|
||||
<dependency>
|
||||
<groupId>org.hibernate</groupId>
|
||||
<artifactId>hibernate-ehcache</artifactId>
|
||||
<exclusions>
|
||||
<!-- Newer version pulled in via Jersey below -->
|
||||
<exclusion>
|
||||
<groupId>org.javassist</groupId>
|
||||
<artifactId>javassist</artifactId>
|
||||
</exclusion>
|
||||
</exclusions>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.hibernate</groupId>
|
||||
@@ -279,16 +332,16 @@
|
||||
<artifactId>hibernate-validator-cdi</artifactId>
|
||||
<version>${hibernate-validator.version}</version>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.hibernate.javax.persistence</groupId>
|
||||
<artifactId>hibernate-jpa-2.1-api</artifactId>
|
||||
<version>1.0.0.Final</version>
|
||||
</dependency>
|
||||
|
||||
<dependency>
|
||||
<groupId>org.springframework</groupId>
|
||||
<artifactId>spring-orm</artifactId>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.glassfish</groupId>
|
||||
<artifactId>javax.el</artifactId>
|
||||
<version>3.0.1-b10</version>
|
||||
</dependency>
|
||||
|
||||
<dependency>
|
||||
<groupId>net.handle</groupId>
|
||||
@@ -303,6 +356,48 @@
|
||||
<groupId>org.ow2.asm</groupId>
|
||||
<artifactId>asm-commons</artifactId>
|
||||
</exclusion>
|
||||
<!-- Newer version of Bouncycastle brought in via solr-cell -->
|
||||
<exclusion>
|
||||
<groupId>org.bouncycastle</groupId>
|
||||
<artifactId>bcpkix-jdk15on</artifactId>
|
||||
</exclusion>
|
||||
<exclusion>
|
||||
<groupId>org.bouncycastle</groupId>
|
||||
<artifactId>bcprov-jdk15on</artifactId>
|
||||
</exclusion>
|
||||
<!-- Newer version of Jetty in our parent POM & via Solr -->
|
||||
<exclusion>
|
||||
<groupId>org.eclipse.jetty</groupId>
|
||||
<artifactId>jetty-alpn-java-server</artifactId>
|
||||
</exclusion>
|
||||
<exclusion>
|
||||
<groupId>org.eclipse.jetty</groupId>
|
||||
<artifactId>jetty-deploy</artifactId>
|
||||
</exclusion>
|
||||
<exclusion>
|
||||
<groupId>org.eclipse.jetty</groupId>
|
||||
<artifactId>jetty-servlet</artifactId>
|
||||
</exclusion>
|
||||
<exclusion>
|
||||
<groupId>org.eclipse.jetty</groupId>
|
||||
<artifactId>jetty-servlets</artifactId>
|
||||
</exclusion>
|
||||
<exclusion>
|
||||
<groupId>org.eclipse.jetty</groupId>
|
||||
<artifactId>jetty-webapp</artifactId>
|
||||
</exclusion>
|
||||
<exclusion>
|
||||
<groupId>org.eclipse.jetty</groupId>
|
||||
<artifactId>jetty-xml</artifactId>
|
||||
</exclusion>
|
||||
<exclusion>
|
||||
<groupId>org.eclipse.jetty.http2</groupId>
|
||||
<artifactId>http2-common</artifactId>
|
||||
</exclusion>
|
||||
<exclusion>
|
||||
<groupId>org.eclipse.jetty.http2</groupId>
|
||||
<artifactId>http2-server</artifactId>
|
||||
</exclusion>
|
||||
</exclusions>
|
||||
</dependency>
|
||||
<!-- Jetty is needed to run Handle Server -->
|
||||
@@ -310,10 +405,6 @@
|
||||
<groupId>org.eclipse.jetty</groupId>
|
||||
<artifactId>jetty-server</artifactId>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.dspace</groupId>
|
||||
<artifactId>jargon</artifactId>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.dspace</groupId>
|
||||
<artifactId>mets</artifactId>
|
||||
@@ -323,6 +414,10 @@
|
||||
<artifactId>apache-jena-libs</artifactId>
|
||||
<type>pom</type>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>commons-cli</groupId>
|
||||
<artifactId>commons-cli</artifactId>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>commons-codec</groupId>
|
||||
<artifactId>commons-codec</artifactId>
|
||||
@@ -379,10 +474,6 @@
|
||||
<groupId>org.jdom</groupId>
|
||||
<artifactId>jdom</artifactId>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>oro</groupId>
|
||||
<artifactId>oro</artifactId>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.apache.pdfbox</groupId>
|
||||
<artifactId>pdfbox</artifactId>
|
||||
@@ -395,14 +486,6 @@
|
||||
<groupId>org.apache.poi</groupId>
|
||||
<artifactId>poi-scratchpad</artifactId>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>rome</groupId>
|
||||
<artifactId>rome</artifactId>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>rome</groupId>
|
||||
<artifactId>opensearch</artifactId>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>xalan</groupId>
|
||||
<artifactId>xalan</artifactId>
|
||||
@@ -411,14 +494,6 @@
|
||||
<groupId>xerces</groupId>
|
||||
<artifactId>xercesImpl</artifactId>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>xml-apis</groupId>
|
||||
<artifactId>xml-apis</artifactId>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>javax.activation</groupId>
|
||||
<artifactId>activation</artifactId>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>com.ibm.icu</groupId>
|
||||
<artifactId>icu4j</artifactId>
|
||||
@@ -438,7 +513,7 @@
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.hamcrest</groupId>
|
||||
<artifactId>hamcrest-core</artifactId>
|
||||
<artifactId>hamcrest-all</artifactId>
|
||||
<scope>test</scope>
|
||||
</dependency>
|
||||
<dependency>
|
||||
@@ -456,6 +531,7 @@
|
||||
<artifactId>spring-test</artifactId>
|
||||
<scope>test</scope>
|
||||
</dependency>
|
||||
<!-- Used for RSS / ATOM syndication feeds -->
|
||||
<dependency>
|
||||
<groupId>org.rometools</groupId>
|
||||
<artifactId>rome-modules</artifactId>
|
||||
@@ -479,64 +555,104 @@
|
||||
|
||||
<dependency>
|
||||
<groupId>org.apache.solr</groupId>
|
||||
<artifactId>solr-cell</artifactId>
|
||||
<artifactId>solr-solrj</artifactId>
|
||||
<version>${solr.client.version}</version>
|
||||
<exclusions>
|
||||
<!-- Newer versions provided in our parent POM -->
|
||||
<exclusion>
|
||||
<groupId>org.ow2.asm</groupId>
|
||||
<artifactId>asm-commons</artifactId>
|
||||
</exclusion>
|
||||
<exclusion>
|
||||
<groupId>org.bouncycastle</groupId>
|
||||
<artifactId>bcpkix-jdk15on</artifactId>
|
||||
</exclusion>
|
||||
<exclusion>
|
||||
<groupId>org.bouncycastle</groupId>
|
||||
<artifactId>bcprov-jdk15on</artifactId>
|
||||
</exclusion>
|
||||
<exclusion>
|
||||
<groupId>org.eclipse.jetty</groupId>
|
||||
<artifactId>jetty-xml</artifactId>
|
||||
</exclusion>
|
||||
<!-- Newer Jetty version brought in via Parent POM -->
|
||||
<exclusion>
|
||||
<groupId>org.eclipse.jetty</groupId>
|
||||
<artifactId>jetty-http</artifactId>
|
||||
</exclusion>
|
||||
<exclusion>
|
||||
<groupId>org.eclipse.jetty</groupId>
|
||||
<artifactId>jetty-servlet</artifactId>
|
||||
</exclusion>
|
||||
<exclusion>
|
||||
<groupId>org.eclipse.jetty</groupId>
|
||||
<artifactId>jetty-webapp</artifactId>
|
||||
</exclusion>
|
||||
<exclusion>
|
||||
<groupId>org.eclipse.jetty</groupId>
|
||||
<artifactId>jetty-util</artifactId>
|
||||
</exclusion>
|
||||
<exclusion>
|
||||
<groupId>org.eclipse.jetty</groupId>
|
||||
<artifactId>jetty-deploy</artifactId>
|
||||
</exclusion>
|
||||
<exclusion>
|
||||
<groupId>org.eclipse.jetty</groupId>
|
||||
<artifactId>jetty-continuation</artifactId>
|
||||
</exclusion>
|
||||
<exclusion>
|
||||
<groupId>org.eclipse.jetty</groupId>
|
||||
<artifactId>jetty-servlets</artifactId>
|
||||
</exclusion>
|
||||
<exclusion>
|
||||
<groupId>org.eclipse.jetty</groupId>
|
||||
<artifactId>jetty-io</artifactId>
|
||||
</exclusion>
|
||||
<exclusion>
|
||||
<groupId>org.eclipse.jetty</groupId>
|
||||
<artifactId>jetty-security</artifactId>
|
||||
<artifactId>jetty-util</artifactId>
|
||||
</exclusion>
|
||||
</exclusions>
|
||||
</dependency>
|
||||
<!-- Solr Core is needed for Integration Tests (to run a MockSolrServer) -->
|
||||
<!-- The following Solr / Lucene dependencies also support integration tests -->
|
||||
<dependency>
|
||||
<groupId>org.apache.solr</groupId>
|
||||
<artifactId>solr-core</artifactId>
|
||||
<scope>test</scope>
|
||||
<version>${solr.client.version}</version>
|
||||
<exclusions>
|
||||
<!-- Newer version brought in by opencsv -->
|
||||
<exclusion>
|
||||
<groupId>org.apache.commons</groupId>
|
||||
<artifactId>commons-text</artifactId>
|
||||
</exclusion>
|
||||
<!-- Newer Jetty version brought in via Parent POM -->
|
||||
<exclusion>
|
||||
<groupId>org.eclipse.jetty</groupId>
|
||||
<artifactId>jetty-http</artifactId>
|
||||
</exclusion>
|
||||
<exclusion>
|
||||
<groupId>org.eclipse.jetty</groupId>
|
||||
<artifactId>jetty-io</artifactId>
|
||||
</exclusion>
|
||||
<exclusion>
|
||||
<groupId>org.eclipse.jetty</groupId>
|
||||
<artifactId>jetty-util</artifactId>
|
||||
</exclusion>
|
||||
</exclusions>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.apache.solr</groupId>
|
||||
<artifactId>solr-cell</artifactId>
|
||||
<exclusions>
|
||||
<!-- Newer version brought in by opencsv -->
|
||||
<exclusion>
|
||||
<groupId>org.apache.commons</groupId>
|
||||
<artifactId>commons-text</artifactId>
|
||||
</exclusion>
|
||||
<!-- Newer Jetty version brought in via Parent POM -->
|
||||
<exclusion>
|
||||
<groupId>org.eclipse.jetty</groupId>
|
||||
<artifactId>jetty-http</artifactId>
|
||||
</exclusion>
|
||||
<exclusion>
|
||||
<groupId>org.eclipse.jetty</groupId>
|
||||
<artifactId>jetty-io</artifactId>
|
||||
</exclusion>
|
||||
<exclusion>
|
||||
<groupId>org.eclipse.jetty</groupId>
|
||||
<artifactId>jetty-util</artifactId>
|
||||
</exclusion>
|
||||
</exclusions>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.apache.lucene</groupId>
|
||||
<artifactId>lucene-core</artifactId>
|
||||
</dependency>
|
||||
<!-- Used for full-text indexing with Solr -->
|
||||
<dependency>
|
||||
<groupId>org.apache.tika</groupId>
|
||||
<artifactId>tika-core</artifactId>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.apache.lucene</groupId>
|
||||
<artifactId>lucene-analyzers-icu</artifactId>
|
||||
<scope>test</scope>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.apache.lucene</groupId>
|
||||
<artifactId>lucene-analyzers-smartcn</artifactId>
|
||||
<scope>test</scope>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.apache.lucene</groupId>
|
||||
<artifactId>lucene-analyzers-stempel</artifactId>
|
||||
<scope>test</scope>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.apache.xmlbeans</groupId>
|
||||
<artifactId>xmlbeans</artifactId>
|
||||
</dependency>
|
||||
|
||||
<dependency>
|
||||
<groupId>com.maxmind.geoip2</groupId>
|
||||
@@ -571,7 +687,6 @@
|
||||
<artifactId>guava</artifactId>
|
||||
</dependency>
|
||||
|
||||
|
||||
<dependency>
|
||||
<groupId>org.postgresql</groupId>
|
||||
<artifactId>postgresql</artifactId>
|
||||
@@ -594,7 +709,7 @@
|
||||
<dependency>
|
||||
<groupId>org.flywaydb</groupId>
|
||||
<artifactId>flyway-core</artifactId>
|
||||
<version>4.0.3</version>
|
||||
<version>6.5.5</version>
|
||||
</dependency>
|
||||
|
||||
<!-- Google Analytics -->
|
||||
@@ -618,6 +733,7 @@
|
||||
<groupId>com.google.oauth-client</groupId>
|
||||
<artifactId>google-oauth-client</artifactId>
|
||||
</dependency>
|
||||
|
||||
<!-- FindBugs -->
|
||||
<dependency>
|
||||
<groupId>com.google.code.findbugs</groupId>
|
||||
@@ -627,6 +743,7 @@
|
||||
<groupId>com.google.code.findbugs</groupId>
|
||||
<artifactId>annotations</artifactId>
|
||||
</dependency>
|
||||
|
||||
<dependency>
|
||||
<groupId>joda-time</groupId>
|
||||
<artifactId>joda-time</artifactId>
|
||||
@@ -660,6 +777,11 @@
|
||||
<groupId>org.apache.geronimo.specs</groupId>
|
||||
<artifactId>*</artifactId>
|
||||
</exclusion>
|
||||
<!-- Exclude Woodstox, as later version provided by Solr dependencies -->
|
||||
<exclusion>
|
||||
<groupId>org.codehaus.woodstox</groupId>
|
||||
<artifactId>woodstox-core-asl</artifactId>
|
||||
</exclusion>
|
||||
</exclusions>
|
||||
</dependency>
|
||||
<dependency>
|
||||
@@ -673,14 +795,28 @@
|
||||
<groupId>org.apache.geronimo.specs</groupId>
|
||||
<artifactId>*</artifactId>
|
||||
</exclusion>
|
||||
<!-- Exclude Woodstox, as later version provided by Solr dependencies -->
|
||||
<exclusion>
|
||||
<groupId>org.codehaus.woodstox</groupId>
|
||||
<artifactId>woodstox-core-asl</artifactId>
|
||||
</exclusion>
|
||||
</exclusions>
|
||||
</dependency>
|
||||
|
||||
<!-- Jersey / JAX-RS client (javax.ws.rs.*) dependencies needed to integrate with external sources/services -->
|
||||
<dependency>
|
||||
<groupId>org.glassfish.jersey.core</groupId>
|
||||
<artifactId>jersey-client</artifactId>
|
||||
<version>${jersey.version}</version>
|
||||
</dependency>
|
||||
<!-- Required because Jersey no longer includes a dependency injection provider by default.
|
||||
Needed to support PubMed API call in "PubmedImportMetadataSourceServiceImpl.GetRecord" -->
|
||||
<dependency>
|
||||
<groupId>org.glassfish.jersey.inject</groupId>
|
||||
<artifactId>jersey-hk2</artifactId>
|
||||
<version>${jersey.version}</version>
|
||||
</dependency>
|
||||
|
||||
<!-- S3 -->
|
||||
<dependency>
|
||||
<groupId>com.amazonaws</groupId>
|
||||
@@ -688,12 +824,30 @@
|
||||
<version>1.10.50</version>
|
||||
</dependency>
|
||||
|
||||
<!-- For ORCID v2 integration -->
|
||||
<dependency>
|
||||
<groupId>org.dspace</groupId>
|
||||
<artifactId>orcid-jaxb-api</artifactId>
|
||||
<version>2.1.0</version>
|
||||
<groupId>org.orcid</groupId>
|
||||
<artifactId>orcid-model</artifactId>
|
||||
<version>3.0.2</version>
|
||||
<exclusions>
|
||||
<exclusion>
|
||||
<groupId>javax.validation</groupId>
|
||||
<artifactId>validation-api</artifactId>
|
||||
</exclusion>
|
||||
<exclusion>
|
||||
<groupId>com.fasterxml.jackson.jaxrs</groupId>
|
||||
<artifactId>jackson-jaxrs-json-provider</artifactId>
|
||||
</exclusion>
|
||||
<exclusion>
|
||||
<groupId>org.yaml</groupId>
|
||||
<artifactId>snakeyaml</artifactId>
|
||||
</exclusion>
|
||||
<exclusion>
|
||||
<groupId>org.javassist</groupId>
|
||||
<artifactId>javassist</artifactId>
|
||||
</exclusion>
|
||||
</exclusions>
|
||||
</dependency>
|
||||
|
||||
<dependency>
|
||||
<groupId>org.json</groupId>
|
||||
<artifactId>json</artifactId>
|
||||
@@ -704,7 +858,7 @@
|
||||
<dependency>
|
||||
<groupId>com.opencsv</groupId>
|
||||
<artifactId>opencsv</artifactId>
|
||||
<version>4.5</version>
|
||||
<version>5.2</version>
|
||||
</dependency>
|
||||
|
||||
<!-- Email templating -->
|
||||
@@ -717,18 +871,12 @@
|
||||
|
||||
<dependency>
|
||||
<groupId>org.xmlunit</groupId>
|
||||
<artifactId>xmlunit-matchers</artifactId>
|
||||
<artifactId>xmlunit-core</artifactId>
|
||||
<version>2.6.3</version>
|
||||
<scope>test</scope>
|
||||
</dependency>
|
||||
|
||||
<dependency>
|
||||
<groupId>org.hibernate.javax.persistence</groupId>
|
||||
<artifactId>hibernate-jpa-2.1-api</artifactId>
|
||||
<version>1.0.0.Final</version>
|
||||
</dependency>
|
||||
|
||||
<dependency>
|
||||
<dependency>
|
||||
<groupId>org.apache.bcel</groupId>
|
||||
<artifactId>bcel</artifactId>
|
||||
<version>6.4.0</version>
|
||||
|
@@ -14,9 +14,9 @@ import java.util.UUID;
|
||||
|
||||
import org.apache.commons.cli.CommandLine;
|
||||
import org.apache.commons.cli.CommandLineParser;
|
||||
import org.apache.commons.cli.DefaultParser;
|
||||
import org.apache.commons.cli.HelpFormatter;
|
||||
import org.apache.commons.cli.Options;
|
||||
import org.apache.commons.cli.PosixParser;
|
||||
import org.apache.commons.collections4.CollectionUtils;
|
||||
import org.dspace.authorize.AuthorizeException;
|
||||
import org.dspace.content.Community;
|
||||
@@ -51,7 +51,7 @@ public class CommunityFiliator {
|
||||
*/
|
||||
public static void main(String[] argv) throws Exception {
|
||||
// create an options object and populate it
|
||||
CommandLineParser parser = new PosixParser();
|
||||
CommandLineParser parser = new DefaultParser();
|
||||
|
||||
Options options = new Options();
|
||||
|
||||
|
@@ -13,10 +13,9 @@ import java.util.Locale;
|
||||
|
||||
import org.apache.commons.cli.CommandLine;
|
||||
import org.apache.commons.cli.CommandLineParser;
|
||||
import org.apache.commons.cli.DefaultParser;
|
||||
import org.apache.commons.cli.Options;
|
||||
import org.apache.commons.cli.PosixParser;
|
||||
import org.apache.commons.lang3.StringUtils;
|
||||
import org.dspace.core.ConfigurationManager;
|
||||
import org.dspace.core.Context;
|
||||
import org.dspace.core.I18nUtil;
|
||||
import org.dspace.eperson.EPerson;
|
||||
@@ -24,6 +23,8 @@ import org.dspace.eperson.Group;
|
||||
import org.dspace.eperson.factory.EPersonServiceFactory;
|
||||
import org.dspace.eperson.service.EPersonService;
|
||||
import org.dspace.eperson.service.GroupService;
|
||||
import org.dspace.services.ConfigurationService;
|
||||
import org.dspace.services.factory.DSpaceServicesFactory;
|
||||
|
||||
/**
|
||||
* A command-line tool for creating an initial administrator for setting up a
|
||||
@@ -61,7 +62,7 @@ public final class CreateAdministrator {
|
||||
*/
|
||||
public static void main(String[] argv)
|
||||
throws Exception {
|
||||
CommandLineParser parser = new PosixParser();
|
||||
CommandLineParser parser = new DefaultParser();
|
||||
Options options = new Options();
|
||||
|
||||
CreateAdministrator ca = new CreateAdministrator();
|
||||
@@ -147,9 +148,10 @@ public final class CreateAdministrator {
|
||||
lastName = lastName.trim();
|
||||
}
|
||||
|
||||
if (ConfigurationManager.getProperty("webui.supported.locales") != null) {
|
||||
System.out.println("Select one of the following languages: " + ConfigurationManager
|
||||
.getProperty("webui.supported.locales"));
|
||||
ConfigurationService cfg = DSpaceServicesFactory.getInstance().getConfigurationService();
|
||||
if (cfg.hasProperty("webui.supported.locales")) {
|
||||
System.out.println("Select one of the following languages: "
|
||||
+ cfg.getProperty("webui.supported.locales"));
|
||||
System.out.print("Language: ");
|
||||
System.out.flush();
|
||||
|
||||
|
@@ -10,6 +10,7 @@ package org.dspace.administer;
|
||||
import java.io.BufferedWriter;
|
||||
import java.io.FileWriter;
|
||||
import java.io.IOException;
|
||||
import java.io.Writer;
|
||||
import java.sql.SQLException;
|
||||
import java.util.HashMap;
|
||||
import java.util.List;
|
||||
@@ -17,25 +18,28 @@ import java.util.Map;
|
||||
|
||||
import org.apache.commons.cli.CommandLine;
|
||||
import org.apache.commons.cli.CommandLineParser;
|
||||
import org.apache.commons.cli.DefaultParser;
|
||||
import org.apache.commons.cli.Options;
|
||||
import org.apache.commons.cli.ParseException;
|
||||
import org.apache.commons.cli.PosixParser;
|
||||
import org.apache.xml.serialize.Method;
|
||||
import org.apache.xml.serialize.OutputFormat;
|
||||
import org.apache.xml.serialize.XMLSerializer;
|
||||
import org.dspace.content.MetadataField;
|
||||
import org.dspace.content.MetadataSchema;
|
||||
import org.dspace.content.factory.ContentServiceFactory;
|
||||
import org.dspace.content.service.MetadataFieldService;
|
||||
import org.dspace.content.service.MetadataSchemaService;
|
||||
import org.dspace.core.Context;
|
||||
import org.xml.sax.SAXException;
|
||||
import org.w3c.dom.DOMConfiguration;
|
||||
import org.w3c.dom.Document;
|
||||
import org.w3c.dom.Element;
|
||||
import org.w3c.dom.bootstrap.DOMImplementationRegistry;
|
||||
import org.w3c.dom.ls.DOMImplementationLS;
|
||||
import org.w3c.dom.ls.LSOutput;
|
||||
import org.w3c.dom.ls.LSSerializer;
|
||||
|
||||
|
||||
/**
|
||||
* @author Graham Triggs
|
||||
*
|
||||
* This class creates an xml document as passed in the arguments and
|
||||
* This class creates an XML document as passed in the arguments and
|
||||
* from the metadata schemas for the repository.
|
||||
*
|
||||
* The form of the XML is as follows
|
||||
@@ -61,17 +65,20 @@ public class MetadataExporter {
|
||||
private MetadataExporter() { }
|
||||
|
||||
/**
|
||||
* @param args commandline arguments
|
||||
* @param args command line arguments
|
||||
* @throws ParseException if parser error
|
||||
* @throws SAXException if XML parse error
|
||||
* @throws IOException if IO error
|
||||
* @throws SQLException if database error
|
||||
* @throws RegistryExportException if export error
|
||||
* @throws ClassNotFoundException if no suitable DOM implementation
|
||||
* @throws InstantiationException if no suitable DOM implementation
|
||||
* @throws IllegalAccessException if no suitable DOM implementation
|
||||
*/
|
||||
public static void main(String[] args)
|
||||
throws ParseException, SQLException, IOException, SAXException, RegistryExportException {
|
||||
throws ParseException, SQLException, IOException, RegistryExportException,
|
||||
ClassNotFoundException, InstantiationException, IllegalAccessException {
|
||||
// create an options object and populate it
|
||||
CommandLineParser parser = new PosixParser();
|
||||
CommandLineParser parser = new DefaultParser();
|
||||
Options options = new Options();
|
||||
options.addOption("f", "file", true, "output xml file for registry");
|
||||
options.addOption("s", "schema", true, "the name of the schema to export");
|
||||
@@ -95,32 +102,31 @@ public class MetadataExporter {
|
||||
}
|
||||
|
||||
/**
|
||||
* Save a registry to a filepath
|
||||
* Save a registry to a file path
|
||||
*
|
||||
* @param file filepath
|
||||
* @param file file path
|
||||
* @param schema schema definition to save
|
||||
* @throws SQLException if database error
|
||||
* @throws IOException if IO error
|
||||
* @throws SAXException if XML error
|
||||
* @throws RegistryExportException if export error
|
||||
* @throws ClassNotFoundException if no suitable DOM implementation
|
||||
* @throws InstantiationException if no suitable DOM implementation
|
||||
* @throws IllegalAccessException if no suitable DOM implementation
|
||||
*/
|
||||
public static void saveRegistry(String file, String schema)
|
||||
throws SQLException, IOException, SAXException, RegistryExportException {
|
||||
throws SQLException, IOException, RegistryExportException,
|
||||
ClassNotFoundException, InstantiationException, IllegalAccessException {
|
||||
// create a context
|
||||
Context context = new Context();
|
||||
context.turnOffAuthorisationSystem();
|
||||
|
||||
OutputFormat xmlFormat = new OutputFormat(Method.XML, "UTF-8", true);
|
||||
xmlFormat.setLineWidth(120);
|
||||
xmlFormat.setIndent(4);
|
||||
|
||||
XMLSerializer xmlSerializer = new XMLSerializer(new BufferedWriter(new FileWriter(file)), xmlFormat);
|
||||
// XMLSerializer xmlSerializer = new XMLSerializer(System.out, xmlFormat);
|
||||
xmlSerializer.startDocument();
|
||||
xmlSerializer.startElement("dspace-dc-types", null);
|
||||
// Initialize an XML document.
|
||||
Document document = DOMImplementationRegistry.newInstance()
|
||||
.getDOMImplementation("XML 3.0")
|
||||
.createDocument(null, "dspace-dc-types", null);
|
||||
|
||||
// Save the schema definition(s)
|
||||
saveSchema(context, xmlSerializer, schema);
|
||||
saveSchema(context, document, schema);
|
||||
|
||||
List<MetadataField> mdFields = null;
|
||||
|
||||
@@ -139,55 +145,64 @@ public class MetadataExporter {
|
||||
mdFields = metadataFieldService.findAll(context);
|
||||
}
|
||||
|
||||
// Output the metadata fields
|
||||
// Compose the metadata fields
|
||||
for (MetadataField mdField : mdFields) {
|
||||
saveType(context, xmlSerializer, mdField);
|
||||
saveType(context, document, mdField);
|
||||
}
|
||||
|
||||
xmlSerializer.endElement("dspace-dc-types");
|
||||
xmlSerializer.endDocument();
|
||||
// Serialize the completed document to the output file.
|
||||
try (Writer writer = new BufferedWriter(new FileWriter(file))) {
|
||||
DOMImplementationLS lsImplementation
|
||||
= (DOMImplementationLS) DOMImplementationRegistry.newInstance()
|
||||
.getDOMImplementation("LS");
|
||||
LSSerializer serializer = lsImplementation.createLSSerializer();
|
||||
DOMConfiguration configuration = serializer.getDomConfig();
|
||||
configuration.setParameter("format-pretty-print", true);
|
||||
LSOutput lsOutput = lsImplementation.createLSOutput();
|
||||
lsOutput.setEncoding("UTF-8");
|
||||
lsOutput.setCharacterStream(writer);
|
||||
serializer.write(document, lsOutput);
|
||||
}
|
||||
|
||||
// abort the context, as we shouldn't have changed it!!
|
||||
context.abort();
|
||||
}
|
||||
|
||||
/**
|
||||
* Serialize the schema registry. If the parameter 'schema' is null or empty, save all schemas
|
||||
* Compose the schema registry. If the parameter 'schema' is null or empty, save all schemas.
|
||||
*
|
||||
* @param context DSpace Context
|
||||
* @param xmlSerializer XML serializer
|
||||
* @param document the document being built
|
||||
* @param schema schema (may be null to save all)
|
||||
* @throws SQLException if database error
|
||||
* @throws SAXException if XML error
|
||||
* @throws RegistryExportException if export error
|
||||
*/
|
||||
public static void saveSchema(Context context, XMLSerializer xmlSerializer, String schema)
|
||||
throws SQLException, SAXException, RegistryExportException {
|
||||
public static void saveSchema(Context context, Document document, String schema)
|
||||
throws SQLException, RegistryExportException {
|
||||
if (schema != null && !"".equals(schema)) {
|
||||
// Find a single named schema
|
||||
MetadataSchema mdSchema = metadataSchemaService.find(context, schema);
|
||||
|
||||
saveSchema(xmlSerializer, mdSchema);
|
||||
saveSchema(document, mdSchema);
|
||||
} else {
|
||||
// Find all schemas
|
||||
List<MetadataSchema> mdSchemas = metadataSchemaService.findAll(context);
|
||||
|
||||
for (MetadataSchema mdSchema : mdSchemas) {
|
||||
saveSchema(xmlSerializer, mdSchema);
|
||||
saveSchema(document, mdSchema);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Serialize a single schema (namespace) registry entry
|
||||
* Compose a single schema (namespace) registry entry
|
||||
*
|
||||
* @param xmlSerializer XML serializer
|
||||
* @param mdSchema DSpace metadata schema
|
||||
* @throws SAXException if XML error
|
||||
* @param document the output document being built.
|
||||
* @param mdSchema DSpace metadata schema
|
||||
* @throws RegistryExportException if export error
|
||||
*/
|
||||
private static void saveSchema(XMLSerializer xmlSerializer, MetadataSchema mdSchema)
|
||||
throws SAXException, RegistryExportException {
|
||||
private static void saveSchema(Document document, MetadataSchema mdSchema)
|
||||
throws RegistryExportException {
|
||||
// If we haven't got a schema, it's an error
|
||||
if (mdSchema == null) {
|
||||
throw new RegistryExportException("no schema to export");
|
||||
@@ -206,35 +221,34 @@ public class MetadataExporter {
|
||||
return;
|
||||
}
|
||||
|
||||
// Output the parent tag
|
||||
xmlSerializer.startElement("dc-schema", null);
|
||||
Element document_element = document.getDocumentElement();
|
||||
|
||||
// Output the schema name
|
||||
xmlSerializer.startElement("name", null);
|
||||
xmlSerializer.characters(name.toCharArray(), 0, name.length());
|
||||
xmlSerializer.endElement("name");
|
||||
// Compose the parent tag
|
||||
Element schema_element = document.createElement("dc-schema");
|
||||
document_element.appendChild(schema_element);
|
||||
|
||||
// Output the schema namespace
|
||||
xmlSerializer.startElement("namespace", null);
|
||||
xmlSerializer.characters(namespace.toCharArray(), 0, namespace.length());
|
||||
xmlSerializer.endElement("namespace");
|
||||
// Compose the schema name
|
||||
Element name_element = document.createElement("name");
|
||||
schema_element.appendChild(name_element);
|
||||
name_element.setTextContent(name);
|
||||
|
||||
xmlSerializer.endElement("dc-schema");
|
||||
// Compose the schema namespace
|
||||
Element namespace_element = document.createElement("namespace");
|
||||
schema_element.appendChild(namespace_element);
|
||||
namespace_element.setTextContent(namespace);
|
||||
}
|
||||
|
||||
/**
|
||||
* Serialize a single metadata field registry entry to xml
|
||||
* Compose a single metadata field registry entry to XML.
|
||||
*
|
||||
* @param context DSpace context
|
||||
* @param xmlSerializer xml serializer
|
||||
* @param document the output document being built.
|
||||
* @param mdField DSpace metadata field
|
||||
* @throws SAXException if XML error
|
||||
* @throws RegistryExportException if export error
|
||||
* @throws SQLException if database error
|
||||
* @throws IOException if IO error
|
||||
*/
|
||||
private static void saveType(Context context, XMLSerializer xmlSerializer, MetadataField mdField)
|
||||
throws SAXException, RegistryExportException, SQLException, IOException {
|
||||
private static void saveType(Context context, Document document, MetadataField mdField)
|
||||
throws RegistryExportException, SQLException {
|
||||
// If we haven't been given a field, it's an error
|
||||
if (mdField == null) {
|
||||
throw new RegistryExportException("no field to export");
|
||||
@@ -251,38 +265,39 @@ public class MetadataExporter {
|
||||
throw new RegistryExportException("incomplete field information");
|
||||
}
|
||||
|
||||
// Output the parent tag
|
||||
xmlSerializer.startElement("dc-type", null);
|
||||
Element document_element = document.getDocumentElement();
|
||||
|
||||
// Output the schema name
|
||||
xmlSerializer.startElement("schema", null);
|
||||
xmlSerializer.characters(schemaName.toCharArray(), 0, schemaName.length());
|
||||
xmlSerializer.endElement("schema");
|
||||
// Compose the parent tag
|
||||
Element dc_type = document.createElement("dc-type");
|
||||
document_element.appendChild(dc_type);
|
||||
|
||||
// Output the element
|
||||
xmlSerializer.startElement("element", null);
|
||||
xmlSerializer.characters(element.toCharArray(), 0, element.length());
|
||||
xmlSerializer.endElement("element");
|
||||
// Compose the schema name
|
||||
Element schema_element = document.createElement("schema");
|
||||
dc_type.appendChild(schema_element);
|
||||
schema_element.setTextContent(schemaName);
|
||||
|
||||
// Output the qualifier, if present
|
||||
// Compose the element
|
||||
Element element_element = document.createElement("element");
|
||||
dc_type.appendChild(element_element);
|
||||
element_element.setTextContent(element);
|
||||
|
||||
// Compose the qualifier, if present
|
||||
if (qualifier != null) {
|
||||
xmlSerializer.startElement("qualifier", null);
|
||||
xmlSerializer.characters(qualifier.toCharArray(), 0, qualifier.length());
|
||||
xmlSerializer.endElement("qualifier");
|
||||
Element qualifier_element = document.createElement("qualifier");
|
||||
dc_type.appendChild(qualifier_element);
|
||||
qualifier_element.setTextContent(qualifier);
|
||||
} else {
|
||||
xmlSerializer.comment("unqualified");
|
||||
dc_type.appendChild(document.createComment("unqualified"));
|
||||
}
|
||||
|
||||
// Output the scope note, if present
|
||||
// Compose the scope note, if present
|
||||
if (scopeNote != null) {
|
||||
xmlSerializer.startElement("scope_note", null);
|
||||
xmlSerializer.characters(scopeNote.toCharArray(), 0, scopeNote.length());
|
||||
xmlSerializer.endElement("scope_note");
|
||||
Element scope_element = document.createElement("scope_note");
|
||||
dc_type.appendChild(scope_element);
|
||||
scope_element.setTextContent(scopeNote);
|
||||
} else {
|
||||
xmlSerializer.comment("no scope note");
|
||||
dc_type.appendChild(document.createComment("no scope note"));
|
||||
}
|
||||
|
||||
xmlSerializer.endElement("dc-type");
|
||||
}
|
||||
|
||||
static Map<Integer, String> schemaMap = new HashMap<Integer, String>();
|
||||
@@ -317,7 +332,7 @@ public class MetadataExporter {
|
||||
}
|
||||
|
||||
/**
|
||||
* Print the usage message to stdout
|
||||
* Print the usage message to standard output
|
||||
*/
|
||||
public static void usage() {
|
||||
String usage = "Use this class with the following options:\n" +
|
||||
|
@@ -14,9 +14,9 @@ import javax.xml.transform.TransformerException;
|
||||
|
||||
import org.apache.commons.cli.CommandLine;
|
||||
import org.apache.commons.cli.CommandLineParser;
|
||||
import org.apache.commons.cli.DefaultParser;
|
||||
import org.apache.commons.cli.Options;
|
||||
import org.apache.commons.cli.ParseException;
|
||||
import org.apache.commons.cli.PosixParser;
|
||||
import org.apache.xpath.XPathAPI;
|
||||
import org.dspace.authorize.AuthorizeException;
|
||||
import org.dspace.content.MetadataField;
|
||||
@@ -94,7 +94,7 @@ public class MetadataImporter {
|
||||
boolean forceUpdate = false;
|
||||
|
||||
// create an options object and populate it
|
||||
CommandLineParser parser = new PosixParser();
|
||||
CommandLineParser parser = new DefaultParser();
|
||||
Options options = new Options();
|
||||
options.addOption("f", "file", true, "source xml file for DC fields");
|
||||
options.addOption("u", "update", false, "update an existing schema");
|
||||
|
@@ -7,6 +7,14 @@
|
||||
*/
|
||||
package org.dspace.administer;
|
||||
|
||||
import static org.dspace.content.service.DSpaceObjectService.MD_COPYRIGHT_TEXT;
|
||||
import static org.dspace.content.service.DSpaceObjectService.MD_INTRODUCTORY_TEXT;
|
||||
import static org.dspace.content.service.DSpaceObjectService.MD_LICENSE;
|
||||
import static org.dspace.content.service.DSpaceObjectService.MD_NAME;
|
||||
import static org.dspace.content.service.DSpaceObjectService.MD_PROVENANCE_DESCRIPTION;
|
||||
import static org.dspace.content.service.DSpaceObjectService.MD_SHORT_DESCRIPTION;
|
||||
import static org.dspace.content.service.DSpaceObjectService.MD_SIDEBAR_TEXT;
|
||||
|
||||
import java.io.FileInputStream;
|
||||
import java.io.FileNotFoundException;
|
||||
import java.io.FileOutputStream;
|
||||
@@ -35,6 +43,7 @@ import org.dspace.authorize.AuthorizeException;
|
||||
import org.dspace.content.Collection;
|
||||
import org.dspace.content.Community;
|
||||
import org.dspace.content.Item;
|
||||
import org.dspace.content.MetadataFieldName;
|
||||
import org.dspace.content.MetadataSchemaEnum;
|
||||
import org.dspace.content.MetadataValue;
|
||||
import org.dspace.content.factory.ContentServiceFactory;
|
||||
@@ -88,12 +97,12 @@ public class StructBuilder {
|
||||
/**
|
||||
* A table to hold metadata for the collection being worked on.
|
||||
*/
|
||||
private static final Map<String, String> collectionMap = new HashMap<>();
|
||||
private static final Map<String, MetadataFieldName> collectionMap = new HashMap<>();
|
||||
|
||||
/**
|
||||
* A table to hold metadata for the community being worked on.
|
||||
*/
|
||||
private static final Map<String, String> communityMap = new HashMap<>();
|
||||
private static final Map<String, MetadataFieldName> communityMap = new HashMap<>();
|
||||
|
||||
protected static CommunityService communityService
|
||||
= ContentServiceFactory.getInstance().getCommunityService();
|
||||
@@ -261,19 +270,19 @@ public class StructBuilder {
|
||||
}
|
||||
|
||||
// load the mappings into the member variable hashmaps
|
||||
communityMap.put("name", "name");
|
||||
communityMap.put("description", "short_description");
|
||||
communityMap.put("intro", "introductory_text");
|
||||
communityMap.put("copyright", "copyright_text");
|
||||
communityMap.put("sidebar", "side_bar_text");
|
||||
communityMap.put("name", MD_NAME);
|
||||
communityMap.put("description", MD_SHORT_DESCRIPTION);
|
||||
communityMap.put("intro", MD_INTRODUCTORY_TEXT);
|
||||
communityMap.put("copyright", MD_COPYRIGHT_TEXT);
|
||||
communityMap.put("sidebar", MD_SIDEBAR_TEXT);
|
||||
|
||||
collectionMap.put("name", "name");
|
||||
collectionMap.put("description", "short_description");
|
||||
collectionMap.put("intro", "introductory_text");
|
||||
collectionMap.put("copyright", "copyright_text");
|
||||
collectionMap.put("sidebar", "side_bar_text");
|
||||
collectionMap.put("license", "license");
|
||||
collectionMap.put("provenance", "provenance_description");
|
||||
collectionMap.put("name", MD_NAME);
|
||||
collectionMap.put("description", MD_SHORT_DESCRIPTION);
|
||||
collectionMap.put("intro", MD_INTRODUCTORY_TEXT);
|
||||
collectionMap.put("copyright", MD_COPYRIGHT_TEXT);
|
||||
collectionMap.put("sidebar", MD_SIDEBAR_TEXT);
|
||||
collectionMap.put("license", MD_LICENSE);
|
||||
collectionMap.put("provenance", MD_PROVENANCE_DESCRIPTION);
|
||||
|
||||
Element[] elements = new Element[]{};
|
||||
try {
|
||||
@@ -619,14 +628,16 @@ public class StructBuilder {
|
||||
}
|
||||
|
||||
// default the short description to be an empty string
|
||||
communityService.setMetadata(context, community, "short_description", " ");
|
||||
communityService.setMetadataSingleValue(context, community,
|
||||
MD_SHORT_DESCRIPTION, null, " ");
|
||||
|
||||
// now update the metadata
|
||||
Node tn = communities.item(i);
|
||||
for (Map.Entry<String, String> entry : communityMap.entrySet()) {
|
||||
for (Map.Entry<String, MetadataFieldName> entry : communityMap.entrySet()) {
|
||||
NodeList nl = XPathAPI.selectNodeList(tn, entry.getKey());
|
||||
if (nl.getLength() == 1) {
|
||||
communityService.setMetadata(context, community, entry.getValue(), getStringValue(nl.item(0)));
|
||||
communityService.setMetadataSingleValue(context, community,
|
||||
entry.getValue(), null, getStringValue(nl.item(0)));
|
||||
}
|
||||
}
|
||||
|
||||
@@ -650,30 +661,41 @@ public class StructBuilder {
|
||||
element.setAttribute("identifier", community.getHandle());
|
||||
|
||||
Element nameElement = new Element("name");
|
||||
nameElement.setText(communityService.getMetadata(community, "name"));
|
||||
nameElement.setText(communityService.getMetadataFirstValue(
|
||||
community, CommunityService.MD_NAME, Item.ANY));
|
||||
element.addContent(nameElement);
|
||||
|
||||
if (communityService.getMetadata(community, "short_description") != null) {
|
||||
String fieldValue;
|
||||
|
||||
fieldValue = communityService.getMetadataFirstValue(community,
|
||||
CommunityService.MD_SHORT_DESCRIPTION, Item.ANY);
|
||||
if (fieldValue != null) {
|
||||
Element descriptionElement = new Element("description");
|
||||
descriptionElement.setText(communityService.getMetadata(community, "short_description"));
|
||||
descriptionElement.setText(fieldValue);
|
||||
element.addContent(descriptionElement);
|
||||
}
|
||||
|
||||
if (communityService.getMetadata(community, "introductory_text") != null) {
|
||||
fieldValue = communityService.getMetadataFirstValue(community,
|
||||
CommunityService.MD_INTRODUCTORY_TEXT, Item.ANY);
|
||||
if (fieldValue != null) {
|
||||
Element introElement = new Element("intro");
|
||||
introElement.setText(communityService.getMetadata(community, "introductory_text"));
|
||||
introElement.setText(fieldValue);
|
||||
element.addContent(introElement);
|
||||
}
|
||||
|
||||
if (communityService.getMetadata(community, "copyright_text") != null) {
|
||||
fieldValue = communityService.getMetadataFirstValue(community,
|
||||
CommunityService.MD_COPYRIGHT_TEXT, Item.ANY);
|
||||
if (fieldValue != null) {
|
||||
Element copyrightElement = new Element("copyright");
|
||||
copyrightElement.setText(communityService.getMetadata(community, "copyright_text"));
|
||||
copyrightElement.setText(fieldValue);
|
||||
element.addContent(copyrightElement);
|
||||
}
|
||||
|
||||
if (communityService.getMetadata(community, "side_bar_text") != null) {
|
||||
fieldValue = communityService.getMetadataFirstValue(community,
|
||||
CommunityService.MD_SIDEBAR_TEXT, Item.ANY);
|
||||
if (fieldValue != null) {
|
||||
Element sidebarElement = new Element("sidebar");
|
||||
sidebarElement.setText(communityService.getMetadata(community, "side_bar_text"));
|
||||
sidebarElement.setText(fieldValue);
|
||||
element.addContent(sidebarElement);
|
||||
}
|
||||
|
||||
@@ -717,14 +739,16 @@ public class StructBuilder {
|
||||
Collection collection = collectionService.create(context, parent);
|
||||
|
||||
// default the short description to the empty string
|
||||
collectionService.setMetadata(context, collection, "short_description", " ");
|
||||
collectionService.setMetadataSingleValue(context, collection,
|
||||
MD_SHORT_DESCRIPTION, Item.ANY, " ");
|
||||
|
||||
// import the rest of the metadata
|
||||
Node tn = collections.item(i);
|
||||
for (Map.Entry<String, String> entry : collectionMap.entrySet()) {
|
||||
for (Map.Entry<String, MetadataFieldName> entry : collectionMap.entrySet()) {
|
||||
NodeList nl = XPathAPI.selectNodeList(tn, entry.getKey());
|
||||
if (nl.getLength() == 1) {
|
||||
collectionService.setMetadata(context, collection, entry.getValue(), getStringValue(nl.item(0)));
|
||||
collectionService.setMetadataSingleValue(context, collection,
|
||||
entry.getValue(), null, getStringValue(nl.item(0)));
|
||||
}
|
||||
}
|
||||
|
||||
@@ -733,42 +757,57 @@ public class StructBuilder {
|
||||
element.setAttribute("identifier", collection.getHandle());
|
||||
|
||||
Element nameElement = new Element("name");
|
||||
nameElement.setText(collectionService.getMetadata(collection, "name"));
|
||||
nameElement.setText(collectionService.getMetadataFirstValue(collection,
|
||||
CollectionService.MD_NAME, Item.ANY));
|
||||
element.addContent(nameElement);
|
||||
|
||||
if (collectionService.getMetadata(collection, "short_description") != null) {
|
||||
String fieldValue;
|
||||
|
||||
fieldValue = collectionService.getMetadataFirstValue(collection,
|
||||
CollectionService.MD_SHORT_DESCRIPTION, Item.ANY);
|
||||
if (fieldValue != null) {
|
||||
Element descriptionElement = new Element("description");
|
||||
descriptionElement.setText(collectionService.getMetadata(collection, "short_description"));
|
||||
descriptionElement.setText(fieldValue);
|
||||
element.addContent(descriptionElement);
|
||||
}
|
||||
|
||||
if (collectionService.getMetadata(collection, "introductory_text") != null) {
|
||||
fieldValue = collectionService.getMetadataFirstValue(collection,
|
||||
CollectionService.MD_INTRODUCTORY_TEXT, Item.ANY);
|
||||
if (fieldValue != null) {
|
||||
Element introElement = new Element("intro");
|
||||
introElement.setText(collectionService.getMetadata(collection, "introductory_text"));
|
||||
introElement.setText(fieldValue);
|
||||
element.addContent(introElement);
|
||||
}
|
||||
|
||||
if (collectionService.getMetadata(collection, "copyright_text") != null) {
|
||||
fieldValue = collectionService.getMetadataFirstValue(collection,
|
||||
CollectionService.MD_COPYRIGHT_TEXT, Item.ANY);
|
||||
if (fieldValue != null) {
|
||||
Element copyrightElement = new Element("copyright");
|
||||
copyrightElement.setText(collectionService.getMetadata(collection, "copyright_text"));
|
||||
copyrightElement.setText(fieldValue);
|
||||
element.addContent(copyrightElement);
|
||||
}
|
||||
|
||||
if (collectionService.getMetadata(collection, "side_bar_text") != null) {
|
||||
fieldValue = collectionService.getMetadataFirstValue(collection,
|
||||
CollectionService.MD_SIDEBAR_TEXT, Item.ANY);
|
||||
if (fieldValue != null) {
|
||||
Element sidebarElement = new Element("sidebar");
|
||||
sidebarElement.setText(collectionService.getMetadata(collection, "side_bar_text"));
|
||||
sidebarElement.setText(fieldValue);
|
||||
element.addContent(sidebarElement);
|
||||
}
|
||||
|
||||
if (collectionService.getMetadata(collection, "license") != null) {
|
||||
fieldValue = collectionService.getMetadataFirstValue(collection,
|
||||
CollectionService.MD_LICENSE, Item.ANY);
|
||||
if (fieldValue != null) {
|
||||
Element sidebarElement = new Element("license");
|
||||
sidebarElement.setText(collectionService.getMetadata(collection, "license"));
|
||||
sidebarElement.setText(fieldValue);
|
||||
element.addContent(sidebarElement);
|
||||
}
|
||||
|
||||
if (collectionService.getMetadata(collection, "provenance_description") != null) {
|
||||
fieldValue = collectionService.getMetadataFirstValue(collection,
|
||||
CollectionService.MD_PROVENANCE_DESCRIPTION, Item.ANY);
|
||||
if (fieldValue != null) {
|
||||
Element sidebarElement = new Element("provenance");
|
||||
sidebarElement.setText(collectionService.getMetadata(collection, "provenance_description"));
|
||||
sidebarElement.setText(fieldValue);
|
||||
element.addContent(sidebarElement);
|
||||
}
|
||||
|
||||
@@ -777,5 +816,4 @@ public class StructBuilder {
|
||||
|
||||
return elements;
|
||||
}
|
||||
|
||||
}
|
||||
|
@@ -175,11 +175,14 @@ public class DSpaceCSV implements Serializable {
|
||||
headings.add(element);
|
||||
} else if (!"id".equals(element)) {
|
||||
String authorityPrefix = "";
|
||||
AuthorityValue authorityValueType = authorityValueService.getAuthorityValueType(element);
|
||||
if (authorityValueType != null) {
|
||||
String authorityType = authorityValueType.getAuthorityType();
|
||||
authorityPrefix = element.substring(0, authorityType.length() + 1);
|
||||
element = element.substring(authorityPrefix.length());
|
||||
if (StringUtils.startsWith(element, "[authority]")) {
|
||||
element = StringUtils.substringAfter(element, "[authority]");
|
||||
AuthorityValue authorityValueType = authorityValueService.getAuthorityValueType(element);
|
||||
if (authorityValueType != null) {
|
||||
String authorityType = authorityValueType.getAuthorityType();
|
||||
authorityPrefix = element.substring(0, authorityType.length() + 1);
|
||||
element = element.substring(authorityPrefix.length());
|
||||
}
|
||||
}
|
||||
|
||||
// Verify that the heading is valid in the metadata registry
|
||||
@@ -303,7 +306,7 @@ public class DSpaceCSV implements Serializable {
|
||||
// Specify default values
|
||||
String[] defaultValues =
|
||||
new String[] {
|
||||
"dc.date.accessioned, dc.date.available, dc.date.updated, dc.description.provenance"
|
||||
"dc.date.accessioned", "dc.date.available", "dc.date.updated", "dc.description.provenance"
|
||||
};
|
||||
String[] toIgnoreArray =
|
||||
DSpaceServicesFactory.getInstance()
|
||||
|
@@ -10,10 +10,14 @@ package org.dspace.app.bulkedit;
|
||||
import java.sql.SQLException;
|
||||
|
||||
import org.apache.commons.cli.ParseException;
|
||||
import org.apache.commons.lang3.StringUtils;
|
||||
import org.dspace.content.DSpaceObject;
|
||||
import org.dspace.content.factory.ContentServiceFactory;
|
||||
import org.dspace.content.service.MetadataDSpaceCsvExportService;
|
||||
import org.dspace.core.Context;
|
||||
import org.dspace.eperson.factory.EPersonServiceFactory;
|
||||
import org.dspace.eperson.service.EPersonService;
|
||||
import org.dspace.handle.factory.HandleServiceFactory;
|
||||
import org.dspace.scripts.DSpaceRunnable;
|
||||
import org.dspace.utils.DSpace;
|
||||
|
||||
@@ -41,8 +45,7 @@ public class MetadataExport extends DSpaceRunnable<MetadataExportScriptConfigura
|
||||
public void internalRun() throws Exception {
|
||||
|
||||
if (help) {
|
||||
handler.logInfo("\nfull export: metadata-export -f filename");
|
||||
handler.logInfo("partial export: metadata-export -i handle -f filename");
|
||||
logHelpInfo();
|
||||
printHelp();
|
||||
return;
|
||||
}
|
||||
@@ -61,6 +64,11 @@ public class MetadataExport extends DSpaceRunnable<MetadataExportScriptConfigura
|
||||
context.complete();
|
||||
}
|
||||
|
||||
protected void logHelpInfo() {
|
||||
handler.logInfo("\nfull export: metadata-export");
|
||||
handler.logInfo("partial export: metadata-export -i handle");
|
||||
}
|
||||
|
||||
@Override
|
||||
public MetadataExportScriptConfiguration getScriptConfiguration() {
|
||||
return new DSpace().getServiceManager().getServiceByName("metadata-export",
|
||||
@@ -75,17 +83,32 @@ public class MetadataExport extends DSpaceRunnable<MetadataExportScriptConfigura
|
||||
return;
|
||||
}
|
||||
|
||||
// Check a filename is given
|
||||
if (!commandLine.hasOption('f')) {
|
||||
throw new ParseException("Required parameter -f missing!");
|
||||
}
|
||||
filename = commandLine.getOptionValue('f');
|
||||
|
||||
exportAllMetadata = commandLine.hasOption('a');
|
||||
|
||||
if (!commandLine.hasOption('i')) {
|
||||
exportAllItems = true;
|
||||
}
|
||||
handle = commandLine.getOptionValue('i');
|
||||
filename = getFileNameForExportFile();
|
||||
|
||||
exportAllMetadata = commandLine.hasOption('a');
|
||||
|
||||
}
|
||||
|
||||
protected String getFileNameForExportFile() throws ParseException {
|
||||
Context context = new Context();
|
||||
try {
|
||||
DSpaceObject dso = null;
|
||||
if (StringUtils.isNotBlank(handle)) {
|
||||
dso = HandleServiceFactory.getInstance().getHandleService().resolveToObject(context, handle);
|
||||
} else {
|
||||
dso = ContentServiceFactory.getInstance().getSiteService().findSite(context);
|
||||
}
|
||||
if (dso == null) {
|
||||
throw new ParseException("A handle got given that wasn't able to be parsed to a DSpaceObject");
|
||||
}
|
||||
return dso.getID().toString() + ".csv";
|
||||
} catch (SQLException e) {
|
||||
handler.handleException("Something went wrong trying to retrieve DSO for handle: " + handle, e);
|
||||
}
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
@@ -0,0 +1,33 @@
|
||||
/**
|
||||
* The contents of this file are subject to the license and copyright
|
||||
* detailed in the LICENSE and NOTICE files at the root of the source
|
||||
* tree and available online at
|
||||
*
|
||||
* http://www.dspace.org/license/
|
||||
*/
|
||||
package org.dspace.app.bulkedit;
|
||||
|
||||
import org.apache.commons.cli.ParseException;
|
||||
|
||||
public class MetadataExportCli extends MetadataExport {
|
||||
|
||||
@Override
|
||||
protected String getFileNameForExportFile() {
|
||||
return commandLine.getOptionValue('f');
|
||||
}
|
||||
|
||||
@Override
|
||||
public void setup() throws ParseException {
|
||||
super.setup();
|
||||
// Check a filename is given
|
||||
if (!commandLine.hasOption('f')) {
|
||||
throw new ParseException("Required parameter -f missing!");
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void logHelpInfo() {
|
||||
handler.logInfo("\nfull export: metadata-export -f filename");
|
||||
handler.logInfo("partial export: metadata-export -i handle -f filename");
|
||||
}
|
||||
}
|
@@ -0,0 +1,26 @@
|
||||
/**
|
||||
* The contents of this file are subject to the license and copyright
|
||||
* detailed in the LICENSE and NOTICE files at the root of the source
|
||||
* tree and available online at
|
||||
*
|
||||
* http://www.dspace.org/license/
|
||||
*/
|
||||
package org.dspace.app.bulkedit;
|
||||
|
||||
import java.io.OutputStream;
|
||||
|
||||
import org.apache.commons.cli.Options;
|
||||
|
||||
public class MetadataExportCliScriptConfiguration extends MetadataExportScriptConfiguration<MetadataExportCli> {
|
||||
|
||||
|
||||
@Override
|
||||
public Options getOptions() {
|
||||
Options options = super.getOptions();
|
||||
options.addOption("f", "file", true, "destination where you want file written");
|
||||
options.getOption("f").setType(OutputStream .class);
|
||||
options.getOption("f").setRequired(true);
|
||||
super.options = options;
|
||||
return options;
|
||||
}
|
||||
}
|
@@ -7,7 +7,6 @@
|
||||
*/
|
||||
package org.dspace.app.bulkedit;
|
||||
|
||||
import java.io.OutputStream;
|
||||
import java.sql.SQLException;
|
||||
|
||||
import org.apache.commons.cli.Options;
|
||||
@@ -56,9 +55,6 @@ public class MetadataExportScriptConfiguration<T extends MetadataExport> extends
|
||||
|
||||
options.addOption("i", "id", true, "ID or handle of thing to export (item, collection, or community)");
|
||||
options.getOption("i").setType(String.class);
|
||||
options.addOption("f", "file", true, "destination where you want file written");
|
||||
options.getOption("f").setType(OutputStream.class);
|
||||
options.getOption("f").setRequired(true);
|
||||
options.addOption("a", "all", false,
|
||||
"include all metadata fields that are not normally changed (e.g. provenance)");
|
||||
options.getOption("a").setType(boolean.class);
|
||||
|
@@ -51,7 +51,6 @@ import org.dspace.content.service.MetadataValueService;
|
||||
import org.dspace.content.service.RelationshipService;
|
||||
import org.dspace.content.service.RelationshipTypeService;
|
||||
import org.dspace.content.service.WorkspaceItemService;
|
||||
import org.dspace.core.ConfigurationManager;
|
||||
import org.dspace.core.Constants;
|
||||
import org.dspace.core.Context;
|
||||
import org.dspace.core.LogManager;
|
||||
@@ -61,6 +60,8 @@ import org.dspace.handle.factory.HandleServiceFactory;
|
||||
import org.dspace.handle.service.HandleService;
|
||||
import org.dspace.scripts.DSpaceRunnable;
|
||||
import org.dspace.scripts.handler.DSpaceRunnableHandler;
|
||||
import org.dspace.services.ConfigurationService;
|
||||
import org.dspace.services.factory.DSpaceServicesFactory;
|
||||
import org.dspace.utils.DSpace;
|
||||
import org.dspace.workflow.WorkflowException;
|
||||
import org.dspace.workflow.WorkflowItem;
|
||||
@@ -113,14 +114,14 @@ public class MetadataImport extends DSpaceRunnable<MetadataImportScriptConfigura
|
||||
*
|
||||
* @see #populateRefAndRowMap(DSpaceCSVLine, UUID)
|
||||
*/
|
||||
protected static HashMap<UUID, String> entityTypeMap = new HashMap<>();
|
||||
protected HashMap<UUID, String> entityTypeMap = new HashMap<>();
|
||||
|
||||
/**
|
||||
* Map of UUIDs to their relations that are referenced within any import with their referers.
|
||||
* Map of UUIDs to their relations that are referenced within any import with their referrers.
|
||||
*
|
||||
* @see #populateEntityRelationMap(String, String, String)
|
||||
*/
|
||||
protected static HashMap<String, HashMap<String, ArrayList<String>>> entityRelationMap = new HashMap<>();
|
||||
protected HashMap<String, HashMap<String, ArrayList<String>>> entityRelationMap = new HashMap<>();
|
||||
|
||||
|
||||
/**
|
||||
@@ -129,7 +130,7 @@ public class MetadataImport extends DSpaceRunnable<MetadataImportScriptConfigura
|
||||
protected ArrayList<String> relationValidationErrors = new ArrayList<>();
|
||||
|
||||
/**
|
||||
* Counter of rows proccssed in a CSV.
|
||||
* Counter of rows processed in a CSV.
|
||||
*/
|
||||
protected Integer rowCount = 1;
|
||||
|
||||
@@ -158,6 +159,8 @@ public class MetadataImport extends DSpaceRunnable<MetadataImportScriptConfigura
|
||||
protected EntityService entityService = ContentServiceFactory.getInstance().getEntityService();
|
||||
protected AuthorityValueService authorityValueService = AuthorityServiceFactory.getInstance()
|
||||
.getAuthorityValueService();
|
||||
protected ConfigurationService configurationService
|
||||
= DSpaceServicesFactory.getInstance().getConfigurationService();
|
||||
|
||||
/**
|
||||
* Create an instance of the metadata importer. Requires a context and an array of CSV lines
|
||||
@@ -182,24 +185,7 @@ public class MetadataImport extends DSpaceRunnable<MetadataImportScriptConfigura
|
||||
c.turnOffAuthorisationSystem();
|
||||
|
||||
// Find the EPerson, assign to context
|
||||
try {
|
||||
if (commandLine.hasOption('e')) {
|
||||
EPerson eperson;
|
||||
String e = commandLine.getOptionValue('e');
|
||||
if (e.indexOf('@') != -1) {
|
||||
eperson = EPersonServiceFactory.getInstance().getEPersonService().findByEmail(c, e);
|
||||
} else {
|
||||
eperson = EPersonServiceFactory.getInstance().getEPersonService().find(c, UUID.fromString(e));
|
||||
}
|
||||
|
||||
if (eperson == null) {
|
||||
throw new ParseException("Error, eperson cannot be found: " + e);
|
||||
}
|
||||
c.setCurrentUser(eperson);
|
||||
}
|
||||
} catch (Exception e) {
|
||||
throw new ParseException("Unable to find DSpace user: " + e.getMessage());
|
||||
}
|
||||
assignCurrentUserInContext(c);
|
||||
|
||||
if (authorityControlled == null) {
|
||||
setAuthorizedMetadataFields();
|
||||
@@ -277,6 +263,18 @@ public class MetadataImport extends DSpaceRunnable<MetadataImportScriptConfigura
|
||||
|
||||
}
|
||||
|
||||
protected void assignCurrentUserInContext(Context context) throws ParseException {
|
||||
UUID uuid = getEpersonIdentifier();
|
||||
if (uuid != null) {
|
||||
try {
|
||||
EPerson ePerson = EPersonServiceFactory.getInstance().getEPersonService().find(context, uuid);
|
||||
context.setCurrentUser(ePerson);
|
||||
} catch (SQLException e) {
|
||||
log.error("Something went wrong trying to fetch the eperson for uuid: " + uuid, e);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* This method determines whether the changes should be applied or not. This is default set to true for the REST
|
||||
* script as we don't want to interact with the caller. This will be overwritten in the CLI script to ask for
|
||||
@@ -312,9 +310,6 @@ public class MetadataImport extends DSpaceRunnable<MetadataImportScriptConfigura
|
||||
throw new ParseException("Required parameter -f missing!");
|
||||
}
|
||||
filename = commandLine.getOptionValue('f');
|
||||
if (!commandLine.hasOption('e')) {
|
||||
throw new ParseException("Required parameter -e missing!");
|
||||
}
|
||||
|
||||
// Option to apply template to new items
|
||||
if (commandLine.hasOption('t')) {
|
||||
@@ -427,7 +422,7 @@ public class MetadataImport extends DSpaceRunnable<MetadataImportScriptConfigura
|
||||
// Do nothing
|
||||
} else if ("expunge".equals(action)) {
|
||||
// Does the configuration allow deletes?
|
||||
if (!ConfigurationManager.getBooleanProperty("bulkedit", "allowexpunge", false)) {
|
||||
if (!configurationService.getBooleanProperty("bulkedit.allowexpunge", false)) {
|
||||
throw new MetadataImportException("'expunge' action denied by configuration");
|
||||
}
|
||||
|
||||
@@ -1376,12 +1371,12 @@ public class MetadataImport extends DSpaceRunnable<MetadataImportScriptConfigura
|
||||
* Set authority controlled fields
|
||||
*/
|
||||
private void setAuthorizedMetadataFields() {
|
||||
authorityControlled = new HashSet<String>();
|
||||
Enumeration propertyNames = ConfigurationManager.getProperties().propertyNames();
|
||||
authorityControlled = new HashSet<>();
|
||||
Enumeration propertyNames = configurationService.getProperties().propertyNames();
|
||||
while (propertyNames.hasMoreElements()) {
|
||||
String key = ((String) propertyNames.nextElement()).trim();
|
||||
if (key.startsWith(AC_PREFIX)
|
||||
&& ConfigurationManager.getBooleanProperty(key, false)) {
|
||||
&& configurationService.getBooleanProperty(key, false)) {
|
||||
authorityControlled.add(key.substring(AC_PREFIX.length()));
|
||||
}
|
||||
}
|
||||
@@ -1411,16 +1406,16 @@ public class MetadataImport extends DSpaceRunnable<MetadataImportScriptConfigura
|
||||
//Populate the EntityRelationMap
|
||||
populateEntityRelationMap(uuid, key, originId.toString());
|
||||
}
|
||||
} else {
|
||||
newLine.add(key, null);
|
||||
}
|
||||
} else {
|
||||
if (line.get(key).size() > 1) {
|
||||
if (line.get(key).size() > 0) {
|
||||
for (String value : line.get(key)) {
|
||||
newLine.add(key, value);
|
||||
}
|
||||
} else {
|
||||
if (line.get(key).size() > 0) {
|
||||
newLine.add(key, line.get(key).get(0));
|
||||
}
|
||||
newLine.add(key, null);
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -1525,6 +1520,9 @@ public class MetadataImport extends DSpaceRunnable<MetadataImportScriptConfigura
|
||||
throw new MetadataImportException("Error in CSV row " + rowCount + ":\n" +
|
||||
"Not a UUID or indirect entity reference: '" + reference + "'");
|
||||
}
|
||||
}
|
||||
if (reference.contains("::virtual::")) {
|
||||
return UUID.fromString(StringUtils.substringBefore(reference, "::virtual::"));
|
||||
} else if (!reference.startsWith("rowName:")) { // Not a rowName ref; so it's a metadata value reference
|
||||
MetadataValueService metadataValueService = ContentServiceFactory.getInstance().getMetadataValueService();
|
||||
MetadataFieldService metadataFieldService =
|
||||
@@ -1696,19 +1694,39 @@ public class MetadataImport extends DSpaceRunnable<MetadataImportScriptConfigura
|
||||
// Add to errors if Realtionship.type cannot be derived.
|
||||
Item originItem = null;
|
||||
if (itemService.find(c, UUID.fromString(targetUUID)) != null) {
|
||||
originItem = itemService.find(c, UUID.fromString(originRefererUUID));
|
||||
List<MetadataValue> relTypes = itemService.
|
||||
getMetadata(originItem, "relationship",
|
||||
"type", null, Item.ANY);
|
||||
String relTypeValue = null;
|
||||
if (relTypes.size() > 0) {
|
||||
relTypeValue = relTypes.get(0).getValue();
|
||||
DSpaceCSVLine dSpaceCSVLine = this.csv.getCSVLines()
|
||||
.get(Integer.valueOf(originRow) - 1);
|
||||
List<String> relTypes = dSpaceCSVLine.get("relationship.type");
|
||||
if (relTypes == null || relTypes.isEmpty()) {
|
||||
dSpaceCSVLine.get("relationship.type[]");
|
||||
}
|
||||
|
||||
if (relTypes != null && relTypes.size() > 0) {
|
||||
String relTypeValue = relTypes.get(0);
|
||||
relTypeValue = StringUtils.remove(relTypeValue, "\"").trim();
|
||||
originType = entityTypeService.findByEntityType(c, relTypeValue).getLabel();
|
||||
validateTypesByTypeByTypeName(c, targetType, originType, typeName, originRow);
|
||||
} else {
|
||||
relationValidationErrors.add("Error on CSV row " + originRow + ":" + "\n" +
|
||||
"Cannot resolve Entity type for reference: "
|
||||
+ originRefererUUID);
|
||||
originItem = itemService.find(c, UUID.fromString(originRefererUUID));
|
||||
if (originItem != null) {
|
||||
List<MetadataValue> mdv = itemService.getMetadata(originItem,
|
||||
"relationship",
|
||||
"type", null,
|
||||
Item.ANY);
|
||||
if (!mdv.isEmpty()) {
|
||||
String relTypeValue = mdv.get(0).getValue();
|
||||
originType = entityTypeService.findByEntityType(c, relTypeValue).getLabel();
|
||||
validateTypesByTypeByTypeName(c, targetType, originType, typeName,
|
||||
originRow);
|
||||
} else {
|
||||
relationValidationErrors.add("Error on CSV row " + originRow + ":" + "\n" +
|
||||
"Cannot resolve Entity type for reference: " + originRefererUUID);
|
||||
}
|
||||
} else {
|
||||
relationValidationErrors.add("Error on CSV row " + originRow + ":" + "\n" +
|
||||
"Cannot resolve Entity type for reference: "
|
||||
+ originRefererUUID);
|
||||
}
|
||||
}
|
||||
|
||||
} else {
|
||||
@@ -1735,11 +1753,11 @@ public class MetadataImport extends DSpaceRunnable<MetadataImportScriptConfigura
|
||||
}
|
||||
|
||||
/**
|
||||
* Generates a list of potenital Relationship Types given a typeName and attempts to match the given
|
||||
* Generates a list of potential Relationship Types given a typeName and attempts to match the given
|
||||
* targetType and originType to a Relationship Type in the list.
|
||||
*
|
||||
* @param targetType entity type of target.
|
||||
* @param originType entity type of origin referer.
|
||||
* @param originType entity type of origin referrer.
|
||||
* @param typeName left or right typeName of the respective Relationship.
|
||||
* @return the UUID of the item.
|
||||
*/
|
||||
|
@@ -10,7 +10,12 @@ package org.dspace.app.bulkedit;
|
||||
import java.io.BufferedReader;
|
||||
import java.io.IOException;
|
||||
import java.io.InputStreamReader;
|
||||
import java.util.UUID;
|
||||
|
||||
import org.apache.commons.cli.ParseException;
|
||||
import org.dspace.core.Context;
|
||||
import org.dspace.eperson.EPerson;
|
||||
import org.dspace.eperson.factory.EPersonServiceFactory;
|
||||
import org.dspace.scripts.handler.DSpaceRunnableHandler;
|
||||
|
||||
/**
|
||||
@@ -30,4 +35,34 @@ public class MetadataImportCLI extends MetadataImport {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void assignCurrentUserInContext(Context context) throws ParseException {
|
||||
try {
|
||||
if (commandLine.hasOption('e')) {
|
||||
EPerson eperson;
|
||||
String e = commandLine.getOptionValue('e');
|
||||
if (e.indexOf('@') != -1) {
|
||||
eperson = EPersonServiceFactory.getInstance().getEPersonService().findByEmail(context, e);
|
||||
} else {
|
||||
eperson = EPersonServiceFactory.getInstance().getEPersonService().find(context, UUID.fromString(e));
|
||||
}
|
||||
|
||||
if (eperson == null) {
|
||||
throw new ParseException("Error, eperson cannot be found: " + e);
|
||||
}
|
||||
context.setCurrentUser(eperson);
|
||||
}
|
||||
} catch (Exception e) {
|
||||
throw new ParseException("Unable to find DSpace user: " + e.getMessage());
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public void setup() throws ParseException {
|
||||
super.setup();
|
||||
if (!commandLine.hasOption('e')) {
|
||||
throw new ParseException("Required parameter -e missing!");
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@@ -7,10 +7,21 @@
|
||||
*/
|
||||
package org.dspace.app.bulkedit;
|
||||
|
||||
import org.apache.commons.cli.Options;
|
||||
import org.dspace.scripts.configuration.ScriptConfiguration;
|
||||
|
||||
/**
|
||||
* The {@link ScriptConfiguration} for the {@link org.dspace.app.bulkedit.MetadataImportCLI} CLI script
|
||||
*/
|
||||
public class MetadataImportCliScriptConfiguration extends MetadataImportScriptConfiguration<MetadataImportCLI> {
|
||||
|
||||
@Override
|
||||
public Options getOptions() {
|
||||
Options options = super.getOptions();
|
||||
options.addOption("e", "email", true, "email address or user id of user (required if adding new items)");
|
||||
options.getOption("e").setType(String.class);
|
||||
options.getOption("e").setRequired(true);
|
||||
super.options = options;
|
||||
return options;
|
||||
}
|
||||
}
|
||||
|
@@ -57,9 +57,6 @@ public class MetadataImportScriptConfiguration<T extends MetadataImport> extends
|
||||
options.addOption("f", "file", true, "source file");
|
||||
options.getOption("f").setType(InputStream.class);
|
||||
options.getOption("f").setRequired(true);
|
||||
options.addOption("e", "email", true, "email address or user id of user (required if adding new items)");
|
||||
options.getOption("e").setType(String.class);
|
||||
options.getOption("e").setRequired(true);
|
||||
options.addOption("s", "silent", false,
|
||||
"silent operation - doesn't request confirmation of changes USE WITH CAUTION");
|
||||
options.getOption("s").setType(boolean.class);
|
||||
|
@@ -17,12 +17,11 @@ import java.util.UUID;
|
||||
|
||||
import org.apache.commons.cli.CommandLine;
|
||||
import org.apache.commons.cli.CommandLineParser;
|
||||
import org.apache.commons.cli.DefaultParser;
|
||||
import org.apache.commons.cli.HelpFormatter;
|
||||
import org.apache.commons.cli.Option;
|
||||
import org.apache.commons.cli.OptionBuilder;
|
||||
import org.apache.commons.cli.Options;
|
||||
import org.apache.commons.cli.ParseException;
|
||||
import org.apache.commons.cli.PosixParser;
|
||||
import org.apache.logging.log4j.LogManager;
|
||||
import org.apache.logging.log4j.Logger;
|
||||
import org.dspace.checker.BitstreamDispatcher;
|
||||
@@ -87,7 +86,7 @@ public final class ChecksumChecker {
|
||||
*/
|
||||
public static void main(String[] args) throws SQLException {
|
||||
// set up command line parser
|
||||
CommandLineParser parser = new PosixParser();
|
||||
CommandLineParser parser = new DefaultParser();
|
||||
CommandLine line = null;
|
||||
|
||||
// create an options object and populate it
|
||||
@@ -102,19 +101,21 @@ public final class ChecksumChecker {
|
||||
options.addOption("a", "handle", true, "Specify a handle to check");
|
||||
options.addOption("v", "verbose", false, "Report all processing");
|
||||
|
||||
OptionBuilder.withArgName("bitstream-ids").hasArgs().withDescription(
|
||||
"Space separated list of bitstream ids");
|
||||
Option useBitstreamIds = OptionBuilder.create('b');
|
||||
Option option;
|
||||
|
||||
options.addOption(useBitstreamIds);
|
||||
option = Option.builder("b")
|
||||
.longOpt("bitstream-ids")
|
||||
.hasArgs()
|
||||
.desc("Space separated list of bitstream ids")
|
||||
.build();
|
||||
options.addOption(option);
|
||||
|
||||
options.addOption("p", "prune", false, "Prune configuration file");
|
||||
options.addOption(OptionBuilder
|
||||
.withArgName("prune")
|
||||
.hasOptionalArgs(1)
|
||||
.withDescription(
|
||||
"Prune old results (optionally using specified properties file for configuration)")
|
||||
.create('p'));
|
||||
option = Option.builder("p")
|
||||
.longOpt("prune")
|
||||
.optionalArg(true)
|
||||
.desc("Prune old results (optionally using specified properties file for configuration)")
|
||||
.build();
|
||||
options.addOption(option);
|
||||
|
||||
try {
|
||||
line = parser.parse(options, args);
|
||||
|
@@ -15,9 +15,9 @@ import java.util.UUID;
|
||||
|
||||
import org.apache.commons.cli.CommandLine;
|
||||
import org.apache.commons.cli.CommandLineParser;
|
||||
import org.apache.commons.cli.DefaultParser;
|
||||
import org.apache.commons.cli.HelpFormatter;
|
||||
import org.apache.commons.cli.Options;
|
||||
import org.apache.commons.cli.PosixParser;
|
||||
import org.dspace.authorize.AuthorizeException;
|
||||
import org.dspace.content.Collection;
|
||||
import org.dspace.content.DSpaceObject;
|
||||
@@ -53,7 +53,7 @@ public class Harvest {
|
||||
|
||||
public static void main(String[] argv) throws Exception {
|
||||
// create an options object and populate it
|
||||
CommandLineParser parser = new PosixParser();
|
||||
CommandLineParser parser = new DefaultParser();
|
||||
|
||||
Options options = new Options();
|
||||
|
||||
@@ -402,11 +402,7 @@ public class Harvest {
|
||||
context.setCurrentUser(eperson);
|
||||
harvester.runHarvest();
|
||||
context.complete();
|
||||
} catch (SQLException e) {
|
||||
throw new IllegalStateException("Failed to run harvester", e);
|
||||
} catch (AuthorizeException e) {
|
||||
throw new IllegalStateException("Failed to run harvester", e);
|
||||
} catch (IOException e) {
|
||||
} catch (SQLException | AuthorizeException | IOException e) {
|
||||
throw new IllegalStateException("Failed to run harvester", e);
|
||||
}
|
||||
|
||||
|
@@ -15,9 +15,9 @@ import java.util.UUID;
|
||||
|
||||
import org.apache.commons.cli.CommandLine;
|
||||
import org.apache.commons.cli.CommandLineParser;
|
||||
import org.apache.commons.cli.DefaultParser;
|
||||
import org.apache.commons.cli.HelpFormatter;
|
||||
import org.apache.commons.cli.Options;
|
||||
import org.apache.commons.cli.PosixParser;
|
||||
import org.dspace.app.itemexport.factory.ItemExportServiceFactory;
|
||||
import org.dspace.app.itemexport.service.ItemExportService;
|
||||
import org.dspace.content.Collection;
|
||||
@@ -69,7 +69,7 @@ public class ItemExportCLITool {
|
||||
*/
|
||||
public static void main(String[] argv) throws Exception {
|
||||
// create an options object and populate it
|
||||
CommandLineParser parser = new PosixParser();
|
||||
CommandLineParser parser = new DefaultParser();
|
||||
|
||||
Options options = new Options();
|
||||
|
||||
|
@@ -47,7 +47,6 @@ import org.dspace.content.MetadataValue;
|
||||
import org.dspace.content.service.BitstreamService;
|
||||
import org.dspace.content.service.CommunityService;
|
||||
import org.dspace.content.service.ItemService;
|
||||
import org.dspace.core.ConfigurationManager;
|
||||
import org.dspace.core.Constants;
|
||||
import org.dspace.core.Context;
|
||||
import org.dspace.core.Email;
|
||||
@@ -57,6 +56,7 @@ import org.dspace.core.Utils;
|
||||
import org.dspace.eperson.EPerson;
|
||||
import org.dspace.eperson.service.EPersonService;
|
||||
import org.dspace.handle.service.HandleService;
|
||||
import org.dspace.services.ConfigurationService;
|
||||
import org.springframework.beans.factory.annotation.Autowired;
|
||||
|
||||
/**
|
||||
@@ -93,12 +93,14 @@ public class ItemExportServiceImpl implements ItemExportService {
|
||||
protected ItemService itemService;
|
||||
@Autowired(required = true)
|
||||
protected HandleService handleService;
|
||||
@Autowired(required = true)
|
||||
protected ConfigurationService configurationService;
|
||||
|
||||
|
||||
/**
|
||||
* log4j logger
|
||||
*/
|
||||
private Logger log = org.apache.logging.log4j.LogManager.getLogger(ItemExportServiceImpl.class);
|
||||
private final Logger log = org.apache.logging.log4j.LogManager.getLogger(ItemExportServiceImpl.class);
|
||||
|
||||
protected ItemExportServiceImpl() {
|
||||
|
||||
@@ -605,7 +607,7 @@ public class ItemExportServiceImpl implements ItemExportService {
|
||||
|
||||
// check the size of all the bitstreams against the configuration file
|
||||
// entry if it exists
|
||||
String megaBytes = ConfigurationManager
|
||||
String megaBytes = configurationService
|
||||
.getProperty("org.dspace.app.itemexport.max.size");
|
||||
if (megaBytes != null) {
|
||||
float maxSize = 0;
|
||||
@@ -730,7 +732,7 @@ public class ItemExportServiceImpl implements ItemExportService {
|
||||
@Override
|
||||
public String getExportDownloadDirectory(EPerson ePerson)
|
||||
throws Exception {
|
||||
String downloadDir = ConfigurationManager
|
||||
String downloadDir = configurationService
|
||||
.getProperty("org.dspace.app.itemexport.download.dir");
|
||||
if (downloadDir == null) {
|
||||
throw new Exception(
|
||||
@@ -747,7 +749,7 @@ public class ItemExportServiceImpl implements ItemExportService {
|
||||
|
||||
@Override
|
||||
public String getExportWorkDirectory() throws Exception {
|
||||
String exportDir = ConfigurationManager
|
||||
String exportDir = configurationService
|
||||
.getProperty("org.dspace.app.itemexport.work.dir");
|
||||
if (exportDir == null) {
|
||||
throw new Exception(
|
||||
@@ -853,7 +855,7 @@ public class ItemExportServiceImpl implements ItemExportService {
|
||||
return null;
|
||||
}
|
||||
|
||||
List<String> fileNames = new ArrayList<String>();
|
||||
List<String> fileNames = new ArrayList<>();
|
||||
|
||||
for (String fileName : downloadDir.list()) {
|
||||
if (fileName.contains("export") && fileName.endsWith(".zip")) {
|
||||
@@ -870,7 +872,7 @@ public class ItemExportServiceImpl implements ItemExportService {
|
||||
|
||||
@Override
|
||||
public void deleteOldExportArchives(EPerson eperson) throws Exception {
|
||||
int hours = ConfigurationManager
|
||||
int hours = configurationService
|
||||
.getIntProperty("org.dspace.app.itemexport.life.span.hours");
|
||||
Calendar now = Calendar.getInstance();
|
||||
now.setTime(new Date());
|
||||
@@ -891,11 +893,11 @@ public class ItemExportServiceImpl implements ItemExportService {
|
||||
|
||||
@Override
|
||||
public void deleteOldExportArchives() throws Exception {
|
||||
int hours = ConfigurationManager.getIntProperty("org.dspace.app.itemexport.life.span.hours");
|
||||
int hours = configurationService.getIntProperty("org.dspace.app.itemexport.life.span.hours");
|
||||
Calendar now = Calendar.getInstance();
|
||||
now.setTime(new Date());
|
||||
now.add(Calendar.HOUR, (-hours));
|
||||
File downloadDir = new File(ConfigurationManager.getProperty("org.dspace.app.itemexport.download.dir"));
|
||||
File downloadDir = new File(configurationService.getProperty("org.dspace.app.itemexport.download.dir"));
|
||||
if (downloadDir.exists()) {
|
||||
// Get a list of all the sub-directories, potentially one for each ePerson.
|
||||
File[] dirs = downloadDir.listFiles();
|
||||
@@ -929,8 +931,8 @@ public class ItemExportServiceImpl implements ItemExportService {
|
||||
Locale supportedLocale = I18nUtil.getEPersonLocale(eperson);
|
||||
Email email = Email.getEmail(I18nUtil.getEmailFilename(supportedLocale, "export_success"));
|
||||
email.addRecipient(eperson.getEmail());
|
||||
email.addArgument(ConfigurationManager.getProperty("dspace.ui.url") + "/exportdownload/" + fileName);
|
||||
email.addArgument(ConfigurationManager.getProperty("org.dspace.app.itemexport.life.span.hours"));
|
||||
email.addArgument(configurationService.getProperty("dspace.ui.url") + "/exportdownload/" + fileName);
|
||||
email.addArgument(configurationService.getProperty("org.dspace.app.itemexport.life.span.hours"));
|
||||
|
||||
email.send();
|
||||
} catch (Exception e) {
|
||||
@@ -947,7 +949,7 @@ public class ItemExportServiceImpl implements ItemExportService {
|
||||
Email email = Email.getEmail(I18nUtil.getEmailFilename(supportedLocale, "export_error"));
|
||||
email.addRecipient(eperson.getEmail());
|
||||
email.addArgument(error);
|
||||
email.addArgument(ConfigurationManager.getProperty("dspace.ui.url") + "/feedback");
|
||||
email.addArgument(configurationService.getProperty("dspace.ui.url") + "/feedback");
|
||||
|
||||
email.send();
|
||||
} catch (Exception e) {
|
||||
|
@@ -8,6 +8,7 @@
|
||||
package org.dspace.app.itemimport;
|
||||
|
||||
import java.io.File;
|
||||
import java.io.IOException;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Date;
|
||||
import java.util.List;
|
||||
@@ -15,9 +16,9 @@ import java.util.UUID;
|
||||
|
||||
import org.apache.commons.cli.CommandLine;
|
||||
import org.apache.commons.cli.CommandLineParser;
|
||||
import org.apache.commons.cli.DefaultParser;
|
||||
import org.apache.commons.cli.HelpFormatter;
|
||||
import org.apache.commons.cli.Options;
|
||||
import org.apache.commons.cli.PosixParser;
|
||||
import org.dspace.app.itemimport.factory.ItemImportServiceFactory;
|
||||
import org.dspace.app.itemimport.service.ItemImportService;
|
||||
import org.dspace.content.Collection;
|
||||
@@ -67,7 +68,7 @@ public class ItemImportCLITool {
|
||||
|
||||
try {
|
||||
// create an options object and populate it
|
||||
CommandLineParser parser = new PosixParser();
|
||||
CommandLineParser parser = new DefaultParser();
|
||||
|
||||
Options options = new Options();
|
||||
|
||||
@@ -408,7 +409,7 @@ public class ItemImportCLITool {
|
||||
"Deleting temporary zip directory: " + myloader.getTempWorkDirFile().getAbsolutePath());
|
||||
myloader.cleanupZipTemp();
|
||||
}
|
||||
} catch (Exception ex) {
|
||||
} catch (IOException ex) {
|
||||
System.out.println("Unable to delete temporary zip archive location: " + myloader.getTempWorkDirFile()
|
||||
.getAbsolutePath());
|
||||
}
|
||||
|
@@ -85,7 +85,6 @@ import org.dspace.content.service.ItemService;
|
||||
import org.dspace.content.service.MetadataFieldService;
|
||||
import org.dspace.content.service.MetadataSchemaService;
|
||||
import org.dspace.content.service.WorkspaceItemService;
|
||||
import org.dspace.core.ConfigurationManager;
|
||||
import org.dspace.core.Constants;
|
||||
import org.dspace.core.Context;
|
||||
import org.dspace.core.Email;
|
||||
@@ -96,6 +95,7 @@ import org.dspace.eperson.Group;
|
||||
import org.dspace.eperson.service.EPersonService;
|
||||
import org.dspace.eperson.service.GroupService;
|
||||
import org.dspace.handle.service.HandleService;
|
||||
import org.dspace.services.ConfigurationService;
|
||||
import org.dspace.utils.DSpace;
|
||||
import org.dspace.workflow.WorkflowItem;
|
||||
import org.dspace.workflow.WorkflowService;
|
||||
@@ -157,8 +157,11 @@ public class ItemImportServiceImpl implements ItemImportService, InitializingBea
|
||||
protected WorkspaceItemService workspaceItemService;
|
||||
@Autowired(required = true)
|
||||
protected WorkflowService workflowService;
|
||||
@Autowired(required = true)
|
||||
protected ConfigurationService configurationService;
|
||||
|
||||
protected final String tempWorkDir = ConfigurationManager.getProperty("org.dspace.app.batchitemimport.work.dir");
|
||||
protected final String tempWorkDir
|
||||
= configurationService.getProperty("org.dspace.app.batchitemimport.work.dir");
|
||||
|
||||
protected boolean isTest = false;
|
||||
protected boolean isResume = false;
|
||||
@@ -217,7 +220,7 @@ public class ItemImportServiceImpl implements ItemImportService, InitializingBea
|
||||
//Determine the folder where BTE will output the results
|
||||
String outputFolder = null;
|
||||
if (workingDir == null) { //This indicates a command line import, create a random path
|
||||
File importDir = new File(ConfigurationManager.getProperty("org.dspace.app.batchitemimport.work.dir"));
|
||||
File importDir = new File(configurationService.getProperty("org.dspace.app.batchitemimport.work.dir"));
|
||||
if (!importDir.exists()) {
|
||||
boolean success = importDir.mkdir();
|
||||
if (!success) {
|
||||
@@ -1481,7 +1484,7 @@ public class ItemImportServiceImpl implements ItemImportService, InitializingBea
|
||||
|
||||
File tempdir = new File(destinationDir);
|
||||
if (!tempdir.isDirectory()) {
|
||||
log.error("'" + ConfigurationManager.getProperty("org.dspace.app.itemexport.work.dir") +
|
||||
log.error("'" + configurationService.getProperty("org.dspace.app.itemexport.work.dir") +
|
||||
"' as defined by the key 'org.dspace.app.itemexport.work.dir' in dspace.cfg " +
|
||||
"is not a valid directory");
|
||||
}
|
||||
@@ -1506,60 +1509,54 @@ public class ItemImportServiceImpl implements ItemImportService, InitializingBea
|
||||
log.error("Unable to create contents directory: " + zipDir + entry.getName());
|
||||
}
|
||||
} else {
|
||||
System.out.println("Extracting file: " + entry.getName());
|
||||
log.info("Extracting file: " + entry.getName());
|
||||
|
||||
int index = entry.getName().lastIndexOf('/');
|
||||
if (index == -1) {
|
||||
// Was it created on Windows instead?
|
||||
index = entry.getName().lastIndexOf('\\');
|
||||
}
|
||||
if (index > 0) {
|
||||
File dir = new File(zipDir + entry.getName().substring(0, index));
|
||||
if (!dir.exists() && !dir.mkdirs()) {
|
||||
log.error("Unable to create directory: " + dir.getAbsolutePath());
|
||||
}
|
||||
// Verify that the directory the entry is using is a subpath of zipDir (and not somewhere else!)
|
||||
if (!dir.toPath().normalize().startsWith(zipDir)) {
|
||||
throw new IOException("Bad zip entry: '" + entry.getName()
|
||||
+ "' in file '" + zipfile.getAbsolutePath() + "'!"
|
||||
+ " Cannot process this file.");
|
||||
}
|
||||
|
||||
//Entries could have too many directories, and we need to adjust the sourcedir
|
||||
// file1.zip (SimpleArchiveFormat / item1 / contents|dublin_core|...
|
||||
// SimpleArchiveFormat / item2 / contents|dublin_core|...
|
||||
// or
|
||||
// file2.zip (item1 / contents|dublin_core|...
|
||||
// item2 / contents|dublin_core|...
|
||||
|
||||
//regex supports either windows or *nix file paths
|
||||
String[] entryChunks = entry.getName().split("/|\\\\");
|
||||
if (entryChunks.length > 2) {
|
||||
if (StringUtils.equals(sourceDirForZip, sourcedir)) {
|
||||
sourceDirForZip = sourcedir + "/" + entryChunks[0];
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
}
|
||||
byte[] buffer = new byte[1024];
|
||||
int len;
|
||||
File outFile = new File(zipDir + entry.getName());
|
||||
// Verify that this file will be created in our zipDir (and not somewhere else!)
|
||||
String entryName = entry.getName();
|
||||
File outFile = new File(zipDir + entryName);
|
||||
// Verify that this file will be extracted into our zipDir (and not somewhere else!)
|
||||
if (!outFile.toPath().normalize().startsWith(zipDir)) {
|
||||
throw new IOException("Bad zip entry: '" + entry.getName()
|
||||
throw new IOException("Bad zip entry: '" + entryName
|
||||
+ "' in file '" + zipfile.getAbsolutePath() + "'!"
|
||||
+ " Cannot process this file.");
|
||||
} else {
|
||||
System.out.println("Extracting file: " + entryName);
|
||||
log.info("Extracting file: " + entryName);
|
||||
|
||||
int index = entryName.lastIndexOf('/');
|
||||
if (index == -1) {
|
||||
// Was it created on Windows instead?
|
||||
index = entryName.lastIndexOf('\\');
|
||||
}
|
||||
if (index > 0) {
|
||||
File dir = new File(zipDir + entryName.substring(0, index));
|
||||
if (!dir.exists() && !dir.mkdirs()) {
|
||||
log.error("Unable to create directory: " + dir.getAbsolutePath());
|
||||
}
|
||||
|
||||
//Entries could have too many directories, and we need to adjust the sourcedir
|
||||
// file1.zip (SimpleArchiveFormat / item1 / contents|dublin_core|...
|
||||
// SimpleArchiveFormat / item2 / contents|dublin_core|...
|
||||
// or
|
||||
// file2.zip (item1 / contents|dublin_core|...
|
||||
// item2 / contents|dublin_core|...
|
||||
|
||||
//regex supports either windows or *nix file paths
|
||||
String[] entryChunks = entryName.split("/|\\\\");
|
||||
if (entryChunks.length > 2) {
|
||||
if (StringUtils.equals(sourceDirForZip, sourcedir)) {
|
||||
sourceDirForZip = sourcedir + "/" + entryChunks[0];
|
||||
}
|
||||
}
|
||||
}
|
||||
byte[] buffer = new byte[1024];
|
||||
int len;
|
||||
InputStream in = zf.getInputStream(entry);
|
||||
BufferedOutputStream out = new BufferedOutputStream(
|
||||
new FileOutputStream(outFile));
|
||||
while ((len = in.read(buffer)) >= 0) {
|
||||
out.write(buffer, 0, len);
|
||||
}
|
||||
in.close();
|
||||
out.close();
|
||||
}
|
||||
InputStream in = zf.getInputStream(entry);
|
||||
BufferedOutputStream out = new BufferedOutputStream(
|
||||
new FileOutputStream(outFile));
|
||||
while ((len = in.read(buffer)) >= 0) {
|
||||
out.write(buffer, 0, len);
|
||||
}
|
||||
in.close();
|
||||
out.close();
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1652,7 +1649,7 @@ public class ItemImportServiceImpl implements ItemImportService, InitializingBea
|
||||
}
|
||||
}
|
||||
|
||||
importDir = ConfigurationManager.getProperty(
|
||||
importDir = configurationService.getProperty(
|
||||
"org.dspace.app.batchitemimport.work.dir") + File.separator + "batchuploads" + File.separator
|
||||
+ context
|
||||
.getCurrentUser()
|
||||
@@ -1810,7 +1807,7 @@ public class ItemImportServiceImpl implements ItemImportService, InitializingBea
|
||||
Email email = Email.getEmail(I18nUtil.getEmailFilename(supportedLocale, "bte_batch_import_error"));
|
||||
email.addRecipient(eperson.getEmail());
|
||||
email.addArgument(error);
|
||||
email.addArgument(ConfigurationManager.getProperty("dspace.ui.url") + "/feedback");
|
||||
email.addArgument(configurationService.getProperty("dspace.ui.url") + "/feedback");
|
||||
|
||||
email.send();
|
||||
} catch (Exception e) {
|
||||
@@ -1848,7 +1845,7 @@ public class ItemImportServiceImpl implements ItemImportService, InitializingBea
|
||||
@Override
|
||||
public String getImportUploadableDirectory(EPerson ePerson)
|
||||
throws Exception {
|
||||
String uploadDir = ConfigurationManager.getProperty("org.dspace.app.batchitemimport.work.dir");
|
||||
String uploadDir = configurationService.getProperty("org.dspace.app.batchitemimport.work.dir");
|
||||
if (uploadDir == null) {
|
||||
throw new Exception(
|
||||
"A dspace.cfg entry for 'org.dspace.app.batchitemimport.work.dir' does not exist.");
|
||||
|
@@ -7,6 +7,7 @@
|
||||
*/
|
||||
package org.dspace.app.itemupdate;
|
||||
|
||||
import java.lang.reflect.InvocationTargetException;
|
||||
import java.util.Iterator;
|
||||
import java.util.LinkedHashMap;
|
||||
import java.util.Map;
|
||||
@@ -20,22 +21,25 @@ import java.util.Map;
|
||||
public class ActionManager implements Iterable<UpdateAction> {
|
||||
|
||||
protected Map<Class<? extends UpdateAction>, UpdateAction> registry
|
||||
= new LinkedHashMap<Class<? extends UpdateAction>, UpdateAction>();
|
||||
= new LinkedHashMap<>();
|
||||
|
||||
/**
|
||||
* Get update action
|
||||
* Get update action.
|
||||
*
|
||||
* @param actionClass UpdateAction class
|
||||
* @return instantiation of UpdateAction class
|
||||
* @throws InstantiationException if instantiation error
|
||||
* @throws IllegalAccessException if illegal access error
|
||||
* @throws NoSuchMethodException passed through.
|
||||
* @throws InvocationTargetException passed through.
|
||||
*/
|
||||
public UpdateAction getUpdateAction(Class<? extends UpdateAction> actionClass)
|
||||
throws InstantiationException, IllegalAccessException {
|
||||
throws InstantiationException, IllegalAccessException,
|
||||
NoSuchMethodException, IllegalArgumentException, InvocationTargetException {
|
||||
UpdateAction action = registry.get(actionClass);
|
||||
|
||||
if (action == null) {
|
||||
action = actionClass.newInstance();
|
||||
action = actionClass.getDeclaredConstructor().newInstance();
|
||||
registry.put(actionClass, action);
|
||||
}
|
||||
|
||||
@@ -58,7 +62,8 @@ public class ActionManager implements Iterable<UpdateAction> {
|
||||
@Override
|
||||
public Iterator<UpdateAction> iterator() {
|
||||
return new Iterator<UpdateAction>() {
|
||||
private Iterator<Class<? extends UpdateAction>> itr = registry.keySet().iterator();
|
||||
private final Iterator<Class<? extends UpdateAction>> itr
|
||||
= registry.keySet().iterator();
|
||||
|
||||
@Override
|
||||
public boolean hasNext() {
|
||||
|
@@ -24,10 +24,10 @@ import java.util.UUID;
|
||||
|
||||
import org.apache.commons.cli.CommandLine;
|
||||
import org.apache.commons.cli.CommandLineParser;
|
||||
import org.apache.commons.cli.DefaultParser;
|
||||
import org.apache.commons.cli.HelpFormatter;
|
||||
import org.apache.commons.cli.Option;
|
||||
import org.apache.commons.cli.Options;
|
||||
import org.apache.commons.cli.PosixParser;
|
||||
import org.dspace.content.Item;
|
||||
import org.dspace.content.factory.ContentServiceFactory;
|
||||
import org.dspace.content.service.ItemService;
|
||||
@@ -109,7 +109,7 @@ public class ItemUpdate {
|
||||
|
||||
// instance variables
|
||||
protected ActionManager actionMgr = new ActionManager();
|
||||
protected List<String> undoActionList = new ArrayList<String>();
|
||||
protected List<String> undoActionList = new ArrayList<>();
|
||||
protected String eperson;
|
||||
|
||||
/**
|
||||
@@ -117,7 +117,7 @@ public class ItemUpdate {
|
||||
*/
|
||||
public static void main(String[] argv) {
|
||||
// create an options object and populate it
|
||||
CommandLineParser parser = new PosixParser();
|
||||
CommandLineParser parser = new DefaultParser();
|
||||
|
||||
Options options = new Options();
|
||||
|
||||
@@ -275,7 +275,8 @@ public class ItemUpdate {
|
||||
Class<?> cfilter = Class.forName(filterClassname);
|
||||
pr("BitstreamFilter class to instantiate: " + cfilter.toString());
|
||||
|
||||
filter = (BitstreamFilter) cfilter.newInstance(); //unfortunate cast, an erasure consequence
|
||||
filter = (BitstreamFilter) cfilter.getDeclaredConstructor()
|
||||
.newInstance(); //unfortunate cast, an erasure consequence
|
||||
} catch (Exception e) {
|
||||
pr("Error: Failure instantiating bitstream filter class: " + filterClassname);
|
||||
System.exit(1);
|
||||
|
@@ -38,8 +38,8 @@ import org.dspace.content.MetadataSchemaEnum;
|
||||
import org.dspace.content.MetadataValue;
|
||||
import org.dspace.content.factory.ContentServiceFactory;
|
||||
import org.dspace.content.service.ItemService;
|
||||
import org.dspace.core.ConfigurationManager;
|
||||
import org.dspace.core.Context;
|
||||
import org.dspace.services.factory.DSpaceServicesFactory;
|
||||
import org.w3c.dom.Document;
|
||||
import org.w3c.dom.Element;
|
||||
import org.w3c.dom.NamedNodeMap;
|
||||
@@ -226,7 +226,9 @@ public class MetadataUtilities {
|
||||
if (language == null) {
|
||||
language = "en";
|
||||
} else if ("".equals(language)) {
|
||||
language = ConfigurationManager.getProperty("default.language");
|
||||
language = DSpaceServicesFactory.getInstance()
|
||||
.getConfigurationService()
|
||||
.getProperty("default.language");
|
||||
}
|
||||
|
||||
DtoMetadata dtom = DtoMetadata.create(schema, element, qualifier, language, value);
|
||||
|
@@ -12,7 +12,8 @@ import java.io.InputStream;
|
||||
import javax.imageio.ImageIO;
|
||||
|
||||
import org.dspace.content.Item;
|
||||
import org.dspace.core.ConfigurationManager;
|
||||
import org.dspace.services.ConfigurationService;
|
||||
import org.dspace.services.factory.DSpaceServicesFactory;
|
||||
|
||||
/**
|
||||
* Filter image bitstreams, scaling the image to be within the bounds of
|
||||
@@ -66,17 +67,19 @@ public class BrandedPreviewJPEGFilter extends MediaFilter {
|
||||
BufferedImage buf = ImageIO.read(source);
|
||||
|
||||
// get config params
|
||||
float xmax = (float) ConfigurationManager
|
||||
ConfigurationService configurationService
|
||||
= DSpaceServicesFactory.getInstance().getConfigurationService();
|
||||
float xmax = (float) configurationService
|
||||
.getIntProperty("webui.preview.maxwidth");
|
||||
float ymax = (float) ConfigurationManager
|
||||
float ymax = (float) configurationService
|
||||
.getIntProperty("webui.preview.maxheight");
|
||||
boolean blurring = (boolean) ConfigurationManager
|
||||
boolean blurring = (boolean) configurationService
|
||||
.getBooleanProperty("webui.preview.blurring");
|
||||
boolean hqscaling = (boolean) ConfigurationManager
|
||||
boolean hqscaling = (boolean) configurationService
|
||||
.getBooleanProperty("webui.preview.hqscaling");
|
||||
int brandHeight = ConfigurationManager.getIntProperty("webui.preview.brand.height");
|
||||
String brandFont = ConfigurationManager.getProperty("webui.preview.brand.font");
|
||||
int brandFontPoint = ConfigurationManager.getIntProperty("webui.preview.brand.fontpoint");
|
||||
int brandHeight = configurationService.getIntProperty("webui.preview.brand.height");
|
||||
String brandFont = configurationService.getProperty("webui.preview.brand.font");
|
||||
int brandFontPoint = configurationService.getIntProperty("webui.preview.brand.fontpoint");
|
||||
|
||||
JPEGFilter jpegFilter = new JPEGFilter();
|
||||
return jpegFilter
|
||||
|
@@ -19,8 +19,9 @@ import org.dspace.content.Bundle;
|
||||
import org.dspace.content.Item;
|
||||
import org.dspace.content.factory.ContentServiceFactory;
|
||||
import org.dspace.content.service.ItemService;
|
||||
import org.dspace.core.ConfigurationManager;
|
||||
import org.dspace.core.Context;
|
||||
import org.dspace.services.ConfigurationService;
|
||||
import org.dspace.services.factory.DSpaceServicesFactory;
|
||||
import org.im4java.core.ConvertCmd;
|
||||
import org.im4java.core.IM4JavaException;
|
||||
import org.im4java.core.IMOperation;
|
||||
@@ -33,36 +34,18 @@ import org.im4java.process.ProcessStarter;
|
||||
* no bigger than. Creates only JPEGs.
|
||||
*/
|
||||
public abstract class ImageMagickThumbnailFilter extends MediaFilter {
|
||||
protected static int width = 180;
|
||||
protected static int height = 120;
|
||||
private static boolean flatten = true;
|
||||
static String bitstreamDescription = "IM Thumbnail";
|
||||
static final String defaultPattern = "Generated Thumbnail";
|
||||
static Pattern replaceRegex = Pattern.compile(defaultPattern);
|
||||
private static final int DEFAULT_WIDTH = 180;
|
||||
private static final int DEFAULT_HEIGHT = 120;
|
||||
static final String DEFAULT_PATTERN = "Generated Thumbnail";
|
||||
protected final ItemService itemService = ContentServiceFactory.getInstance().getItemService();
|
||||
protected static final ConfigurationService configurationService
|
||||
= DSpaceServicesFactory.getInstance().getConfigurationService();
|
||||
|
||||
static String cmyk_profile;
|
||||
static String srgb_profile;
|
||||
protected static final String PRE = ImageMagickThumbnailFilter.class.getName();
|
||||
|
||||
static {
|
||||
String pre = ImageMagickThumbnailFilter.class.getName();
|
||||
String s = ConfigurationManager.getProperty(pre + ".ProcessStarter");
|
||||
String s = configurationService.getProperty(PRE + ".ProcessStarter");
|
||||
ProcessStarter.setGlobalSearchPath(s);
|
||||
width = ConfigurationManager.getIntProperty("thumbnail.maxwidth", width);
|
||||
height = ConfigurationManager.getIntProperty("thumbnail.maxheight", height);
|
||||
flatten = ConfigurationManager.getBooleanProperty(pre + ".flatten", flatten);
|
||||
String description = ConfigurationManager.getProperty(pre + ".bitstreamDescription");
|
||||
cmyk_profile = ConfigurationManager.getProperty(pre + ".cmyk_profile");
|
||||
srgb_profile = ConfigurationManager.getProperty(pre + ".srgb_profile");
|
||||
if (description != null) {
|
||||
bitstreamDescription = description;
|
||||
}
|
||||
try {
|
||||
String patt = ConfigurationManager.getProperty(pre + ".replaceRegex");
|
||||
replaceRegex = Pattern.compile(patt == null ? defaultPattern : patt);
|
||||
} catch (PatternSyntaxException e) {
|
||||
System.err.println("Invalid thumbnail replacement pattern: " + e.getMessage());
|
||||
}
|
||||
}
|
||||
|
||||
public ImageMagickThumbnailFilter() {
|
||||
@@ -94,7 +77,7 @@ public abstract class ImageMagickThumbnailFilter extends MediaFilter {
|
||||
*/
|
||||
@Override
|
||||
public String getDescription() {
|
||||
return bitstreamDescription;
|
||||
return configurationService.getProperty(PRE + ".bitstreamDescription", "IM Thumbnail");
|
||||
}
|
||||
|
||||
public File inputStreamToTempFile(InputStream source, String prefix, String suffix) throws IOException {
|
||||
@@ -120,7 +103,8 @@ public abstract class ImageMagickThumbnailFilter extends MediaFilter {
|
||||
IMOperation op = new IMOperation();
|
||||
op.autoOrient();
|
||||
op.addImage(f.getAbsolutePath());
|
||||
op.thumbnail(width, height);
|
||||
op.thumbnail(configurationService.getIntProperty("thumbnail.maxwidth", DEFAULT_WIDTH),
|
||||
configurationService.getIntProperty("thumbnail.maxheight", DEFAULT_HEIGHT));
|
||||
op.addImage(f2.getAbsolutePath());
|
||||
if (verbose) {
|
||||
System.out.println("IM Thumbnail Param: " + op);
|
||||
@@ -137,11 +121,14 @@ public abstract class ImageMagickThumbnailFilter extends MediaFilter {
|
||||
IMOperation op = new IMOperation();
|
||||
String s = "[" + page + "]";
|
||||
op.addImage(f.getAbsolutePath() + s);
|
||||
if (flatten) {
|
||||
if (configurationService.getBooleanProperty(PRE + ".flatten", true)) {
|
||||
op.flatten();
|
||||
}
|
||||
|
||||
// PDFs using the CMYK color system can be handled specially if
|
||||
// profiles are defined
|
||||
String cmyk_profile = configurationService.getProperty(PRE + ".cmyk_profile");
|
||||
String srgb_profile = configurationService.getProperty(PRE + ".srgb_profile");
|
||||
if (cmyk_profile != null && srgb_profile != null) {
|
||||
Info imageInfo = new Info(f.getAbsolutePath() + s, true);
|
||||
String imageClass = imageInfo.getImageClass();
|
||||
@@ -174,24 +161,32 @@ public abstract class ImageMagickThumbnailFilter extends MediaFilter {
|
||||
String description = bit.getDescription();
|
||||
// If anything other than a generated thumbnail
|
||||
// is found, halt processing
|
||||
Pattern replaceRegex;
|
||||
try {
|
||||
String patt = configurationService.getProperty(PRE + ".replaceRegex", DEFAULT_PATTERN);
|
||||
replaceRegex = Pattern.compile(patt == null ? DEFAULT_PATTERN : patt);
|
||||
} catch (PatternSyntaxException e) {
|
||||
System.err.println("Invalid thumbnail replacement pattern: " + e.getMessage());
|
||||
throw e;
|
||||
}
|
||||
if (description != null) {
|
||||
if (replaceRegex.matcher(description).matches()) {
|
||||
if (verbose) {
|
||||
System.out.println(description + " " + nsrc
|
||||
+ " matches pattern and is replacable.");
|
||||
System.out.format("%s %s matches pattern and is replacable.%n",
|
||||
description, nsrc);
|
||||
}
|
||||
continue;
|
||||
}
|
||||
if (description.equals(bitstreamDescription)) {
|
||||
if (description.equals(getDescription())) {
|
||||
if (verbose) {
|
||||
System.out.println(bitstreamDescription + " " + nsrc
|
||||
+ " is replacable.");
|
||||
System.out.format("%s %s is replaceable.%n",
|
||||
getDescription(), nsrc);
|
||||
}
|
||||
continue;
|
||||
}
|
||||
}
|
||||
System.out.println("Custom Thumbnail exists for " + nsrc + " for item "
|
||||
+ item.getHandle() + ". Thumbnail will not be generated. ");
|
||||
System.out.format("Custom Thumbnail exists for %s for item %s. Thumbnail will not be generated.%n",
|
||||
nsrc, item.getHandle());
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
@@ -22,7 +22,8 @@ import java.io.InputStream;
|
||||
import javax.imageio.ImageIO;
|
||||
|
||||
import org.dspace.content.Item;
|
||||
import org.dspace.core.ConfigurationManager;
|
||||
import org.dspace.services.ConfigurationService;
|
||||
import org.dspace.services.factory.DSpaceServicesFactory;
|
||||
|
||||
/**
|
||||
* Filter image bitstreams, scaling the image to be within the bounds of
|
||||
@@ -80,13 +81,15 @@ public class JPEGFilter extends MediaFilter implements SelfRegisterInputFormats
|
||||
public InputStream getThumb(Item currentItem, BufferedImage buf, boolean verbose)
|
||||
throws Exception {
|
||||
// get config params
|
||||
float xmax = (float) ConfigurationManager
|
||||
final ConfigurationService configurationService
|
||||
= DSpaceServicesFactory.getInstance().getConfigurationService();
|
||||
float xmax = (float) configurationService
|
||||
.getIntProperty("thumbnail.maxwidth");
|
||||
float ymax = (float) ConfigurationManager
|
||||
float ymax = (float) configurationService
|
||||
.getIntProperty("thumbnail.maxheight");
|
||||
boolean blurring = (boolean) ConfigurationManager
|
||||
boolean blurring = (boolean) configurationService
|
||||
.getBooleanProperty("thumbnail.blurring");
|
||||
boolean hqscaling = (boolean) ConfigurationManager
|
||||
boolean hqscaling = (boolean) configurationService
|
||||
.getBooleanProperty("thumbnail.hqscaling");
|
||||
|
||||
return getThumbDim(currentItem, buf, verbose, xmax, ymax, blurring, hqscaling, 0, 0, null);
|
||||
@@ -169,9 +172,11 @@ public class JPEGFilter extends MediaFilter implements SelfRegisterInputFormats
|
||||
g2d.drawImage(buf, 0, 0, (int) xsize, (int) ysize, null);
|
||||
|
||||
if (brandHeight != 0) {
|
||||
ConfigurationService configurationService
|
||||
= DSpaceServicesFactory.getInstance().getConfigurationService();
|
||||
Brand brand = new Brand((int) xsize, brandHeight, new Font(brandFont, Font.PLAIN, brandFontPoint), 5);
|
||||
BufferedImage brandImage = brand.create(ConfigurationManager.getProperty("webui.preview.brand"),
|
||||
ConfigurationManager.getProperty("webui.preview.brand.abbrev"),
|
||||
BufferedImage brandImage = brand.create(configurationService.getProperty("webui.preview.brand"),
|
||||
configurationService.getProperty("webui.preview.brand.abbrev"),
|
||||
currentItem == null ? "" : "hdl:" + currentItem.getHandle());
|
||||
|
||||
g2d.drawImage(brandImage, (int) 0, (int) ysize, (int) xsize, (int) 20, null);
|
||||
|
@@ -16,12 +16,11 @@ import java.util.Map;
|
||||
|
||||
import org.apache.commons.cli.CommandLine;
|
||||
import org.apache.commons.cli.CommandLineParser;
|
||||
import org.apache.commons.cli.DefaultParser;
|
||||
import org.apache.commons.cli.HelpFormatter;
|
||||
import org.apache.commons.cli.MissingArgumentException;
|
||||
import org.apache.commons.cli.Option;
|
||||
import org.apache.commons.cli.OptionBuilder;
|
||||
import org.apache.commons.cli.Options;
|
||||
import org.apache.commons.cli.PosixParser;
|
||||
import org.apache.commons.lang3.ArrayUtils;
|
||||
import org.dspace.app.mediafilter.factory.MediaFilterServiceFactory;
|
||||
import org.dspace.app.mediafilter.service.MediaFilterService;
|
||||
@@ -66,7 +65,7 @@ public class MediaFilterCLITool {
|
||||
System.setProperty("java.awt.headless", "true");
|
||||
|
||||
// create an options object and populate it
|
||||
CommandLineParser parser = new PosixParser();
|
||||
CommandLineParser parser = new DefaultParser();
|
||||
|
||||
int status = 0;
|
||||
|
||||
@@ -85,26 +84,30 @@ public class MediaFilterCLITool {
|
||||
options.addOption("h", "help", false, "help");
|
||||
|
||||
//create a "plugin" option (to specify specific MediaFilter plugins to run)
|
||||
OptionBuilder.withLongOpt("plugins");
|
||||
OptionBuilder.withValueSeparator(',');
|
||||
OptionBuilder.withDescription(
|
||||
"ONLY run the specified Media Filter plugin(s)\n" +
|
||||
"listed from '" + MEDIA_FILTER_PLUGINS_KEY + "' in dspace.cfg.\n" +
|
||||
"Separate multiple with a comma (,)\n" +
|
||||
"(e.g. MediaFilterManager -p \n\"Word Text Extractor\",\"PDF Text Extractor\")");
|
||||
Option pluginOption = OptionBuilder.create('p');
|
||||
pluginOption.setArgs(Option.UNLIMITED_VALUES); //unlimited number of args
|
||||
Option pluginOption = Option.builder("p")
|
||||
.longOpt("plugins")
|
||||
.hasArg()
|
||||
.hasArgs()
|
||||
.valueSeparator(',')
|
||||
.desc(
|
||||
"ONLY run the specified Media Filter plugin(s)\n" +
|
||||
"listed from '" + MEDIA_FILTER_PLUGINS_KEY + "' in dspace.cfg.\n" +
|
||||
"Separate multiple with a comma (,)\n" +
|
||||
"(e.g. MediaFilterManager -p \n\"Word Text Extractor\",\"PDF Text Extractor\")")
|
||||
.build();
|
||||
options.addOption(pluginOption);
|
||||
|
||||
//create a "skip" option (to specify communities/collections/items to skip)
|
||||
OptionBuilder.withLongOpt("skip");
|
||||
OptionBuilder.withValueSeparator(',');
|
||||
OptionBuilder.withDescription(
|
||||
"SKIP the bitstreams belonging to identifier\n" +
|
||||
"Separate multiple identifiers with a comma (,)\n" +
|
||||
"(e.g. MediaFilterManager -s \n 123456789/34,123456789/323)");
|
||||
Option skipOption = OptionBuilder.create('s');
|
||||
skipOption.setArgs(Option.UNLIMITED_VALUES); //unlimited number of args
|
||||
Option skipOption = Option.builder("s")
|
||||
.longOpt("skip")
|
||||
.hasArg()
|
||||
.hasArgs()
|
||||
.valueSeparator(',')
|
||||
.desc(
|
||||
"SKIP the bitstreams belonging to identifier\n" +
|
||||
"Separate multiple identifiers with a comma (,)\n" +
|
||||
"(e.g. MediaFilterManager -s \n 123456789/34,123456789/323)")
|
||||
.build();
|
||||
options.addOption(skipOption);
|
||||
|
||||
boolean isVerbose = false;
|
||||
@@ -179,7 +182,7 @@ public class MediaFilterCLITool {
|
||||
mediaFilterService.setMax2Process(max2Process);
|
||||
|
||||
//initialize an array of our enabled filters
|
||||
List<FormatFilter> filterList = new ArrayList<FormatFilter>();
|
||||
List<FormatFilter> filterList = new ArrayList<>();
|
||||
|
||||
//set up each filter
|
||||
for (int i = 0; i < filterNames.length; i++) {
|
||||
|
@@ -21,7 +21,8 @@ import org.apache.pdfbox.pdmodel.PDDocument;
|
||||
import org.apache.pdfbox.pdmodel.encryption.InvalidPasswordException;
|
||||
import org.apache.pdfbox.text.PDFTextStripper;
|
||||
import org.dspace.content.Item;
|
||||
import org.dspace.core.ConfigurationManager;
|
||||
import org.dspace.services.ConfigurationService;
|
||||
import org.dspace.services.factory.DSpaceServicesFactory;
|
||||
|
||||
/*
|
||||
*
|
||||
@@ -72,8 +73,10 @@ public class PDFFilter extends MediaFilter {
|
||||
@Override
|
||||
public InputStream getDestinationStream(Item currentItem, InputStream source, boolean verbose)
|
||||
throws Exception {
|
||||
ConfigurationService configurationService
|
||||
= DSpaceServicesFactory.getInstance().getConfigurationService();
|
||||
try {
|
||||
boolean useTemporaryFile = ConfigurationManager.getBooleanProperty("pdffilter.largepdfs", false);
|
||||
boolean useTemporaryFile = configurationService.getBooleanProperty("pdffilter.largepdfs", false);
|
||||
|
||||
// get input stream from bitstream
|
||||
// pass to filter, get string back
|
||||
@@ -124,7 +127,7 @@ public class PDFFilter extends MediaFilter {
|
||||
}
|
||||
} catch (OutOfMemoryError oome) {
|
||||
log.error("Error parsing PDF document " + oome.getMessage(), oome);
|
||||
if (!ConfigurationManager.getBooleanProperty("pdffilter.skiponmemoryexception", false)) {
|
||||
if (!configurationService.getBooleanProperty("pdffilter.skiponmemoryexception", false)) {
|
||||
throw oome;
|
||||
}
|
||||
}
|
||||
|
@@ -17,9 +17,9 @@ import java.util.List;
|
||||
|
||||
import org.apache.commons.cli.CommandLine;
|
||||
import org.apache.commons.cli.CommandLineParser;
|
||||
import org.apache.commons.cli.DefaultParser;
|
||||
import org.apache.commons.cli.HelpFormatter;
|
||||
import org.apache.commons.cli.Options;
|
||||
import org.apache.commons.cli.PosixParser;
|
||||
import org.dspace.authorize.AuthorizeException;
|
||||
import org.dspace.content.DSpaceObject;
|
||||
import org.dspace.content.crosswalk.CrosswalkException;
|
||||
@@ -175,7 +175,7 @@ public class Packager {
|
||||
"flag can be used if you want to save (pipe) a report of all changes to a file, and " +
|
||||
"therefore need to bypass all user interaction.");
|
||||
|
||||
CommandLineParser parser = new PosixParser();
|
||||
CommandLineParser parser = new DefaultParser();
|
||||
CommandLine line = parser.parse(options, argv);
|
||||
|
||||
String sourceFile = null;
|
||||
|
@@ -19,6 +19,15 @@ import org.dspace.core.Context;
|
||||
* @author Andrea Bollini
|
||||
*/
|
||||
public interface RequestItemAuthorExtractor {
|
||||
public RequestItemAuthor getRequestItemAuthor(Context context, Item item)
|
||||
throws SQLException;
|
||||
|
||||
/**
|
||||
* Retrieve the auhtor to contact for a request copy of the give item.
|
||||
*
|
||||
* @param context DSpace context object
|
||||
* @param item item to request
|
||||
* @return An object containing name an email address to send the request to
|
||||
* or null if no valid email address was found.
|
||||
* @throws SQLException if database error
|
||||
*/
|
||||
public RequestItemAuthor getRequestItemAuthor(Context context, Item item) throws SQLException;
|
||||
}
|
||||
|
@@ -10,13 +10,13 @@ package org.dspace.app.requestitem;
|
||||
import java.sql.SQLException;
|
||||
|
||||
import org.apache.commons.lang3.StringUtils;
|
||||
import org.apache.logging.log4j.Logger;
|
||||
import org.dspace.content.Item;
|
||||
import org.dspace.core.ConfigurationManager;
|
||||
import org.dspace.core.Context;
|
||||
import org.dspace.core.I18nUtil;
|
||||
import org.dspace.eperson.EPerson;
|
||||
import org.dspace.eperson.service.EPersonService;
|
||||
import org.dspace.services.ConfigurationService;
|
||||
import org.dspace.services.factory.DSpaceServicesFactory;
|
||||
import org.springframework.beans.factory.annotation.Autowired;
|
||||
|
||||
/**
|
||||
@@ -30,9 +30,6 @@ import org.springframework.beans.factory.annotation.Autowired;
|
||||
* @author Peter Dietz
|
||||
*/
|
||||
public class RequestItemHelpdeskStrategy extends RequestItemSubmitterStrategy {
|
||||
|
||||
private Logger log = org.apache.logging.log4j.LogManager.getLogger(RequestItemHelpdeskStrategy.class);
|
||||
|
||||
@Autowired(required = true)
|
||||
protected EPersonService ePersonService;
|
||||
|
||||
@@ -41,9 +38,11 @@ public class RequestItemHelpdeskStrategy extends RequestItemSubmitterStrategy {
|
||||
|
||||
@Override
|
||||
public RequestItemAuthor getRequestItemAuthor(Context context, Item item) throws SQLException {
|
||||
boolean helpdeskOverridesSubmitter = ConfigurationManager
|
||||
ConfigurationService configurationService
|
||||
= DSpaceServicesFactory.getInstance().getConfigurationService();
|
||||
boolean helpdeskOverridesSubmitter = configurationService
|
||||
.getBooleanProperty("request.item.helpdesk.override", false);
|
||||
String helpDeskEmail = ConfigurationManager.getProperty("mail.helpdesk");
|
||||
String helpDeskEmail = configurationService.getProperty("mail.helpdesk");
|
||||
|
||||
if (helpdeskOverridesSubmitter && StringUtils.isNotBlank(helpDeskEmail)) {
|
||||
return getHelpDeskPerson(context, helpDeskEmail);
|
||||
@@ -64,18 +63,16 @@ public class RequestItemHelpdeskStrategy extends RequestItemSubmitterStrategy {
|
||||
* @throws SQLException if database error
|
||||
*/
|
||||
public RequestItemAuthor getHelpDeskPerson(Context context, String helpDeskEmail) throws SQLException {
|
||||
EPerson helpdeskEPerson = null;
|
||||
|
||||
context.turnOffAuthorisationSystem();
|
||||
helpdeskEPerson = ePersonService.findByEmail(context, helpDeskEmail);
|
||||
EPerson helpdeskEPerson = ePersonService.findByEmail(context, helpDeskEmail);
|
||||
context.restoreAuthSystemState();
|
||||
|
||||
if (helpdeskEPerson != null) {
|
||||
return new RequestItemAuthor(helpdeskEPerson);
|
||||
} else {
|
||||
String helpdeskName = I18nUtil.getMessage(
|
||||
"org.dspace.app.requestitem.RequestItemHelpdeskStrategy.helpdeskname",
|
||||
context);
|
||||
"org.dspace.app.requestitem.RequestItemHelpdeskStrategy.helpdeskname",
|
||||
context);
|
||||
return new RequestItemAuthor(helpdeskName, helpDeskEmail);
|
||||
}
|
||||
}
|
||||
|
@@ -16,6 +16,7 @@ import org.dspace.content.MetadataValue;
|
||||
import org.dspace.content.service.ItemService;
|
||||
import org.dspace.core.Context;
|
||||
import org.dspace.core.I18nUtil;
|
||||
import org.dspace.services.factory.DSpaceServicesFactory;
|
||||
import org.springframework.beans.factory.annotation.Autowired;
|
||||
|
||||
/**
|
||||
@@ -38,6 +39,7 @@ public class RequestItemMetadataStrategy extends RequestItemSubmitterStrategy {
|
||||
@Override
|
||||
public RequestItemAuthor getRequestItemAuthor(Context context, Item item)
|
||||
throws SQLException {
|
||||
RequestItemAuthor author = null;
|
||||
if (emailMetadata != null) {
|
||||
List<MetadataValue> vals = itemService.getMetadataByMetadataString(item, emailMetadata);
|
||||
if (vals.size() > 0) {
|
||||
@@ -49,19 +51,38 @@ public class RequestItemMetadataStrategy extends RequestItemSubmitterStrategy {
|
||||
fullname = nameVals.iterator().next().getValue();
|
||||
}
|
||||
}
|
||||
|
||||
if (StringUtils.isBlank(fullname)) {
|
||||
fullname = I18nUtil
|
||||
.getMessage(
|
||||
"org.dspace.app.requestitem.RequestItemMetadataStrategy.unnamed",
|
||||
context);
|
||||
.getMessage(
|
||||
"org.dspace.app.requestitem.RequestItemMetadataStrategy.unnamed",
|
||||
context);
|
||||
}
|
||||
RequestItemAuthor author = new RequestItemAuthor(
|
||||
fullname, email);
|
||||
author = new RequestItemAuthor(fullname, email);
|
||||
return author;
|
||||
}
|
||||
} else {
|
||||
// Uses the basic strategy to look for the original submitter
|
||||
author = super.getRequestItemAuthor(context, item);
|
||||
// Is the author or his email null, so get the help desk or admin name and email
|
||||
if (null == author || null == author.getEmail()) {
|
||||
String email = null;
|
||||
String name = null;
|
||||
//First get help desk name and email
|
||||
email = DSpaceServicesFactory.getInstance()
|
||||
.getConfigurationService().getProperty("mail.helpdesk");
|
||||
name = DSpaceServicesFactory.getInstance()
|
||||
.getConfigurationService().getProperty("mail.helpdesk.name");
|
||||
// If help desk mail is null get the mail and name of admin
|
||||
if (email == null) {
|
||||
email = DSpaceServicesFactory.getInstance()
|
||||
.getConfigurationService().getProperty("mail.admin");
|
||||
name = DSpaceServicesFactory.getInstance()
|
||||
.getConfigurationService().getProperty("mail.admin.name");
|
||||
}
|
||||
author = new RequestItemAuthor(name, email);
|
||||
}
|
||||
}
|
||||
return super.getRequestItemAuthor(context, item);
|
||||
return author;
|
||||
}
|
||||
|
||||
public void setEmailMetadata(String emailMetadata) {
|
||||
|
@@ -23,13 +23,22 @@ public class RequestItemSubmitterStrategy implements RequestItemAuthorExtractor
|
||||
public RequestItemSubmitterStrategy() {
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the submitter of an Item as RequestItemAuthor or null if the
|
||||
* Submitter is deleted.
|
||||
*
|
||||
* @return The submitter of the item or null if the submitter is deleted
|
||||
* @throws SQLException if database error
|
||||
*/
|
||||
@Override
|
||||
public RequestItemAuthor getRequestItemAuthor(Context context, Item item)
|
||||
throws SQLException {
|
||||
EPerson submitter = item.getSubmitter();
|
||||
RequestItemAuthor author = new RequestItemAuthor(
|
||||
submitter.getFullName(), submitter.getEmail());
|
||||
RequestItemAuthor author = null;
|
||||
if (null != submitter) {
|
||||
author = new RequestItemAuthor(
|
||||
submitter.getFullName(), submitter.getEmail());
|
||||
}
|
||||
return author;
|
||||
}
|
||||
|
||||
}
|
||||
|
@@ -8,9 +8,7 @@
|
||||
|
||||
/**
|
||||
* <p>SFX/OpenURL link server support.</p>
|
||||
*
|
||||
* @see org.dspace.app.webui.jsptag.SFXLinkTag
|
||||
* @see org.dspace.app.xmlui.aspect.artifactbrowser.ItemViewer
|
||||
*
|
||||
*/
|
||||
|
||||
package org.dspace.app.sfx;
|
||||
|
@@ -1,49 +0,0 @@
|
||||
/**
|
||||
* The contents of this file are subject to the license and copyright
|
||||
* detailed in the LICENSE and NOTICE files at the root of the source
|
||||
* tree and available online at
|
||||
*
|
||||
* http://www.dspace.org/license/
|
||||
*/
|
||||
package org.dspace.app.sherpa;
|
||||
|
||||
/**
|
||||
* POJO representation for a SHERPA journal
|
||||
*
|
||||
* @author Andrea Bollini
|
||||
*/
|
||||
public class SHERPAJournal {
|
||||
private String title;
|
||||
|
||||
private String issn;
|
||||
|
||||
private String zetopub;
|
||||
|
||||
private String romeopub;
|
||||
|
||||
public SHERPAJournal(String title, String issn, String zetopub,
|
||||
String romeopub) {
|
||||
super();
|
||||
this.title = title;
|
||||
this.issn = issn;
|
||||
this.zetopub = zetopub;
|
||||
this.romeopub = romeopub;
|
||||
}
|
||||
|
||||
public String getTitle() {
|
||||
return title;
|
||||
}
|
||||
|
||||
public String getIssn() {
|
||||
return issn;
|
||||
}
|
||||
|
||||
public String getZetopub() {
|
||||
return zetopub;
|
||||
}
|
||||
|
||||
public String getRomeopub() {
|
||||
return romeopub;
|
||||
}
|
||||
|
||||
}
|
@@ -1,175 +0,0 @@
|
||||
/**
|
||||
* The contents of this file are subject to the license and copyright
|
||||
* detailed in the LICENSE and NOTICE files at the root of the source
|
||||
* tree and available online at
|
||||
*
|
||||
* http://www.dspace.org/license/
|
||||
*/
|
||||
package org.dspace.app.sherpa;
|
||||
|
||||
import java.util.List;
|
||||
|
||||
/**
|
||||
* POJO representation for a SHERPA Publisher record
|
||||
*
|
||||
* @author Andrea Bollini
|
||||
*/
|
||||
public class SHERPAPublisher {
|
||||
|
||||
private String id;
|
||||
|
||||
private String name;
|
||||
|
||||
private String alias;
|
||||
|
||||
private String homeurl;
|
||||
|
||||
private String prearchiving;
|
||||
|
||||
private List<String> prerestriction;
|
||||
|
||||
private String postarchiving;
|
||||
|
||||
private List<String> postrestriction;
|
||||
|
||||
private String pubarchiving;
|
||||
|
||||
private List<String> pubrestriction;
|
||||
|
||||
private List<String> condition;
|
||||
|
||||
private String paidaccessurl;
|
||||
|
||||
private String paidaccessname;
|
||||
|
||||
private String paidaccessnotes;
|
||||
|
||||
private List<String[]> copyright;
|
||||
|
||||
private String romeocolour;
|
||||
|
||||
private String dateadded;
|
||||
|
||||
private String dateupdated;
|
||||
|
||||
public SHERPAPublisher(String id, String name, String alias, String homeurl,
|
||||
String prearchiving, List<String> prerestriction,
|
||||
String postarchiving, List<String> postrestriction,
|
||||
String pubarchiving, List<String> pubrestriction,
|
||||
List<String> condition, String paidaccessurl,
|
||||
String paidaccessname, String paidaccessnotes,
|
||||
List<String[]> copyright, String romeocolour, String datedded,
|
||||
String dateupdated) {
|
||||
this.id = id;
|
||||
|
||||
this.name = name;
|
||||
|
||||
this.alias = alias;
|
||||
|
||||
this.homeurl = homeurl;
|
||||
|
||||
this.prearchiving = prearchiving;
|
||||
|
||||
this.prerestriction = prerestriction;
|
||||
|
||||
this.postarchiving = postarchiving;
|
||||
|
||||
this.postrestriction = postrestriction;
|
||||
|
||||
this.pubarchiving = pubarchiving;
|
||||
|
||||
this.pubrestriction = pubrestriction;
|
||||
|
||||
this.condition = condition;
|
||||
|
||||
this.paidaccessurl = paidaccessurl;
|
||||
|
||||
this.paidaccessname = paidaccessname;
|
||||
|
||||
this.paidaccessnotes = paidaccessnotes;
|
||||
|
||||
this.copyright = copyright;
|
||||
|
||||
this.romeocolour = romeocolour;
|
||||
|
||||
this.dateadded = datedded;
|
||||
|
||||
this.dateupdated = dateupdated;
|
||||
}
|
||||
|
||||
public String getName() {
|
||||
return name;
|
||||
}
|
||||
|
||||
public String getAlias() {
|
||||
return alias;
|
||||
}
|
||||
|
||||
public String getHomeurl() {
|
||||
return homeurl;
|
||||
}
|
||||
|
||||
public String getPrearchiving() {
|
||||
return prearchiving;
|
||||
}
|
||||
|
||||
public List<String> getPrerestriction() {
|
||||
return prerestriction;
|
||||
}
|
||||
|
||||
public String getPostarchiving() {
|
||||
return postarchiving;
|
||||
}
|
||||
|
||||
public List<String> getPostrestriction() {
|
||||
return postrestriction;
|
||||
}
|
||||
|
||||
public String getPubarchiving() {
|
||||
return pubarchiving;
|
||||
}
|
||||
|
||||
public List<String> getPubrestriction() {
|
||||
return pubrestriction;
|
||||
}
|
||||
|
||||
public List<String> getCondition() {
|
||||
return condition;
|
||||
}
|
||||
|
||||
public String getPaidaccessurl() {
|
||||
return paidaccessurl;
|
||||
}
|
||||
|
||||
public String getPaidaccessname() {
|
||||
return paidaccessname;
|
||||
}
|
||||
|
||||
public String getPaidaccessnotes() {
|
||||
return paidaccessnotes;
|
||||
}
|
||||
|
||||
public List<String[]> getCopyright() {
|
||||
return copyright;
|
||||
}
|
||||
|
||||
public String getRomeocolour() {
|
||||
return romeocolour;
|
||||
}
|
||||
|
||||
public String getDatedded() {
|
||||
return dateadded;
|
||||
}
|
||||
|
||||
public String getDateupdated() {
|
||||
return dateupdated;
|
||||
}
|
||||
|
||||
/**
|
||||
* Generic getter for the id
|
||||
* @return the id value of this SHERPAPublisher
|
||||
*/
|
||||
public String getId() {
|
||||
return id;
|
||||
}
|
||||
}
|
@@ -1,206 +0,0 @@
|
||||
/**
|
||||
* The contents of this file are subject to the license and copyright
|
||||
* detailed in the LICENSE and NOTICE files at the root of the source
|
||||
* tree and available online at
|
||||
*
|
||||
* http://www.dspace.org/license/
|
||||
*/
|
||||
package org.dspace.app.sherpa;
|
||||
|
||||
import java.io.InputStream;
|
||||
import java.util.LinkedList;
|
||||
import java.util.List;
|
||||
import javax.xml.parsers.DocumentBuilder;
|
||||
import javax.xml.parsers.DocumentBuilderFactory;
|
||||
|
||||
import org.apache.commons.lang3.StringUtils;
|
||||
import org.apache.logging.log4j.Logger;
|
||||
import org.dspace.app.util.XMLUtils;
|
||||
import org.w3c.dom.Document;
|
||||
import org.w3c.dom.Element;
|
||||
|
||||
/**
|
||||
* JAVA representation for a SHERPA API Response
|
||||
*
|
||||
* @author Andrea Bollini
|
||||
*/
|
||||
public class SHERPAResponse {
|
||||
private static final Logger log = org.apache.logging.log4j.LogManager.getLogger(SHERPAResponse.class);
|
||||
|
||||
private int numHits;
|
||||
|
||||
private String message;
|
||||
|
||||
private String license;
|
||||
|
||||
private String licenseURL;
|
||||
|
||||
private String disclaimer;
|
||||
|
||||
private List<SHERPAJournal> journals;
|
||||
|
||||
private List<SHERPAPublisher> publishers;
|
||||
|
||||
public SHERPAResponse(InputStream xmlData) {
|
||||
try {
|
||||
DocumentBuilderFactory factory = DocumentBuilderFactory
|
||||
.newInstance();
|
||||
factory.setValidating(false);
|
||||
factory.setIgnoringComments(true);
|
||||
factory.setIgnoringElementContentWhitespace(true);
|
||||
// disallow DTD parsing to ensure no XXE attacks can occur.
|
||||
// See https://cheatsheetseries.owasp.org/cheatsheets/XML_External_Entity_Prevention_Cheat_Sheet.html
|
||||
factory.setFeature("http://apache.org/xml/features/disallow-doctype-decl", true);
|
||||
|
||||
DocumentBuilder db = factory.newDocumentBuilder();
|
||||
Document inDoc = db.parse(xmlData);
|
||||
|
||||
Element xmlRoot = inDoc.getDocumentElement();
|
||||
Element headersElement = XMLUtils.getSingleElement(xmlRoot,
|
||||
"header");
|
||||
Element journalsElement = XMLUtils.getSingleElement(xmlRoot,
|
||||
"journals");
|
||||
Element publishersElement = XMLUtils.getSingleElement(xmlRoot,
|
||||
"publishers");
|
||||
|
||||
String numhitsString = XMLUtils.getElementValue(headersElement, "numhits");
|
||||
if (StringUtils.isNotBlank(numhitsString)) {
|
||||
numHits = Integer.parseInt(numhitsString);
|
||||
} else {
|
||||
numHits = 0;
|
||||
}
|
||||
message = XMLUtils.getElementValue(headersElement, "message");
|
||||
|
||||
license = XMLUtils.getElementValue(headersElement, "license");
|
||||
licenseURL = XMLUtils.getElementValue(headersElement, "licenseurl");
|
||||
disclaimer = XMLUtils.getElementValue(headersElement, "disclaimer");
|
||||
|
||||
List<Element> journalsList = XMLUtils.getElementList(
|
||||
journalsElement, "journal");
|
||||
List<Element> publishersList = XMLUtils.getElementList(
|
||||
publishersElement, "publisher");
|
||||
|
||||
if (journalsList != null) {
|
||||
journals = new LinkedList<SHERPAJournal>();
|
||||
for (Element journalElement : journalsList) {
|
||||
journals.add(new SHERPAJournal(
|
||||
XMLUtils.getElementValue(journalElement, "jtitle"),
|
||||
XMLUtils.getElementValue(journalElement, "issn"),
|
||||
XMLUtils.getElementValue(journalElement, "zetopub"),
|
||||
XMLUtils.getElementValue(journalElement, "romeopub")));
|
||||
}
|
||||
}
|
||||
|
||||
if (publishersList != null) {
|
||||
publishers = new LinkedList<SHERPAPublisher>();
|
||||
for (Element publisherElement : publishersList) {
|
||||
Element preprintsElement = XMLUtils.getSingleElement(
|
||||
publisherElement, "preprints");
|
||||
Element preprintsRestrictionElement = XMLUtils
|
||||
.getSingleElement(publisherElement,
|
||||
"prerestrictions");
|
||||
|
||||
Element postprintsElement = XMLUtils.getSingleElement(
|
||||
publisherElement, "postprints");
|
||||
Element postprintsRestrictionElement = XMLUtils
|
||||
.getSingleElement(publisherElement,
|
||||
"postrestrictions");
|
||||
|
||||
Element pdfversionElement = XMLUtils.getSingleElement(
|
||||
publisherElement, "pdfversion");
|
||||
Element pdfversionRestrictionElement = XMLUtils
|
||||
.getSingleElement(publisherElement,
|
||||
"pdfrestrictions");
|
||||
|
||||
Element conditionsElement = XMLUtils.getSingleElement(
|
||||
publisherElement, "conditions");
|
||||
Element paidaccessElement = XMLUtils.getSingleElement(
|
||||
publisherElement, "paidaccess");
|
||||
|
||||
Element copyrightlinksElement = XMLUtils.getSingleElement(
|
||||
publisherElement, "copyrightlinks");
|
||||
publishers
|
||||
.add(new SHERPAPublisher(publisherElement.getAttribute("id"), XMLUtils.getElementValue(
|
||||
publisherElement, "name"),
|
||||
XMLUtils.getElementValue(publisherElement,
|
||||
"alias"), XMLUtils.getElementValue(
|
||||
publisherElement, "homeurl"),
|
||||
|
||||
XMLUtils.getElementValue(preprintsElement,
|
||||
"prearchiving"),
|
||||
XMLUtils.getElementValueList(
|
||||
preprintsRestrictionElement,
|
||||
"prerestriction"),
|
||||
|
||||
XMLUtils.getElementValue(postprintsElement,
|
||||
"postarchiving"),
|
||||
XMLUtils.getElementValueList(
|
||||
postprintsRestrictionElement,
|
||||
"postrestriction"),
|
||||
|
||||
XMLUtils.getElementValue(pdfversionElement,
|
||||
"pdfarchiving"),
|
||||
XMLUtils.getElementValueList(
|
||||
pdfversionRestrictionElement,
|
||||
"pdfrestriction"),
|
||||
|
||||
XMLUtils
|
||||
.getElementValueList(
|
||||
conditionsElement,
|
||||
"condition"), XMLUtils
|
||||
.getElementValue(paidaccessElement,
|
||||
"paidaccessurl"), XMLUtils
|
||||
.getElementValue(paidaccessElement,
|
||||
"paidaccessname"), XMLUtils
|
||||
.getElementValue(paidaccessElement,
|
||||
"paidaccessnotes"),
|
||||
XMLUtils.getElementValueArrayList(
|
||||
copyrightlinksElement,
|
||||
"copyrightlink",
|
||||
"copyrightlinktext",
|
||||
"copyrightlinkurl"), XMLUtils
|
||||
.getElementValue(publisherElement,
|
||||
"romeocolour"), XMLUtils
|
||||
.getElementValue(publisherElement,
|
||||
"dateadded"), XMLUtils
|
||||
.getElementValue(publisherElement,
|
||||
"dateupdated")));
|
||||
}
|
||||
}
|
||||
} catch (Exception e) {
|
||||
log.error("Error parsing SHERPA API Response", e);
|
||||
}
|
||||
}
|
||||
|
||||
public SHERPAResponse(String message) {
|
||||
this.message = message;
|
||||
}
|
||||
|
||||
public String getMessage() {
|
||||
return message;
|
||||
}
|
||||
|
||||
public String getLicense() {
|
||||
return license;
|
||||
}
|
||||
|
||||
public String getLicenseURL() {
|
||||
return licenseURL;
|
||||
}
|
||||
|
||||
public String getDisclaimer() {
|
||||
return disclaimer;
|
||||
}
|
||||
|
||||
public List<SHERPAJournal> getJournals() {
|
||||
return journals;
|
||||
}
|
||||
|
||||
public List<SHERPAPublisher> getPublishers() {
|
||||
return publishers;
|
||||
}
|
||||
|
||||
public int getNumHits() {
|
||||
return numHits;
|
||||
}
|
||||
}
|
@@ -7,6 +7,13 @@
|
||||
*/
|
||||
package org.dspace.app.sherpa;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.io.InputStream;
|
||||
import java.net.URI;
|
||||
import java.net.URISyntaxException;
|
||||
import java.nio.charset.StandardCharsets;
|
||||
|
||||
import org.apache.commons.io.IOUtils;
|
||||
import org.apache.commons.lang3.StringUtils;
|
||||
import org.apache.http.HttpEntity;
|
||||
import org.apache.http.HttpResponse;
|
||||
@@ -17,21 +24,50 @@ import org.apache.http.client.utils.URIBuilder;
|
||||
import org.apache.http.impl.client.CloseableHttpClient;
|
||||
import org.apache.http.impl.client.HttpClientBuilder;
|
||||
import org.apache.logging.log4j.Logger;
|
||||
import org.dspace.core.ConfigurationManager;
|
||||
import org.dspace.app.sherpa.v2.SHERPAPublisherResponse;
|
||||
import org.dspace.app.sherpa.v2.SHERPAResponse;
|
||||
import org.dspace.app.sherpa.v2.SHERPAUtils;
|
||||
import org.dspace.services.ConfigurationService;
|
||||
import org.dspace.services.factory.DSpaceServicesFactory;
|
||||
import org.springframework.beans.factory.annotation.Autowired;
|
||||
|
||||
/**
|
||||
* SHERPAService is responsible for making the HTTP call to the SHERPA v2 API
|
||||
* for SHERPASubmitService.
|
||||
* Note, this service is ported from DSpace 6 for the ability to search policies by ISSN
|
||||
* There are also new DataProvider implementations provided for use as 'external sources'
|
||||
* of journal and publisher data
|
||||
* @see org.dspace.external.provider.impl.SHERPAv2JournalDataProvider
|
||||
* @see org.dspace.external.provider.impl.SHERPAv2PublisherDataProvider
|
||||
* @author Kim Shepherd
|
||||
*/
|
||||
public class SHERPAService {
|
||||
private CloseableHttpClient client = null;
|
||||
|
||||
private int maxNumberOfTries;
|
||||
private long sleepBetweenTimeouts;
|
||||
private int timeout = 5000;
|
||||
private String endpoint = "https://v2.sherpa.ac.uk/cgi/retrieve";
|
||||
private String apiKey = null;
|
||||
|
||||
/** log4j category */
|
||||
private static Logger log = org.apache.logging.log4j.LogManager.getLogger(SHERPAService.class);
|
||||
|
||||
@Autowired
|
||||
ConfigurationService configurationService;
|
||||
|
||||
/**
|
||||
* log4j category
|
||||
* Create a new HTTP builder with sensible defaults in constructor
|
||||
*/
|
||||
private static final Logger log = org.apache.logging.log4j.LogManager.getLogger(SHERPAService.class);
|
||||
|
||||
public SHERPAService() {
|
||||
// Set configuration service
|
||||
configurationService = DSpaceServicesFactory.getInstance().getConfigurationService();
|
||||
|
||||
// Get endoint and API key from configuration
|
||||
endpoint = configurationService.getProperty("sherpa.romeo.url",
|
||||
"https://v2.sherpa.ac.uk/cgi/retrieve");
|
||||
apiKey = configurationService.getProperty("sherpa.romeo.apikey");
|
||||
|
||||
HttpClientBuilder builder = HttpClientBuilder.create();
|
||||
// httpclient 4.3+ doesn't appear to have any sensible defaults any more. Setting conservative defaults as
|
||||
// not to hammer the SHERPA service too much.
|
||||
@@ -41,62 +77,106 @@ public class SHERPAService {
|
||||
.build();
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* Search the SHERPA v2 API for a journal policy data using the supplied ISSN.
|
||||
* If the API key is missing, or the HTTP response is non-OK or does not complete
|
||||
* successfully, a simple error response will be returned.
|
||||
* Otherwise, the response body will be passed to SHERPAResponse for parsing as JSON
|
||||
* and the final result returned to the calling method
|
||||
* @param query ISSN string to pass in an "issn equals" API query
|
||||
* @return SHERPAResponse containing an error or journal policies
|
||||
*/
|
||||
public SHERPAResponse searchByJournalISSN(String query) {
|
||||
String endpoint = ConfigurationManager.getProperty("sherpa.romeo.url");
|
||||
String apiKey = ConfigurationManager.getProperty("sherpa.romeo.apikey");
|
||||
return performRequest("publication", "issn", "equals", query, 0, 1);
|
||||
}
|
||||
|
||||
/**
|
||||
* Perform an API request to the SHERPA v2 API - this could be a search or a get for any entity type
|
||||
* but the return object here must be a SHERPAPublisherResponse not the journal-centric SHERPAResponse
|
||||
* For more information about the type, field and predicate arguments, see the SHERPA v2 API documentation
|
||||
* @param type entity type eg "publisher"
|
||||
* @param field field eg "issn" or "title"
|
||||
* @param predicate predicate eg "equals" or "contains-word"
|
||||
* @param value the actual value to search for (eg an ISSN or partial title)
|
||||
* @param start start / offset of search results
|
||||
* @param limit maximum search results to return
|
||||
* @return SHERPAPublisherResponse object
|
||||
*/
|
||||
public SHERPAPublisherResponse performPublisherRequest(String type, String field, String predicate, String value,
|
||||
int start, int limit) {
|
||||
// API Key is *required* for v2 API calls
|
||||
if (null == apiKey) {
|
||||
log.error("SHERPA ROMeO API Key missing: please register for an API key and set sherpa.romeo.apikey");
|
||||
return new SHERPAPublisherResponse("SHERPA/RoMEO configuration invalid or missing");
|
||||
}
|
||||
|
||||
HttpGet method = null;
|
||||
SHERPAResponse sherpaResponse = null;
|
||||
SHERPAPublisherResponse sherpaResponse = null;
|
||||
int numberOfTries = 0;
|
||||
|
||||
while (numberOfTries < maxNumberOfTries && sherpaResponse == null) {
|
||||
numberOfTries++;
|
||||
|
||||
if (log.isDebugEnabled()) {
|
||||
log.debug(String.format(
|
||||
"Trying to contact SHERPA/RoMEO - attempt %d of %d; timeout is %d; sleep between timeouts is %d",
|
||||
numberOfTries,
|
||||
maxNumberOfTries,
|
||||
timeout,
|
||||
sleepBetweenTimeouts));
|
||||
}
|
||||
log.debug(String.format(
|
||||
"Trying to contact SHERPA/RoMEO - attempt %d of %d; timeout is %d; sleep between timeouts is %d",
|
||||
numberOfTries,
|
||||
maxNumberOfTries,
|
||||
timeout,
|
||||
sleepBetweenTimeouts));
|
||||
|
||||
try {
|
||||
Thread.sleep(sleepBetweenTimeouts);
|
||||
|
||||
URIBuilder uriBuilder = new URIBuilder(endpoint);
|
||||
uriBuilder.addParameter("issn", query);
|
||||
uriBuilder.addParameter("versions", "all");
|
||||
if (StringUtils.isNotBlank(apiKey)) {
|
||||
uriBuilder.addParameter("ak", apiKey);
|
||||
}
|
||||
|
||||
method = new HttpGet(uriBuilder.build());
|
||||
method.setConfig(RequestConfig.custom()
|
||||
.setConnectionRequestTimeout(timeout)
|
||||
.setConnectTimeout(timeout)
|
||||
.setSocketTimeout(timeout)
|
||||
.build());
|
||||
// Execute the method.
|
||||
// Construct a default HTTP method (first result)
|
||||
method = constructHttpGet(type, field, predicate, value, start, limit);
|
||||
|
||||
// Execute the method
|
||||
HttpResponse response = client.execute(method);
|
||||
int statusCode = response.getStatusLine().getStatusCode();
|
||||
|
||||
log.debug(response.getStatusLine().getStatusCode() + ": "
|
||||
+ response.getStatusLine().getReasonPhrase());
|
||||
|
||||
if (statusCode != HttpStatus.SC_OK) {
|
||||
sherpaResponse = new SHERPAResponse("SHERPA/RoMEO return not OK status: "
|
||||
+ statusCode);
|
||||
sherpaResponse = new SHERPAPublisherResponse("SHERPA/RoMEO return not OK status: "
|
||||
+ statusCode);
|
||||
String errorBody = IOUtils.toString(response.getEntity().getContent(), StandardCharsets.UTF_8);
|
||||
log.error("Error from SHERPA HTTP request: " + errorBody);
|
||||
}
|
||||
|
||||
HttpEntity responseBody = response.getEntity();
|
||||
|
||||
// If the response body is valid, pass to SHERPAResponse for parsing as JSON
|
||||
if (null != responseBody) {
|
||||
sherpaResponse = new SHERPAResponse(responseBody.getContent());
|
||||
log.debug("Non-null SHERPA resonse received for query of " + value);
|
||||
InputStream content = null;
|
||||
try {
|
||||
content = responseBody.getContent();
|
||||
sherpaResponse =
|
||||
new SHERPAPublisherResponse(content, SHERPAPublisherResponse.SHERPAFormat.JSON);
|
||||
} catch (IOException e) {
|
||||
log.error("Encountered exception while contacting SHERPA/RoMEO: " + e.getMessage(), e);
|
||||
} finally {
|
||||
if (content != null) {
|
||||
content.close();
|
||||
}
|
||||
}
|
||||
} else {
|
||||
sherpaResponse = new SHERPAResponse("SHERPA/RoMEO returned no response");
|
||||
log.debug("Empty SHERPA response body for query on " + value);
|
||||
sherpaResponse = new SHERPAPublisherResponse("SHERPA/RoMEO returned no response");
|
||||
}
|
||||
} catch (Exception e) {
|
||||
log.warn("Encountered exception while contacting SHERPA/RoMEO: " + e.getMessage(), e);
|
||||
} catch (URISyntaxException e) {
|
||||
String errorMessage = "Error building SHERPA v2 API URI: " + e.getMessage();
|
||||
log.error(errorMessage, e);
|
||||
sherpaResponse = new SHERPAPublisherResponse(errorMessage);
|
||||
} catch (IOException e) {
|
||||
String errorMessage = "Encountered exception while contacting SHERPA/RoMEO: " + e.getMessage();
|
||||
log.error(errorMessage, e);
|
||||
sherpaResponse = new SHERPAPublisherResponse(errorMessage);
|
||||
} catch (InterruptedException e) {
|
||||
String errorMessage = "Encountered exception while sleeping thread: " + e.getMessage();
|
||||
log.error(errorMessage, e);
|
||||
sherpaResponse = new SHERPAPublisherResponse(errorMessage);
|
||||
} finally {
|
||||
if (method != null) {
|
||||
method.releaseConnection();
|
||||
@@ -105,13 +185,218 @@ public class SHERPAService {
|
||||
}
|
||||
|
||||
if (sherpaResponse == null) {
|
||||
log.debug("SHERPA response is still null");
|
||||
sherpaResponse = new SHERPAPublisherResponse(
|
||||
"Error processing the SHERPA/RoMEO answer");
|
||||
}
|
||||
|
||||
// Return the final response
|
||||
return sherpaResponse;
|
||||
}
|
||||
|
||||
/**
|
||||
* Perform an API request to the SHERPA v2 API - this could be a search or a get for any entity type
|
||||
* For more information about the type, field and predicate arguments, see the SHERPA v2 API documentation
|
||||
* @param type entity type eg "publication" or "publisher"
|
||||
* @param field field eg "issn" or "title"
|
||||
* @param predicate predicate eg "equals" or "contains-word"
|
||||
* @param value the actual value to search for (eg an ISSN or partial title)
|
||||
* @param start start / offset of search results
|
||||
* @param limit maximum search results to return
|
||||
* @return SHERPAResponse object
|
||||
*/
|
||||
public SHERPAResponse performRequest(String type, String field, String predicate, String value,
|
||||
int start, int limit) {
|
||||
// API Key is *required* for v2 API calls
|
||||
if (null == apiKey) {
|
||||
log.error("SHERPA ROMeO API Key missing: please register for an API key and set sherpa.romeo.apikey");
|
||||
return new SHERPAResponse("SHERPA/RoMEO configuration invalid or missing");
|
||||
}
|
||||
|
||||
HttpGet method = null;
|
||||
SHERPAResponse sherpaResponse = null;
|
||||
int numberOfTries = 0;
|
||||
|
||||
while (numberOfTries < maxNumberOfTries && sherpaResponse == null) {
|
||||
numberOfTries++;
|
||||
|
||||
log.debug(String.format(
|
||||
"Trying to contact SHERPA/RoMEO - attempt %d of %d; timeout is %d; sleep between timeouts is %d",
|
||||
numberOfTries,
|
||||
maxNumberOfTries,
|
||||
timeout,
|
||||
sleepBetweenTimeouts));
|
||||
|
||||
try {
|
||||
Thread.sleep(sleepBetweenTimeouts);
|
||||
|
||||
// Construct a default HTTP method (first result)
|
||||
method = constructHttpGet(type, field, predicate, value, start, limit);
|
||||
|
||||
// Execute the method
|
||||
HttpResponse response = client.execute(method);
|
||||
int statusCode = response.getStatusLine().getStatusCode();
|
||||
|
||||
log.debug(response.getStatusLine().getStatusCode() + ": "
|
||||
+ response.getStatusLine().getReasonPhrase());
|
||||
|
||||
if (statusCode != HttpStatus.SC_OK) {
|
||||
sherpaResponse = new SHERPAResponse("SHERPA/RoMEO return not OK status: "
|
||||
+ statusCode);
|
||||
String errorBody = IOUtils.toString(response.getEntity().getContent(), StandardCharsets.UTF_8);
|
||||
log.error("Error from SHERPA HTTP request: " + errorBody);
|
||||
}
|
||||
|
||||
HttpEntity responseBody = response.getEntity();
|
||||
|
||||
// If the response body is valid, pass to SHERPAResponse for parsing as JSON
|
||||
if (null != responseBody) {
|
||||
log.debug("Non-null SHERPA resonse received for query of " + value);
|
||||
InputStream content = null;
|
||||
try {
|
||||
content = responseBody.getContent();
|
||||
sherpaResponse = new SHERPAResponse(content, SHERPAResponse.SHERPAFormat.JSON);
|
||||
} catch (IOException e) {
|
||||
log.error("Encountered exception while contacting SHERPA/RoMEO: " + e.getMessage(), e);
|
||||
} finally {
|
||||
if (content != null) {
|
||||
content.close();
|
||||
}
|
||||
}
|
||||
} else {
|
||||
log.debug("Empty SHERPA response body for query on " + value);
|
||||
sherpaResponse = new SHERPAResponse("SHERPA/RoMEO returned no response");
|
||||
}
|
||||
} catch (URISyntaxException e) {
|
||||
String errorMessage = "Error building SHERPA v2 API URI: " + e.getMessage();
|
||||
log.error(errorMessage, e);
|
||||
sherpaResponse = new SHERPAResponse(errorMessage);
|
||||
} catch (IOException e) {
|
||||
String errorMessage = "Encountered exception while contacting SHERPA/RoMEO: " + e.getMessage();
|
||||
log.error(errorMessage, e);
|
||||
sherpaResponse = new SHERPAResponse(errorMessage);
|
||||
} catch (InterruptedException e) {
|
||||
String errorMessage = "Encountered exception while sleeping thread: " + e.getMessage();
|
||||
log.error(errorMessage, e);
|
||||
sherpaResponse = new SHERPAResponse(errorMessage);
|
||||
} finally {
|
||||
if (method != null) {
|
||||
method.releaseConnection();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (sherpaResponse == null) {
|
||||
log.debug("SHERPA response is still null");
|
||||
sherpaResponse = new SHERPAResponse(
|
||||
"Error processing the SHERPA/RoMEO answer");
|
||||
}
|
||||
|
||||
// Return the final response
|
||||
return sherpaResponse;
|
||||
}
|
||||
|
||||
/**
|
||||
* Construct HTTP GET object for a "field,predicate,value" query with default start, limit
|
||||
* eg. "title","contains-word","Lancet" or "issn","equals","1234-1234"
|
||||
* @param field the field (issn, title, etc)
|
||||
* @param predicate the predicate (contains-word, equals, etc - see API docs)
|
||||
* @param value the query value itself
|
||||
* @return HttpGet method which can then be executed by the client
|
||||
* @throws URISyntaxException if the URL build fails
|
||||
*/
|
||||
public HttpGet constructHttpGet(String type, String field, String predicate, String value)
|
||||
throws URISyntaxException {
|
||||
return constructHttpGet(type, field, predicate, value, 0, 1);
|
||||
}
|
||||
|
||||
/**
|
||||
* Construct HTTP GET object for a "field,predicate,value" query
|
||||
* eg. "title","contains-word","Lancet" or "issn","equals","1234-1234"
|
||||
* @param field the field (issn, title, etc)
|
||||
* @param predicate the predicate (contains-word, equals, etc - see API docs)
|
||||
* @param value the query value itself
|
||||
* @param start row offset
|
||||
* @param limit number of results to return
|
||||
* @return HttpGet object to be executed by the client
|
||||
* @throws URISyntaxException
|
||||
*/
|
||||
public HttpGet constructHttpGet(String type, String field, String predicate, String value, int start, int limit)
|
||||
throws URISyntaxException {
|
||||
// Sanitise query string (strip some characters) field, predicate and value
|
||||
if (null == type) {
|
||||
type = "publication";
|
||||
}
|
||||
field = SHERPAUtils.sanitiseQuery(field);
|
||||
predicate = SHERPAUtils.sanitiseQuery(predicate);
|
||||
value = SHERPAUtils.sanitiseQuery(value);
|
||||
type = SHERPAUtils.sanitiseQuery(type);
|
||||
|
||||
// Build URL based on search query
|
||||
URIBuilder uriBuilder = new URIBuilder(endpoint);
|
||||
uriBuilder.addParameter("item-type", type);
|
||||
uriBuilder.addParameter("filter", "[[\"" + field + "\",\"" + predicate + "\",\"" + value + "\"]]");
|
||||
uriBuilder.addParameter("format", "Json");
|
||||
// Set optional start (offset) and limit parameters
|
||||
if (start >= 0) {
|
||||
uriBuilder.addParameter("offset", String.valueOf(start));
|
||||
}
|
||||
if (limit > 0) {
|
||||
uriBuilder.addParameter("limit", String.valueOf(limit));
|
||||
}
|
||||
if (StringUtils.isNotBlank(apiKey)) {
|
||||
uriBuilder.addParameter("api-key", apiKey);
|
||||
}
|
||||
|
||||
log.debug("SHERPA API URL: " + uriBuilder.toString());
|
||||
|
||||
// Create HTTP GET object
|
||||
HttpGet method = new HttpGet(uriBuilder.build());
|
||||
|
||||
// Set connection parameters
|
||||
int timeout = 5000;
|
||||
method.setConfig(RequestConfig.custom()
|
||||
.setConnectionRequestTimeout(timeout)
|
||||
.setConnectTimeout(timeout)
|
||||
.setSocketTimeout(timeout)
|
||||
.build());
|
||||
|
||||
return method;
|
||||
}
|
||||
|
||||
/**
|
||||
* Prepare the API query for execution by the HTTP client
|
||||
* @param query ISSN query string
|
||||
* @param endpoint API endpoint (base URL)
|
||||
* @param apiKey API key parameter
|
||||
* @return URI object
|
||||
* @throws URISyntaxException
|
||||
*/
|
||||
public URI prepareQuery(String query, String endpoint, String apiKey) throws URISyntaxException {
|
||||
// Sanitise query string
|
||||
query = SHERPAUtils.sanitiseQuery(query);
|
||||
|
||||
// Instantiate URI builder
|
||||
URIBuilder uriBuilder = new URIBuilder(endpoint);
|
||||
|
||||
// Build URI parameters from supplied values
|
||||
uriBuilder.addParameter("item-type", "publication");
|
||||
|
||||
// Log warning if no query is supplied
|
||||
if (null == query) {
|
||||
log.warn("No ISSN supplied as query string for SHERPA service search");
|
||||
}
|
||||
uriBuilder.addParameter("filter", "[[\"issn\",\"equals\",\"" + query + "\"]]");
|
||||
uriBuilder.addParameter("format", "Json");
|
||||
if (StringUtils.isNotBlank(apiKey)) {
|
||||
uriBuilder.addParameter("api-key", apiKey);
|
||||
}
|
||||
log.debug("Would search SHERPA endpoint with " + uriBuilder.toString());
|
||||
|
||||
// Return final built URI
|
||||
return uriBuilder.build();
|
||||
}
|
||||
|
||||
public void setMaxNumberOfTries(int maxNumberOfTries) {
|
||||
this.maxNumberOfTries = maxNumberOfTries;
|
||||
}
|
||||
|
@@ -7,49 +7,111 @@
|
||||
*/
|
||||
package org.dspace.app.sherpa.submit;
|
||||
|
||||
import java.util.Iterator;
|
||||
import java.util.LinkedHashSet;
|
||||
import java.util.LinkedList;
|
||||
import java.util.List;
|
||||
import java.util.Set;
|
||||
|
||||
import org.apache.commons.lang3.StringUtils;
|
||||
import org.apache.logging.log4j.Logger;
|
||||
import org.dspace.app.sherpa.SHERPAResponse;
|
||||
import org.dspace.app.sherpa.SHERPAService;
|
||||
import org.dspace.app.sherpa.v2.SHERPAResponse;
|
||||
import org.dspace.content.Item;
|
||||
import org.dspace.core.Context;
|
||||
import org.dspace.core.LogManager;
|
||||
|
||||
/**
|
||||
* SHERPASubmitService is
|
||||
* @see
|
||||
* @author Kim Shepherd
|
||||
*/
|
||||
public class SHERPASubmitService {
|
||||
private SHERPAService sherpaService;
|
||||
|
||||
private SHERPASubmitConfigurationService configuration;
|
||||
/**
|
||||
* Spring beans for configuration and API service
|
||||
*/
|
||||
protected SHERPAService sherpaService;
|
||||
protected SHERPASubmitConfigurationService configuration;
|
||||
|
||||
/**
|
||||
* log4j logger
|
||||
*/
|
||||
private static Logger log = org.apache.logging.log4j.LogManager.getLogger(SHERPASubmitService.class);
|
||||
|
||||
/**
|
||||
* Setter for configuration (from Spring)
|
||||
* @see "dspace-dspace-addon-sherpa-configuration-services.xml"
|
||||
* @param configuration
|
||||
*/
|
||||
public void setConfiguration(SHERPASubmitConfigurationService configuration) {
|
||||
this.configuration = configuration;
|
||||
}
|
||||
|
||||
/**
|
||||
* Setter for SHERPA service, reponsible for actual HTTP API calls
|
||||
* @see "dspace-dspace-addon-sherpa-configuration-services.xml"
|
||||
* @param sherpaService
|
||||
*/
|
||||
public void setSherpaService(SHERPAService sherpaService) {
|
||||
this.sherpaService = sherpaService;
|
||||
}
|
||||
|
||||
public SHERPAResponse searchRelatedJournals(Context context, Item item) {
|
||||
/**
|
||||
* Search SHERPA for journal policies matching the ISSNs in the item.
|
||||
* Rather than a 'search' query for any/all ISSNs, the v2 API requires a separate
|
||||
* query for each ISSN found in the item. The ISSNs are extracted using the configured
|
||||
* issnItemExtractor(s) in the SHERPA spring configuration.
|
||||
* The ISSNs are not validated with a regular expression or other rules - any values
|
||||
* extracted will be included in API queries.
|
||||
* @see "dspace-dspace-addon-sherpa-configuration-services.xml"
|
||||
* @param context DSpace context
|
||||
* @param item DSpace item containing ISSNs to be checked
|
||||
* @return SHERPA v2 API response (policy data)
|
||||
*/
|
||||
public List<SHERPAResponse> searchRelatedJournals(Context context, Item item) {
|
||||
Set<String> issns = getISSNs(context, item);
|
||||
if (issns == null || issns.size() == 0) {
|
||||
return null;
|
||||
} else {
|
||||
return sherpaService.searchByJournalISSN(StringUtils.join(issns, ","));
|
||||
// SHERPA v2 API no longer supports "OR'd" ISSN search, perform individual searches instead
|
||||
Iterator<String> issnIterator = issns.iterator();
|
||||
List<SHERPAResponse> responses = new LinkedList<>();
|
||||
while (issnIterator.hasNext()) {
|
||||
String issn = issnIterator.next();
|
||||
SHERPAResponse response = sherpaService.searchByJournalISSN(issn);
|
||||
if (response.isError()) {
|
||||
// Continue with loop
|
||||
log.warn("Failed to look up SHERPA ROMeO result for ISSN: " + issn
|
||||
+ ": " + response.getMessage());
|
||||
}
|
||||
// Store this response, even if it has an error (useful for UI reporting)
|
||||
responses.add(response);
|
||||
}
|
||||
if (responses.isEmpty()) {
|
||||
responses.add(new SHERPAResponse("SHERPA ROMeO lookup failed"));
|
||||
}
|
||||
return responses;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Search SHERPA for journal policies matching the passed ISSN.
|
||||
* The ISSN are not validated with a regular expression or other rules - any String
|
||||
* passed to this method will be considered an ISSN for the purposes of an API query
|
||||
* @param issn ISSN string
|
||||
* @return SHERPA v2 API response object (policy data)
|
||||
*/
|
||||
public SHERPAResponse searchRelatedJournalsByISSN(String issn) {
|
||||
return sherpaService.searchByJournalISSN(issn);
|
||||
}
|
||||
|
||||
/**
|
||||
* Using the configured itemIssnExtractors from SHERPA configuration, extract
|
||||
* ISSNs from item metadata or authority values
|
||||
* @param context DSpace context
|
||||
* @param item Item containing metadata / authority values
|
||||
* @return Set of ISSN strings
|
||||
*/
|
||||
public Set<String> getISSNs(Context context, Item item) {
|
||||
Set<String> issns = new LinkedHashSet<String>();
|
||||
if (configuration.getIssnItemExtractors() == null) {
|
||||
@@ -68,6 +130,13 @@ public class SHERPASubmitService {
|
||||
return issns;
|
||||
}
|
||||
|
||||
/**
|
||||
* Simple boolean test that runs the getISSNs extraction method
|
||||
* to determine whether an item has any ISSNs at all
|
||||
* @param context DSpace context
|
||||
* @param item Item to test
|
||||
* @return boolean indicating presence of >=1 ISSNs
|
||||
*/
|
||||
public boolean hasISSNs(Context context, Item item) {
|
||||
Set<String> issns = getISSNs(context, item);
|
||||
if (issns == null || issns.size() == 0) {
|
||||
|
@@ -0,0 +1,111 @@
|
||||
/**
|
||||
* The contents of this file are subject to the license and copyright
|
||||
* detailed in the LICENSE and NOTICE files at the root of the source
|
||||
* tree and available online at
|
||||
*
|
||||
* http://www.dspace.org/license/
|
||||
*/
|
||||
package org.dspace.app.sherpa.v2;
|
||||
|
||||
import java.util.List;
|
||||
|
||||
/**
|
||||
* Plain java representation of a SHERPA Journal object, based on SHERPA API v2 responses.
|
||||
*
|
||||
* In a SHERPA search for journal deposit policies, this is generally structured
|
||||
* as a list in the SHERPAResponse object.
|
||||
* Each journal contains a list of publisher data and list of publishing policies as well as basic metadata
|
||||
* about the journal such as ISSNs, titles, whether it appears in DOAJ, primary publisher, etc.
|
||||
* @see SHERPAResponse
|
||||
* @see org.dspace.external.provider.impl.SHERPAv2JournalDataProvider
|
||||
*
|
||||
* @author Kim Shepherd
|
||||
*/
|
||||
public class SHERPAJournal {
|
||||
|
||||
private List<String> titles;
|
||||
private String url;
|
||||
private List<String> issns;
|
||||
private String romeoPub;
|
||||
private String zetoPub;
|
||||
private SHERPAPublisher publisher;
|
||||
private List<SHERPAPublisher> publishers;
|
||||
private List<SHERPAPublisherPolicy> policies;
|
||||
private Boolean inDOAJ;
|
||||
|
||||
public SHERPAJournal() {
|
||||
|
||||
}
|
||||
|
||||
public List<String> getTitles() {
|
||||
return titles;
|
||||
}
|
||||
|
||||
public void setTitles(List<String> titles) {
|
||||
this.titles = titles;
|
||||
}
|
||||
|
||||
public String getUrl() {
|
||||
return url;
|
||||
}
|
||||
|
||||
public void setUrl(String url) {
|
||||
this.url = url;
|
||||
}
|
||||
|
||||
public List<String> getIssns() {
|
||||
return issns;
|
||||
}
|
||||
|
||||
public void setIssns(List<String> issns) {
|
||||
this.issns = issns;
|
||||
}
|
||||
|
||||
public String getRomeoPub() {
|
||||
return romeoPub;
|
||||
}
|
||||
|
||||
public void setRomeoPub(String romeoPub) {
|
||||
this.romeoPub = romeoPub;
|
||||
}
|
||||
|
||||
public String getZetoPub() {
|
||||
return zetoPub;
|
||||
}
|
||||
|
||||
public void setZetoPub(String zetoPub) {
|
||||
this.zetoPub = zetoPub;
|
||||
}
|
||||
|
||||
public SHERPAPublisher getPublisher() {
|
||||
return publisher;
|
||||
}
|
||||
|
||||
public void setPublisher(SHERPAPublisher publisher) {
|
||||
this.publisher = publisher;
|
||||
}
|
||||
|
||||
public List<SHERPAPublisher> getPublishers() {
|
||||
return publishers;
|
||||
}
|
||||
|
||||
public void setPublishers(List<SHERPAPublisher> publishers) {
|
||||
this.publishers = publishers;
|
||||
}
|
||||
|
||||
public List<SHERPAPublisherPolicy> getPolicies() {
|
||||
return policies;
|
||||
}
|
||||
|
||||
public void setPolicies(List<SHERPAPublisherPolicy> policies) {
|
||||
this.policies = policies;
|
||||
}
|
||||
|
||||
public Boolean getInDOAJ() {
|
||||
return inDOAJ;
|
||||
}
|
||||
|
||||
public void setInDOAJ(Boolean inDOAJ) {
|
||||
this.inDOAJ = inDOAJ;
|
||||
}
|
||||
}
|
@@ -0,0 +1,118 @@
|
||||
/**
|
||||
* The contents of this file are subject to the license and copyright
|
||||
* detailed in the LICENSE and NOTICE files at the root of the source
|
||||
* tree and available online at
|
||||
*
|
||||
* http://www.dspace.org/license/
|
||||
*/
|
||||
package org.dspace.app.sherpa.v2;
|
||||
|
||||
import java.util.List;
|
||||
|
||||
/**
|
||||
* Plain java representation of a SHERPA Permitted Version object, based on SHERPA API v2 responses.
|
||||
*
|
||||
* In a SHERPA search for journal deposit policies, this data is contained within a publisher policy.
|
||||
* Each permitted version is for a particular article version (eg. submitted, accepted, published) and contains
|
||||
*
|
||||
* A list of general conditions / terms for deposit of this version of work
|
||||
* A list of allowed locations (eg. institutional repository, personal homepage, non-commercial repository)
|
||||
* A list of prerequisite conditions for deposit (eg. attribution, linking to published version)
|
||||
* A list of required licences for the deposited work (eg. CC-BY-NC)
|
||||
* Embargo requirements, if any
|
||||
*
|
||||
* This class also has some helper data for labels, which can be used with i18n when displaying policy information
|
||||
*
|
||||
* @see SHERPAPublisherPolicy
|
||||
*/
|
||||
public class SHERPAPermittedVersion {
|
||||
|
||||
// Version (submitted, accepted, published)
|
||||
private String articleVersion;
|
||||
|
||||
// Version label
|
||||
private String articleVersionLabel;
|
||||
|
||||
// Option number
|
||||
private int option;
|
||||
|
||||
// General conditions
|
||||
private List<String> conditions;
|
||||
// Prerequisites (eg. if required by funder)
|
||||
private List<String> prerequisites;
|
||||
// Allowed locations
|
||||
private List<String> locations;
|
||||
// Required license(s)
|
||||
private List<String> licenses;
|
||||
// Embargo
|
||||
private SHERPAEmbargo embargo;
|
||||
|
||||
protected class SHERPAEmbargo {
|
||||
String units;
|
||||
int amount;
|
||||
}
|
||||
|
||||
public String getArticleVersion() {
|
||||
return articleVersion;
|
||||
}
|
||||
|
||||
public void setArticleVersion(String articleVersion) {
|
||||
this.articleVersion = articleVersion;
|
||||
}
|
||||
|
||||
public List<String> getConditions() {
|
||||
return conditions;
|
||||
}
|
||||
|
||||
public void setConditions(List<String> conditions) {
|
||||
this.conditions = conditions;
|
||||
}
|
||||
|
||||
public List<String> getPrerequisites() {
|
||||
return prerequisites;
|
||||
}
|
||||
|
||||
public void setPrerequisites(List<String> prerequisites) {
|
||||
this.prerequisites = prerequisites;
|
||||
}
|
||||
|
||||
public List<String> getLocations() {
|
||||
return locations;
|
||||
}
|
||||
|
||||
public void setLocations(List<String> locations) {
|
||||
this.locations = locations;
|
||||
}
|
||||
|
||||
public List<String> getLicenses() {
|
||||
return licenses;
|
||||
}
|
||||
|
||||
public void setLicenses(List<String> licenses) {
|
||||
this.licenses = licenses;
|
||||
}
|
||||
|
||||
public SHERPAEmbargo getEmbargo() {
|
||||
return embargo;
|
||||
}
|
||||
|
||||
public void setEmbargo(SHERPAEmbargo embargo) {
|
||||
this.embargo = embargo;
|
||||
}
|
||||
|
||||
public int getOption() {
|
||||
return option;
|
||||
}
|
||||
|
||||
public void setOption(int option) {
|
||||
this.option = option;
|
||||
}
|
||||
|
||||
public String getArticleVersionLabel() {
|
||||
return articleVersionLabel;
|
||||
}
|
||||
|
||||
public void setArticleVersionLabel(String articleVersionLabel) {
|
||||
this.articleVersionLabel = articleVersionLabel;
|
||||
}
|
||||
}
|
@@ -0,0 +1,100 @@
|
||||
/**
|
||||
* The contents of this file are subject to the license and copyright
|
||||
* detailed in the LICENSE and NOTICE files at the root of the source
|
||||
* tree and available online at
|
||||
*
|
||||
* http://www.dspace.org/license/
|
||||
*/
|
||||
package org.dspace.app.sherpa.v2;
|
||||
|
||||
/**
|
||||
* Plain java representation of a SHERPA Publisher object, based on SHERPA API v2 responses.
|
||||
*
|
||||
* In a search for SHERPA journal deposit policy, this publisher object will appear in a list of publishers
|
||||
* from the journal object, and as a single publisher member for the primary/current publisher of the journal.
|
||||
* In a search for SHERPA publisher information, this object will appear in a list of publishers from the main
|
||||
* SHERPA Publisher Response object
|
||||
*
|
||||
* @see SHERPAJournal
|
||||
* @see SHERPAPublisherResponse
|
||||
*/
|
||||
public class SHERPAPublisher {
|
||||
private String name = null;
|
||||
private String relationshipType;
|
||||
private String country;
|
||||
private String uri = null;
|
||||
private String identifier = null;
|
||||
private int publicationCount;
|
||||
|
||||
// this is not technically in the same place in SHERPA data model but it makes more sense to apply it here
|
||||
// is it is treated as a 'special case' - just for printing links to paid OA access policies
|
||||
private String paidAccessDescription;
|
||||
private String paidAccessUrl;
|
||||
|
||||
public SHERPAPublisher() {
|
||||
}
|
||||
|
||||
public String getName() {
|
||||
return name;
|
||||
}
|
||||
|
||||
public void setName(String name) {
|
||||
this.name = name;
|
||||
}
|
||||
|
||||
public String getRelationshipType() {
|
||||
return relationshipType;
|
||||
}
|
||||
|
||||
public void setRelationshipType(String relationshipType) {
|
||||
this.relationshipType = relationshipType;
|
||||
}
|
||||
|
||||
public String getCountry() {
|
||||
return country;
|
||||
}
|
||||
|
||||
public void setCountry(String country) {
|
||||
this.country = country;
|
||||
}
|
||||
|
||||
public String getUri() {
|
||||
return uri;
|
||||
}
|
||||
|
||||
public void setUri(String uri) {
|
||||
this.uri = uri;
|
||||
}
|
||||
|
||||
public int getPublicationCount() {
|
||||
return publicationCount;
|
||||
}
|
||||
|
||||
public void setPublicationCount(int publicationCount) {
|
||||
this.publicationCount = publicationCount;
|
||||
}
|
||||
|
||||
public String getPaidAccessDescription() {
|
||||
return paidAccessDescription;
|
||||
}
|
||||
|
||||
public void setPaidAccessDescription(String paidAccessDescription) {
|
||||
this.paidAccessDescription = paidAccessDescription;
|
||||
}
|
||||
|
||||
public String getPaidAccessUrl() {
|
||||
return paidAccessUrl;
|
||||
}
|
||||
|
||||
public void setPaidAccessUrl(String paidAccessUrl) {
|
||||
this.paidAccessUrl = paidAccessUrl;
|
||||
}
|
||||
|
||||
public String getIdentifier() {
|
||||
return identifier;
|
||||
}
|
||||
|
||||
public void setIdentifier(String identifier) {
|
||||
this.identifier = identifier;
|
||||
}
|
||||
}
|
@@ -0,0 +1,128 @@
|
||||
/**
|
||||
* The contents of this file are subject to the license and copyright
|
||||
* detailed in the LICENSE and NOTICE files at the root of the source
|
||||
* tree and available online at
|
||||
*
|
||||
* http://www.dspace.org/license/
|
||||
*/
|
||||
package org.dspace.app.sherpa.v2;
|
||||
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
|
||||
/**
|
||||
* Plain java representation of a SHERPA Publisher Policy object, based on SHERPA API v2 responses.
|
||||
*
|
||||
* In a SHERPA search for deposit policies, each journal contains one or more publisher policies
|
||||
* Each publisher policies contains a list of different article versions (eg. submitted, accepted, published)
|
||||
* which hold the data about what can be done with each version.
|
||||
* This class also holds copyright URLs and other policy URLs, as well as some helper information for display
|
||||
* of overall policies in UI (as per legacy SHERPA data)
|
||||
*
|
||||
* @see SHERPAJournal
|
||||
* @see SHERPAPermittedVersion
|
||||
*/
|
||||
public class SHERPAPublisherPolicy {
|
||||
|
||||
private int id;
|
||||
private boolean openAccessPermitted;
|
||||
private String uri;
|
||||
private String internalMoniker;
|
||||
private List<SHERPAPermittedVersion> permittedVersions;
|
||||
private Map<String, String> urls;
|
||||
private boolean openAccessProhibited;
|
||||
private int publicationCount;
|
||||
|
||||
// The legacy "can" / "cannot" indicators
|
||||
private String preArchiving = "cannot";
|
||||
private String postArchiving = "cannot";
|
||||
private String pubArchiving = "cannot";
|
||||
|
||||
public int getId() {
|
||||
return id;
|
||||
}
|
||||
|
||||
public void setId(int id) {
|
||||
this.id = id;
|
||||
}
|
||||
|
||||
public boolean isOpenAccessPermitted() {
|
||||
return openAccessPermitted;
|
||||
}
|
||||
|
||||
public void setOpenAccessPermitted(boolean openAccessPermitted) {
|
||||
this.openAccessPermitted = openAccessPermitted;
|
||||
}
|
||||
|
||||
public String getUri() {
|
||||
return uri;
|
||||
}
|
||||
|
||||
public void setUri(String uri) {
|
||||
this.uri = uri;
|
||||
}
|
||||
|
||||
public String getInternalMoniker() {
|
||||
return internalMoniker;
|
||||
}
|
||||
|
||||
public void setInternalMoniker(String internalMoniker) {
|
||||
this.internalMoniker = internalMoniker;
|
||||
}
|
||||
|
||||
public List<SHERPAPermittedVersion> getPermittedVersions() {
|
||||
return permittedVersions;
|
||||
}
|
||||
|
||||
public void setPermittedVersions(List<SHERPAPermittedVersion> permittedVersions) {
|
||||
this.permittedVersions = permittedVersions;
|
||||
}
|
||||
|
||||
public Map<String, String> getUrls() {
|
||||
return urls;
|
||||
}
|
||||
|
||||
public void setUrls(Map<String, String> urls) {
|
||||
this.urls = urls;
|
||||
}
|
||||
|
||||
public boolean isOpenAccessProhibited() {
|
||||
return openAccessProhibited;
|
||||
}
|
||||
|
||||
public void setOpenAccessProhibited(boolean openAccessProhibited) {
|
||||
this.openAccessProhibited = openAccessProhibited;
|
||||
}
|
||||
|
||||
public int getPublicationCount() {
|
||||
return publicationCount;
|
||||
}
|
||||
|
||||
public void setPublicationCount(int publicationCount) {
|
||||
this.publicationCount = publicationCount;
|
||||
}
|
||||
|
||||
public String getPreArchiving() {
|
||||
return preArchiving;
|
||||
}
|
||||
|
||||
public void setPreArchiving(String preArchiving) {
|
||||
this.preArchiving = preArchiving;
|
||||
}
|
||||
|
||||
public String getPostArchiving() {
|
||||
return postArchiving;
|
||||
}
|
||||
|
||||
public void setPostArchiving(String postArchiving) {
|
||||
this.postArchiving = postArchiving;
|
||||
}
|
||||
|
||||
public String getPubArchiving() {
|
||||
return pubArchiving;
|
||||
}
|
||||
|
||||
public void setPubArchiving(String pubArchiving) {
|
||||
this.pubArchiving = pubArchiving;
|
||||
}
|
||||
}
|
@@ -0,0 +1,223 @@
|
||||
/**
|
||||
* The contents of this file are subject to the license and copyright
|
||||
* detailed in the LICENSE and NOTICE files at the root of the source
|
||||
* tree and available online at
|
||||
*
|
||||
* http://www.dspace.org/license/
|
||||
*/
|
||||
package org.dspace.app.sherpa.v2;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.io.InputStream;
|
||||
import java.io.InputStreamReader;
|
||||
import java.util.LinkedList;
|
||||
import java.util.List;
|
||||
|
||||
import org.apache.log4j.Logger;
|
||||
import org.json.JSONArray;
|
||||
import org.json.JSONException;
|
||||
import org.json.JSONObject;
|
||||
import org.json.JSONTokener;
|
||||
|
||||
/**
|
||||
* Model class for the SHERPAv2 API (JSON) response for a publisher search
|
||||
* The structure and approached used is quite different to the simple v1 API used previously
|
||||
*
|
||||
* @see SHERPAPublisher
|
||||
*
|
||||
* @author Kim Shepherd
|
||||
*
|
||||
*/
|
||||
public class SHERPAPublisherResponse {
|
||||
// Is this response to be treated as an error?
|
||||
private boolean error;
|
||||
|
||||
// Error message
|
||||
private String message;
|
||||
|
||||
// Parsed system metadata from search results
|
||||
private SHERPASystemMetadata metadata;
|
||||
|
||||
// List of parsed publisher results
|
||||
private List<SHERPAPublisher> publishers;
|
||||
|
||||
// Internal Sherpa ID
|
||||
private int id;
|
||||
|
||||
// SHERPA URI (the human page version of this API response)
|
||||
private String uri;
|
||||
|
||||
// Format enum - currently only JSON is supported
|
||||
public enum SHERPAFormat {
|
||||
JSON, XML
|
||||
};
|
||||
|
||||
private static Logger log = Logger.getLogger(SHERPAPublisherResponse.class);
|
||||
|
||||
/**
|
||||
* Parse SHERPA v2 API for a given format
|
||||
* @param input - input stream from the HTTP response content
|
||||
* @param format - requested format
|
||||
* @throws IOException
|
||||
*/
|
||||
public SHERPAPublisherResponse(InputStream input, SHERPAFormat format) throws IOException {
|
||||
if (format == SHERPAFormat.JSON) {
|
||||
parseJSON(input);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Parse the SHERPA v2 API JSON and construct simple list of publisher objects
|
||||
* This method does not return a value, but rather populates the metadata and publishers objects
|
||||
* with data parsed from the JSON.
|
||||
* @param jsonData - the JSON input stream from the API result response body
|
||||
*/
|
||||
private void parseJSON(InputStream jsonData) throws IOException {
|
||||
InputStreamReader streamReader = new InputStreamReader(jsonData);
|
||||
JSONTokener jsonTokener = new JSONTokener(streamReader);
|
||||
JSONObject httpResponse;
|
||||
try {
|
||||
httpResponse = new JSONObject(jsonTokener);
|
||||
if (httpResponse.has("items")) {
|
||||
JSONArray items = httpResponse.getJSONArray("items");
|
||||
|
||||
// items array in this context is publisher results - parsing is more simple than
|
||||
// parsing the full journal / policy responses
|
||||
if (items.length() > 0) {
|
||||
metadata = new SHERPASystemMetadata();
|
||||
this.publishers = new LinkedList<>();
|
||||
// Iterate search result items
|
||||
for (int itemIndex = 0; itemIndex < items.length(); itemIndex++) {
|
||||
SHERPAPublisher sherpaPublisher = new SHERPAPublisher();
|
||||
|
||||
JSONObject item = items.getJSONObject(0);
|
||||
|
||||
// Parse system metadata (per-item / result information)
|
||||
if (item.has("system_metadata")) {
|
||||
JSONObject systemMetadata = item.getJSONObject("system_metadata");
|
||||
metadata = parseSystemMetadata(systemMetadata);
|
||||
if (metadata.getId() >= 0) {
|
||||
// Set publisher identifier to be the internal SHERPA ID
|
||||
// eg. '30' (Elsevier)
|
||||
sherpaPublisher.setIdentifier(String.valueOf(metadata.getId()));
|
||||
}
|
||||
}
|
||||
|
||||
// Set publisher name
|
||||
sherpaPublisher.setName(parsePublisherName(item));
|
||||
|
||||
// Set publisher URL
|
||||
sherpaPublisher.setUri(parsePublisherURL(item));
|
||||
|
||||
this.publishers.add(sherpaPublisher);
|
||||
}
|
||||
|
||||
} else {
|
||||
error = true;
|
||||
message = "No results found";
|
||||
}
|
||||
} else {
|
||||
error = true;
|
||||
message = "No results found";
|
||||
}
|
||||
|
||||
} catch (JSONException e) {
|
||||
log.error("Failed to parse SHERPA response", e);
|
||||
error = true;
|
||||
} finally {
|
||||
streamReader.close();
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Parse system metadata and return populated SHERPASystemMetadata object
|
||||
* @param systemMetadata
|
||||
*/
|
||||
private SHERPASystemMetadata parseSystemMetadata(JSONObject systemMetadata) {
|
||||
|
||||
SHERPASystemMetadata metadata = new SHERPASystemMetadata();
|
||||
|
||||
if (systemMetadata.has("uri")) {
|
||||
this.uri = systemMetadata.getString("uri");
|
||||
metadata.setUri(this.uri);
|
||||
} else {
|
||||
log.error("SHERPA URI missing for API response item");
|
||||
}
|
||||
if (systemMetadata.has("id")) {
|
||||
this.id = systemMetadata.getInt("id");
|
||||
metadata.setId(this.id);
|
||||
} else {
|
||||
log.error("SHERPA internal ID missing for API response item");
|
||||
}
|
||||
// Get date created and added - DSpace expects this in the publisher object, though
|
||||
if (systemMetadata.has("date_created")) {
|
||||
metadata.setDateCreated(systemMetadata.getString("date_created"));
|
||||
}
|
||||
if (systemMetadata.has("date_modified")) {
|
||||
metadata.setDateModified(systemMetadata.getString("date_modified"));
|
||||
}
|
||||
|
||||
return metadata;
|
||||
}
|
||||
|
||||
/**
|
||||
* Parse publisher array and return the first name string found
|
||||
* @param publisher - array of publisher JSON data
|
||||
* @return first publisher name found (trimmed String)
|
||||
*/
|
||||
private String parsePublisherName(JSONObject publisher) {
|
||||
String name = null;
|
||||
if (publisher.has("name")) {
|
||||
JSONArray publisherNames = publisher.getJSONArray("name");
|
||||
if (publisherNames.length() > 0) {
|
||||
JSONObject publisherName = publisherNames.getJSONObject(0);
|
||||
if (publisherName.has("name")) {
|
||||
name = publisherName.getString("name").trim();
|
||||
}
|
||||
}
|
||||
}
|
||||
return name;
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* Parse publisher URL from the json data
|
||||
* @param publisher - publisher object (from JSON array)
|
||||
* @return publisher URL as string
|
||||
*/
|
||||
private String parsePublisherURL(JSONObject publisher) {
|
||||
if (publisher.has("url")) {
|
||||
return publisher.getString("url");
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
||||
/**
|
||||
* Create new response object to be handled as an error
|
||||
* @param message - the message to render in logs or error pages
|
||||
*/
|
||||
public SHERPAPublisherResponse(String message) {
|
||||
this.message = message;
|
||||
this.error = true;
|
||||
}
|
||||
|
||||
public boolean isError() {
|
||||
return error;
|
||||
}
|
||||
|
||||
public String getMessage() {
|
||||
return message;
|
||||
}
|
||||
|
||||
public void setMessage(String message) {
|
||||
this.message = message;
|
||||
}
|
||||
|
||||
public SHERPASystemMetadata getMetadata() {
|
||||
return metadata;
|
||||
}
|
||||
|
||||
public List<SHERPAPublisher> getPublishers() {
|
||||
return publishers;
|
||||
}
|
||||
}
|
@@ -0,0 +1,557 @@
|
||||
/**
|
||||
* The contents of this file are subject to the license and copyright
|
||||
* detailed in the LICENSE and NOTICE files at the root of the source
|
||||
* tree and available online at
|
||||
*
|
||||
* http://www.dspace.org/license/
|
||||
*/
|
||||
package org.dspace.app.sherpa.v2;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.io.InputStream;
|
||||
import java.io.InputStreamReader;
|
||||
import java.util.ArrayList;
|
||||
import java.util.LinkedList;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.TreeMap;
|
||||
|
||||
import org.apache.log4j.Logger;
|
||||
import org.dspace.core.I18nUtil;
|
||||
import org.json.JSONArray;
|
||||
import org.json.JSONException;
|
||||
import org.json.JSONObject;
|
||||
import org.json.JSONTokener;
|
||||
|
||||
/**
|
||||
* Model class for the SHERPAv2 API (JSON) response for a publication (journal) search
|
||||
* The structure and approached used is quite different to the simple v1 API used previously
|
||||
* The structure is based on journal data, which in turn contains data about publishers and policies
|
||||
*
|
||||
* @see SHERPAJournal
|
||||
*
|
||||
* @author Kim Shepherd
|
||||
*
|
||||
*/
|
||||
public class SHERPAResponse {
|
||||
// Is this response to be treated as an error?
|
||||
private boolean error;
|
||||
|
||||
// Error message
|
||||
private String message;
|
||||
|
||||
// Parsed system metadata from search results
|
||||
private SHERPASystemMetadata metadata;
|
||||
|
||||
// List of parsed journal results
|
||||
private List<SHERPAJournal> journals;
|
||||
|
||||
// Internal Sherpa ID
|
||||
private int id;
|
||||
|
||||
// SHERPA URI (the human page version of this API response)
|
||||
private String uri;
|
||||
|
||||
// Format enum - currently only JSON is supported
|
||||
public enum SHERPAFormat {
|
||||
JSON, XML
|
||||
};
|
||||
|
||||
private static Logger log = Logger.getLogger(SHERPAResponse.class);
|
||||
|
||||
/**
|
||||
* Parse SHERPA v2 API for a given format
|
||||
* @param input - input stream from the HTTP response content
|
||||
* @param format - requested format
|
||||
* @throws IOException
|
||||
*/
|
||||
public SHERPAResponse(InputStream input, SHERPAFormat format) throws IOException {
|
||||
if (format == SHERPAFormat.JSON) {
|
||||
parseJSON(input);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Parse the SHERPA v2 API JSON and construct Romeo policy data for display
|
||||
* This method does not return a value, but rather populates the metadata and journals objects
|
||||
* with data parsed from the JSON.
|
||||
* @param jsonData - the JSON input stream from the API result response body
|
||||
*/
|
||||
private void parseJSON(InputStream jsonData) throws IOException {
|
||||
InputStreamReader streamReader = new InputStreamReader(jsonData);
|
||||
JSONTokener jsonTokener = new JSONTokener(streamReader);
|
||||
JSONObject httpResponse;
|
||||
try {
|
||||
httpResponse = new JSONObject(jsonTokener);
|
||||
if (httpResponse.has("items")) {
|
||||
JSONArray items = httpResponse.getJSONArray("items");
|
||||
|
||||
// items array is search results, *not* journals or publishers - they are listed for each item
|
||||
// - however, we only ever want one result since we're passing an "equals ISSN" query
|
||||
if (items.length() > 0) {
|
||||
metadata = new SHERPASystemMetadata();
|
||||
this.journals = new LinkedList<>();
|
||||
// Iterate search result items
|
||||
for (int itemIndex = 0; itemIndex < items.length(); itemIndex++) {
|
||||
List<SHERPAPublisher> sherpaPublishers = new LinkedList<>();
|
||||
List<SHERPAPublisherPolicy> policies = new ArrayList<>();
|
||||
SHERPAPublisher sherpaPublisher = new SHERPAPublisher();
|
||||
SHERPAJournal sherpaJournal = new SHERPAJournal();
|
||||
|
||||
JSONObject item = items.getJSONObject(0);
|
||||
|
||||
// Parse system metadata (per-item / result information)
|
||||
if (item.has("system_metadata")) {
|
||||
JSONObject systemMetadata = item.getJSONObject("system_metadata");
|
||||
metadata = parseSystemMetadata(systemMetadata);
|
||||
}
|
||||
|
||||
// Parse "publisher policy"
|
||||
// note - most of the information that was previously under 'publisher' is now under here
|
||||
if (item.has("publisher_policy")) {
|
||||
|
||||
// Parse main publisher policies node
|
||||
JSONArray publisherPolicies = item.getJSONArray("publisher_policy");
|
||||
for (int i = 0; i < publisherPolicies.length(); i++) {
|
||||
|
||||
JSONObject policy = publisherPolicies.getJSONObject(i);
|
||||
|
||||
// Special case - quickly check the policy for the 'paid access' option
|
||||
// and continue if found, then parse the rest of the policy
|
||||
String moniker = null;
|
||||
if (policy.has("internal_moniker")) {
|
||||
moniker = policy.getString("internal_moniker");
|
||||
}
|
||||
// This seems to be usually policy(ies) for the journal proper
|
||||
// and then an "Open access option" which contains some of the info
|
||||
// that the 'paidaccess' node in the old API used to contain
|
||||
// Look for: internal_moniker = "Open access option"
|
||||
// Check if this is OA options (Paid Access) or not
|
||||
if ("Open access option".equalsIgnoreCase(moniker)) {
|
||||
log.debug("This is the Open access options policy - a special case");
|
||||
if (policy.has("urls")) {
|
||||
JSONArray urls = policy.getJSONArray("urls");
|
||||
for (int u = 0; u < urls.length(); u++) {
|
||||
JSONObject url = urls.getJSONObject(u);
|
||||
if (url.has("description") &&
|
||||
"Open Access".equalsIgnoreCase(url.getString("description"))) {
|
||||
log.debug("Found OA paid access url: " + url.getString("url"));
|
||||
sherpaPublisher.setPaidAccessDescription(url.getString("description"));
|
||||
sherpaPublisher.setPaidAccessUrl(url.getString("url"));
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
// Continue the loop here - this "policy" is a bit different and we
|
||||
// don't want to add irrelevant conditions to the policy
|
||||
continue;
|
||||
}
|
||||
|
||||
// Parse the main publisher policy object and add to the list
|
||||
SHERPAPublisherPolicy sherpaPublisherPolicy = parsePublisherPolicy(policy);
|
||||
policies.add(sherpaPublisherPolicy);
|
||||
}
|
||||
|
||||
// set publisher name - note we're only looking for the first name here
|
||||
// as per previous functionality (for simple display)
|
||||
if (item.has("publishers")) {
|
||||
JSONArray publishers = item.getJSONArray("publishers");
|
||||
if (publishers.length() > 0) {
|
||||
JSONObject publisherElement = publishers.getJSONObject(0);
|
||||
if (publisherElement.has("publisher")) {
|
||||
JSONObject publisher = publisherElement.getJSONObject("publisher");
|
||||
sherpaPublisher.setName(parsePublisherName(publisher));
|
||||
sherpaPublisher.setUri(parsePublisherURL(publisher));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Parse journal data
|
||||
sherpaJournal = parseJournal(item, sherpaPublisher.getName());
|
||||
}
|
||||
|
||||
sherpaPublishers.add(sherpaPublisher);
|
||||
sherpaJournal.setPublisher(sherpaPublisher);
|
||||
sherpaJournal.setPublishers(sherpaPublishers);
|
||||
sherpaJournal.setPolicies(policies);
|
||||
this.journals.add(sherpaJournal);
|
||||
}
|
||||
|
||||
} else {
|
||||
error = true;
|
||||
message = "No results found";
|
||||
}
|
||||
} else {
|
||||
error = true;
|
||||
message = "No results found";
|
||||
}
|
||||
|
||||
} catch (JSONException e) {
|
||||
log.error("Failed to parse SHERPA response", e);
|
||||
error = true;
|
||||
} finally {
|
||||
streamReader.close();
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Parse system metadata and return populated SHERPASystemMetadata object
|
||||
* @param systemMetadata
|
||||
*/
|
||||
private SHERPASystemMetadata parseSystemMetadata(JSONObject systemMetadata) {
|
||||
|
||||
SHERPASystemMetadata metadata = new SHERPASystemMetadata();
|
||||
|
||||
if (systemMetadata.has("uri")) {
|
||||
this.uri = systemMetadata.getString("uri");
|
||||
metadata.setUri(this.uri);
|
||||
} else {
|
||||
log.error("SHERPA URI missing for API response item");
|
||||
}
|
||||
if (systemMetadata.has("id")) {
|
||||
this.id = systemMetadata.getInt("id");
|
||||
metadata.setId(this.id);
|
||||
} else {
|
||||
log.error("SHERPA internal ID missing for API response item");
|
||||
}
|
||||
// Get date created and added - DSpace expects this in the publisher object, though
|
||||
if (systemMetadata.has("date_created")) {
|
||||
metadata.setDateCreated(systemMetadata.getString("date_created"));
|
||||
}
|
||||
if (systemMetadata.has("date_modified")) {
|
||||
metadata.setDateModified(systemMetadata.getString("date_modified"));
|
||||
}
|
||||
// Is this item publicly visible?
|
||||
if (systemMetadata.has("publicly_visible")) {
|
||||
metadata.setPubliclyVisible ("yes".equals(systemMetadata
|
||||
.getString("publicly_visible")));
|
||||
}
|
||||
// Is this item listed in the DOAJ?
|
||||
if (systemMetadata.has("listed_in_doaj")) {
|
||||
metadata.setPubliclyVisible ("yes".equals(systemMetadata
|
||||
.getString("listed_in_doaj")));
|
||||
}
|
||||
|
||||
return metadata;
|
||||
}
|
||||
|
||||
/**
|
||||
* Parse journal JSON data and return populated bean
|
||||
* This method also takes publisherName as a string to help construct some
|
||||
* legacy labels
|
||||
* @param item - the main result item JSON (which is the closest thing to an actual 'journal')
|
||||
* @param publisherName - the parsed publisher name
|
||||
* @return
|
||||
*/
|
||||
private SHERPAJournal parseJournal(JSONObject item, String publisherName) {
|
||||
|
||||
SHERPAJournal sherpaJournal = new SHERPAJournal();
|
||||
|
||||
// set journal title
|
||||
if (item.has("title")) {
|
||||
JSONArray titles = item.getJSONArray("title");
|
||||
if (titles.length() > 0) {
|
||||
List<String> titleList = new ArrayList<>();
|
||||
for (int t = 0; t < titles.length(); t++) {
|
||||
JSONObject title = titles.getJSONObject(t);
|
||||
if (title.has("title")) {
|
||||
titleList.add(title.getString("title").trim());
|
||||
}
|
||||
}
|
||||
sherpaJournal.setTitles(titleList);
|
||||
if (titleList.size() > 0) {
|
||||
// Faking this a bit based on what I'd seen - not in the API v2 data
|
||||
sherpaJournal.setRomeoPub(publisherName + ": "
|
||||
+ titleList.get(0));
|
||||
sherpaJournal.setZetoPub(publisherName + ": "
|
||||
+ titleList.get(0));
|
||||
log.debug("Found journal title: " + titleList.get(0));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Journal URL
|
||||
if (item.has("url")) {
|
||||
sherpaJournal.setUrl(item.getString("url"));
|
||||
}
|
||||
|
||||
// set ISSNs
|
||||
if (item.has("issns")) {
|
||||
JSONArray issns = item.getJSONArray("issns");
|
||||
// just get first - DSpace data model only allows for one
|
||||
List<String> issnList = new ArrayList<>();
|
||||
for (int ii = 0; ii < issns.length(); ii++) {
|
||||
JSONObject issn = issns.getJSONObject(ii);
|
||||
issnList.add(issn.getString("issn").trim());
|
||||
}
|
||||
sherpaJournal.setIssns(issnList);
|
||||
}
|
||||
|
||||
// Is the item in DOAJ?
|
||||
if (item.has("listed_in_doaj")) {
|
||||
sherpaJournal.setInDOAJ(("yes".equals(item.getString("listed_in_doaj"))));
|
||||
}
|
||||
|
||||
return sherpaJournal;
|
||||
}
|
||||
|
||||
/**
|
||||
* Parse a publisher_policy JSON data and return a populated bean
|
||||
* @param policy - each publisher policy node in the JSON array
|
||||
* @return populated SHERPAPublisherPolicy object
|
||||
*/
|
||||
private SHERPAPublisherPolicy parsePublisherPolicy(JSONObject policy) {
|
||||
|
||||
SHERPAPublisherPolicy sherpaPublisherPolicy = new SHERPAPublisherPolicy();
|
||||
|
||||
// Get and set monikers
|
||||
String moniker = null;
|
||||
if (policy.has("internal_moniker")) {
|
||||
moniker = policy.getString("internal_moniker");
|
||||
sherpaPublisherPolicy.setInternalMoniker(moniker);
|
||||
}
|
||||
|
||||
// URLs (used to be Copyright Links)
|
||||
if (policy.has("urls")) {
|
||||
JSONArray urls = policy.getJSONArray("urls");
|
||||
Map<String, String> copyrightLinks = new TreeMap<>();
|
||||
for (int u = 0; u < urls.length(); u++) {
|
||||
JSONObject url = urls.getJSONObject(u);
|
||||
if (url.has("description") && url.has("url")) {
|
||||
log.debug("Setting copyright URL: " + url.getString("url"));
|
||||
copyrightLinks.put(url.getString("url"), url.getString("description"));
|
||||
}
|
||||
}
|
||||
sherpaPublisherPolicy.setUrls(copyrightLinks);
|
||||
}
|
||||
|
||||
// Permitted OA options
|
||||
int submittedOption = 0;
|
||||
int acceptedOption = 0;
|
||||
int publishedOption = 0;
|
||||
int currentOption = 0;
|
||||
if (policy.has("permitted_oa")) {
|
||||
List<String> allowed = new ArrayList<>();
|
||||
JSONArray permittedOA = policy.getJSONArray("permitted_oa");
|
||||
List<SHERPAPermittedVersion> permittedVersions = new ArrayList<>();
|
||||
|
||||
// Iterate each permitted OA version / option. The permitted_oa node is also known as a 'pathway' --
|
||||
// essentially "a way to get a work into a repository". Each pathway could refer to one article version
|
||||
// like a pre-print, or multiple versions might have the same acceptable locations and conditions.
|
||||
// As described below, where multiple versions are referenced in a single permitted_oa pathway, they will
|
||||
// be split out and treated separately. This keeps processing simple, especially later in display or
|
||||
// compliance checking when it is preferred to group / indicate rules by the article version
|
||||
for (int p = 0; p < permittedOA.length(); p++) {
|
||||
JSONObject permitted = permittedOA.getJSONObject(p);
|
||||
// Although it adds redundancy, we will treat each 'article version' within
|
||||
// the permitted_oa ("pathway") node as a separate version altogether to keep the rest of our display
|
||||
// handled nicely. This was confirmed as an appropriate approach by JISC
|
||||
if (permitted.has("article_version")) {
|
||||
JSONArray versions = permitted.getJSONArray("article_version");
|
||||
for (int v = 0; v < versions.length(); v++) {
|
||||
// Parse this permitted_oa node but specifically looking for the article_version 'v'
|
||||
SHERPAPermittedVersion permittedVersion = parsePermittedVersion(permitted, v);
|
||||
|
||||
// To determine which option # we are, inspect article versions and set
|
||||
allowed.add(permittedVersion.getArticleVersion());
|
||||
if ("submitted".equals(permittedVersion.getArticleVersion())) {
|
||||
submittedOption++;
|
||||
currentOption = submittedOption;
|
||||
} else if ("accepted".equals(permittedVersion.getArticleVersion())) {
|
||||
acceptedOption++;
|
||||
currentOption = acceptedOption;
|
||||
} else if ("published".equals(permittedVersion.getArticleVersion())) {
|
||||
publishedOption++;
|
||||
currentOption = publishedOption;
|
||||
}
|
||||
permittedVersion.setOption(currentOption);
|
||||
permittedVersions.add(permittedVersion);
|
||||
}
|
||||
}
|
||||
|
||||
// Populate the old indicators into the publisher policy object
|
||||
if (allowed.contains("submitted")) {
|
||||
sherpaPublisherPolicy.setPreArchiving("can");
|
||||
}
|
||||
if (allowed.contains("accepted")) {
|
||||
sherpaPublisherPolicy.setPostArchiving("can");
|
||||
}
|
||||
if (allowed.contains("published")) {
|
||||
sherpaPublisherPolicy.setPubArchiving("can");
|
||||
}
|
||||
|
||||
}
|
||||
sherpaPublisherPolicy.setPermittedVersions(permittedVersions);
|
||||
}
|
||||
|
||||
return sherpaPublisherPolicy;
|
||||
}
|
||||
|
||||
/**
|
||||
* Parse permitted version JSON and populate new bean from the data
|
||||
* @param permitted - each 'permitted_oa' node in the JSON array
|
||||
* @return populated SHERPAPermittedVersion object
|
||||
*/
|
||||
private SHERPAPermittedVersion parsePermittedVersion(JSONObject permitted, int index) {
|
||||
|
||||
SHERPAPermittedVersion permittedVersion = new SHERPAPermittedVersion();
|
||||
|
||||
// Get the article version, which is ultimately used for the ticks / crosses
|
||||
// in the UI display. My assumptions around translation:
|
||||
// submitted = preprint
|
||||
// accepted = postprint
|
||||
// published = pdfversion
|
||||
// These strings can be used to construct i18n messages.
|
||||
String articleVersion = "unknown";
|
||||
String versionLabel = "Unknown";
|
||||
|
||||
// Each 'permitted OA' can actually refer to multiple versions
|
||||
if (permitted.has("article_version")) {
|
||||
JSONArray versions = permitted.getJSONArray("article_version");
|
||||
|
||||
// Get one particular article version to return as a PermittedVersion. The outer loop calling this
|
||||
// is iterating all permitted_oa and permitted_oa->article_version array members
|
||||
articleVersion = versions.getString(index);
|
||||
permittedVersion.setArticleVersion(articleVersion);
|
||||
log.debug("Added allowed version: " + articleVersion + " to list");
|
||||
}
|
||||
|
||||
// Add labels for this particular article version
|
||||
if ("submitted".equals(articleVersion)) {
|
||||
versionLabel = I18nUtil.getMessage("jsp.sherpa.submitted-version-label");
|
||||
} else if ("accepted".equals(articleVersion)) {
|
||||
versionLabel = I18nUtil.getMessage("jsp.sherpa.accepted-version-label");
|
||||
} else if ("published".equals(articleVersion)) {
|
||||
versionLabel = I18nUtil.getMessage("jsp.sherpa.published-version-label");
|
||||
}
|
||||
// Set the article version label based on the i18n text set above
|
||||
permittedVersion.setArticleVersionLabel(versionLabel);
|
||||
|
||||
// These are now child arrays, in old API they were explicit like
|
||||
// "preprint restrictions", etc., and just contained text rather than data
|
||||
if (permitted.has("conditions")) {
|
||||
List<String> conditionList = new ArrayList<>();
|
||||
JSONArray conditions = permitted.getJSONArray("conditions");
|
||||
for (int c = 0; c < conditions.length(); c++) {
|
||||
conditionList.add(conditions.getString(c).trim());
|
||||
}
|
||||
permittedVersion.setConditions(conditionList);
|
||||
}
|
||||
|
||||
// Any prerequisites for this option (eg required by funder)
|
||||
List<String> prerequisites = new ArrayList<>();
|
||||
if (permitted.has("prerequisites")) {
|
||||
JSONObject prereqs = permitted.getJSONObject("prerequisites");
|
||||
if (prereqs.has("prerequisites_phrases")) {
|
||||
JSONArray phrases = prereqs.getJSONArray("prerequisites_phrases");
|
||||
for (int pp = 0; pp < phrases.length(); pp++) {
|
||||
JSONObject phrase = phrases.getJSONObject(pp);
|
||||
if (phrase.has("phrase")) {
|
||||
prerequisites.add(phrase.getString("phrase").trim());
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
permittedVersion.setPrerequisites(prerequisites);
|
||||
|
||||
// Locations where this version / option may be archived
|
||||
List<String> sherpaLocations = new ArrayList<>();
|
||||
if (permitted.has("location")) {
|
||||
JSONObject locations = permitted.getJSONObject("location");
|
||||
if (locations.has("location_phrases")) {
|
||||
JSONArray locationPhrases = locations.getJSONArray("location_phrases");
|
||||
if (locationPhrases.length() > 0) {
|
||||
for (int l = 0; l < locationPhrases.length(); l++) {
|
||||
JSONObject locationPhrase = locationPhrases.getJSONObject(l);
|
||||
if (locationPhrase.has("phrase")) {
|
||||
sherpaLocations.add(locationPhrase.getString("phrase").trim());
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
permittedVersion.setLocations(sherpaLocations);
|
||||
|
||||
List<String> sherpaLicenses = new ArrayList<>();
|
||||
// required licences
|
||||
if (permitted.has("license")) {
|
||||
JSONArray licences = permitted.getJSONArray("license");
|
||||
for (int l = 0; l < licences.length(); l++) {
|
||||
JSONObject licence = licences.getJSONObject(l);
|
||||
if (licence.has("license_phrases")) {
|
||||
JSONArray phrases = licence.getJSONArray("license_phrases");
|
||||
for (int ll = 0; ll < phrases.length(); ll++) {
|
||||
JSONObject phrase = phrases.getJSONObject(ll);
|
||||
if (phrase.has("phrase")) {
|
||||
sherpaLicenses.add(phrase.getString("phrase").trim());
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
permittedVersion.setLicenses(sherpaLicenses);
|
||||
|
||||
return permittedVersion;
|
||||
}
|
||||
|
||||
/**
|
||||
* Parse publisher array and return the first name string found
|
||||
* @param publisher - array of publisher JSON data
|
||||
* @return first publisher name found (trimmed String)
|
||||
*/
|
||||
private String parsePublisherName(JSONObject publisher) {
|
||||
String name = null;
|
||||
if (publisher.has("name")) {
|
||||
JSONArray publisherNames = publisher.getJSONArray("name");
|
||||
if (publisherNames.length() > 0) {
|
||||
JSONObject publisherName = publisherNames.getJSONObject(0);
|
||||
if (publisherName.has("name")) {
|
||||
name = publisherName.getString("name").trim();
|
||||
}
|
||||
}
|
||||
}
|
||||
return name;
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* Parse publisher URL from the json data
|
||||
* @param publisher - publisher object (from JSON array)
|
||||
* @return publisher URL as string
|
||||
*/
|
||||
private String parsePublisherURL(JSONObject publisher) {
|
||||
if (publisher.has("url")) {
|
||||
return publisher.getString("url");
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
||||
/**
|
||||
* Create new response object to be handled as an error
|
||||
* @param message - the message to render in logs or error pages
|
||||
*/
|
||||
public SHERPAResponse(String message) {
|
||||
this.message = message;
|
||||
this.error = true;
|
||||
}
|
||||
|
||||
public boolean isError() {
|
||||
return error;
|
||||
}
|
||||
|
||||
public String getMessage() {
|
||||
return message;
|
||||
}
|
||||
|
||||
public void setMessage(String message) {
|
||||
this.message = message;
|
||||
}
|
||||
|
||||
public List<SHERPAJournal> getJournals() {
|
||||
return journals;
|
||||
}
|
||||
|
||||
public SHERPASystemMetadata getMetadata() {
|
||||
return metadata;
|
||||
}
|
||||
}
|
@@ -0,0 +1,80 @@
|
||||
/**
|
||||
* The contents of this file are subject to the license and copyright
|
||||
* detailed in the LICENSE and NOTICE files at the root of the source
|
||||
* tree and available online at
|
||||
*
|
||||
* http://www.dspace.org/license/
|
||||
*/
|
||||
package org.dspace.app.sherpa.v2;
|
||||
|
||||
/**
|
||||
* Plain java representation of a SHERPA System Metadata object, based on SHERPA API v2 responses.
|
||||
*
|
||||
* This data is included in both journal deposit policy and publisher searches and contains basic metadata
|
||||
* about the SHERPA record and API response, eg. creation and modification dates, internal IDs, permissions, etc.
|
||||
*
|
||||
* @see SHERPAResponse
|
||||
* @see SHERPAPublisherResponse
|
||||
*
|
||||
* @author Kim Shepherd
|
||||
*/
|
||||
public class SHERPASystemMetadata {
|
||||
|
||||
private int id;
|
||||
private String uri;
|
||||
private String dateCreated;
|
||||
private String dateModified;
|
||||
private boolean isPubliclyVisible = false;
|
||||
private boolean inDOAJ = false;
|
||||
|
||||
public SHERPASystemMetadata() {
|
||||
}
|
||||
|
||||
public int getId() {
|
||||
return id;
|
||||
}
|
||||
|
||||
public void setId(int id) {
|
||||
this.id = id;
|
||||
}
|
||||
|
||||
public String getUri() {
|
||||
return uri;
|
||||
}
|
||||
|
||||
public void setUri(String uri) {
|
||||
this.uri = uri;
|
||||
}
|
||||
|
||||
public String getDateCreated() {
|
||||
return dateCreated;
|
||||
}
|
||||
|
||||
public void setDateCreated(String dateCreated) {
|
||||
this.dateCreated = dateCreated;
|
||||
}
|
||||
|
||||
public String getDateModified() {
|
||||
return dateModified;
|
||||
}
|
||||
|
||||
public void setDateModified(String dateModified) {
|
||||
this.dateModified = dateModified;
|
||||
}
|
||||
|
||||
public boolean isPubliclyVisible() {
|
||||
return isPubliclyVisible;
|
||||
}
|
||||
|
||||
public void setPubliclyVisible(boolean publiclyVisible) {
|
||||
isPubliclyVisible = publiclyVisible;
|
||||
}
|
||||
|
||||
public boolean isInDOAJ() {
|
||||
return inDOAJ;
|
||||
}
|
||||
|
||||
public void setInDOAJ(boolean inDOAJ) {
|
||||
this.inDOAJ = inDOAJ;
|
||||
}
|
||||
}
|
@@ -0,0 +1,38 @@
|
||||
/**
|
||||
* The contents of this file are subject to the license and copyright
|
||||
* detailed in the LICENSE and NOTICE files at the root of the source
|
||||
* tree and available online at
|
||||
*
|
||||
* http://www.dspace.org/license/
|
||||
*/
|
||||
package org.dspace.app.sherpa.v2;
|
||||
|
||||
/**
|
||||
* SHERPA v2 API query handling utility methods (static). Used by external data providers and SHERPA service.
|
||||
*
|
||||
* @author Kim Shepherd
|
||||
*/
|
||||
public final class SHERPAUtils {
|
||||
|
||||
// Private constructor (since this is a Utility class)
|
||||
private SHERPAUtils() {}
|
||||
|
||||
/**
|
||||
* Sanitise a SHERPA v2 API query for some special JSON characters to help with parsing at remote end
|
||||
* Strip all these characters: "'{};
|
||||
* The URI builder used in the provider and service classes will perform URL encoding. This string
|
||||
* is the raw query submitted to the provider or service.
|
||||
* @param query query string
|
||||
* @return safe query string
|
||||
*/
|
||||
public static String sanitiseQuery(String query) {
|
||||
String safe = query;
|
||||
try {
|
||||
safe = query.replaceAll("['{}\";]", "");
|
||||
} catch (NullPointerException e) {
|
||||
safe = "";
|
||||
}
|
||||
return safe;
|
||||
}
|
||||
|
||||
}
|
@@ -23,10 +23,12 @@ import java.util.List;
|
||||
|
||||
import org.apache.commons.cli.CommandLine;
|
||||
import org.apache.commons.cli.CommandLineParser;
|
||||
import org.apache.commons.cli.DefaultParser;
|
||||
import org.apache.commons.cli.HelpFormatter;
|
||||
import org.apache.commons.cli.Options;
|
||||
import org.apache.commons.cli.ParseException;
|
||||
import org.apache.commons.cli.PosixParser;
|
||||
import org.apache.commons.collections4.CollectionUtils;
|
||||
import org.apache.commons.io.FileUtils;
|
||||
import org.apache.commons.lang3.ArrayUtils;
|
||||
import org.apache.commons.lang3.StringUtils;
|
||||
import org.apache.logging.log4j.Logger;
|
||||
@@ -39,6 +41,11 @@ import org.dspace.content.service.CommunityService;
|
||||
import org.dspace.content.service.ItemService;
|
||||
import org.dspace.core.Context;
|
||||
import org.dspace.core.LogManager;
|
||||
import org.dspace.discovery.DiscoverQuery;
|
||||
import org.dspace.discovery.DiscoverResult;
|
||||
import org.dspace.discovery.SearchService;
|
||||
import org.dspace.discovery.SearchServiceException;
|
||||
import org.dspace.discovery.SearchUtils;
|
||||
import org.dspace.services.ConfigurationService;
|
||||
import org.dspace.services.factory.DSpaceServicesFactory;
|
||||
|
||||
@@ -52,7 +59,7 @@ public class GenerateSitemaps {
|
||||
/**
|
||||
* Logger
|
||||
*/
|
||||
private static Logger log = org.apache.logging.log4j.LogManager.getLogger(GenerateSitemaps.class);
|
||||
private static final Logger log = org.apache.logging.log4j.LogManager.getLogger(GenerateSitemaps.class);
|
||||
|
||||
private static final CommunityService communityService = ContentServiceFactory.getInstance().getCommunityService();
|
||||
private static final CollectionService collectionService =
|
||||
@@ -60,6 +67,7 @@ public class GenerateSitemaps {
|
||||
private static final ItemService itemService = ContentServiceFactory.getInstance().getItemService();
|
||||
private static final ConfigurationService configurationService =
|
||||
DSpaceServicesFactory.getInstance().getConfigurationService();
|
||||
private static final SearchService searchService = SearchUtils.getSearchService();
|
||||
|
||||
/**
|
||||
* Default constructor
|
||||
@@ -69,7 +77,7 @@ public class GenerateSitemaps {
|
||||
public static void main(String[] args) throws Exception {
|
||||
final String usage = GenerateSitemaps.class.getCanonicalName();
|
||||
|
||||
CommandLineParser parser = new PosixParser();
|
||||
CommandLineParser parser = new DefaultParser();
|
||||
HelpFormatter hf = new HelpFormatter();
|
||||
|
||||
Options options = new Options();
|
||||
@@ -84,6 +92,9 @@ public class GenerateSitemaps {
|
||||
options
|
||||
.addOption("p", "ping", true,
|
||||
"ping specified search engine URL");
|
||||
options
|
||||
.addOption("d", "delete", false,
|
||||
"delete sitemaps dir and its contents");
|
||||
|
||||
CommandLine line = null;
|
||||
|
||||
@@ -105,10 +116,9 @@ public class GenerateSitemaps {
|
||||
}
|
||||
|
||||
/*
|
||||
* Sanity check -- if no sitemap generation or pinging to do, print
|
||||
* usage
|
||||
* Sanity check -- if no sitemap generation or pinging to do, or deletion, print usage
|
||||
*/
|
||||
if (line.getArgs().length != 0 || line.hasOption('b')
|
||||
if (line.getArgs().length != 0 || line.hasOption('d') || line.hasOption('b')
|
||||
&& line.hasOption('s') && !line.hasOption('g')
|
||||
&& !line.hasOption('m') && !line.hasOption('y')
|
||||
&& !line.hasOption('p')) {
|
||||
@@ -123,6 +133,10 @@ public class GenerateSitemaps {
|
||||
generateSitemaps(!line.hasOption('b'), !line.hasOption('s'));
|
||||
}
|
||||
|
||||
if (line.hasOption('d')) {
|
||||
deleteSitemaps();
|
||||
}
|
||||
|
||||
if (line.hasOption('a')) {
|
||||
pingConfiguredSearchEngines();
|
||||
}
|
||||
@@ -140,6 +154,29 @@ public class GenerateSitemaps {
|
||||
System.exit(0);
|
||||
}
|
||||
|
||||
/**
|
||||
* Runs generate-sitemaps without any params for the scheduler (task-scheduler.xml).
|
||||
*
|
||||
* @throws SQLException if a database error occurs.
|
||||
* @throws IOException if IO error occurs.
|
||||
*/
|
||||
public static void generateSitemapsScheduled() throws IOException, SQLException {
|
||||
generateSitemaps(true, true);
|
||||
}
|
||||
|
||||
/**
|
||||
* Delete the sitemaps directory and its contents if it exists
|
||||
* @throws IOException if IO error occurs
|
||||
*/
|
||||
public static void deleteSitemaps() throws IOException {
|
||||
File outputDir = new File(configurationService.getProperty("sitemap.dir"));
|
||||
if (!outputDir.exists() && !outputDir.isDirectory()) {
|
||||
log.error("Unable to delete sitemaps directory, doesn't exist or isn't a directort");
|
||||
} else {
|
||||
FileUtils.deleteDirectory(outputDir);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Generate sitemap.org protocol and/or basic HTML sitemaps.
|
||||
*
|
||||
@@ -150,14 +187,9 @@ public class GenerateSitemaps {
|
||||
* @throws IOException if IO error
|
||||
* if IO error occurs.
|
||||
*/
|
||||
public static void generateSitemaps(boolean makeHTMLMap,
|
||||
boolean makeSitemapOrg) throws SQLException, IOException {
|
||||
String sitemapStem = configurationService.getProperty("dspace.ui.url")
|
||||
+ "/sitemap";
|
||||
String htmlMapStem = configurationService.getProperty("dspace.ui.url")
|
||||
+ "/htmlmap";
|
||||
String handleURLStem = configurationService.getProperty("dspace.ui.url")
|
||||
+ "/handle/";
|
||||
public static void generateSitemaps(boolean makeHTMLMap, boolean makeSitemapOrg) throws SQLException, IOException {
|
||||
String uiURLStem = configurationService.getProperty("dspace.ui.url");
|
||||
String sitemapStem = uiURLStem + "/sitemap";
|
||||
|
||||
File outputDir = new File(configurationService.getProperty("sitemap.dir"));
|
||||
if (!outputDir.exists() && !outputDir.mkdir()) {
|
||||
@@ -168,13 +200,11 @@ public class GenerateSitemaps {
|
||||
AbstractGenerator sitemapsOrg = null;
|
||||
|
||||
if (makeHTMLMap) {
|
||||
html = new HTMLSitemapGenerator(outputDir, htmlMapStem + "?map=",
|
||||
null);
|
||||
html = new HTMLSitemapGenerator(outputDir, sitemapStem, ".html");
|
||||
}
|
||||
|
||||
if (makeSitemapOrg) {
|
||||
sitemapsOrg = new SitemapsOrgGenerator(outputDir, sitemapStem
|
||||
+ "?map=", null);
|
||||
sitemapsOrg = new SitemapsOrgGenerator(outputDir, sitemapStem, ".xml");
|
||||
}
|
||||
|
||||
Context c = new Context(Context.Mode.READ_ONLY);
|
||||
@@ -182,7 +212,7 @@ public class GenerateSitemaps {
|
||||
List<Community> comms = communityService.findAll(c);
|
||||
|
||||
for (Community comm : comms) {
|
||||
String url = handleURLStem + comm.getHandle();
|
||||
String url = uiURLStem + "/communities/" + comm.getID();
|
||||
|
||||
if (makeHTMLMap) {
|
||||
html.addURL(url, null);
|
||||
@@ -197,7 +227,7 @@ public class GenerateSitemaps {
|
||||
List<Collection> colls = collectionService.findAll(c);
|
||||
|
||||
for (Collection coll : colls) {
|
||||
String url = handleURLStem + coll.getHandle();
|
||||
String url = uiURLStem + "/collections/" + coll.getID();
|
||||
|
||||
if (makeHTMLMap) {
|
||||
html.addURL(url, null);
|
||||
@@ -214,14 +244,37 @@ public class GenerateSitemaps {
|
||||
|
||||
while (allItems.hasNext()) {
|
||||
Item i = allItems.next();
|
||||
String url = handleURLStem + i.getHandle();
|
||||
Date lastMod = i.getLastModified();
|
||||
|
||||
if (makeHTMLMap) {
|
||||
html.addURL(url, lastMod);
|
||||
}
|
||||
if (makeSitemapOrg) {
|
||||
sitemapsOrg.addURL(url, lastMod);
|
||||
DiscoverQuery entityQuery = new DiscoverQuery();
|
||||
entityQuery.setQuery("search.uniqueid:\"Item-" + i.getID() + "\" and entityType:*");
|
||||
entityQuery.addSearchField("entityType");
|
||||
|
||||
try {
|
||||
DiscoverResult discoverResult = searchService.search(c, entityQuery);
|
||||
|
||||
String url;
|
||||
if (CollectionUtils.isNotEmpty(discoverResult.getIndexableObjects())
|
||||
&& CollectionUtils.isNotEmpty(discoverResult.getSearchDocument(
|
||||
discoverResult.getIndexableObjects().get(0)).get(0).getSearchFieldValues("entityType"))
|
||||
&& StringUtils.isNotBlank(discoverResult.getSearchDocument(
|
||||
discoverResult.getIndexableObjects().get(0)).get(0).getSearchFieldValues("entityType").get(0))
|
||||
) {
|
||||
url = uiURLStem + "/entities/" + StringUtils.lowerCase(discoverResult.getSearchDocument(
|
||||
discoverResult.getIndexableObjects().get(0))
|
||||
.get(0).getSearchFieldValues("entityType").get(0)) + "/" + i.getID();
|
||||
} else {
|
||||
url = uiURLStem + "/items/" + i.getID();
|
||||
}
|
||||
Date lastMod = i.getLastModified();
|
||||
|
||||
if (makeHTMLMap) {
|
||||
html.addURL(url, lastMod);
|
||||
}
|
||||
if (makeSitemapOrg) {
|
||||
sitemapsOrg.addURL(url, lastMod);
|
||||
}
|
||||
} catch (SearchServiceException e) {
|
||||
log.error("Failed getting entitytype through solr for item " + i.getID() + ": " + e.getMessage());
|
||||
}
|
||||
|
||||
c.uncacheEntity(i);
|
||||
|
@@ -59,7 +59,7 @@ public class SitemapsOrgGenerator extends AbstractGenerator {
|
||||
|
||||
@Override
|
||||
public String getFilename(int number) {
|
||||
return "sitemap" + number + ".xml.gz";
|
||||
return "sitemap" + number + ".xml";
|
||||
}
|
||||
|
||||
@Override
|
||||
@@ -100,12 +100,12 @@ public class SitemapsOrgGenerator extends AbstractGenerator {
|
||||
|
||||
@Override
|
||||
public boolean useCompression() {
|
||||
return true;
|
||||
return false;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getIndexFilename() {
|
||||
return "sitemap_index.xml.gz";
|
||||
return "sitemap_index.xml";
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@@ -16,10 +16,11 @@ import java.util.Properties;
|
||||
|
||||
import org.apache.commons.cli.CommandLine;
|
||||
import org.apache.commons.cli.CommandLineParser;
|
||||
import org.apache.commons.cli.DefaultParser;
|
||||
import org.apache.commons.cli.Options;
|
||||
import org.apache.commons.cli.PosixParser;
|
||||
import org.dspace.core.ConfigurationManager;
|
||||
import org.dspace.core.Context;
|
||||
import org.dspace.services.ConfigurationService;
|
||||
import org.dspace.services.factory.DSpaceServicesFactory;
|
||||
|
||||
/**
|
||||
* This class allows the running of the DSpace statistic tools
|
||||
@@ -56,7 +57,7 @@ public class CreateStatReport {
|
||||
/**
|
||||
* File suffix for log files
|
||||
*/
|
||||
private static String outputSuffix = ".dat";
|
||||
private static final String outputSuffix = ".dat";
|
||||
|
||||
/**
|
||||
* User context
|
||||
@@ -66,9 +67,6 @@ public class CreateStatReport {
|
||||
/**
|
||||
* the config file from which to configure the analyser
|
||||
*/
|
||||
private static String configFile = ConfigurationManager.getProperty("dspace.dir") +
|
||||
File.separator + "config" + File.separator +
|
||||
"dstat.cfg";
|
||||
|
||||
/**
|
||||
* Default constructor
|
||||
@@ -81,8 +79,12 @@ public class CreateStatReport {
|
||||
* Usage: java CreateStatReport -r <statistic to run>
|
||||
*/
|
||||
public static void main(String[] argv) throws Exception {
|
||||
ConfigurationService configurationService
|
||||
= DSpaceServicesFactory.getInstance().getConfigurationService();
|
||||
|
||||
// Open the statistics config file
|
||||
final String configFile = configurationService.getProperty("dspace.dir")
|
||||
+ File.separator + "config" + File.separator + "dstat.cfg";
|
||||
FileInputStream fis = new java.io.FileInputStream(new File(configFile));
|
||||
Properties config = new Properties();
|
||||
config.load(fis);
|
||||
@@ -108,11 +110,11 @@ public class CreateStatReport {
|
||||
context.turnOffAuthorisationSystem();
|
||||
|
||||
//get paths to directories
|
||||
outputLogDirectory = ConfigurationManager.getProperty("log.report.dir") + File.separator;
|
||||
outputReportDirectory = ConfigurationManager.getProperty("report.dir") + File.separator;
|
||||
outputLogDirectory = configurationService.getProperty("log.report.dir") + File.separator;
|
||||
outputReportDirectory = configurationService.getProperty("report.dir") + File.separator;
|
||||
|
||||
//read in command line variable to determine which statistic to run
|
||||
CommandLineParser parser = new PosixParser();
|
||||
CommandLineParser parser = new DefaultParser();
|
||||
Options options = new Options();
|
||||
options.addOption("r", "report", true, "report");
|
||||
CommandLine line = parser.parse(options, argv);
|
||||
@@ -405,6 +407,5 @@ public class CreateStatReport {
|
||||
System.out.println(
|
||||
"Available: <stat-initial> <stat-general> <stat-monthly> <stat-report-initial> <stat-report-general> " +
|
||||
"<stat-report-monthly>");
|
||||
return;
|
||||
}
|
||||
}
|
||||
|
@@ -20,7 +20,8 @@ import java.util.List;
|
||||
import java.util.regex.Matcher;
|
||||
import java.util.regex.Pattern;
|
||||
|
||||
import org.dspace.core.ConfigurationManager;
|
||||
import org.dspace.services.ConfigurationService;
|
||||
import org.dspace.services.factory.DSpaceServicesFactory;
|
||||
|
||||
/**
|
||||
* This class provides HTML reports for the ReportGenerator class
|
||||
@@ -34,7 +35,7 @@ public class HTMLReport implements Report {
|
||||
/**
|
||||
* a list of the statistic blocks being managed by this class
|
||||
*/
|
||||
private List<Statistics> blocks = new ArrayList<Statistics>();
|
||||
private final List<Statistics> blocks = new ArrayList<>();
|
||||
|
||||
/**
|
||||
* the title for the page
|
||||
@@ -59,16 +60,23 @@ public class HTMLReport implements Report {
|
||||
/**
|
||||
* the output file to which to write aggregation data
|
||||
*/
|
||||
private String output = ConfigurationManager.getProperty("dspace.dir") +
|
||||
File.separator + "log" + File.separator + "report";
|
||||
private String output;
|
||||
|
||||
/**
|
||||
* constructor for HTML reporting
|
||||
* Output file path is set to {@code ${dspace.dir}/log/report}.
|
||||
*/
|
||||
public HTMLReport() {
|
||||
// empty constructor
|
||||
ConfigurationService configurationService
|
||||
= DSpaceServicesFactory.getInstance().getConfigurationService();
|
||||
output = configurationService.getProperty("dspace.dir")
|
||||
+ File.separator + "log" + File.separator + "report";
|
||||
}
|
||||
|
||||
/**
|
||||
* Set a non-default output file path.
|
||||
*
|
||||
* @param newOutput new path to the report.
|
||||
*/
|
||||
public void setOutput(String newOutput) {
|
||||
if (newOutput != null) {
|
||||
output = newOutput;
|
||||
@@ -82,7 +90,7 @@ public class HTMLReport implements Report {
|
||||
*/
|
||||
@Override
|
||||
public String render() {
|
||||
StringBuffer frag = new StringBuffer();
|
||||
StringBuilder frag = new StringBuilder();
|
||||
|
||||
// get the page headings
|
||||
frag.append(header(pageTitle));
|
||||
@@ -140,7 +148,7 @@ public class HTMLReport implements Report {
|
||||
* @return an HTML string providing internal page navigation
|
||||
*/
|
||||
public String navigation() {
|
||||
StringBuffer frag = new StringBuffer();
|
||||
StringBuilder frag = new StringBuilder();
|
||||
|
||||
frag.append("<div class=\"reportNavigation\">");
|
||||
frag.append("<a href=\"#general_overview\">General Overview</a>");
|
||||
@@ -173,7 +181,6 @@ public class HTMLReport implements Report {
|
||||
@Override
|
||||
public void addBlock(Statistics stat) {
|
||||
blocks.add(stat);
|
||||
return;
|
||||
}
|
||||
|
||||
|
||||
@@ -207,7 +214,7 @@ public class HTMLReport implements Report {
|
||||
*/
|
||||
@Override
|
||||
public String dateRange() {
|
||||
StringBuffer frag = new StringBuffer();
|
||||
StringBuilder frag = new StringBuilder();
|
||||
DateFormat df = DateFormat.getDateInstance();
|
||||
|
||||
frag.append("<div class=\"reportDate\">");
|
||||
@@ -255,7 +262,6 @@ public class HTMLReport implements Report {
|
||||
if (pageTitle == null) {
|
||||
pageTitle = mainTitle;
|
||||
}
|
||||
return;
|
||||
}
|
||||
|
||||
|
||||
@@ -280,7 +286,7 @@ public class HTMLReport implements Report {
|
||||
// FIXME: this need to be figured out to integrate nicely into the
|
||||
// whole JSTL thing, but for the moment it's just going to deliver
|
||||
// some styles
|
||||
StringBuffer frag = new StringBuffer();
|
||||
StringBuilder frag = new StringBuilder();
|
||||
|
||||
frag.append("<style type=\"text/css\">\n");
|
||||
frag.append("body { font-family: Arial, Helvetica, sans-serif }");
|
||||
@@ -334,7 +340,7 @@ public class HTMLReport implements Report {
|
||||
*/
|
||||
@Override
|
||||
public String statBlock(Statistics content) {
|
||||
StringBuffer frag = new StringBuffer();
|
||||
StringBuilder frag = new StringBuilder();
|
||||
Stat[] stats = content.getStats();
|
||||
|
||||
// start the table
|
||||
@@ -345,14 +351,14 @@ public class HTMLReport implements Report {
|
||||
frag.append("\t<tr>\n");
|
||||
frag.append("\t\t<th>\n");
|
||||
if (content.getStatName() != null) {
|
||||
frag.append("\t\t\t" + content.getStatName() + "\n");
|
||||
frag.append("\t\t\t").append(content.getStatName()).append("\n");
|
||||
} else {
|
||||
frag.append("\t\t\t \n");
|
||||
}
|
||||
frag.append("\t\t</th>\n");
|
||||
frag.append("\t\t<th>\n");
|
||||
if (content.getResultName() != null) {
|
||||
frag.append("\t\t\t" + content.getResultName() + "\n");
|
||||
frag.append("\t\t\t").append(content.getResultName()).append("\n");
|
||||
} else {
|
||||
frag.append("\t\t\t \n");
|
||||
}
|
||||
@@ -370,10 +376,10 @@ public class HTMLReport implements Report {
|
||||
style = "reportEvenRow";
|
||||
}
|
||||
|
||||
frag.append("\t<tr class=\"" + style + "\">\n\t\t<td>\n");
|
||||
frag.append("\t<tr class=\"").append(style).append("\">\n\t\t<td>\n");
|
||||
frag.append("\t\t\t");
|
||||
if (stats[i].getReference() != null) {
|
||||
frag.append("<a href=\"" + stats[i].getReference() + "\" ");
|
||||
frag.append("<a href=\"").append(stats[i].getReference()).append("\" ");
|
||||
frag.append("target=\"_blank\">");
|
||||
}
|
||||
frag.append(this.clean(stats[i].getKey()));
|
||||
@@ -405,9 +411,9 @@ public class HTMLReport implements Report {
|
||||
@Override
|
||||
public String floorInfo(int floor) {
|
||||
if (floor > 0) {
|
||||
StringBuffer frag = new StringBuffer();
|
||||
StringBuilder frag = new StringBuilder();
|
||||
frag.append("<div class=\"reportFloor\">");
|
||||
frag.append("(more than " + ReportTools.numberFormat(floor) + " times)");
|
||||
frag.append("(more than ").append(ReportTools.numberFormat(floor)).append(" times)");
|
||||
frag.append("</div>\n");
|
||||
return frag.toString();
|
||||
} else {
|
||||
@@ -419,12 +425,12 @@ public class HTMLReport implements Report {
|
||||
* output the explanation of the report block in HTML format
|
||||
*
|
||||
* @param explanation some text explaining the coming report block
|
||||
* @return a string containing an explanaton HTML formatted
|
||||
* @return a string containing an explanation HTML formatted
|
||||
*/
|
||||
@Override
|
||||
public String blockExplanation(String explanation) {
|
||||
if (explanation != null) {
|
||||
StringBuffer frag = new StringBuffer();
|
||||
StringBuilder frag = new StringBuilder();
|
||||
frag.append("<div class=\"reportExplanation\">");
|
||||
frag.append(explanation);
|
||||
frag.append("</div>\n\n");
|
||||
|
@@ -30,13 +30,14 @@ import java.util.regex.Matcher;
|
||||
import java.util.regex.Pattern;
|
||||
|
||||
import org.apache.commons.lang3.StringUtils;
|
||||
import org.dspace.core.ConfigurationManager;
|
||||
import org.dspace.core.Context;
|
||||
import org.dspace.core.LogManager;
|
||||
import org.dspace.core.Utils;
|
||||
import org.dspace.discovery.DiscoverQuery;
|
||||
import org.dspace.discovery.SearchServiceException;
|
||||
import org.dspace.discovery.SearchUtils;
|
||||
import org.dspace.services.ConfigurationService;
|
||||
import org.dspace.services.factory.DSpaceServicesFactory;
|
||||
|
||||
/**
|
||||
* This class performs all the actual analysis of a given set of DSpace log
|
||||
@@ -268,7 +269,7 @@ public class LogAnalyser {
|
||||
/**
|
||||
* the log directory to be analysed
|
||||
*/
|
||||
private static String logDir = ConfigurationManager.getProperty("log.report.dir");
|
||||
private static String logDir;
|
||||
|
||||
/**
|
||||
* the regex to describe the file name format
|
||||
@@ -276,16 +277,14 @@ public class LogAnalyser {
|
||||
private static String fileTemplate = "dspace\\.log.*";
|
||||
|
||||
/**
|
||||
* the config file from which to configure the analyser
|
||||
* the configuration file from which to configure the analyser
|
||||
*/
|
||||
private static String configFile = ConfigurationManager.getProperty("dspace.dir") +
|
||||
File.separator + "config" + File.separator +
|
||||
"dstat.cfg";
|
||||
private static String configFile;
|
||||
|
||||
/**
|
||||
* the output file to which to write aggregation data
|
||||
*/
|
||||
private static String outFile = ConfigurationManager.getProperty("log.report.dir") + File.separator + "dstat.dat";
|
||||
private static String outFile;
|
||||
|
||||
/**
|
||||
* the starting date of the report
|
||||
@@ -582,9 +581,11 @@ public class LogAnalyser {
|
||||
}
|
||||
|
||||
// now do the host name and url lookup
|
||||
hostName = Utils.getHostName(ConfigurationManager.getProperty("dspace.ui.url"));
|
||||
name = ConfigurationManager.getProperty("dspace.name").trim();
|
||||
url = ConfigurationManager.getProperty("dspace.ui.url").trim();
|
||||
ConfigurationService configurationService
|
||||
= DSpaceServicesFactory.getInstance().getConfigurationService();
|
||||
hostName = Utils.getHostName(configurationService.getProperty("dspace.ui.url"));
|
||||
name = configurationService.getProperty("dspace.name").trim();
|
||||
url = configurationService.getProperty("dspace.ui.url").trim();
|
||||
if ((url != null) && (!url.endsWith("/"))) {
|
||||
url = url + "/";
|
||||
}
|
||||
@@ -622,8 +623,13 @@ public class LogAnalyser {
|
||||
String myConfigFile, String myOutFile,
|
||||
Date myStartDate, Date myEndDate,
|
||||
boolean myLookUp) {
|
||||
ConfigurationService configurationService
|
||||
= DSpaceServicesFactory.getInstance().getConfigurationService();
|
||||
|
||||
if (myLogDir != null) {
|
||||
logDir = myLogDir;
|
||||
} else {
|
||||
logDir = configurationService.getProperty("log.report.dir");
|
||||
}
|
||||
|
||||
if (myFileTemplate != null) {
|
||||
@@ -632,6 +638,9 @@ public class LogAnalyser {
|
||||
|
||||
if (myConfigFile != null) {
|
||||
configFile = myConfigFile;
|
||||
} else {
|
||||
configFile = configurationService.getProperty("dspace.dir")
|
||||
+ File.separator + "config" + File.separator + "dstat.cfg";
|
||||
}
|
||||
|
||||
if (myStartDate != null) {
|
||||
@@ -644,9 +653,9 @@ public class LogAnalyser {
|
||||
|
||||
if (myOutFile != null) {
|
||||
outFile = myOutFile;
|
||||
} else {
|
||||
outFile = configurationService.getProperty("log.report.dir") + File.separator + "dstat.dat";
|
||||
}
|
||||
|
||||
return;
|
||||
}
|
||||
|
||||
|
||||
@@ -657,7 +666,7 @@ public class LogAnalyser {
|
||||
*/
|
||||
public static String createOutput() {
|
||||
// start a string buffer to hold the final output
|
||||
StringBuffer summary = new StringBuffer();
|
||||
StringBuilder summary = new StringBuilder();
|
||||
|
||||
// define an iterator that will be used to go over the hashmap keys
|
||||
Iterator<String> keys = null;
|
||||
@@ -820,7 +829,7 @@ public class LogAnalyser {
|
||||
*/
|
||||
public static void setRegex(String fileTemplate) {
|
||||
// build the exclude characters regular expression
|
||||
StringBuffer charRegEx = new StringBuffer();
|
||||
StringBuilder charRegEx = new StringBuilder();
|
||||
charRegEx.append("[");
|
||||
for (int i = 0; i < excludeChars.size(); i++) {
|
||||
charRegEx.append("\\").append(excludeChars.get(i));
|
||||
@@ -864,7 +873,7 @@ public class LogAnalyser {
|
||||
logRegex = Pattern.compile(fileTemplate);
|
||||
|
||||
// set up the pattern for matching any of the query types
|
||||
StringBuffer typeRXString = new StringBuffer();
|
||||
StringBuilder typeRXString = new StringBuilder();
|
||||
typeRXString.append("(");
|
||||
for (int i = 0; i < excludeTypes.size(); i++) {
|
||||
if (i > 0) {
|
||||
@@ -876,7 +885,7 @@ public class LogAnalyser {
|
||||
typeRX = Pattern.compile(typeRXString.toString());
|
||||
|
||||
// set up the pattern for matching any of the words to exclude
|
||||
StringBuffer wordRXString = new StringBuffer();
|
||||
StringBuilder wordRXString = new StringBuilder();
|
||||
wordRXString.append("(");
|
||||
for (int i = 0; i < excludeWords.size(); i++) {
|
||||
if (i > 0) {
|
||||
@@ -890,8 +899,6 @@ public class LogAnalyser {
|
||||
}
|
||||
wordRXString.append(")");
|
||||
wordRX = Pattern.compile(wordRXString.toString());
|
||||
|
||||
return;
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -920,18 +927,18 @@ public class LogAnalyser {
|
||||
*/
|
||||
public static void readConfig(String configFile) throws IOException {
|
||||
//instantiate aggregators
|
||||
actionAggregator = new HashMap<String, Integer>();
|
||||
searchAggregator = new HashMap<String, Integer>();
|
||||
userAggregator = new HashMap<String, Integer>();
|
||||
itemAggregator = new HashMap<String, Integer>();
|
||||
archiveStats = new HashMap<String, Integer>();
|
||||
actionAggregator = new HashMap<>();
|
||||
searchAggregator = new HashMap<>();
|
||||
userAggregator = new HashMap<>();
|
||||
itemAggregator = new HashMap<>();
|
||||
archiveStats = new HashMap<>();
|
||||
|
||||
//instantiate lists
|
||||
generalSummary = new ArrayList<String>();
|
||||
excludeWords = new ArrayList<String>();
|
||||
excludeTypes = new ArrayList<String>();
|
||||
excludeChars = new ArrayList<String>();
|
||||
itemTypes = new ArrayList<String>();
|
||||
generalSummary = new ArrayList<>();
|
||||
excludeWords = new ArrayList<>();
|
||||
excludeTypes = new ArrayList<>();
|
||||
excludeChars = new ArrayList<>();
|
||||
itemTypes = new ArrayList<>();
|
||||
|
||||
// prepare our standard file readers and buffered readers
|
||||
FileReader fr = null;
|
||||
@@ -1002,8 +1009,6 @@ public class LogAnalyser {
|
||||
// close the inputs
|
||||
br.close();
|
||||
fr.close();
|
||||
|
||||
return;
|
||||
}
|
||||
|
||||
/**
|
||||
|
@@ -32,10 +32,11 @@ import org.dspace.content.MetadataSchemaEnum;
|
||||
import org.dspace.content.MetadataValue;
|
||||
import org.dspace.content.factory.ContentServiceFactory;
|
||||
import org.dspace.content.service.ItemService;
|
||||
import org.dspace.core.ConfigurationManager;
|
||||
import org.dspace.core.Context;
|
||||
import org.dspace.handle.factory.HandleServiceFactory;
|
||||
import org.dspace.handle.service.HandleService;
|
||||
import org.dspace.services.ConfigurationService;
|
||||
import org.dspace.services.factory.DSpaceServicesFactory;
|
||||
|
||||
/**
|
||||
* This class performs the action of coordinating a usage report being
|
||||
@@ -161,7 +162,7 @@ public class ReportGenerator {
|
||||
/**
|
||||
* pattern that matches an unqualified aggregator property
|
||||
*/
|
||||
private static Pattern real = Pattern.compile("^(.+)=(.+)");
|
||||
private static final Pattern real = Pattern.compile("^(.+)=(.+)");
|
||||
|
||||
//////////////////////////
|
||||
// Miscellaneous variables
|
||||
@@ -189,11 +190,12 @@ public class ReportGenerator {
|
||||
/**
|
||||
* the log file action to human readable action map
|
||||
*/
|
||||
private static String map = ConfigurationManager.getProperty("dspace.dir") +
|
||||
File.separator + "config" + File.separator + "dstat.map";
|
||||
private static String map;
|
||||
|
||||
private static final ItemService itemService = ContentServiceFactory.getInstance().getItemService();
|
||||
private static final HandleService handleService = HandleServiceFactory.getInstance().getHandleService();
|
||||
private static final ConfigurationService configurationService
|
||||
= DSpaceServicesFactory.getInstance().getConfigurationService();
|
||||
|
||||
/**
|
||||
* Default constructor
|
||||
@@ -268,6 +270,9 @@ public class ReportGenerator {
|
||||
throws Exception, SQLException {
|
||||
if (myMap != null) {
|
||||
map = myMap;
|
||||
} else {
|
||||
map = configurationService.getProperty("dspace.dir")
|
||||
+ File.separator + "config" + File.separator + "dstat.map";
|
||||
}
|
||||
|
||||
// create the relevant report type
|
||||
@@ -302,15 +307,15 @@ public class ReportGenerator {
|
||||
startTime = new GregorianCalendar();
|
||||
|
||||
/** instantiate aggregators */
|
||||
actionAggregator = new HashMap<String, String>();
|
||||
searchAggregator = new HashMap<String, String>();
|
||||
userAggregator = new HashMap<String, String>();
|
||||
itemAggregator = new HashMap<String, String>();
|
||||
archiveStats = new HashMap<String, String>();
|
||||
actionMap = new HashMap<String, String>();
|
||||
actionAggregator = new HashMap<>();
|
||||
searchAggregator = new HashMap<>();
|
||||
userAggregator = new HashMap<>();
|
||||
itemAggregator = new HashMap<>();
|
||||
archiveStats = new HashMap<>();
|
||||
actionMap = new HashMap<>();
|
||||
|
||||
/** instantite lists */
|
||||
generalSummary = new ArrayList<String>();
|
||||
/** instantiate lists */
|
||||
generalSummary = new ArrayList<>();
|
||||
|
||||
// set the parameters for this analysis
|
||||
setParameters(myInput);
|
||||
@@ -486,8 +491,6 @@ public class ReportGenerator {
|
||||
report.addBlock(process);
|
||||
|
||||
report.render();
|
||||
|
||||
return;
|
||||
}
|
||||
|
||||
|
||||
@@ -612,8 +615,6 @@ public class ReportGenerator {
|
||||
if (myInput != null) {
|
||||
input = myInput;
|
||||
}
|
||||
|
||||
return;
|
||||
}
|
||||
|
||||
|
||||
@@ -768,9 +769,9 @@ public class ReportGenerator {
|
||||
List<MetadataValue> author = itemService
|
||||
.getMetadata(item, MetadataSchemaEnum.DC.getName(), "contributor", "author", Item.ANY);
|
||||
|
||||
StringBuffer authors = new StringBuffer();
|
||||
StringBuilder authors = new StringBuilder();
|
||||
if (author.size() > 0) {
|
||||
authors.append("(" + author.get(0).getValue());
|
||||
authors.append("(").append(author.get(0).getValue());
|
||||
}
|
||||
if (author.size() > 1) {
|
||||
authors.append(" et al");
|
||||
|
@@ -22,7 +22,8 @@ import java.util.regex.Matcher;
|
||||
import java.util.regex.Pattern;
|
||||
|
||||
import org.apache.commons.lang3.time.DateUtils;
|
||||
import org.dspace.core.ConfigurationManager;
|
||||
import org.dspace.services.ConfigurationService;
|
||||
import org.dspace.services.factory.DSpaceServicesFactory;
|
||||
|
||||
/**
|
||||
* Helper class for loading the analysis / report files from the reports directory
|
||||
@@ -219,8 +220,8 @@ public class StatisticsLoader {
|
||||
}
|
||||
|
||||
// Create new maps for the monthly analysis / reports
|
||||
Map<String, StatsFile> newMonthlyAnalysis = new HashMap<String, StatsFile>();
|
||||
Map<String, StatsFile> newMonthlyReports = new HashMap<String, StatsFile>();
|
||||
Map<String, StatsFile> newMonthlyAnalysis = new HashMap<>();
|
||||
Map<String, StatsFile> newMonthlyReports = new HashMap<>();
|
||||
|
||||
StatsFile newGeneralAnalysis = null;
|
||||
StatsFile newGeneralReport = null;
|
||||
@@ -320,7 +321,9 @@ public class StatisticsLoader {
|
||||
* @return array of files
|
||||
*/
|
||||
private static File[] getAnalysisAndReportFileList() {
|
||||
File reportDir = new File(ConfigurationManager.getProperty("log.report.dir"));
|
||||
ConfigurationService configurationService
|
||||
= DSpaceServicesFactory.getInstance().getConfigurationService();
|
||||
File reportDir = new File(configurationService.getProperty("log.report.dir"));
|
||||
if (reportDir != null) {
|
||||
return reportDir.listFiles(new AnalysisAndReportFilter());
|
||||
}
|
||||
|
@@ -14,8 +14,9 @@ import java.util.Date;
|
||||
|
||||
import org.dspace.app.util.factory.UtilServiceFactory;
|
||||
import org.dspace.app.util.service.WebAppService;
|
||||
import org.dspace.core.ConfigurationManager;
|
||||
import org.dspace.core.Context;
|
||||
import org.dspace.services.ConfigurationService;
|
||||
import org.dspace.services.factory.DSpaceServicesFactory;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
||||
@@ -57,7 +58,9 @@ abstract public class AbstractDSpaceWebapp
|
||||
|
||||
started = new Date();
|
||||
|
||||
url = ConfigurationManager.getProperty("dspace.ui.url");
|
||||
ConfigurationService configurationService
|
||||
= DSpaceServicesFactory.getInstance().getConfigurationService();
|
||||
url = configurationService.getProperty("dspace.ui.url");
|
||||
if (null == url) {
|
||||
throw new IllegalStateException("dspace.ui.url is undefined");
|
||||
}
|
||||
|
@@ -16,8 +16,9 @@ import org.dspace.content.Collection;
|
||||
import org.dspace.content.Community;
|
||||
import org.dspace.content.factory.ContentServiceFactory;
|
||||
import org.dspace.content.service.CommunityService;
|
||||
import org.dspace.core.ConfigurationManager;
|
||||
import org.dspace.core.Context;
|
||||
import org.dspace.services.ConfigurationService;
|
||||
import org.dspace.services.factory.DSpaceServicesFactory;
|
||||
|
||||
/**
|
||||
* Utility class for lists of collections.
|
||||
@@ -55,8 +56,11 @@ public class CollectionDropDown {
|
||||
* @return Full path to the collection (truncated)
|
||||
* @throws SQLException if database error
|
||||
*/
|
||||
public static String collectionPath(Context context, Collection col, int maxchars) throws SQLException {
|
||||
String separator = ConfigurationManager.getProperty("subcommunity.separator");
|
||||
public static String collectionPath(Context context, Collection col, int maxchars)
|
||||
throws SQLException {
|
||||
ConfigurationService configurationService
|
||||
= DSpaceServicesFactory.getInstance().getConfigurationService();
|
||||
String separator = configurationService.getProperty("subcommunity.separator");
|
||||
if (separator == null) {
|
||||
separator = " > ";
|
||||
}
|
||||
|
@@ -12,6 +12,7 @@ import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.regex.Pattern;
|
||||
import java.util.regex.PatternSyntaxException;
|
||||
import javax.annotation.Nullable;
|
||||
|
||||
import org.apache.commons.lang3.StringUtils;
|
||||
import org.dspace.content.MetadataSchemaEnum;
|
||||
@@ -144,6 +145,7 @@ public class DCInput {
|
||||
private String relationshipType = null;
|
||||
private String searchConfiguration = null;
|
||||
private String filter;
|
||||
private List<String> externalSources;
|
||||
|
||||
/**
|
||||
* The scope of the input sets, this restricts hidden metadata fields from
|
||||
@@ -225,6 +227,15 @@ public class DCInput {
|
||||
relationshipType = fieldMap.get("relationship-type");
|
||||
searchConfiguration = fieldMap.get("search-configuration");
|
||||
filter = fieldMap.get("filter");
|
||||
externalSources = new ArrayList<>();
|
||||
String externalSourcesDef = fieldMap.get("externalsources");
|
||||
if (StringUtils.isNotBlank(externalSourcesDef)) {
|
||||
String[] sources = StringUtils.split(externalSourcesDef, ",");
|
||||
for (String source: sources) {
|
||||
externalSources.add(StringUtils.trim(source));
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -291,7 +302,7 @@ public class DCInput {
|
||||
*
|
||||
* @return the input type
|
||||
*/
|
||||
public String getInputType() {
|
||||
public @Nullable String getInputType() {
|
||||
return inputType;
|
||||
}
|
||||
|
||||
@@ -521,6 +532,10 @@ public class DCInput {
|
||||
return filter;
|
||||
}
|
||||
|
||||
public List<String> getExternalSources() {
|
||||
return externalSources;
|
||||
}
|
||||
|
||||
public boolean isQualdropValue() {
|
||||
if ("qualdrop_value".equals(getInputType())) {
|
||||
return true;
|
||||
|
@@ -10,6 +10,7 @@ package org.dspace.app.util;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
|
||||
import org.apache.commons.lang3.StringUtils;
|
||||
import org.dspace.core.Utils;
|
||||
/**
|
||||
* Class representing all DC inputs required for a submission, organized into pages
|
||||
@@ -109,7 +110,7 @@ public class DCInputSet {
|
||||
for (int j = 0; j < inputs[i].length; j++) {
|
||||
DCInput field = inputs[i][j];
|
||||
// If this is a "qualdrop_value" field, then the full field name is the field + dropdown qualifier
|
||||
if (field.getInputType().equals("qualdrop_value")) {
|
||||
if (StringUtils.equals(field.getInputType(), "qualdrop_value")) {
|
||||
List<String> pairs = field.getPairs();
|
||||
for (int k = 0; k < pairs.size(); k += 2) {
|
||||
String qualifier = pairs.get(k + 1);
|
||||
|
@@ -7,6 +7,7 @@
|
||||
*/
|
||||
package org.dspace.app.util;
|
||||
|
||||
import java.lang.reflect.InvocationTargetException;
|
||||
import javax.servlet.ServletContextEvent;
|
||||
import javax.servlet.ServletContextListener;
|
||||
|
||||
@@ -27,13 +28,11 @@ public class DSpaceWebappListener implements ServletContextListener {
|
||||
|
||||
try {
|
||||
Class webappClass = Class.forName("org.dspace.utils.DSpaceWebapp");
|
||||
webApp = (AbstractDSpaceWebapp) webappClass.newInstance();
|
||||
webApp = (AbstractDSpaceWebapp) webappClass.getDeclaredConstructor().newInstance();
|
||||
webApp.register();
|
||||
} catch (ClassNotFoundException ex) {
|
||||
event.getServletContext().log("Can't create webapp MBean: " + ex.getMessage());
|
||||
} catch (InstantiationException ex) {
|
||||
event.getServletContext().log("Can't create webapp MBean: " + ex.getMessage());
|
||||
} catch (IllegalAccessException ex) {
|
||||
} catch (ClassNotFoundException | InstantiationException
|
||||
| IllegalAccessException | IllegalArgumentException
|
||||
| NoSuchMethodException | InvocationTargetException ex) {
|
||||
event.getServletContext().log("Can't create webapp MBean: " + ex.getMessage());
|
||||
}
|
||||
}
|
||||
|
@@ -22,7 +22,7 @@ public interface DSpaceWebappMXBean {
|
||||
public boolean isUI();
|
||||
|
||||
/**
|
||||
* What kind of webapp? XMLUI, OAI, etc.
|
||||
* What kind of webapp? Server, etc.
|
||||
*
|
||||
* @return kind of webapp
|
||||
*/
|
||||
|
@@ -26,6 +26,7 @@ import java.util.Properties;
|
||||
|
||||
import com.google.common.collect.ArrayListMultimap;
|
||||
import com.google.common.collect.ListMultimap;
|
||||
import org.apache.logging.log4j.LogManager;
|
||||
import org.apache.logging.log4j.Logger;
|
||||
import org.dspace.authorize.factory.AuthorizeServiceFactory;
|
||||
import org.dspace.content.Bitstream;
|
||||
@@ -36,10 +37,11 @@ import org.dspace.content.MetadataSchema;
|
||||
import org.dspace.content.MetadataValue;
|
||||
import org.dspace.content.factory.ContentServiceFactory;
|
||||
import org.dspace.content.service.ItemService;
|
||||
import org.dspace.core.ConfigurationManager;
|
||||
import org.dspace.core.Constants;
|
||||
import org.dspace.core.Context;
|
||||
import org.dspace.handle.factory.HandleServiceFactory;
|
||||
import org.dspace.services.ConfigurationService;
|
||||
import org.dspace.services.factory.DSpaceServicesFactory;
|
||||
import org.jdom.Element;
|
||||
|
||||
/**
|
||||
@@ -51,7 +53,7 @@ import org.jdom.Element;
|
||||
@SuppressWarnings("deprecation")
|
||||
public class GoogleMetadata {
|
||||
|
||||
private final static Logger log = org.apache.logging.log4j.LogManager.getLogger(GoogleMetadata.class);
|
||||
private final static Logger log = LogManager.getLogger(GoogleMetadata.class);
|
||||
|
||||
protected static final String GOOGLE_PREFIX = "google.";
|
||||
|
||||
@@ -62,7 +64,7 @@ public class GoogleMetadata {
|
||||
protected String itemURL;
|
||||
|
||||
// Configuration keys and fields
|
||||
protected static Map<String, String> googleScholarSettings = new HashMap<String, String>();
|
||||
protected static Map<String, String> googleScholarSettings = new HashMap<>();
|
||||
|
||||
// Google field names (e.g. citation_fieldname) and formatted metadata
|
||||
// values
|
||||
@@ -132,35 +134,39 @@ public class GoogleMetadata {
|
||||
|
||||
private static GoogleBitstreamComparator googleBitstreamComparator = null;
|
||||
|
||||
// Load configured fields from google-metadata.properties
|
||||
static {
|
||||
private final ConfigurationService configurationService
|
||||
= DSpaceServicesFactory.getInstance().getConfigurationService();
|
||||
|
||||
File loadedFile = null;
|
||||
URL url = null;
|
||||
InputStream is = null;
|
||||
|
||||
String googleConfigFile = ConfigurationManager
|
||||
/**
|
||||
* Load configured fields from google-metadata.properties.
|
||||
*/
|
||||
private void loadGoogleScholarSettings()
|
||||
throws MalformedURLException, IOException {
|
||||
String googleConfigFile = configurationService
|
||||
.getProperty("google-metadata.config");
|
||||
log.info("Using [" + googleConfigFile
|
||||
+ "] for Google Metadata configuration");
|
||||
log.info("Using [{}] for Google Metadata configuration", googleConfigFile);
|
||||
|
||||
loadedFile = new File(googleConfigFile);
|
||||
File loadedFile = new File(googleConfigFile);
|
||||
URL url;
|
||||
try {
|
||||
url = loadedFile.toURL();
|
||||
url = loadedFile.toURI().toURL();
|
||||
|
||||
} catch (MalformedURLException mux) {
|
||||
log.error("Can't find Google Metadata configuration file: "
|
||||
+ googleConfigFile, mux);
|
||||
log.error("Can't find Google Metadata configuration file: {}",
|
||||
googleConfigFile, mux);
|
||||
throw mux;
|
||||
}
|
||||
|
||||
Properties properties = new Properties();
|
||||
InputStream is;
|
||||
try {
|
||||
is = url.openStream();
|
||||
properties.load(is);
|
||||
|
||||
} catch (IOException iox) {
|
||||
log.error("Could not read Google Metadata configuration file: "
|
||||
+ googleConfigFile, iox);
|
||||
log.error("Could not read Google Metadata configuration file: {}",
|
||||
googleConfigFile, iox);
|
||||
throw iox;
|
||||
}
|
||||
|
||||
Enumeration propertyNames = properties.propertyNames();
|
||||
@@ -180,19 +186,21 @@ public class GoogleMetadata {
|
||||
}
|
||||
}
|
||||
|
||||
if (log.isDebugEnabled()) {
|
||||
logConfiguration();
|
||||
}
|
||||
logConfiguration();
|
||||
}
|
||||
|
||||
/**
|
||||
* Dump Metadata field mapping to log
|
||||
*/
|
||||
public static void logConfiguration() {
|
||||
if (!log.isDebugEnabled()) {
|
||||
return;
|
||||
}
|
||||
|
||||
log.debug("Google Metadata Configuration Mapping:");
|
||||
|
||||
for (String name : googleScholarSettings.keySet()) {
|
||||
log.debug(" " + name + " => " + googleScholarSettings.get(name));
|
||||
log.debug(" {} => {}", name, googleScholarSettings.get(name));
|
||||
}
|
||||
}
|
||||
|
||||
@@ -202,9 +210,14 @@ public class GoogleMetadata {
|
||||
*
|
||||
* @param context context
|
||||
* @param item The item being viewed to extract metadata from
|
||||
* @throws SQLException if database error
|
||||
* @throws SQLException if database error.
|
||||
* @throws java.io.IOException passed through.
|
||||
*/
|
||||
public GoogleMetadata(Context context, Item item) throws SQLException {
|
||||
public GoogleMetadata(Context context, Item item)
|
||||
throws SQLException, IOException {
|
||||
if (googleScholarSettings.isEmpty()) {
|
||||
loadGoogleScholarSettings();
|
||||
}
|
||||
|
||||
// Hold onto the item in case we need to refresh a stale parse
|
||||
this.item = item;
|
||||
@@ -336,7 +349,7 @@ public class GoogleMetadata {
|
||||
int optionMatches = 0;
|
||||
String[] components;
|
||||
List<MetadataValue> values;
|
||||
ArrayList<MetadataValue> resolvedFields = new ArrayList<MetadataValue>();
|
||||
ArrayList<MetadataValue> resolvedFields = new ArrayList<>();
|
||||
|
||||
for (String field : optionFields) {
|
||||
|
||||
@@ -399,8 +412,8 @@ public class GoogleMetadata {
|
||||
*/
|
||||
protected ArrayList<ArrayList<String>> parseOptions(String configFilter) {
|
||||
|
||||
ArrayList<String> options = new ArrayList<String>();
|
||||
ArrayList<ArrayList<String>> parsedOptions = new ArrayList<ArrayList<String>>();
|
||||
ArrayList<String> options = new ArrayList<>();
|
||||
ArrayList<ArrayList<String>> parsedOptions = new ArrayList<>();
|
||||
|
||||
if (null == configFilter || configFilter.equals("")) {
|
||||
return null;
|
||||
@@ -414,7 +427,7 @@ public class GoogleMetadata {
|
||||
options.add(option.trim());
|
||||
}
|
||||
} else {
|
||||
options = new ArrayList<String>();
|
||||
options = new ArrayList<>();
|
||||
options.add(configFilter);
|
||||
}
|
||||
|
||||
@@ -426,12 +439,12 @@ public class GoogleMetadata {
|
||||
for (String option : options) {
|
||||
|
||||
ArrayList<String> fields;
|
||||
parsedFields = new ArrayList<String>();
|
||||
parsedFields = new ArrayList<>();
|
||||
|
||||
if (option.contains(",")) {
|
||||
fields = parseFields(option);
|
||||
} else {
|
||||
fields = new ArrayList<String>();
|
||||
fields = new ArrayList<>();
|
||||
fields.add(option);
|
||||
}
|
||||
|
||||
@@ -472,7 +485,7 @@ public class GoogleMetadata {
|
||||
*/
|
||||
protected ArrayList<String> parseFields(String configString) {
|
||||
|
||||
ArrayList<String> fields = new ArrayList<String>();
|
||||
ArrayList<String> fields = new ArrayList<>();
|
||||
|
||||
for (String field : configString.split("\\,")) {
|
||||
fields.add(field.trim());
|
||||
@@ -523,7 +536,7 @@ public class GoogleMetadata {
|
||||
List<MetadataValue> allMD = itemService.getMetadata(item, components[0], components[1],
|
||||
components[2], Item.ANY);
|
||||
|
||||
ArrayList<String> expandedDC = new ArrayList<String>();
|
||||
ArrayList<String> expandedDC = new ArrayList<>();
|
||||
for (MetadataValue v : allMD) {
|
||||
|
||||
// De-dup multiple occurrences of field names in item
|
||||
@@ -558,7 +571,7 @@ public class GoogleMetadata {
|
||||
MetadataSchema metadataSchema = v.getMetadataField().getMetadataSchema();
|
||||
name.append(metadataSchema.getName()).append(".").append(metadataField.getElement());
|
||||
if (null != metadataField.getQualifier()) {
|
||||
name.append("." + metadataField.getQualifier());
|
||||
name.append(".").append(metadataField.getQualifier());
|
||||
}
|
||||
|
||||
return name.toString();
|
||||
@@ -687,7 +700,7 @@ public class GoogleMetadata {
|
||||
* @return List of elements
|
||||
*/
|
||||
public List<Element> disseminateList() {
|
||||
List<Element> metas = new ArrayList<Element>();
|
||||
List<Element> metas = new ArrayList<>();
|
||||
|
||||
for (Entry<String, String> m : getMappings()) {
|
||||
Element e = new Element("meta");
|
||||
@@ -889,7 +902,7 @@ public class GoogleMetadata {
|
||||
Bitstream bitstream = findLinkableFulltext(item);
|
||||
if (bitstream != null) {
|
||||
StringBuilder path = new StringBuilder();
|
||||
path.append(ConfigurationManager.getProperty("dspace.ui.url"));
|
||||
path.append(configurationService.getProperty("dspace.ui.url"));
|
||||
|
||||
if (item.getHandle() != null) {
|
||||
path.append("/bitstream/");
|
||||
@@ -1075,7 +1088,7 @@ public class GoogleMetadata {
|
||||
// FIXME: Shouldn't have to parse identifiers for every identification.
|
||||
|
||||
ArrayList<ArrayList<String>> options = parseOptions(dConfig);
|
||||
HashMap<String, ArrayList<String>> mdPairs = new HashMap<String, ArrayList<String>>();
|
||||
HashMap<String, ArrayList<String>> mdPairs = new HashMap<>();
|
||||
|
||||
// Parse field/value pairs from field identifier string
|
||||
for (ArrayList<String> option : options) {
|
||||
@@ -1092,7 +1105,7 @@ public class GoogleMetadata {
|
||||
}
|
||||
} else {
|
||||
// Otherwise, add it as the first occurrence of this field
|
||||
ArrayList<String> newField = new ArrayList<String>();
|
||||
ArrayList<String> newField = new ArrayList<>();
|
||||
newField.add(parsedPair[1].trim());
|
||||
mdPairs.put(parsedPair[0].trim(), newField);
|
||||
|
||||
@@ -1113,7 +1126,7 @@ public class GoogleMetadata {
|
||||
|
||||
// Check resolved/present metadata fields against configured values
|
||||
ArrayList<MetadataValue> presentMD = resolveMetadataFields(sb.toString());
|
||||
if (null != presentMD && presentMD.size() != 0) {
|
||||
if (null != presentMD && !presentMD.isEmpty()) {
|
||||
for (MetadataValue v : presentMD) {
|
||||
String fieldName = buildFieldName(v);
|
||||
if (mdPairs.containsKey(fieldName)) {
|
||||
|
@@ -18,10 +18,10 @@ import javax.xml.parsers.ParserConfigurationException;
|
||||
|
||||
import org.apache.commons.cli.CommandLine;
|
||||
import org.apache.commons.cli.CommandLineParser;
|
||||
import org.apache.commons.cli.DefaultParser;
|
||||
import org.apache.commons.cli.HelpFormatter;
|
||||
import org.apache.commons.cli.Options;
|
||||
import org.apache.commons.cli.ParseException;
|
||||
import org.apache.commons.cli.PosixParser;
|
||||
import org.apache.commons.lang3.StringUtils;
|
||||
import org.apache.logging.log4j.LogManager;
|
||||
import org.apache.logging.log4j.Logger;
|
||||
@@ -48,9 +48,9 @@ public class InitializeEntities {
|
||||
|
||||
private final static Logger log = LogManager.getLogger();
|
||||
|
||||
private RelationshipTypeService relationshipTypeService;
|
||||
private RelationshipService relationshipService;
|
||||
private EntityTypeService entityTypeService;
|
||||
private final RelationshipTypeService relationshipTypeService;
|
||||
private final RelationshipService relationshipService;
|
||||
private final EntityTypeService entityTypeService;
|
||||
|
||||
|
||||
private InitializeEntities() {
|
||||
@@ -62,14 +62,14 @@ public class InitializeEntities {
|
||||
/**
|
||||
* The main method for this script
|
||||
*
|
||||
* @param argv The commandline arguments given with this command
|
||||
* @param argv The command line arguments given with this command
|
||||
* @throws SQLException If something goes wrong with the database
|
||||
* @throws AuthorizeException If something goes wrong with permissions
|
||||
* @throws ParseException If something goes wrong with the parsing
|
||||
*/
|
||||
public static void main(String[] argv) throws SQLException, AuthorizeException, ParseException {
|
||||
InitializeEntities initializeEntities = new InitializeEntities();
|
||||
CommandLineParser parser = new PosixParser();
|
||||
CommandLineParser parser = new DefaultParser();
|
||||
Options options = createCommandLineOptions();
|
||||
CommandLine line = parser.parse(options,argv);
|
||||
String fileLocation = getFileLocationFromCommandLine(line);
|
||||
|
@@ -377,7 +377,7 @@ public class SubmissionConfigReader {
|
||||
for (int i = 0; i < len; i++) {
|
||||
Node nd = nl.item(i);
|
||||
// process each step definition
|
||||
if (nd.getNodeName().equals("step")) {
|
||||
if (StringUtils.equalsIgnoreCase(nd.getNodeName(), "step-definition")) {
|
||||
String stepID = getAttribute(nd, "id");
|
||||
if (stepID == null) {
|
||||
throw new SAXException(
|
||||
|
@@ -8,6 +8,7 @@
|
||||
package org.dspace.app.util;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.sql.SQLException;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Date;
|
||||
import java.util.List;
|
||||
@@ -49,7 +50,6 @@ import org.dspace.content.factory.ContentServiceFactory;
|
||||
import org.dspace.content.service.CollectionService;
|
||||
import org.dspace.content.service.CommunityService;
|
||||
import org.dspace.content.service.ItemService;
|
||||
import org.dspace.core.ConfigurationManager;
|
||||
import org.dspace.core.Context;
|
||||
import org.dspace.discovery.IndexableObject;
|
||||
import org.dspace.discovery.indexobject.IndexableCollection;
|
||||
@@ -102,7 +102,7 @@ public class SyndicationFeed {
|
||||
};
|
||||
protected String defaultExternalMedia = "dc.source.uri";
|
||||
|
||||
private final ConfigurationService configurationService =
|
||||
private static final ConfigurationService configurationService =
|
||||
DSpaceServicesFactory.getInstance().getConfigurationService();
|
||||
|
||||
// metadata field for Item title in entry:
|
||||
@@ -196,18 +196,19 @@ public class SyndicationFeed {
|
||||
|
||||
// dso is null for the whole site, or a search without scope
|
||||
if (dso == null) {
|
||||
defaultTitle = ConfigurationManager.getProperty("dspace.name");
|
||||
defaultTitle = configurationService.getProperty("dspace.name");
|
||||
feed.setDescription(localize(labels, MSG_FEED_DESCRIPTION));
|
||||
objectURL = resolveURL(request, null);
|
||||
logoURL = ConfigurationManager.getProperty("webui.feed.logo.url");
|
||||
logoURL = configurationService.getProperty("webui.feed.logo.url");
|
||||
} else {
|
||||
Bitstream logo = null;
|
||||
if (dso instanceof IndexableCollection) {
|
||||
Collection col = ((IndexableCollection) dso).getIndexedObject();
|
||||
defaultTitle = col.getName();
|
||||
feed.setDescription(collectionService.getMetadata(col, "short_description"));
|
||||
feed.setDescription(collectionService.getMetadataFirstValue(col,
|
||||
CollectionService.MD_SHORT_DESCRIPTION, Item.ANY));
|
||||
logo = col.getLogo();
|
||||
String cols = ConfigurationManager.getProperty("webui.feed.podcast.collections");
|
||||
String cols = configurationService.getProperty("webui.feed.podcast.collections");
|
||||
if (cols != null && cols.length() > 1 && cols.contains(col.getHandle())) {
|
||||
podcastFeed = true;
|
||||
}
|
||||
@@ -215,9 +216,10 @@ public class SyndicationFeed {
|
||||
} else if (dso instanceof IndexableCommunity) {
|
||||
Community comm = ((IndexableCommunity) dso).getIndexedObject();
|
||||
defaultTitle = comm.getName();
|
||||
feed.setDescription(communityService.getMetadata(comm, "short_description"));
|
||||
feed.setDescription(communityService.getMetadataFirstValue(comm,
|
||||
CommunityService.MD_SHORT_DESCRIPTION, Item.ANY));
|
||||
logo = comm.getLogo();
|
||||
String comms = ConfigurationManager.getProperty("webui.feed.podcast.communities");
|
||||
String comms = configurationService.getProperty("webui.feed.podcast.communities");
|
||||
if (comms != null && comms.length() > 1 && comms.contains(comm.getHandle())) {
|
||||
podcastFeed = true;
|
||||
}
|
||||
@@ -251,7 +253,7 @@ public class SyndicationFeed {
|
||||
|
||||
// add entries for items
|
||||
if (items != null) {
|
||||
List<SyndEntry> entries = new ArrayList<SyndEntry>();
|
||||
List<SyndEntry> entries = new ArrayList<>();
|
||||
for (IndexableObject idxObj : items) {
|
||||
if (!(idxObj instanceof IndexableItem)) {
|
||||
continue;
|
||||
@@ -277,7 +279,7 @@ public class SyndicationFeed {
|
||||
// date of last change to Item
|
||||
entry.setUpdatedDate(item.getLastModified());
|
||||
|
||||
StringBuffer db = new StringBuffer();
|
||||
StringBuilder db = new StringBuilder();
|
||||
for (String df : descriptionFields) {
|
||||
// Special Case: "(date)" in field name means render as date
|
||||
boolean isDate = df.indexOf("(date)") > 0;
|
||||
@@ -313,7 +315,7 @@ public class SyndicationFeed {
|
||||
// This gets the authors into an ATOM feed
|
||||
List<MetadataValue> authors = itemService.getMetadataByMetadataString(item, authorField);
|
||||
if (authors.size() > 0) {
|
||||
List<SyndPerson> creators = new ArrayList<SyndPerson>();
|
||||
List<SyndPerson> creators = new ArrayList<>();
|
||||
for (MetadataValue author : authors) {
|
||||
SyndPerson sp = new SyndPersonImpl();
|
||||
sp.setName(author.getValue());
|
||||
@@ -329,7 +331,7 @@ public class SyndicationFeed {
|
||||
if (dcCreatorField != null) {
|
||||
List<MetadataValue> dcAuthors = itemService.getMetadataByMetadataString(item, dcCreatorField);
|
||||
if (dcAuthors.size() > 0) {
|
||||
List<String> creators = new ArrayList<String>();
|
||||
List<String> creators = new ArrayList<>();
|
||||
for (MetadataValue author : dcAuthors) {
|
||||
creators.add(author.getValue());
|
||||
}
|
||||
@@ -345,7 +347,7 @@ public class SyndicationFeed {
|
||||
if (dcDescriptionField != null) {
|
||||
List<MetadataValue> v = itemService.getMetadataByMetadataString(item, dcDescriptionField);
|
||||
if (v.size() > 0) {
|
||||
StringBuffer descs = new StringBuffer();
|
||||
StringBuilder descs = new StringBuilder();
|
||||
for (MetadataValue d : v) {
|
||||
if (descs.length() > 0) {
|
||||
descs.append("\n\n");
|
||||
@@ -374,8 +376,6 @@ public class SyndicationFeed {
|
||||
enc.setLength(bit.getSizeBytes());
|
||||
enc.setUrl(urlOfBitstream(request, bit));
|
||||
enclosures.add(enc);
|
||||
} else {
|
||||
continue;
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -395,7 +395,7 @@ public class SyndicationFeed {
|
||||
}
|
||||
}
|
||||
|
||||
} catch (Exception e) {
|
||||
} catch (SQLException e) {
|
||||
System.out.println(e.getMessage());
|
||||
}
|
||||
entry.setEnclosures(enclosures);
|
||||
@@ -501,7 +501,7 @@ public class SyndicationFeed {
|
||||
|
||||
// utility to get config property with default value when not set.
|
||||
protected static String getDefaultedConfiguration(String key, String dfl) {
|
||||
String result = ConfigurationManager.getProperty(key);
|
||||
String result = configurationService.getProperty(key);
|
||||
return (result == null) ? dfl : result;
|
||||
}
|
||||
|
||||
@@ -531,14 +531,14 @@ public class SyndicationFeed {
|
||||
if (dso == null) {
|
||||
if (baseURL == null) {
|
||||
if (request == null) {
|
||||
baseURL = ConfigurationManager.getProperty("dspace.ui.url");
|
||||
baseURL = configurationService.getProperty("dspace.ui.url");
|
||||
} else {
|
||||
baseURL = ConfigurationManager.getProperty("dspace.ui.url");
|
||||
baseURL = configurationService.getProperty("dspace.ui.url");
|
||||
baseURL += request.getContextPath();
|
||||
}
|
||||
}
|
||||
return baseURL;
|
||||
} else if (ConfigurationManager.getBooleanProperty("webui.feed.localresolve")) {
|
||||
} else if (configurationService.getBooleanProperty("webui.feed.localresolve")) {
|
||||
// return a link to handle in repository
|
||||
return resolveURL(request, null) + "/handle/" + dso.getHandle();
|
||||
} else {
|
||||
|
@@ -15,9 +15,9 @@ import java.util.List;
|
||||
|
||||
import org.apache.http.HttpResponse;
|
||||
import org.apache.http.HttpStatus;
|
||||
import org.apache.http.client.HttpClient;
|
||||
import org.apache.http.client.methods.HttpHead;
|
||||
import org.apache.http.impl.client.DefaultHttpClient;
|
||||
import org.apache.http.impl.client.CloseableHttpClient;
|
||||
import org.apache.http.impl.client.HttpClientBuilder;
|
||||
import org.apache.logging.log4j.Logger;
|
||||
import org.dspace.app.util.dao.WebAppDAO;
|
||||
import org.dspace.app.util.service.WebAppService;
|
||||
@@ -76,13 +76,13 @@ public class WebAppServiceImpl implements WebAppService {
|
||||
|
||||
for (WebApp app : webApps) {
|
||||
method = new HttpHead(app.getUrl());
|
||||
HttpClient client = new DefaultHttpClient();
|
||||
HttpResponse response = client.execute(method);
|
||||
int status = response.getStatusLine().getStatusCode();
|
||||
int status;
|
||||
try (CloseableHttpClient client = HttpClientBuilder.create().build()) {
|
||||
HttpResponse response = client.execute(method);
|
||||
status = response.getStatusLine().getStatusCode();
|
||||
}
|
||||
if (status != HttpStatus.SC_OK) {
|
||||
delete(context, app
|
||||
|
||||
);
|
||||
delete(context, app);
|
||||
continue;
|
||||
}
|
||||
|
||||
|
@@ -7,6 +7,8 @@
|
||||
*/
|
||||
package org.dspace.authenticate;
|
||||
|
||||
import static org.dspace.eperson.service.EPersonService.MD_PHONE;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.sql.SQLException;
|
||||
import java.util.Arrays;
|
||||
@@ -33,7 +35,6 @@ import org.apache.logging.log4j.Logger;
|
||||
import org.dspace.authenticate.factory.AuthenticateServiceFactory;
|
||||
import org.dspace.authenticate.service.AuthenticationService;
|
||||
import org.dspace.authorize.AuthorizeException;
|
||||
import org.dspace.core.ConfigurationManager;
|
||||
import org.dspace.core.Context;
|
||||
import org.dspace.core.LogManager;
|
||||
import org.dspace.eperson.EPerson;
|
||||
@@ -41,19 +42,28 @@ import org.dspace.eperson.Group;
|
||||
import org.dspace.eperson.factory.EPersonServiceFactory;
|
||||
import org.dspace.eperson.service.EPersonService;
|
||||
import org.dspace.eperson.service.GroupService;
|
||||
import org.dspace.services.ConfigurationService;
|
||||
import org.dspace.services.factory.DSpaceServicesFactory;
|
||||
|
||||
/**
|
||||
* This combined LDAP authentication method supersedes both the 'LDAPAuthentication'
|
||||
* and the 'LDAPHierarchicalAuthentication' methods. It's capable of both:
|
||||
* - authenticaton against a flat LDAP tree where all users are in the same unit
|
||||
* (if search.user or search.password is not set)
|
||||
* - authentication against structured hierarchical LDAP trees of users.
|
||||
* <ul>
|
||||
* <li>authentication against a flat LDAP tree where all users are in the same unit
|
||||
* (if {@code search.user} or {@code search.password} is not set)</li>
|
||||
* <li>authentication against structured hierarchical LDAP trees of users.</li>
|
||||
* </ul>
|
||||
* An initial bind is required using a user name and password in order to
|
||||
* search the tree and find the DN of the user. A second bind is then required to
|
||||
* check the credentials of the user by binding directly to their DN.
|
||||
*
|
||||
* @author Stuart Lewis, Chris Yates, Alex Barbieri, Flavio Botelho, Reuben Pasquini, Samuel Ottenhoff, Ivan Masár
|
||||
* @version $Revision$
|
||||
* @author Stuart Lewis
|
||||
* @author Chris Yates
|
||||
* @author Alex Barbieri
|
||||
* @author Flavio Botelho
|
||||
* @author Reuben Pasquini
|
||||
* @author Samuel Ottenhoff
|
||||
* @author Ivan Masár
|
||||
*/
|
||||
public class LDAPAuthentication
|
||||
implements AuthenticationMethod {
|
||||
@@ -61,13 +71,17 @@ public class LDAPAuthentication
|
||||
/**
|
||||
* log4j category
|
||||
*/
|
||||
private static Logger log = org.apache.logging.log4j.LogManager.getLogger(LDAPAuthentication.class);
|
||||
|
||||
protected AuthenticationService authenticationService = AuthenticateServiceFactory.getInstance()
|
||||
.getAuthenticationService();
|
||||
protected EPersonService ePersonService = EPersonServiceFactory.getInstance().getEPersonService();
|
||||
protected GroupService groupService = EPersonServiceFactory.getInstance().getGroupService();
|
||||
private static final Logger log
|
||||
= org.apache.logging.log4j.LogManager.getLogger(LDAPAuthentication.class);
|
||||
|
||||
protected AuthenticationService authenticationService
|
||||
= AuthenticateServiceFactory.getInstance().getAuthenticationService();
|
||||
protected EPersonService ePersonService
|
||||
= EPersonServiceFactory.getInstance().getEPersonService();
|
||||
protected GroupService groupService
|
||||
= EPersonServiceFactory.getInstance().getGroupService();
|
||||
protected ConfigurationService configurationService
|
||||
= DSpaceServicesFactory.getInstance().getConfigurationService();
|
||||
|
||||
/**
|
||||
* Let a real auth method return true if it wants.
|
||||
@@ -80,7 +94,7 @@ public class LDAPAuthentication
|
||||
String username)
|
||||
throws SQLException {
|
||||
// Looks to see if autoregister is set or not
|
||||
return ConfigurationManager.getBooleanProperty("authentication-ldap", "autoregister");
|
||||
return configurationService.getBooleanProperty("authentication-ldap.autoregister");
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -128,7 +142,7 @@ public class LDAPAuthentication
|
||||
// ensures they are LDAP users
|
||||
try {
|
||||
if (!context.getCurrentUser().getNetid().equals("")) {
|
||||
String groupName = ConfigurationManager.getProperty("authentication-ldap", "login.specialgroup");
|
||||
String groupName = configurationService.getProperty("authentication-ldap.login.specialgroup");
|
||||
if ((groupName != null) && (!groupName.trim().equals(""))) {
|
||||
Group ldapGroup = groupService.findByName(context, groupName);
|
||||
if (ldapGroup == null) {
|
||||
@@ -142,7 +156,7 @@ public class LDAPAuthentication
|
||||
}
|
||||
}
|
||||
}
|
||||
} catch (Exception npe) {
|
||||
} catch (SQLException ex) {
|
||||
// The user is not an LDAP user, so we don't need to worry about them
|
||||
}
|
||||
return Collections.EMPTY_LIST;
|
||||
@@ -205,11 +219,11 @@ public class LDAPAuthentication
|
||||
SpeakerToLDAP ldap = new SpeakerToLDAP(log);
|
||||
|
||||
// Get the DN of the user
|
||||
boolean anonymousSearch = ConfigurationManager.getBooleanProperty("authentication-ldap", "search.anonymous");
|
||||
String adminUser = ConfigurationManager.getProperty("authentication-ldap", "search.user");
|
||||
String adminPassword = ConfigurationManager.getProperty("authentication-ldap", "search.password");
|
||||
String objectContext = ConfigurationManager.getProperty("authentication-ldap", "object_context");
|
||||
String idField = ConfigurationManager.getProperty("authentication-ldap", "id_field");
|
||||
boolean anonymousSearch = configurationService.getBooleanProperty("authentication-ldap.search.anonymous");
|
||||
String adminUser = configurationService.getProperty("authentication-ldap.search.user");
|
||||
String adminPassword = configurationService.getProperty("authentication-ldap.search.password");
|
||||
String objectContext = configurationService.getProperty("authentication-ldap.object_context");
|
||||
String idField = configurationService.getProperty("authentication-ldap.id_field");
|
||||
String dn = "";
|
||||
|
||||
// If adminUser is blank and anonymous search is not allowed, then we can't search so construct the DN
|
||||
@@ -263,9 +277,8 @@ public class LDAPAuthentication
|
||||
if (StringUtils.isEmpty(email)) {
|
||||
// If no email, check if we have a "netid_email_domain". If so, append it to the netid to create
|
||||
// email
|
||||
if (StringUtils
|
||||
.isNotEmpty(ConfigurationManager.getProperty("authentication-ldap", "netid_email_domain"))) {
|
||||
email = netid + ConfigurationManager.getProperty("authentication-ldap", "netid_email_domain");
|
||||
if (configurationService.hasProperty("authentication-ldap.netid_email_domain")) {
|
||||
email = netid + configurationService.getProperty("authentication-ldap.netid_email_domain");
|
||||
} else {
|
||||
// We don't have a valid email address. We'll default it to 'netid' but log a warning
|
||||
log.warn(LogManager.getHeader(context, "autoregister",
|
||||
@@ -310,7 +323,8 @@ public class LDAPAuthentication
|
||||
eperson.setLastName(context, ldap.ldapSurname);
|
||||
}
|
||||
if (StringUtils.isNotEmpty(ldap.ldapPhone)) {
|
||||
ePersonService.setMetadata(context, eperson, "phone", ldap.ldapPhone);
|
||||
ePersonService.setMetadataSingleValue(context, eperson,
|
||||
MD_PHONE, ldap.ldapPhone, null);
|
||||
}
|
||||
eperson.setNetid(netid.toLowerCase());
|
||||
eperson.setCanLogIn(true);
|
||||
@@ -365,21 +379,34 @@ public class LDAPAuthentication
|
||||
/**
|
||||
* LDAP settings
|
||||
*/
|
||||
String ldap_provider_url = ConfigurationManager.getProperty("authentication-ldap", "provider_url");
|
||||
String ldap_id_field = ConfigurationManager.getProperty("authentication-ldap", "id_field");
|
||||
String ldap_search_context = ConfigurationManager.getProperty("authentication-ldap", "search_context");
|
||||
String ldap_search_scope = ConfigurationManager.getProperty("authentication-ldap", "search_scope");
|
||||
final String ldap_provider_url;
|
||||
final String ldap_id_field;
|
||||
final String ldap_search_context;
|
||||
final String ldap_search_scope;
|
||||
|
||||
String ldap_email_field = ConfigurationManager.getProperty("authentication-ldap", "email_field");
|
||||
String ldap_givenname_field = ConfigurationManager.getProperty("authentication-ldap", "givenname_field");
|
||||
String ldap_surname_field = ConfigurationManager.getProperty("authentication-ldap", "surname_field");
|
||||
String ldap_phone_field = ConfigurationManager.getProperty("authentication-ldap", "phone_field");
|
||||
String ldap_group_field = ConfigurationManager.getProperty("authentication-ldap", "login.groupmap.attribute");
|
||||
final String ldap_email_field;
|
||||
final String ldap_givenname_field;
|
||||
final String ldap_surname_field;
|
||||
final String ldap_phone_field;
|
||||
final String ldap_group_field;
|
||||
|
||||
boolean useTLS = ConfigurationManager.getBooleanProperty("authentication-ldap", "starttls", false);
|
||||
final boolean useTLS;
|
||||
|
||||
SpeakerToLDAP(Logger thelog) {
|
||||
ConfigurationService configurationService
|
||||
= DSpaceServicesFactory.getInstance().getConfigurationService();
|
||||
log = thelog;
|
||||
|
||||
ldap_provider_url = configurationService.getProperty("authentication-ldap.provider_url");
|
||||
ldap_id_field = configurationService.getProperty("authentication-ldap.id_field");
|
||||
ldap_search_context = configurationService.getProperty("authentication-ldap.search_context");
|
||||
ldap_search_scope = configurationService.getProperty("authentication-ldap.search_scope");
|
||||
ldap_email_field = configurationService.getProperty("authentication-ldap.email_field");
|
||||
ldap_givenname_field = configurationService.getProperty("authentication-ldap.givenname_field");
|
||||
ldap_surname_field = configurationService.getProperty("authentication-ldap.surname_field");
|
||||
ldap_phone_field = configurationService.getProperty("authentication-ldap.phone_field");
|
||||
ldap_group_field = configurationService.getProperty("authentication-ldap.login.groupmap.attribute");
|
||||
useTLS = configurationService.getBooleanProperty("authentication-ldap.starttls", false);
|
||||
}
|
||||
|
||||
protected String getDNOfUser(String adminUser, String adminPassword, Context context, String netid) {
|
||||
@@ -399,7 +426,8 @@ public class LDAPAuthentication
|
||||
}
|
||||
|
||||
// Set up environment for creating initial context
|
||||
Hashtable<String, String> env = new Hashtable<String, String>();
|
||||
@SuppressWarnings("UseOfObsoleteCollectionType")
|
||||
Hashtable<String, String> env = new Hashtable<>();
|
||||
env.put(javax.naming.Context.INITIAL_CONTEXT_FACTORY, "com.sun.jndi.ldap.LdapCtxFactory");
|
||||
env.put(javax.naming.Context.PROVIDER_URL, ldap_provider_url);
|
||||
|
||||
@@ -447,7 +475,7 @@ public class LDAPAuthentication
|
||||
SearchControls ctrls = new SearchControls();
|
||||
ctrls.setSearchScope(ldap_search_scope_value);
|
||||
|
||||
String searchName = "";
|
||||
String searchName;
|
||||
if (useTLS) {
|
||||
searchName = ldap_search_context;
|
||||
} else {
|
||||
@@ -555,7 +583,8 @@ public class LDAPAuthentication
|
||||
|
||||
|
||||
// Set up environment for creating initial context
|
||||
Hashtable<String, String> env = new Hashtable<String, String>();
|
||||
@SuppressWarnings("UseOfObsoleteCollectionType")
|
||||
Hashtable<String, String> env = new Hashtable<>();
|
||||
env.put(javax.naming.Context.INITIAL_CONTEXT_FACTORY,
|
||||
"com.sun.jndi.ldap.LdapCtxFactory");
|
||||
env.put(javax.naming.Context.PROVIDER_URL, ldap_provider_url);
|
||||
@@ -652,7 +681,7 @@ public class LDAPAuthentication
|
||||
if (StringUtils.isNotBlank(dn)) {
|
||||
System.out.println("dn:" + dn);
|
||||
int i = 1;
|
||||
String groupMap = ConfigurationManager.getProperty("authentication-ldap", "login.groupmap." + i);
|
||||
String groupMap = configurationService.getProperty("authentication-ldap", "login.groupmap." + i);
|
||||
|
||||
boolean cmp;
|
||||
|
||||
@@ -692,7 +721,7 @@ public class LDAPAuthentication
|
||||
}
|
||||
}
|
||||
|
||||
groupMap = ConfigurationManager.getProperty("authentication-ldap", "login.groupmap." + ++i);
|
||||
groupMap = configurationService.getProperty("authentication-ldap", "login.groupmap." + ++i);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@@ -24,11 +24,14 @@ import javax.servlet.http.HttpServletRequest;
|
||||
import javax.servlet.http.HttpServletResponse;
|
||||
|
||||
import org.apache.commons.lang3.StringUtils;
|
||||
import org.apache.logging.log4j.LogManager;
|
||||
import org.apache.logging.log4j.Logger;
|
||||
import org.dspace.authenticate.factory.AuthenticateServiceFactory;
|
||||
import org.dspace.authorize.AuthorizeException;
|
||||
import org.dspace.content.MetadataField;
|
||||
import org.dspace.content.MetadataFieldName;
|
||||
import org.dspace.content.MetadataSchema;
|
||||
import org.dspace.content.MetadataSchemaEnum;
|
||||
import org.dspace.content.NonUniqueMetadataException;
|
||||
import org.dspace.content.factory.ContentServiceFactory;
|
||||
import org.dspace.content.service.MetadataFieldService;
|
||||
@@ -65,13 +68,12 @@ import org.dspace.services.factory.DSpaceServicesFactory;
|
||||
* @author <a href="mailto:bliong@melcoe.mq.edu.au">Bruc Liong, MELCOE</a>
|
||||
* @author <a href="mailto:kli@melcoe.mq.edu.au">Xiang Kevin Li, MELCOE</a>
|
||||
* @author <a href="http://www.scottphillips.com">Scott Phillips</a>
|
||||
* @version $Revision$
|
||||
*/
|
||||
public class ShibAuthentication implements AuthenticationMethod {
|
||||
/**
|
||||
* log4j category
|
||||
*/
|
||||
private static Logger log = org.apache.logging.log4j.LogManager.getLogger(ShibAuthentication.class);
|
||||
private static final Logger log = LogManager.getLogger(ShibAuthentication.class);
|
||||
|
||||
/**
|
||||
* Additional metadata mappings
|
||||
@@ -843,25 +845,24 @@ public class ShibAuthentication implements AuthenticationMethod {
|
||||
|
||||
// Truncate values
|
||||
if (value == null) {
|
||||
log.warn(
|
||||
"Unable to update the eperson's '" + field + "' metadata because the header '" + header + "' does" +
|
||||
" not exist.");
|
||||
log.warn("Unable to update the eperson's '{}' metadata"
|
||||
+ " because the header '{}' does not exist.", field, header);
|
||||
continue;
|
||||
} else if ("phone".equals(field) && value.length() > PHONE_MAX_SIZE) {
|
||||
log.warn(
|
||||
"Truncating eperson phone metadata because it is longer than " + PHONE_MAX_SIZE + ": '" + value +
|
||||
"'");
|
||||
log.warn("Truncating eperson phone metadata because it is longer than {}: '{}'",
|
||||
PHONE_MAX_SIZE, value);
|
||||
value = value.substring(0, PHONE_MAX_SIZE);
|
||||
} else if (value.length() > METADATA_MAX_SIZE) {
|
||||
log.warn(
|
||||
"Truncating eperson " + field + " metadata because it is longer than " + METADATA_MAX_SIZE + ": " +
|
||||
"'" + value + "'");
|
||||
log.warn("Truncating eperson {} metadata because it is longer than {}: '{}'",
|
||||
field, METADATA_MAX_SIZE, value);
|
||||
value = value.substring(0, METADATA_MAX_SIZE);
|
||||
}
|
||||
|
||||
ePersonService.setMetadata(context, eperson, field, value);
|
||||
log.debug(
|
||||
"Updated the eperson's '" + field + "' metadata using header: '" + header + "' = '" + value + "'.");
|
||||
String[] nameParts = MetadataFieldName.parse(field);
|
||||
ePersonService.setMetadataSingleValue(context, eperson,
|
||||
nameParts[0], nameParts[1], nameParts[2], value, null);
|
||||
log.debug("Updated the eperson's '{}' metadata using header: '{}' = '{}'.",
|
||||
field, header, value);
|
||||
}
|
||||
ePersonService.update(context, eperson);
|
||||
context.dispatchEvents();
|
||||
@@ -889,10 +890,8 @@ public class ShibAuthentication implements AuthenticationMethod {
|
||||
protected int swordCompatibility(Context context, String username, String password, HttpServletRequest request)
|
||||
throws SQLException {
|
||||
|
||||
EPerson eperson = null;
|
||||
|
||||
log.debug("Shibboleth Sword compatibility activated.");
|
||||
eperson = ePersonService.findByEmail(context, username.toLowerCase());
|
||||
EPerson eperson = ePersonService.findByEmail(context, username.toLowerCase());
|
||||
|
||||
if (eperson == null) {
|
||||
// lookup failed.
|
||||
@@ -951,7 +950,7 @@ public class ShibAuthentication implements AuthenticationMethod {
|
||||
}
|
||||
|
||||
|
||||
HashMap<String, String> map = new HashMap<String, String>();
|
||||
HashMap<String, String> map = new HashMap<>();
|
||||
|
||||
String[] mappingString = configurationService.getArrayProperty("authentication-shibboleth.eperson.metadata");
|
||||
boolean autoCreate = configurationService
|
||||
@@ -990,19 +989,19 @@ public class ShibAuthentication implements AuthenticationMethod {
|
||||
|
||||
if (valid) {
|
||||
// The eperson field is fine, we can use it.
|
||||
log.debug("Loading additional eperson metadata mapping for: '" + header + "' = '" + name + "'");
|
||||
log.debug("Loading additional eperson metadata mapping for: '{}' = '{}'",
|
||||
header, name);
|
||||
map.put(header, name);
|
||||
} else {
|
||||
// The field doesn't exist, and we can't use it.
|
||||
log.error(
|
||||
"Skipping the additional eperson metadata mapping for: '" + header + "' = '" + name + "' because " +
|
||||
"the field is not supported by the current configuration.");
|
||||
log.error("Skipping the additional eperson metadata mapping for: '{}' = '{}'"
|
||||
+ " because the field is not supported by the current configuration.",
|
||||
header, name);
|
||||
}
|
||||
} // foreach metadataStringList
|
||||
|
||||
|
||||
metadataHeaderMap = map;
|
||||
return;
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -1020,12 +1019,8 @@ public class ShibAuthentication implements AuthenticationMethod {
|
||||
return false;
|
||||
}
|
||||
|
||||
// The phone is a predefined field
|
||||
if ("phone".equals(metadataName)) {
|
||||
return true;
|
||||
}
|
||||
|
||||
MetadataField metadataField = metadataFieldService.findByElement(context, "eperson", metadataName, null);
|
||||
MetadataField metadataField = metadataFieldService.findByElement(context,
|
||||
MetadataSchemaEnum.EPERSON.getName(), metadataName, null);
|
||||
return metadataField != null;
|
||||
}
|
||||
|
||||
@@ -1063,10 +1058,7 @@ public class ShibAuthentication implements AuthenticationMethod {
|
||||
try {
|
||||
context.turnOffAuthorisationSystem();
|
||||
metadataField = metadataFieldService.create(context, epersonSchema, metadataName, null, null);
|
||||
} catch (AuthorizeException e) {
|
||||
log.error(e.getMessage(), e);
|
||||
return false;
|
||||
} catch (NonUniqueMetadataException e) {
|
||||
} catch (AuthorizeException | NonUniqueMetadataException e) {
|
||||
log.error(e.getMessage(), e);
|
||||
return false;
|
||||
} finally {
|
||||
@@ -1211,7 +1203,7 @@ public class ShibAuthentication implements AuthenticationMethod {
|
||||
// Shibboleth attributes are separated by semicolons (and semicolons are
|
||||
// escaped with a backslash). So here we will scan through the string and
|
||||
// split on any unescaped semicolons.
|
||||
List<String> valueList = new ArrayList<String>();
|
||||
List<String> valueList = new ArrayList<>();
|
||||
int idx = 0;
|
||||
do {
|
||||
idx = values.indexOf(';', idx);
|
||||
|
@@ -12,6 +12,7 @@ import java.net.MalformedURLException;
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
|
||||
import org.apache.logging.log4j.LogManager;
|
||||
import org.apache.logging.log4j.Logger;
|
||||
import org.apache.solr.client.solrj.SolrClient;
|
||||
import org.apache.solr.client.solrj.SolrQuery;
|
||||
@@ -21,7 +22,8 @@ import org.apache.solr.client.solrj.response.FacetField;
|
||||
import org.apache.solr.client.solrj.response.QueryResponse;
|
||||
import org.apache.solr.common.SolrInputDocument;
|
||||
import org.dspace.authority.indexer.AuthorityIndexingService;
|
||||
import org.dspace.core.ConfigurationManager;
|
||||
import org.dspace.services.ConfigurationService;
|
||||
import org.dspace.services.factory.DSpaceServicesFactory;
|
||||
|
||||
/**
|
||||
* @author Antoine Snyers (antoine at atmire.com)
|
||||
@@ -31,7 +33,7 @@ import org.dspace.core.ConfigurationManager;
|
||||
*/
|
||||
public class AuthoritySolrServiceImpl implements AuthorityIndexingService, AuthoritySearchService {
|
||||
|
||||
private static final Logger log = org.apache.logging.log4j.LogManager.getLogger(AuthoritySolrServiceImpl.class);
|
||||
private static final Logger log = LogManager.getLogger(AuthoritySolrServiceImpl.class);
|
||||
|
||||
protected AuthoritySolrServiceImpl() {
|
||||
|
||||
@@ -46,7 +48,9 @@ public class AuthoritySolrServiceImpl implements AuthorityIndexingService, Autho
|
||||
throws MalformedURLException, SolrServerException, IOException {
|
||||
if (solr == null) {
|
||||
|
||||
String solrService = ConfigurationManager.getProperty("solr.authority.server");
|
||||
ConfigurationService configurationService
|
||||
= DSpaceServicesFactory.getInstance().getConfigurationService();
|
||||
String solrService = configurationService.getProperty("solr.authority.server");
|
||||
|
||||
log.debug("Solr authority URL: " + solrService);
|
||||
|
||||
@@ -153,7 +157,7 @@ public class AuthoritySolrServiceImpl implements AuthorityIndexingService, Autho
|
||||
|
||||
QueryResponse response = getSolr().query(solrQuery);
|
||||
|
||||
List<String> results = new ArrayList<String>();
|
||||
List<String> results = new ArrayList<>();
|
||||
FacetField facetField = response.getFacetField("field");
|
||||
if (facetField != null) {
|
||||
List<FacetField.Count> values = facetField.getValues();
|
||||
|
@@ -7,20 +7,22 @@
|
||||
*/
|
||||
package org.dspace.authority;
|
||||
|
||||
import java.lang.reflect.InvocationTargetException;
|
||||
import java.util.ArrayList;
|
||||
import java.util.HashMap;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
|
||||
import org.apache.logging.log4j.LogManager;
|
||||
import org.apache.logging.log4j.Logger;
|
||||
|
||||
/**
|
||||
* This class contains a list of active authority types.
|
||||
* It can be used to created a new instance of a specific type.
|
||||
* However if you need to make a new instance to store it in solr, you need to use AuthorityValueGenerator.
|
||||
* To create an instance from a solr record, use AuthorityValue#fromSolr(SolrDocument).
|
||||
* However if you need to make a new instance to store it in Solr, you need to use {@link AuthorityValueGenerator}.
|
||||
* To create an instance from a Solr record, use {@link AuthorityValue#fromSolr(SolrDocument)}.
|
||||
*
|
||||
* This class is instantiated in spring and accessed by a static method in AuthorityValue.
|
||||
* This class is instantiated in Spring and accessed by a static method in AuthorityValue.
|
||||
*
|
||||
* @author Antoine Snyers (antoine at atmire.com)
|
||||
* @author Kevin Van de Velde (kevin at atmire dot com)
|
||||
@@ -32,11 +34,11 @@ public class AuthorityTypes {
|
||||
/**
|
||||
* log4j logger
|
||||
*/
|
||||
private static Logger log = org.apache.logging.log4j.LogManager.getLogger(AuthorityTypes.class);
|
||||
private static final Logger log = LogManager.getLogger(AuthorityTypes.class);
|
||||
|
||||
protected List<AuthorityValue> types = new ArrayList<AuthorityValue>();
|
||||
protected List<AuthorityValue> types = new ArrayList<>();
|
||||
|
||||
protected Map<String, AuthorityValue> fieldDefaults = new HashMap<String, AuthorityValue>();
|
||||
protected Map<String, AuthorityValue> fieldDefaults = new HashMap<>();
|
||||
|
||||
|
||||
public List<AuthorityValue> getTypes() {
|
||||
@@ -60,10 +62,10 @@ public class AuthorityTypes {
|
||||
for (AuthorityValue authorityValue : types) {
|
||||
if (authorityValue.getAuthorityType().equals(type)) {
|
||||
try {
|
||||
result = authorityValue.getClass().newInstance();
|
||||
} catch (InstantiationException e) {
|
||||
log.error("Error", e);
|
||||
} catch (IllegalAccessException e) {
|
||||
result = authorityValue.getClass().getDeclaredConstructor().newInstance();
|
||||
} catch (InstantiationException | IllegalAccessException
|
||||
| NoSuchMethodException | SecurityException
|
||||
| IllegalArgumentException | InvocationTargetException e) {
|
||||
log.error("Error", e);
|
||||
}
|
||||
}
|
||||
|
@@ -8,6 +8,7 @@
|
||||
package org.dspace.authority;
|
||||
|
||||
import java.io.PrintWriter;
|
||||
import java.sql.SQLException;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Arrays;
|
||||
import java.util.Iterator;
|
||||
@@ -15,19 +16,22 @@ import java.util.List;
|
||||
|
||||
import org.apache.commons.cli.CommandLine;
|
||||
import org.apache.commons.cli.CommandLineParser;
|
||||
import org.apache.commons.cli.DefaultParser;
|
||||
import org.apache.commons.cli.HelpFormatter;
|
||||
import org.apache.commons.cli.Options;
|
||||
import org.apache.commons.cli.ParseException;
|
||||
import org.apache.commons.cli.PosixParser;
|
||||
import org.apache.logging.log4j.LogManager;
|
||||
import org.apache.logging.log4j.Logger;
|
||||
import org.dspace.authority.factory.AuthorityServiceFactory;
|
||||
import org.dspace.authority.service.AuthorityValueService;
|
||||
import org.dspace.authorize.AuthorizeException;
|
||||
import org.dspace.content.Item;
|
||||
import org.dspace.content.MetadataValue;
|
||||
import org.dspace.content.factory.ContentServiceFactory;
|
||||
import org.dspace.content.service.ItemService;
|
||||
import org.dspace.core.ConfigurationManager;
|
||||
import org.dspace.core.Context;
|
||||
import org.dspace.services.ConfigurationService;
|
||||
import org.dspace.services.factory.DSpaceServicesFactory;
|
||||
|
||||
/**
|
||||
* @author Antoine Snyers (antoine at atmire.com)
|
||||
@@ -40,21 +44,23 @@ public class UpdateAuthorities {
|
||||
/**
|
||||
* log4j logger
|
||||
*/
|
||||
private static Logger log = org.apache.logging.log4j.LogManager.getLogger(UpdateAuthorities.class);
|
||||
private static final Logger log = LogManager.getLogger(UpdateAuthorities.class);
|
||||
|
||||
protected PrintWriter print = null;
|
||||
|
||||
private Context context;
|
||||
private final Context context;
|
||||
private List<String> selectedIDs;
|
||||
|
||||
protected final ItemService itemService;
|
||||
protected final AuthorityValueService authorityValueService;
|
||||
protected final ConfigurationService configurationService;
|
||||
|
||||
public UpdateAuthorities(Context context) {
|
||||
print = new PrintWriter(System.out);
|
||||
this.context = context;
|
||||
this.authorityValueService = AuthorityServiceFactory.getInstance().getAuthorityValueService();
|
||||
this.itemService = ContentServiceFactory.getInstance().getItemService();
|
||||
this.configurationService = DSpaceServicesFactory.getInstance().getConfigurationService();
|
||||
}
|
||||
|
||||
public static void main(String[] args) throws ParseException {
|
||||
@@ -78,7 +84,7 @@ public class UpdateAuthorities {
|
||||
}
|
||||
|
||||
protected static int processArgs(String[] args, UpdateAuthorities UpdateAuthorities) throws ParseException {
|
||||
CommandLineParser parser = new PosixParser();
|
||||
CommandLineParser parser = new DefaultParser();
|
||||
Options options = createCommandLineOptions();
|
||||
CommandLine line = parser.parse(options, args);
|
||||
|
||||
@@ -102,7 +108,7 @@ public class UpdateAuthorities {
|
||||
}
|
||||
|
||||
private void setSelectedIDs(String b) {
|
||||
this.selectedIDs = new ArrayList<String>();
|
||||
this.selectedIDs = new ArrayList<>();
|
||||
String[] orcids = b.split(",");
|
||||
for (String orcid : orcids) {
|
||||
this.selectedIDs.add(orcid.trim());
|
||||
@@ -125,7 +131,7 @@ public class UpdateAuthorities {
|
||||
List<AuthorityValue> authorities;
|
||||
|
||||
if (selectedIDs != null && !selectedIDs.isEmpty()) {
|
||||
authorities = new ArrayList<AuthorityValue>();
|
||||
authorities = new ArrayList<>();
|
||||
for (String selectedID : selectedIDs) {
|
||||
AuthorityValue byUID = authorityValueService.findByUID(context, selectedID);
|
||||
authorities.add(byUID);
|
||||
@@ -149,7 +155,7 @@ public class UpdateAuthorities {
|
||||
protected void followUp(AuthorityValue authority) {
|
||||
print.println("Updated: " + authority.getValue() + " - " + authority.getId());
|
||||
|
||||
boolean updateItems = ConfigurationManager.getBooleanProperty("solrauthority", "auto-update-items");
|
||||
boolean updateItems = configurationService.getBooleanProperty("solrauthority.auto-update-items");
|
||||
if (updateItems) {
|
||||
updateItems(authority);
|
||||
}
|
||||
@@ -169,7 +175,7 @@ public class UpdateAuthorities {
|
||||
print.println("Updated item with handle " + next.getHandle());
|
||||
}
|
||||
}
|
||||
} catch (Exception e) {
|
||||
} catch (SQLException | AuthorizeException e) {
|
||||
log.error("Error updating item", e);
|
||||
print.println("Error updating item. " + Arrays.toString(e.getStackTrace()));
|
||||
}
|
||||
|
@@ -460,7 +460,7 @@ public class AuthorizeServiceImpl implements AuthorizeService {
|
||||
groupService.allMemberGroups(c, e),
|
||||
Constants.ADMIN, Constants.COLLECTION);
|
||||
|
||||
if (CollectionUtils.isNotEmpty(policies)) {
|
||||
if (CollectionUtils.isNotEmpty(policies) || isCommunityAdmin(c, e)) {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
@@ -614,6 +614,12 @@ public class AuthorizeServiceImpl implements AuthorizeService {
|
||||
resourcePolicyService.removeDsoEPersonPolicies(c, o, e);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void removeAllEPersonPolicies(Context c, EPerson e)
|
||||
throws SQLException, AuthorizeException {
|
||||
resourcePolicyService.removeAllEPersonPolicies(c, e);
|
||||
}
|
||||
|
||||
@Override
|
||||
public List<Group> getAuthorizedGroups(Context c, DSpaceObject o,
|
||||
int actionID) throws java.sql.SQLException {
|
||||
|
@@ -114,6 +114,11 @@ public class ResourcePolicyServiceImpl implements ResourcePolicyService {
|
||||
return resourcePolicyDAO.findByEPersonGroupTypeIdAction(c, e, groups, action, type_id);
|
||||
}
|
||||
|
||||
@Override
|
||||
public List<ResourcePolicy> find(Context context, EPerson ePerson) throws SQLException {
|
||||
return resourcePolicyDAO.findByEPerson(context, ePerson);
|
||||
}
|
||||
|
||||
@Override
|
||||
public List<ResourcePolicy> findByTypeGroupActionExceptId(Context context, DSpaceObject dso, Group group,
|
||||
int action, int notPolicyID)
|
||||
@@ -246,6 +251,11 @@ public class ResourcePolicyServiceImpl implements ResourcePolicyService {
|
||||
|
||||
}
|
||||
|
||||
@Override
|
||||
public void removeAllEPersonPolicies(Context context, EPerson ePerson) throws SQLException, AuthorizeException {
|
||||
resourcePolicyDAO.deleteByEPerson(context, ePerson);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void removeGroupPolicies(Context c, Group group) throws SQLException {
|
||||
resourcePolicyDAO.deleteByGroup(c, group);
|
||||
|
@@ -33,6 +33,8 @@ public interface ResourcePolicyDAO extends GenericDAO<ResourcePolicy> {
|
||||
public List<ResourcePolicy> findByDsoAndType(Context context, DSpaceObject dSpaceObject, String type)
|
||||
throws SQLException;
|
||||
|
||||
public List<ResourcePolicy> findByEPerson(Context context, EPerson ePerson) throws SQLException;
|
||||
|
||||
public List<ResourcePolicy> findByGroup(Context context, Group group) throws SQLException;
|
||||
|
||||
public List<ResourcePolicy> findByDSoAndAction(Context context, DSpaceObject dso, int actionId) throws SQLException;
|
||||
@@ -66,6 +68,15 @@ public interface ResourcePolicyDAO extends GenericDAO<ResourcePolicy> {
|
||||
|
||||
public void deleteByDsoEPersonPolicies(Context context, DSpaceObject dso, EPerson ePerson) throws SQLException;
|
||||
|
||||
/**
|
||||
* Deletes all policies that belong to an EPerson
|
||||
*
|
||||
* @param context DSpace context object
|
||||
* @param ePerson ePerson whose policies to delete
|
||||
* @throws SQLException if database error
|
||||
*/
|
||||
public void deleteByEPerson(Context context, EPerson ePerson) throws SQLException;
|
||||
|
||||
public void deleteByDsoAndTypeNotEqualsTo(Context c, DSpaceObject o, String type) throws SQLException;
|
||||
|
||||
/**
|
||||
@@ -101,7 +112,7 @@ public interface ResourcePolicyDAO extends GenericDAO<ResourcePolicy> {
|
||||
* @return total resource policies of the ePerson
|
||||
* @throws SQLException if database error
|
||||
*/
|
||||
public int countByEPerson(Context context, EPerson eperson) throws SQLException;
|
||||
public int countByEPerson(Context context, EPerson ePerson) throws SQLException;
|
||||
|
||||
/**
|
||||
* Return a paginated list of policies related to a resourceUuid belong to an ePerson
|
||||
|
@@ -63,6 +63,16 @@ public class ResourcePolicyDAOImpl extends AbstractHibernateDAO<ResourcePolicy>
|
||||
return list(context, criteriaQuery, false, ResourcePolicy.class, -1, -1);
|
||||
}
|
||||
|
||||
@Override
|
||||
public List<ResourcePolicy> findByEPerson(Context context, EPerson ePerson) throws SQLException {
|
||||
CriteriaBuilder criteriaBuilder = getCriteriaBuilder(context);
|
||||
CriteriaQuery criteriaQuery = getCriteriaQuery(criteriaBuilder, ResourcePolicy.class);
|
||||
Root<ResourcePolicy> resourcePolicyRoot = criteriaQuery.from(ResourcePolicy.class);
|
||||
criteriaQuery.select(resourcePolicyRoot);
|
||||
criteriaQuery.where(criteriaBuilder.equal(resourcePolicyRoot.get(ResourcePolicy_.eperson), ePerson));
|
||||
return list(context, criteriaQuery, false, ResourcePolicy.class, -1, -1);
|
||||
}
|
||||
|
||||
@Override
|
||||
public List<ResourcePolicy> findByGroup(Context context, Group group) throws SQLException {
|
||||
CriteriaBuilder criteriaBuilder = getCriteriaBuilder(context);
|
||||
@@ -194,6 +204,15 @@ public class ResourcePolicyDAOImpl extends AbstractHibernateDAO<ResourcePolicy>
|
||||
|
||||
}
|
||||
|
||||
@Override
|
||||
public void deleteByEPerson(Context context, EPerson ePerson) throws SQLException {
|
||||
String queryString = "delete from ResourcePolicy where eperson= :eperson";
|
||||
Query query = createQuery(context, queryString);
|
||||
query.setParameter("eperson", ePerson);
|
||||
query.executeUpdate();
|
||||
|
||||
}
|
||||
|
||||
@Override
|
||||
public void deleteByDsoAndTypeNotEqualsTo(Context context, DSpaceObject dso, String type) throws SQLException {
|
||||
|
||||
@@ -247,10 +266,10 @@ public class ResourcePolicyDAOImpl extends AbstractHibernateDAO<ResourcePolicy>
|
||||
}
|
||||
|
||||
@Override
|
||||
public int countByEPerson(Context context, EPerson eperson) throws SQLException {
|
||||
public int countByEPerson(Context context, EPerson ePerson) throws SQLException {
|
||||
Query query = createQuery(context,
|
||||
"SELECT count(*) FROM " + ResourcePolicy.class.getSimpleName() + " WHERE eperson_id = (:epersonUuid) ");
|
||||
query.setParameter("epersonUuid", eperson.getID());
|
||||
query.setParameter("epersonUuid", ePerson.getID());
|
||||
return count(query);
|
||||
}
|
||||
|
||||
|
@@ -449,6 +449,16 @@ public interface AuthorizeService {
|
||||
*/
|
||||
public void removeEPersonPolicies(Context c, DSpaceObject o, EPerson e) throws SQLException, AuthorizeException;
|
||||
|
||||
/**
|
||||
* Removes all policies from an eperson that belong to an EPerson.
|
||||
*
|
||||
* @param c current context
|
||||
* @param e the eperson
|
||||
* @throws SQLException if there's a database problem
|
||||
* @throws AuthorizeException if authorization error
|
||||
*/
|
||||
public void removeAllEPersonPolicies(Context c, EPerson e) throws SQLException, AuthorizeException;
|
||||
|
||||
/**
|
||||
* Returns all groups authorized to perform an action on an object. Returns
|
||||
* empty array if no matches.
|
||||
|
@@ -39,6 +39,16 @@ public interface ResourcePolicyService extends DSpaceCRUDService<ResourcePolicy>
|
||||
|
||||
public List<ResourcePolicy> find(Context context, Group group) throws SQLException;
|
||||
|
||||
/**
|
||||
* Retrieve a list of ResourcePolicies by EPerson
|
||||
*
|
||||
* @param c context
|
||||
* @param ePerson the EPerson for which to look up the resource policies
|
||||
* @return a list of ResourcePolicies for the provided EPerson
|
||||
* @throws SQLException if there's a database problem
|
||||
*/
|
||||
public List<ResourcePolicy> find(Context c, EPerson ePerson) throws SQLException;
|
||||
|
||||
public List<ResourcePolicy> find(Context c, EPerson e, List<Group> groups, int action, int type_id)
|
||||
throws SQLException;
|
||||
|
||||
@@ -72,6 +82,16 @@ public interface ResourcePolicyService extends DSpaceCRUDService<ResourcePolicy>
|
||||
public void removeDsoEPersonPolicies(Context context, DSpaceObject dso, EPerson ePerson)
|
||||
throws SQLException, AuthorizeException;
|
||||
|
||||
/**
|
||||
* Removes all ResourcePolicies related to an EPerson
|
||||
*
|
||||
* @param context context
|
||||
* @param ePerson the EPerson for which the ResourcePolicies will be deleted
|
||||
* @throws SQLException if there's a database problem
|
||||
* @throws AuthorizeException when the current user is not authorized
|
||||
*/
|
||||
public void removeAllEPersonPolicies(Context context, EPerson ePerson) throws SQLException, AuthorizeException;
|
||||
|
||||
public void removeGroupPolicies(Context c, Group group) throws SQLException;
|
||||
|
||||
public void removeDsoAndTypeNotEqualsToPolicies(Context c, DSpaceObject o, String type)
|
||||
|
@@ -7,8 +7,11 @@
|
||||
*/
|
||||
package org.dspace.browse;
|
||||
|
||||
import org.dspace.core.ConfigurationManager;
|
||||
import java.lang.reflect.InvocationTargetException;
|
||||
|
||||
import org.dspace.core.Context;
|
||||
import org.dspace.services.ConfigurationService;
|
||||
import org.dspace.services.factory.DSpaceServicesFactory;
|
||||
|
||||
/**
|
||||
* Factory class to generate DAOs based on the configuration
|
||||
@@ -33,16 +36,21 @@ public class BrowseDAOFactory {
|
||||
*/
|
||||
public static BrowseDAO getInstance(Context context)
|
||||
throws BrowseException {
|
||||
String className = ConfigurationManager.getProperty("browseDAO.class");
|
||||
ConfigurationService configurationService
|
||||
= DSpaceServicesFactory.getInstance().getConfigurationService();
|
||||
String className = configurationService.getProperty("browseDAO.class");
|
||||
if (className == null) {
|
||||
// SOLR implementation is the default since DSpace 4.0
|
||||
return new SolrBrowseDAO(context);
|
||||
}
|
||||
try {
|
||||
return (BrowseDAO) Class
|
||||
.forName(ConfigurationManager.getProperty("browseDAO.class"))
|
||||
.forName(configurationService.getProperty("browseDAO.class"))
|
||||
.getConstructor(Context.class).newInstance(context);
|
||||
} catch (Exception e) {
|
||||
} catch (ClassNotFoundException | IllegalAccessException
|
||||
| IllegalArgumentException | InstantiationException
|
||||
| NoSuchMethodException | SecurityException |
|
||||
InvocationTargetException e) {
|
||||
throw new BrowseException("The configuration for browseDAO is invalid: " + className, e);
|
||||
}
|
||||
}
|
||||
|
@@ -13,7 +13,8 @@ import java.util.StringTokenizer;
|
||||
import java.util.regex.Matcher;
|
||||
import java.util.regex.Pattern;
|
||||
|
||||
import org.dspace.core.ConfigurationManager;
|
||||
import org.dspace.services.ConfigurationService;
|
||||
import org.dspace.services.factory.DSpaceServicesFactory;
|
||||
import org.dspace.sort.SortException;
|
||||
import org.dspace.sort.SortOption;
|
||||
|
||||
@@ -414,6 +415,7 @@ public final class BrowseIndex {
|
||||
* @return the name of the table
|
||||
* @deprecated 1.5
|
||||
*/
|
||||
@Deprecated
|
||||
public static String getTableName(int number, boolean isCommunity, boolean isCollection, boolean isDistinct,
|
||||
boolean isMap) {
|
||||
return BrowseIndex.getTableName(makeTableBaseName(number), isCommunity, isCollection, isDistinct, isMap);
|
||||
@@ -462,6 +464,7 @@ public final class BrowseIndex {
|
||||
* @return the name of the table
|
||||
* @deprecated 1.5
|
||||
*/
|
||||
@Deprecated
|
||||
public String getTableName(boolean isCommunity, boolean isCollection, boolean isDistinct, boolean isMap) {
|
||||
if (isDistinct || isMap) {
|
||||
return BrowseIndex.getTableName(number, isCommunity, isCollection, isDistinct, isMap);
|
||||
@@ -482,6 +485,7 @@ public final class BrowseIndex {
|
||||
* @return the name of the table
|
||||
* @deprecated 1.5
|
||||
*/
|
||||
@Deprecated
|
||||
public String getTableName(boolean isCommunity, boolean isCollection) {
|
||||
return getTableName(isCommunity, isCollection, false, false);
|
||||
}
|
||||
@@ -514,6 +518,7 @@ public final class BrowseIndex {
|
||||
* @return table name
|
||||
* @deprecated 1.5
|
||||
*/
|
||||
@Deprecated
|
||||
public String getTableName(boolean isDistinct, boolean isCommunity, boolean isCollection) {
|
||||
return getTableName(isCommunity, isCollection, isDistinct, false);
|
||||
}
|
||||
@@ -649,6 +654,7 @@ public final class BrowseIndex {
|
||||
* @throws BrowseException if browse error
|
||||
* @deprecated
|
||||
*/
|
||||
@Deprecated
|
||||
public static String[] tables()
|
||||
throws BrowseException {
|
||||
BrowseIndex[] bis = getBrowseIndices();
|
||||
@@ -670,13 +676,14 @@ public final class BrowseIndex {
|
||||
throws BrowseException {
|
||||
int idx = 1;
|
||||
String definition;
|
||||
ArrayList<BrowseIndex> browseIndices = new ArrayList<BrowseIndex>();
|
||||
ArrayList<BrowseIndex> browseIndices = new ArrayList<>();
|
||||
|
||||
while (((definition = ConfigurationManager.getProperty("webui.browse.index." + idx))) != null) {
|
||||
ConfigurationService configurationService
|
||||
= DSpaceServicesFactory.getInstance().getConfigurationService();
|
||||
while (((definition = configurationService.getProperty("webui.browse.index." + idx))) != null) {
|
||||
BrowseIndex bi = new BrowseIndex(definition, idx);
|
||||
bi.displayFrequencies = Boolean.valueOf(ConfigurationManager
|
||||
.getBooleanProperty("webui.browse.metadata.show-freq."
|
||||
+ idx, true));
|
||||
bi.displayFrequencies = configurationService
|
||||
.getBooleanProperty("webui.browse.metadata.show-freq." + idx, true);
|
||||
|
||||
browseIndices.add(bi);
|
||||
idx++;
|
||||
@@ -804,8 +811,8 @@ public final class BrowseIndex {
|
||||
* @return true or false
|
||||
*/
|
||||
public boolean isTagCloudEnabled() {
|
||||
|
||||
return ConfigurationManager.getBooleanProperty("webui.browse.index.tagcloud." + number);
|
||||
|
||||
ConfigurationService configurationService
|
||||
= DSpaceServicesFactory.getInstance().getConfigurationService();
|
||||
return configurationService.getBooleanProperty("webui.browse.index.tagcloud." + number);
|
||||
}
|
||||
}
|
||||
|
@@ -475,6 +475,7 @@ public class BrowseInfo {
|
||||
* @return an empty array of Item.
|
||||
* @deprecated
|
||||
*/
|
||||
@Deprecated
|
||||
public Item[] getItemResults() {
|
||||
return new Item[0];
|
||||
}
|
||||
|
@@ -10,7 +10,8 @@ package org.dspace.browse;
|
||||
import java.util.HashMap;
|
||||
import java.util.Map;
|
||||
|
||||
import org.dspace.core.ConfigurationManager;
|
||||
import org.dspace.services.ConfigurationService;
|
||||
import org.dspace.services.factory.DSpaceServicesFactory;
|
||||
|
||||
/**
|
||||
* Class to represent the configuration of the cross-linking between browse
|
||||
@@ -23,7 +24,7 @@ public class CrossLinks {
|
||||
/**
|
||||
* a map of the desired links
|
||||
*/
|
||||
private Map<String, String> links = new HashMap<String, String>();
|
||||
private Map<String, String> links = new HashMap<>();
|
||||
|
||||
/**
|
||||
* Construct a new object which will obtain the configuration for itself.
|
||||
@@ -35,7 +36,9 @@ public class CrossLinks {
|
||||
int i = 1;
|
||||
while (true) {
|
||||
String field = "webui.browse.link." + i;
|
||||
String config = ConfigurationManager.getProperty(field);
|
||||
ConfigurationService configurationService
|
||||
= DSpaceServicesFactory.getInstance().getConfigurationService();
|
||||
String config = configurationService.getProperty(field);
|
||||
if (config == null) {
|
||||
break;
|
||||
}
|
||||
|
@@ -7,8 +7,11 @@
|
||||
*/
|
||||
package org.dspace.browse;
|
||||
|
||||
import org.dspace.core.ConfigurationManager;
|
||||
import java.lang.reflect.InvocationTargetException;
|
||||
|
||||
import org.dspace.core.Context;
|
||||
import org.dspace.services.ConfigurationService;
|
||||
import org.dspace.services.factory.DSpaceServicesFactory;
|
||||
|
||||
/**
|
||||
* Factory class to allow us to load the correct DAO for registering
|
||||
@@ -38,16 +41,22 @@ public class ItemCountDAOFactory {
|
||||
/** Log4j logger */
|
||||
ItemCountDAO dao = null;
|
||||
|
||||
String className = ConfigurationManager.getProperty("ItemCountDAO.class");
|
||||
ConfigurationService configurationService
|
||||
= DSpaceServicesFactory.getInstance().getConfigurationService();
|
||||
String className = configurationService.getProperty("ItemCountDAO.class");
|
||||
|
||||
// SOLR implementation is the default since DSpace 4.0
|
||||
if (className == null) {
|
||||
dao = new ItemCountDAOSolr();
|
||||
} else {
|
||||
try {
|
||||
dao = (ItemCountDAO) Class
|
||||
.forName(className.trim()).newInstance();
|
||||
} catch (Exception e) {
|
||||
dao = (ItemCountDAO) Class.forName(className.trim())
|
||||
.getDeclaredConstructor()
|
||||
.newInstance();
|
||||
} catch (ClassNotFoundException | IllegalAccessException
|
||||
| InstantiationException | NoSuchMethodException
|
||||
| SecurityException | IllegalArgumentException
|
||||
| InvocationTargetException e) {
|
||||
throw new ItemCountException("The configuration for ItemCountDAO is invalid: " + className, e);
|
||||
}
|
||||
}
|
||||
|
@@ -12,8 +12,10 @@ import java.util.Locale;
|
||||
import com.ibm.icu.text.CollationElementIterator;
|
||||
import com.ibm.icu.text.Collator;
|
||||
import com.ibm.icu.text.RuleBasedCollator;
|
||||
import org.apache.logging.log4j.LogManager;
|
||||
import org.apache.logging.log4j.Logger;
|
||||
import org.dspace.core.ConfigurationManager;
|
||||
import org.dspace.services.ConfigurationService;
|
||||
import org.dspace.services.factory.DSpaceServicesFactory;
|
||||
import org.dspace.text.filter.TextFilter;
|
||||
|
||||
/**
|
||||
@@ -32,7 +34,7 @@ import org.dspace.text.filter.TextFilter;
|
||||
* @author Graham Triggs
|
||||
*/
|
||||
public class LocaleOrderingFilter implements TextFilter {
|
||||
private static Logger log = org.apache.logging.log4j.LogManager.getLogger(LocaleOrderingFilter.class);
|
||||
private static final Logger log = LogManager.getLogger(LocaleOrderingFilter.class);
|
||||
|
||||
/**
|
||||
* Uses a Locale dependent Collator to generate a sort string
|
||||
@@ -47,7 +49,7 @@ public class LocaleOrderingFilter implements TextFilter {
|
||||
// Have we got a collator?
|
||||
if (collator != null) {
|
||||
int element;
|
||||
StringBuffer buf = new StringBuffer();
|
||||
StringBuilder buf = new StringBuilder();
|
||||
|
||||
// Iterate through the elements of the collator
|
||||
CollationElementIterator iter = collator.getCollationElementIterator(str);
|
||||
@@ -107,7 +109,9 @@ public class LocaleOrderingFilter implements TextFilter {
|
||||
Locale theLocale = null;
|
||||
|
||||
// Get a Locale configuration from the dspace.cfg
|
||||
String locale = ConfigurationManager.getProperty("webui.browse.sort.locale");
|
||||
ConfigurationService configurationService
|
||||
= DSpaceServicesFactory.getInstance().getConfigurationService();
|
||||
String locale = configurationService.getProperty("webui.browse.sort.locale");
|
||||
|
||||
if (locale != null) {
|
||||
// Attempt to create Locale for the configured value
|
||||
|
@@ -17,17 +17,19 @@ import javax.mail.MessagingException;
|
||||
|
||||
import org.apache.commons.cli.CommandLine;
|
||||
import org.apache.commons.cli.CommandLineParser;
|
||||
import org.apache.commons.cli.DefaultParser;
|
||||
import org.apache.commons.cli.HelpFormatter;
|
||||
import org.apache.commons.cli.Options;
|
||||
import org.apache.commons.cli.ParseException;
|
||||
import org.apache.commons.cli.PosixParser;
|
||||
import org.apache.logging.log4j.LogManager;
|
||||
import org.apache.logging.log4j.Logger;
|
||||
import org.dspace.checker.factory.CheckerServiceFactory;
|
||||
import org.dspace.checker.service.SimpleReporterService;
|
||||
import org.dspace.core.ConfigurationManager;
|
||||
import org.dspace.core.Context;
|
||||
import org.dspace.core.Email;
|
||||
import org.dspace.core.Utils;
|
||||
import org.dspace.services.ConfigurationService;
|
||||
import org.dspace.services.factory.DSpaceServicesFactory;
|
||||
|
||||
/**
|
||||
* <p>
|
||||
@@ -44,7 +46,7 @@ public class DailyReportEmailer {
|
||||
/**
|
||||
* log4j logger.
|
||||
*/
|
||||
private static Logger log = org.apache.logging.log4j.LogManager.getLogger(DailyReportEmailer.class);
|
||||
private static final Logger log = LogManager.getLogger(DailyReportEmailer.class);
|
||||
|
||||
/**
|
||||
* Default constructor.
|
||||
@@ -63,14 +65,16 @@ public class DailyReportEmailer {
|
||||
public void sendReport(File attachment, int numberOfBitstreams)
|
||||
throws IOException, javax.mail.MessagingException {
|
||||
if (numberOfBitstreams > 0) {
|
||||
String hostname = Utils.getHostName(ConfigurationManager.getProperty("dspace.ui.url"));
|
||||
ConfigurationService configurationService
|
||||
= DSpaceServicesFactory.getInstance().getConfigurationService();
|
||||
String hostname = Utils.getHostName(configurationService.getProperty("dspace.ui.url"));
|
||||
Email email = new Email();
|
||||
email.setSubject(
|
||||
"Checksum checker Report - " + numberOfBitstreams + " Bitstreams found with POSSIBLE issues on " +
|
||||
hostname);
|
||||
email.setSubject(String.format(
|
||||
"Checksum checker Report - %d Bitstreams found with POSSIBLE issues on %s",
|
||||
numberOfBitstreams, hostname));
|
||||
email.setContent("Checker Report", "report is attached ...");
|
||||
email.addAttachment(attachment, "checksum_checker_report.txt");
|
||||
email.addRecipient(ConfigurationManager.getProperty("mail.admin"));
|
||||
email.addRecipient(configurationService.getProperty("mail.admin"));
|
||||
email.send();
|
||||
}
|
||||
}
|
||||
@@ -98,7 +102,7 @@ public class DailyReportEmailer {
|
||||
*/
|
||||
public static void main(String[] args) {
|
||||
// set up command line parser
|
||||
CommandLineParser parser = new PosixParser();
|
||||
CommandLineParser parser = new DefaultParser();
|
||||
CommandLine line = null;
|
||||
|
||||
// create an options object and populate it
|
||||
@@ -164,7 +168,9 @@ public class DailyReportEmailer {
|
||||
int numBitstreams = 0;
|
||||
|
||||
// create a temporary file in the log directory
|
||||
String dirLocation = ConfigurationManager.getProperty("log.report.dir");
|
||||
ConfigurationService configurationService
|
||||
= DSpaceServicesFactory.getInstance().getConfigurationService();
|
||||
String dirLocation = configurationService.getProperty("log.report.dir");
|
||||
File directory = new File(dirLocation);
|
||||
|
||||
if (directory.exists() && directory.isDirectory()) {
|
||||
@@ -247,7 +253,7 @@ public class DailyReportEmailer {
|
||||
if (writer != null) {
|
||||
try {
|
||||
writer.close();
|
||||
} catch (Exception e) {
|
||||
} catch (IOException e) {
|
||||
log.fatal("Could not close writer", e);
|
||||
}
|
||||
}
|
||||
|
@@ -19,12 +19,14 @@ import java.util.Properties;
|
||||
import java.util.regex.Matcher;
|
||||
import java.util.regex.Pattern;
|
||||
|
||||
import org.apache.logging.log4j.LogManager;
|
||||
import org.apache.logging.log4j.Logger;
|
||||
import org.dspace.checker.factory.CheckerServiceFactory;
|
||||
import org.dspace.checker.service.ChecksumHistoryService;
|
||||
import org.dspace.core.ConfigurationManager;
|
||||
import org.dspace.core.Context;
|
||||
import org.dspace.core.Utils;
|
||||
import org.dspace.services.ConfigurationService;
|
||||
import org.dspace.services.factory.DSpaceServicesFactory;
|
||||
|
||||
/**
|
||||
* Manages the deletion of results from the checksum history. It uses the
|
||||
@@ -40,7 +42,7 @@ public final class ResultsPruner {
|
||||
/**
|
||||
* Default logger.
|
||||
*/
|
||||
private static final Logger LOG = org.apache.logging.log4j.LogManager.getLogger(ResultsPruner.class);
|
||||
private static final Logger LOG = LogManager.getLogger(ResultsPruner.class);
|
||||
|
||||
/**
|
||||
* Factory method for the default results pruner configuration using
|
||||
@@ -51,12 +53,13 @@ public final class ResultsPruner {
|
||||
*/
|
||||
public static ResultsPruner getDefaultPruner(Context context) {
|
||||
try {
|
||||
return getPruner(context, ConfigurationManager.getProperties());
|
||||
ConfigurationService configurationService
|
||||
= DSpaceServicesFactory.getInstance().getConfigurationService();
|
||||
return getPruner(context, configurationService.getProperties());
|
||||
} catch (FileNotFoundException e) {
|
||||
throw new IllegalStateException(
|
||||
"VeryExceptionalException - config file not there! ", e);
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
|
||||
|
@@ -7,11 +7,14 @@
|
||||
*/
|
||||
package org.dspace.content;
|
||||
|
||||
import static org.dspace.content.service.DSpaceObjectService.MD_LICENSE;
|
||||
|
||||
import java.sql.SQLException;
|
||||
import java.util.Arrays;
|
||||
import java.util.HashSet;
|
||||
import java.util.List;
|
||||
import java.util.Set;
|
||||
import javax.annotation.Nonnull;
|
||||
import javax.persistence.Cacheable;
|
||||
import javax.persistence.CascadeType;
|
||||
import javax.persistence.Column;
|
||||
@@ -46,7 +49,6 @@ import org.hibernate.proxy.HibernateProxyHelper;
|
||||
* effect.
|
||||
*
|
||||
* @author Robert Tansley
|
||||
* @version $Revision$
|
||||
*/
|
||||
@Entity
|
||||
@Table(name = "collection")
|
||||
@@ -87,23 +89,11 @@ public class Collection extends DSpaceObject implements DSpaceObjectLegacySuppor
|
||||
joinColumns = {@JoinColumn(name = "collection_id")},
|
||||
inverseJoinColumns = {@JoinColumn(name = "community_id")}
|
||||
)
|
||||
private Set<Community> communities = new HashSet<>();
|
||||
private final Set<Community> communities = new HashSet<>();
|
||||
|
||||
@Transient
|
||||
private transient CollectionService collectionService;
|
||||
|
||||
// Keys for accessing Collection metadata
|
||||
@Transient
|
||||
public static final String COPYRIGHT_TEXT = "copyright_text";
|
||||
@Transient
|
||||
public static final String INTRODUCTORY_TEXT = "introductory_text";
|
||||
@Transient
|
||||
public static final String SHORT_DESCRIPTION = "short_description";
|
||||
@Transient
|
||||
public static final String SIDEBAR_TEXT = "side_bar_text";
|
||||
@Transient
|
||||
public static final String PROVENANCE_TEXT = "provenance_description";
|
||||
|
||||
/**
|
||||
* Protected constructor, create object using:
|
||||
* {@link org.dspace.content.service.CollectionService#create(Context, Community)}
|
||||
@@ -207,10 +197,17 @@ public class Collection extends DSpaceObject implements DSpaceObjectLegacySuppor
|
||||
* Get the license that users must grant before submitting to this
|
||||
* collection.
|
||||
*
|
||||
* @return the license for this collection
|
||||
* @return the license for this collection. Never null.
|
||||
*/
|
||||
@Nonnull
|
||||
public String getLicenseCollection() {
|
||||
return getCollectionService().getMetadata(this, "license");
|
||||
String license = getCollectionService()
|
||||
.getMetadataFirstValue(this, CollectionService.MD_LICENSE, Item.ANY);
|
||||
if (null == license) {
|
||||
return "";
|
||||
} else {
|
||||
return license;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -222,7 +219,7 @@ public class Collection extends DSpaceObject implements DSpaceObjectLegacySuppor
|
||||
* @throws SQLException if database error
|
||||
*/
|
||||
public void setLicense(Context context, String license) throws SQLException {
|
||||
getCollectionService().setMetadata(context, this, "license", license);
|
||||
getCollectionService().setMetadataSingleValue(context, this, MD_LICENSE, Item.ANY, license);
|
||||
}
|
||||
|
||||
/**
|
||||
|
@@ -36,7 +36,6 @@ import org.dspace.content.service.CollectionService;
|
||||
import org.dspace.content.service.CommunityService;
|
||||
import org.dspace.content.service.ItemService;
|
||||
import org.dspace.content.service.WorkspaceItemService;
|
||||
import org.dspace.core.ConfigurationManager;
|
||||
import org.dspace.core.Constants;
|
||||
import org.dspace.core.Context;
|
||||
import org.dspace.core.I18nUtil;
|
||||
@@ -55,9 +54,11 @@ import org.dspace.eperson.service.SubscribeService;
|
||||
import org.dspace.event.Event;
|
||||
import org.dspace.harvest.HarvestedCollection;
|
||||
import org.dspace.harvest.service.HarvestedCollectionService;
|
||||
import org.dspace.identifier.IdentifierException;
|
||||
import org.dspace.identifier.service.IdentifierService;
|
||||
import org.dspace.services.ConfigurationService;
|
||||
import org.dspace.workflow.factory.WorkflowServiceFactory;
|
||||
import org.dspace.xmlworkflow.WorkflowConfigurationException;
|
||||
import org.dspace.xmlworkflow.XmlWorkflowFactoryImpl;
|
||||
import org.dspace.xmlworkflow.factory.XmlWorkflowFactory;
|
||||
import org.dspace.xmlworkflow.state.Workflow;
|
||||
import org.dspace.xmlworkflow.storedcomponents.CollectionRole;
|
||||
@@ -93,6 +94,8 @@ public class CollectionServiceImpl extends DSpaceObjectServiceImpl<Collection> i
|
||||
protected CommunityService communityService;
|
||||
@Autowired(required = true)
|
||||
protected GroupService groupService;
|
||||
@Autowired(required = true)
|
||||
protected IdentifierService identifierService;
|
||||
|
||||
@Autowired(required = true)
|
||||
protected LicenseService licenseService;
|
||||
@@ -112,6 +115,9 @@ public class CollectionServiceImpl extends DSpaceObjectServiceImpl<Collection> i
|
||||
@Autowired(required = true)
|
||||
protected SearchService searchService;
|
||||
|
||||
@Autowired(required = true)
|
||||
protected ConfigurationService configurationService;
|
||||
|
||||
protected CollectionServiceImpl() {
|
||||
super();
|
||||
}
|
||||
@@ -132,13 +138,6 @@ public class CollectionServiceImpl extends DSpaceObjectServiceImpl<Collection> i
|
||||
//Add our newly created collection to our community, authorization checks occur in THIS method
|
||||
communityService.addCollection(context, community, newCollection);
|
||||
|
||||
//Update our community so we have a collection identifier
|
||||
if (handle == null) {
|
||||
handleService.createHandle(context, newCollection);
|
||||
} else {
|
||||
handleService.createHandle(context, newCollection, handle);
|
||||
}
|
||||
|
||||
// create the default authorization policy for collections
|
||||
// of 'anonymous' READ
|
||||
Group anonymousGroup = groupService.findByName(context, Group.ANONYMOUS);
|
||||
@@ -151,6 +150,18 @@ public class CollectionServiceImpl extends DSpaceObjectServiceImpl<Collection> i
|
||||
authorizeService
|
||||
.createResourcePolicy(context, newCollection, anonymousGroup, null, Constants.DEFAULT_BITSTREAM_READ, null);
|
||||
|
||||
collectionDAO.save(context, newCollection);
|
||||
|
||||
//Update our collection so we have a collection identifier
|
||||
try {
|
||||
if (handle == null) {
|
||||
identifierService.register(context, newCollection);
|
||||
} else {
|
||||
identifierService.register(context, newCollection, handle);
|
||||
}
|
||||
} catch (IllegalStateException | IdentifierException ex) {
|
||||
throw new IllegalStateException(ex);
|
||||
}
|
||||
|
||||
context.addEvent(new Event(Event.CREATE, Constants.COLLECTION,
|
||||
newCollection.getID(), newCollection.getHandle(),
|
||||
@@ -160,7 +171,6 @@ public class CollectionServiceImpl extends DSpaceObjectServiceImpl<Collection> i
|
||||
"collection_id=" + newCollection.getID())
|
||||
+ ",handle=" + newCollection.getHandle());
|
||||
|
||||
collectionDAO.save(context, newCollection);
|
||||
return newCollection;
|
||||
}
|
||||
|
||||
@@ -190,7 +200,7 @@ public class CollectionServiceImpl extends DSpaceObjectServiceImpl<Collection> i
|
||||
|
||||
@Override
|
||||
public List<Collection> findAuthorizedOptimized(Context context, int actionID) throws SQLException {
|
||||
if (!ConfigurationManager
|
||||
if (!configurationService
|
||||
.getBooleanProperty("org.dspace.content.Collection.findAuthorizedPerformanceOptimize", false)) {
|
||||
// Fallback to legacy query if config says so. The rationale could be that a site found a bug.
|
||||
return findAuthorized(context, null, actionID);
|
||||
@@ -291,9 +301,10 @@ public class CollectionServiceImpl extends DSpaceObjectServiceImpl<Collection> i
|
||||
}
|
||||
|
||||
@Override
|
||||
public void setMetadata(Context context, Collection collection, String field, String value)
|
||||
throws MissingResourceException, SQLException {
|
||||
if ((field.trim()).equals("name") && (value == null || value.trim().equals(""))) {
|
||||
public void setMetadataSingleValue(Context context, Collection collection,
|
||||
MetadataFieldName field, String language, String value)
|
||||
throws MissingResourceException, SQLException {
|
||||
if (field.equals(MD_NAME) && (value == null || value.trim().equals(""))) {
|
||||
try {
|
||||
value = I18nUtil.getMessage("org.dspace.workflow.WorkflowManager.untitled");
|
||||
} catch (MissingResourceException e) {
|
||||
@@ -301,21 +312,19 @@ public class CollectionServiceImpl extends DSpaceObjectServiceImpl<Collection> i
|
||||
}
|
||||
}
|
||||
|
||||
String[] MDValue = getMDValueByLegacyField(field);
|
||||
|
||||
/*
|
||||
* Set metadata field to null if null
|
||||
* and trim strings to eliminate excess
|
||||
* whitespace.
|
||||
*/
|
||||
if (value == null) {
|
||||
clearMetadata(context, collection, MDValue[0], MDValue[1], MDValue[2], Item.ANY);
|
||||
clearMetadata(context, collection, field.SCHEMA, field.ELEMENT, field.QUALIFIER, Item.ANY);
|
||||
collection.setMetadataModified();
|
||||
} else {
|
||||
setMetadataSingleValue(context, collection, MDValue[0], MDValue[1], MDValue[2], null, value);
|
||||
super.setMetadataSingleValue(context, collection, field, null, value);
|
||||
}
|
||||
|
||||
collection.addDetails(field);
|
||||
collection.addDetails(field.toString());
|
||||
}
|
||||
|
||||
@Override
|
||||
@@ -387,7 +396,7 @@ public class CollectionServiceImpl extends DSpaceObjectServiceImpl<Collection> i
|
||||
log.error(LogManager.getHeader(context, "setWorkflowGroup",
|
||||
"collection_id=" + collection.getID() + " " + e.getMessage()), e);
|
||||
}
|
||||
if (!StringUtils.equals(XmlWorkflowFactoryImpl.LEGACY_WORKFLOW_NAME, workflow.getID())) {
|
||||
if (!StringUtils.equals(workflowFactory.getDefaultWorkflow().getID(), workflow.getID())) {
|
||||
throw new IllegalArgumentException(
|
||||
"setWorkflowGroup can be used only on collection with the default basic dspace workflow. "
|
||||
+ "Instead, the collection: "
|
||||
@@ -455,22 +464,6 @@ public class CollectionServiceImpl extends DSpaceObjectServiceImpl<Collection> i
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the value of a metadata field
|
||||
*
|
||||
* @param collection which collection to operate on
|
||||
* @param field the name of the metadata field to get
|
||||
* @return the value of the metadata field
|
||||
* @throws IllegalArgumentException if the requested metadata field doesn't exist
|
||||
*/
|
||||
@Override
|
||||
@Deprecated
|
||||
public String getMetadata(Collection collection, String field) {
|
||||
String[] MDValue = getMDValueByLegacyField(field);
|
||||
String value = getMetadataFirstValue(collection, MDValue[0], MDValue[1], MDValue[2], Item.ANY);
|
||||
return value == null ? "" : value;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Group createSubmitters(Context context, Collection collection) throws SQLException, AuthorizeException {
|
||||
// Check authorisation - Must be an Admin to create Submitters Group
|
||||
@@ -551,7 +544,7 @@ public class CollectionServiceImpl extends DSpaceObjectServiceImpl<Collection> i
|
||||
|
||||
@Override
|
||||
public String getLicense(Collection collection) {
|
||||
String license = getMetadata(collection, "license");
|
||||
String license = getMetadataFirstValue(collection, CollectionService.MD_LICENSE, Item.ANY);
|
||||
|
||||
if (license == null || license.trim().equals("")) {
|
||||
// Fallback to site-wide default
|
||||
@@ -923,7 +916,7 @@ public class CollectionServiceImpl extends DSpaceObjectServiceImpl<Collection> i
|
||||
public List<Collection> findCollectionsWithSubmit(String q, Context context, Community community,
|
||||
int offset, int limit) throws SQLException, SearchServiceException {
|
||||
|
||||
List<Collection> collections = new ArrayList<Collection>();
|
||||
List<Collection> collections = new ArrayList<>();
|
||||
DiscoverQuery discoverQuery = new DiscoverQuery();
|
||||
discoverQuery.setDSpaceObjectFilter(IndexableCollection.TYPE);
|
||||
discoverQuery.setStart(offset);
|
||||
|
@@ -63,13 +63,13 @@ public class Community extends DSpaceObject implements DSpaceObjectLegacySupport
|
||||
joinColumns = {@JoinColumn(name = "parent_comm_id")},
|
||||
inverseJoinColumns = {@JoinColumn(name = "child_comm_id")}
|
||||
)
|
||||
private Set<Community> subCommunities = new HashSet<>();
|
||||
private final Set<Community> subCommunities = new HashSet<>();
|
||||
|
||||
@ManyToMany(fetch = FetchType.LAZY, mappedBy = "subCommunities")
|
||||
private Set<Community> parentCommunities = new HashSet<>();
|
||||
private final Set<Community> parentCommunities = new HashSet<>();
|
||||
|
||||
@ManyToMany(fetch = FetchType.LAZY, mappedBy = "communities", cascade = {CascadeType.PERSIST})
|
||||
private Set<Collection> collections = new HashSet<>();
|
||||
private final Set<Collection> collections = new HashSet<>();
|
||||
|
||||
@OneToOne
|
||||
@JoinColumn(name = "admin")
|
||||
@@ -83,12 +83,6 @@ public class Community extends DSpaceObject implements DSpaceObjectLegacySupport
|
||||
@JoinColumn(name = "logo_bitstream_id")
|
||||
private Bitstream logo = null;
|
||||
|
||||
// Keys for accessing Community metadata
|
||||
public static final String COPYRIGHT_TEXT = "copyright_text";
|
||||
public static final String INTRODUCTORY_TEXT = "introductory_text";
|
||||
public static final String SHORT_DESCRIPTION = "short_description";
|
||||
public static final String SIDEBAR_TEXT = "side_bar_text";
|
||||
|
||||
@Transient
|
||||
protected transient CommunityService communityService;
|
||||
|
||||
|
@@ -37,6 +37,8 @@ import org.dspace.core.LogManager;
|
||||
import org.dspace.eperson.Group;
|
||||
import org.dspace.eperson.service.GroupService;
|
||||
import org.dspace.event.Event;
|
||||
import org.dspace.identifier.IdentifierException;
|
||||
import org.dspace.identifier.service.IdentifierService;
|
||||
import org.springframework.beans.factory.annotation.Autowired;
|
||||
|
||||
/**
|
||||
@@ -51,7 +53,7 @@ public class CommunityServiceImpl extends DSpaceObjectServiceImpl<Community> imp
|
||||
/**
|
||||
* log4j category
|
||||
*/
|
||||
private static Logger log = org.apache.logging.log4j.LogManager.getLogger(CommunityServiceImpl.class);
|
||||
private static final Logger log = org.apache.logging.log4j.LogManager.getLogger(CommunityServiceImpl.class);
|
||||
|
||||
@Autowired(required = true)
|
||||
protected CommunityDAO communityDAO;
|
||||
@@ -69,6 +71,8 @@ public class CommunityServiceImpl extends DSpaceObjectServiceImpl<Community> imp
|
||||
protected BitstreamService bitstreamService;
|
||||
@Autowired(required = true)
|
||||
protected SiteService siteService;
|
||||
@Autowired(required = true)
|
||||
protected IdentifierService identifierService;
|
||||
|
||||
protected CommunityServiceImpl() {
|
||||
super();
|
||||
@@ -90,17 +94,6 @@ public class CommunityServiceImpl extends DSpaceObjectServiceImpl<Community> imp
|
||||
|
||||
Community newCommunity = communityDAO.create(context, new Community());
|
||||
|
||||
try {
|
||||
if (handle == null) {
|
||||
handleService.createHandle(context, newCommunity);
|
||||
} else {
|
||||
handleService.createHandle(context, newCommunity, handle);
|
||||
}
|
||||
} catch (IllegalStateException ie) {
|
||||
//If an IllegalStateException is thrown, then an existing object is already using this handle
|
||||
throw ie;
|
||||
}
|
||||
|
||||
if (parent != null) {
|
||||
parent.addSubCommunity(newCommunity);
|
||||
newCommunity.addParentCommunity(parent);
|
||||
@@ -115,14 +108,24 @@ public class CommunityServiceImpl extends DSpaceObjectServiceImpl<Community> imp
|
||||
|
||||
communityDAO.save(context, newCommunity);
|
||||
|
||||
try {
|
||||
if (handle == null) {
|
||||
identifierService.register(context, newCommunity);
|
||||
} else {
|
||||
identifierService.register(context, newCommunity, handle);
|
||||
}
|
||||
} catch (IllegalStateException | IdentifierException ex) {
|
||||
throw new IllegalStateException(ex);
|
||||
}
|
||||
|
||||
context.addEvent(new Event(Event.CREATE, Constants.COMMUNITY, newCommunity.getID(), newCommunity.getHandle(),
|
||||
getIdentifiers(context, newCommunity)));
|
||||
getIdentifiers(context, newCommunity)));
|
||||
|
||||
// if creating a top-level Community, simulate an ADD event at the Site.
|
||||
if (parent == null) {
|
||||
context.addEvent(new Event(Event.ADD, Constants.SITE, siteService.findSite(context).getID(),
|
||||
Constants.COMMUNITY, newCommunity.getID(), newCommunity.getHandle(),
|
||||
getIdentifiers(context, newCommunity)));
|
||||
Constants.COMMUNITY, newCommunity.getID(), newCommunity.getHandle(),
|
||||
getIdentifiers(context, newCommunity)));
|
||||
}
|
||||
|
||||
log.info(LogManager.getHeader(context, "create_community",
|
||||
@@ -175,17 +178,10 @@ public class CommunityServiceImpl extends DSpaceObjectServiceImpl<Community> imp
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getMetadata(Community community, String field) {
|
||||
String[] MDValue = getMDValueByLegacyField(field);
|
||||
String value = getMetadataFirstValue(community, MDValue[0], MDValue[1], MDValue[2], Item.ANY);
|
||||
return value == null ? "" : value;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void setMetadata(Context context, Community community, String field, String value)
|
||||
throws MissingResourceException, SQLException {
|
||||
if ((field.trim()).equals("name")
|
||||
&& (value == null || value.trim().equals(""))) {
|
||||
public void setMetadataSingleValue(Context context, Community community,
|
||||
MetadataFieldName field, String language, String value)
|
||||
throws MissingResourceException, SQLException {
|
||||
if (field.equals(MD_NAME) && (value == null || value.trim().equals(""))) {
|
||||
try {
|
||||
value = I18nUtil.getMessage("org.dspace.workflow.WorkflowManager.untitled");
|
||||
} catch (MissingResourceException e) {
|
||||
@@ -193,19 +189,19 @@ public class CommunityServiceImpl extends DSpaceObjectServiceImpl<Community> imp
|
||||
}
|
||||
}
|
||||
|
||||
String[] MDValue = getMDValueByLegacyField(field);
|
||||
|
||||
/*
|
||||
* Set metadata field to null if null
|
||||
* and trim strings to eliminate excess
|
||||
* whitespace.
|
||||
*/
|
||||
if (value == null) {
|
||||
clearMetadata(context, community, MDValue[0], MDValue[1], MDValue[2], Item.ANY);
|
||||
clearMetadata(context, community, field.SCHEMA, field.ELEMENT, field.QUALIFIER, Item.ANY);
|
||||
community.setMetadataModified();
|
||||
} else {
|
||||
setMetadataSingleValue(context, community, MDValue[0], MDValue[1], MDValue[2], null, value);
|
||||
super.setMetadataSingleValue(context, community, field, null, value);
|
||||
}
|
||||
community.addDetails(field);
|
||||
|
||||
community.addDetails(field.toString());
|
||||
}
|
||||
|
||||
@Override
|
||||
@@ -310,7 +306,7 @@ public class CommunityServiceImpl extends DSpaceObjectServiceImpl<Community> imp
|
||||
|
||||
@Override
|
||||
public List<Community> getAllParents(Context context, Community community) throws SQLException {
|
||||
List<Community> parentList = new ArrayList<Community>();
|
||||
List<Community> parentList = new ArrayList<>();
|
||||
Community parent = (Community) getParentObject(context, community);
|
||||
while (parent != null) {
|
||||
parentList.add(parent);
|
||||
@@ -332,7 +328,7 @@ public class CommunityServiceImpl extends DSpaceObjectServiceImpl<Community> imp
|
||||
|
||||
@Override
|
||||
public List<Collection> getAllCollections(Context context, Community community) throws SQLException {
|
||||
List<Collection> collectionList = new ArrayList<Collection>();
|
||||
List<Collection> collectionList = new ArrayList<>();
|
||||
List<Community> subCommunities = community.getSubcommunities();
|
||||
for (Community subCommunity : subCommunities) {
|
||||
addCollectionList(subCommunity, collectionList);
|
||||
@@ -629,6 +625,10 @@ public class CommunityServiceImpl extends DSpaceObjectServiceImpl<Community> imp
|
||||
case Constants.DELETE:
|
||||
if (AuthorizeConfiguration.canCommunityAdminPerformSubelementDeletion()) {
|
||||
adminObject = getParentObject(context, community);
|
||||
if (adminObject == null) {
|
||||
//top-level community, has to be admin of the current community
|
||||
adminObject = community;
|
||||
}
|
||||
}
|
||||
break;
|
||||
case Constants.ADD:
|
||||
|
@@ -131,7 +131,7 @@ public abstract class DSpaceObjectServiceImpl<T extends DSpaceObject> implements
|
||||
@Override
|
||||
public List<MetadataValue> getMetadata(T dso, String schema, String element, String qualifier, String lang) {
|
||||
// Build up list of matching values
|
||||
List<MetadataValue> values = new ArrayList<MetadataValue>();
|
||||
List<MetadataValue> values = new ArrayList<>();
|
||||
for (MetadataValue dcv : dso.getMetadata()) {
|
||||
if (match(schema, element, qualifier, lang, dcv)) {
|
||||
values.add(dcv);
|
||||
@@ -207,8 +207,8 @@ public abstract class DSpaceObjectServiceImpl<T extends DSpaceObject> implements
|
||||
}
|
||||
|
||||
@Override
|
||||
public void addMetadata(Context context, T dso, String schema, String element, String qualifier, String lang,
|
||||
List<String> values) throws SQLException {
|
||||
public List<MetadataValue> addMetadata(Context context, T dso, String schema, String element, String qualifier,
|
||||
String lang, List<String> values) throws SQLException {
|
||||
MetadataField metadataField = metadataFieldService.findByElement(context, schema, element, qualifier);
|
||||
if (metadataField == null) {
|
||||
throw new SQLException(
|
||||
@@ -216,12 +216,12 @@ public abstract class DSpaceObjectServiceImpl<T extends DSpaceObject> implements
|
||||
"exist!");
|
||||
}
|
||||
|
||||
addMetadata(context, dso, metadataField, lang, values);
|
||||
return addMetadata(context, dso, metadataField, lang, values);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void addMetadata(Context context, T dso, String schema, String element, String qualifier, String lang,
|
||||
List<String> values, List<String> authorities, List<Integer> confidences)
|
||||
public List<MetadataValue> addMetadata(Context context, T dso, String schema, String element, String qualifier,
|
||||
String lang, List<String> values, List<String> authorities, List<Integer> confidences)
|
||||
throws SQLException {
|
||||
// We will not verify that they are valid entries in the registry
|
||||
// until update() is called.
|
||||
@@ -231,15 +231,16 @@ public abstract class DSpaceObjectServiceImpl<T extends DSpaceObject> implements
|
||||
"bad_dublin_core schema=" + schema + "." + element + "." + qualifier + ". Metadata field does not " +
|
||||
"exist!");
|
||||
}
|
||||
addMetadata(context, dso, metadataField, lang, values, authorities, confidences);
|
||||
return addMetadata(context, dso, metadataField, lang, values, authorities, confidences);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void addMetadata(Context context, T dso, MetadataField metadataField, String lang, List<String> values,
|
||||
List<String> authorities, List<Integer> confidences)
|
||||
public List<MetadataValue> addMetadata(Context context, T dso, MetadataField metadataField, String lang,
|
||||
List<String> values, List<String> authorities, List<Integer> confidences)
|
||||
throws SQLException {
|
||||
boolean authorityControlled = metadataAuthorityService.isAuthorityControlled(metadataField);
|
||||
boolean authorityRequired = metadataAuthorityService.isAuthorityRequired(metadataField);
|
||||
List<MetadataValue> newMetadata = new ArrayList<>(values.size());
|
||||
// We will not verify that they are valid entries in the registry
|
||||
// until update() is called.
|
||||
for (int i = 0; i < values.size(); i++) {
|
||||
@@ -250,6 +251,7 @@ public abstract class DSpaceObjectServiceImpl<T extends DSpaceObject> implements
|
||||
}
|
||||
}
|
||||
MetadataValue metadataValue = metadataValueService.create(context, dso, metadataField);
|
||||
newMetadata.add(metadataValue);
|
||||
//Set place to list length of all metadatavalues for the given schema.element.qualifier combination.
|
||||
// Subtract one to adhere to the 0 as first element rule
|
||||
metadataValue.setPlace(
|
||||
@@ -296,7 +298,6 @@ public abstract class DSpaceObjectServiceImpl<T extends DSpaceObject> implements
|
||||
}
|
||||
}
|
||||
metadataValue.setValue(String.valueOf(dcvalue));
|
||||
;
|
||||
} else {
|
||||
metadataValue.setValue(null);
|
||||
}
|
||||
@@ -304,37 +305,39 @@ public abstract class DSpaceObjectServiceImpl<T extends DSpaceObject> implements
|
||||
// metadataValueService.update(context, metadataValue);
|
||||
dso.addDetails(metadataField.toString());
|
||||
}
|
||||
return newMetadata;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void addMetadata(Context context, T dso, MetadataField metadataField, String language, String value,
|
||||
String authority, int confidence) throws SQLException {
|
||||
addMetadata(context, dso, metadataField, language, Arrays.asList(value), Arrays.asList(authority),
|
||||
Arrays.asList(confidence));
|
||||
public MetadataValue addMetadata(Context context, T dso, MetadataField metadataField, String language,
|
||||
String value, String authority, int confidence) throws SQLException {
|
||||
return addMetadata(context, dso, metadataField, language, Arrays.asList(value), Arrays.asList(authority),
|
||||
Arrays.asList(confidence)).get(0);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void addMetadata(Context context, T dso, String schema, String element, String qualifier, String lang,
|
||||
String value) throws SQLException {
|
||||
addMetadata(context, dso, schema, element, qualifier, lang, Arrays.asList(value));
|
||||
public MetadataValue addMetadata(Context context, T dso, String schema, String element, String qualifier,
|
||||
String lang, String value) throws SQLException {
|
||||
return addMetadata(context, dso, schema, element, qualifier, lang, Arrays.asList(value)).get(0);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void addMetadata(Context context, T dso, MetadataField metadataField, String language, String value)
|
||||
public MetadataValue addMetadata(Context context, T dso, MetadataField metadataField, String language, String value)
|
||||
throws SQLException {
|
||||
addMetadata(context, dso, metadataField, language, Arrays.asList(value));
|
||||
return addMetadata(context, dso, metadataField, language, Arrays.asList(value)).get(0);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void addMetadata(Context context, T dso, MetadataField metadataField, String language, List<String> values)
|
||||
public List<MetadataValue> addMetadata(Context context, T dso, MetadataField metadataField, String language,
|
||||
List<String> values)
|
||||
throws SQLException {
|
||||
if (metadataField != null) {
|
||||
String fieldKey = metadataAuthorityService
|
||||
.makeFieldKey(metadataField.getMetadataSchema().getName(), metadataField.getElement(),
|
||||
metadataField.getQualifier());
|
||||
if (metadataAuthorityService.isAuthorityControlled(fieldKey)) {
|
||||
List<String> authorities = new ArrayList<String>();
|
||||
List<Integer> confidences = new ArrayList<Integer>();
|
||||
List<String> authorities = new ArrayList<>();
|
||||
List<Integer> confidences = new ArrayList<>();
|
||||
for (int i = 0; i < values.size(); ++i) {
|
||||
if (dso instanceof Item) {
|
||||
getAuthoritiesAndConfidences(fieldKey, ((Item) dso).getOwningCollection(), values, authorities,
|
||||
@@ -343,18 +346,19 @@ public abstract class DSpaceObjectServiceImpl<T extends DSpaceObject> implements
|
||||
getAuthoritiesAndConfidences(fieldKey, null, values, authorities, confidences, i);
|
||||
}
|
||||
}
|
||||
addMetadata(context, dso, metadataField, language, values, authorities, confidences);
|
||||
return addMetadata(context, dso, metadataField, language, values, authorities, confidences);
|
||||
} else {
|
||||
addMetadata(context, dso, metadataField, language, values, null, null);
|
||||
return addMetadata(context, dso, metadataField, language, values, null, null);
|
||||
}
|
||||
}
|
||||
return new ArrayList<>(0);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void addMetadata(Context context, T dso, String schema, String element, String qualifier, String lang,
|
||||
String value, String authority, int confidence) throws SQLException {
|
||||
addMetadata(context, dso, schema, element, qualifier, lang, Arrays.asList(value), Arrays.asList(authority),
|
||||
Arrays.asList(confidence));
|
||||
public MetadataValue addMetadata(Context context, T dso, String schema, String element, String qualifier,
|
||||
String lang, String value, String authority, int confidence) throws SQLException {
|
||||
return addMetadata(context, dso, schema, element, qualifier, lang, Arrays.asList(value),
|
||||
Arrays.asList(authority), Arrays.asList(confidence)).get(0);
|
||||
}
|
||||
|
||||
@Override
|
||||
@@ -405,6 +409,24 @@ public abstract class DSpaceObjectServiceImpl<T extends DSpaceObject> implements
|
||||
return null;
|
||||
}
|
||||
|
||||
/**
|
||||
* Retrieve first metadata field value
|
||||
*
|
||||
* @param dso The DSpaceObject which we ask for metadata.
|
||||
* @param field {schema, element, qualifier} for the desired field.
|
||||
* @param language the language to match, or <code>Item.ANY</code>
|
||||
* @return the first metadata field value
|
||||
*/
|
||||
@Override
|
||||
public String getMetadataFirstValue(T dso, MetadataFieldName field, String language) {
|
||||
List<MetadataValue> metadataValues
|
||||
= getMetadata(dso, field.SCHEMA, field.ELEMENT, field.QUALIFIER, language);
|
||||
if (CollectionUtils.isNotEmpty(metadataValues)) {
|
||||
return metadataValues.get(0).getValue();
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
||||
/**
|
||||
* Set first metadata field value
|
||||
*
|
||||
@@ -420,6 +442,21 @@ public abstract class DSpaceObjectServiceImpl<T extends DSpaceObject> implements
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public void setMetadataSingleValue(Context context, T dso, MetadataFieldName field,
|
||||
String language, String value)
|
||||
throws SQLException {
|
||||
if (value != null) {
|
||||
clearMetadata(context, dso, field.SCHEMA, field.ELEMENT, field.QUALIFIER,
|
||||
language);
|
||||
|
||||
String newValueLanguage = (Item.ANY.equals(language)) ? null : language;
|
||||
addMetadata(context, dso, field.SCHEMA, field.ELEMENT, field.QUALIFIER,
|
||||
newValueLanguage, value);
|
||||
dso.setMetadataModified();
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Utility method for pattern-matching metadata elements. This
|
||||
* method will return <code>true</code> if the given schema,
|
||||
@@ -569,6 +606,7 @@ public abstract class DSpaceObjectServiceImpl<T extends DSpaceObject> implements
|
||||
//RelationshipMetadataValue instance.
|
||||
//This is done to ensure that the order is correct.
|
||||
metadataValues.sort(new Comparator<MetadataValue>() {
|
||||
@Override
|
||||
public int compare(MetadataValue o1, MetadataValue o2) {
|
||||
int compare = o1.getPlace() - o2.getPlace();
|
||||
if (compare == 0) {
|
||||
@@ -660,33 +698,35 @@ public abstract class DSpaceObjectServiceImpl<T extends DSpaceObject> implements
|
||||
@Override
|
||||
public void addAndShiftRightMetadata(Context context, T dso, String schema, String element, String qualifier,
|
||||
String lang, String value, String authority, int confidence, int index)
|
||||
throws SQLException {
|
||||
throws SQLException {
|
||||
|
||||
List<MetadataValue> list = getMetadata(dso, schema, element, qualifier);
|
||||
|
||||
clearMetadata(context, dso, schema, element, qualifier, Item.ANY);
|
||||
|
||||
int idx = 0;
|
||||
int place = 0;
|
||||
boolean last = true;
|
||||
for (MetadataValue rr : list) {
|
||||
if (idx == index) {
|
||||
addMetadata(context, dso, schema, element, qualifier,
|
||||
lang, value, authority, confidence);
|
||||
MetadataValue newMetadata = addMetadata(context, dso, schema, element, qualifier,
|
||||
lang, value, authority, confidence);
|
||||
|
||||
moveSingleMetadataValue(context, dso, place, newMetadata);
|
||||
place++;
|
||||
last = false;
|
||||
}
|
||||
addMetadata(context, dso, schema, element, qualifier,
|
||||
rr.getLanguage(), rr.getValue(), rr.getAuthority(), rr.getConfidence());
|
||||
moveSingleMetadataValue(context, dso, place, rr);
|
||||
place++;
|
||||
idx++;
|
||||
}
|
||||
if (last) {
|
||||
addMetadata(context, dso, schema, element, qualifier,
|
||||
lang, value, authority, confidence);
|
||||
lang, value, authority, confidence);
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public void moveMetadata(Context context, T dso, String schema, String element, String qualifier, int from, int to)
|
||||
throws SQLException, IllegalArgumentException {
|
||||
throws SQLException, IllegalArgumentException {
|
||||
|
||||
if (from == to) {
|
||||
throw new IllegalArgumentException("The \"from\" location MUST be different from \"to\" location");
|
||||
@@ -701,8 +741,6 @@ public abstract class DSpaceObjectServiceImpl<T extends DSpaceObject> implements
|
||||
"\n Idx from:" + from + " Idx to: " + to);
|
||||
}
|
||||
|
||||
clearMetadata(context, dso, schema, element, qualifier, Item.ANY);
|
||||
|
||||
int idx = 0;
|
||||
MetadataValue moved = null;
|
||||
for (MetadataValue md : list) {
|
||||
@@ -714,49 +752,51 @@ public abstract class DSpaceObjectServiceImpl<T extends DSpaceObject> implements
|
||||
}
|
||||
|
||||
idx = 0;
|
||||
int place = 0;
|
||||
boolean last = true;
|
||||
for (MetadataValue rr : list) {
|
||||
if (idx == to && to < from) {
|
||||
addMetadata(context, dso, schema, element, qualifier, moved.getLanguage(), moved.getValue(),
|
||||
moved.getAuthority(), moved.getConfidence());
|
||||
moveSingleMetadataValue(context, dso, place, moved);
|
||||
place++;
|
||||
last = false;
|
||||
}
|
||||
if (idx != from) {
|
||||
addMetadata(context, dso, schema, element, qualifier, rr.getLanguage(), rr.getValue(),
|
||||
rr.getAuthority(), rr.getConfidence());
|
||||
moveSingleMetadataValue(context, dso, place, rr);
|
||||
place++;
|
||||
}
|
||||
if (idx == to && to > from) {
|
||||
addMetadata(context, dso, schema, element, qualifier, moved.getLanguage(), moved.getValue(),
|
||||
moved.getAuthority(), moved.getConfidence());
|
||||
moveSingleMetadataValue(context, dso, place, moved);
|
||||
place++;
|
||||
last = false;
|
||||
}
|
||||
idx++;
|
||||
}
|
||||
if (last) {
|
||||
addMetadata(context, dso, schema, element, qualifier, moved.getLanguage(), moved.getValue(),
|
||||
moved.getAuthority(), moved.getConfidence());
|
||||
moveSingleMetadataValue(context, dso, place, moved);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Supports moving metadata by updating the place of the metadata value.
|
||||
*
|
||||
* @param context current DSpace session.
|
||||
* @param dso unused.
|
||||
* @param place ordinal position of the value in the list of that field's values.
|
||||
* @param rr the value to be placed.
|
||||
*/
|
||||
protected void moveSingleMetadataValue(Context context, T dso, int place, MetadataValue rr) {
|
||||
//just move the metadata
|
||||
rr.setPlace(place);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void replaceMetadata(Context context, T dso, String schema, String element, String qualifier, String lang,
|
||||
String value, String authority, int confidence, int index) throws SQLException {
|
||||
|
||||
List<MetadataValue> list = getMetadata(dso, schema, element, qualifier);
|
||||
|
||||
clearMetadata(context, dso, schema, element, qualifier, Item.ANY);
|
||||
|
||||
int idx = 0;
|
||||
for (MetadataValue rr : list) {
|
||||
if (idx == index) {
|
||||
addMetadata(context, dso, schema, element, qualifier,
|
||||
lang, value, authority, confidence);
|
||||
} else {
|
||||
addMetadata(context, dso, schema, element, qualifier,
|
||||
rr.getLanguage(), rr.getValue(), rr.getAuthority(), rr.getConfidence());
|
||||
}
|
||||
idx++;
|
||||
}
|
||||
removeMetadataValues(context, dso, Arrays.asList(list.get(index)));
|
||||
addAndShiftRightMetadata(context, dso, schema, element, qualifier, lang, value, authority, confidence, index);
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@@ -7,6 +7,7 @@
|
||||
*/
|
||||
package org.dspace.content;
|
||||
|
||||
import java.util.Objects;
|
||||
import javax.persistence.Column;
|
||||
import javax.persistence.Entity;
|
||||
import javax.persistence.GeneratedValue;
|
||||
@@ -15,6 +16,8 @@ import javax.persistence.Id;
|
||||
import javax.persistence.SequenceGenerator;
|
||||
import javax.persistence.Table;
|
||||
|
||||
import org.apache.commons.lang3.StringUtils;
|
||||
import org.apache.commons.lang3.builder.HashCodeBuilder;
|
||||
import org.dspace.core.ReloadableEntity;
|
||||
|
||||
/**
|
||||
@@ -45,7 +48,8 @@ public class EntityType implements ReloadableEntity<Integer> {
|
||||
|
||||
/**
|
||||
* The standard setter for the ID of this EntityType
|
||||
* @param id The ID that this EntityType's ID will be set to
|
||||
*
|
||||
* @param id The ID that this EntityType's ID will be set to
|
||||
*/
|
||||
public void setId(Integer id) {
|
||||
this.id = id;
|
||||
@@ -53,7 +57,8 @@ public class EntityType implements ReloadableEntity<Integer> {
|
||||
|
||||
/**
|
||||
* The standard getter for the label of this EntityType
|
||||
* @return The label for this EntityType
|
||||
*
|
||||
* @return The label for this EntityType
|
||||
*/
|
||||
public String getLabel() {
|
||||
return label;
|
||||
@@ -61,6 +66,7 @@ public class EntityType implements ReloadableEntity<Integer> {
|
||||
|
||||
/**
|
||||
* The standard setter for the label of this EntityType
|
||||
*
|
||||
* @param label The label that this EntityType's label will be set to
|
||||
*/
|
||||
public void setLabel(String label) {
|
||||
@@ -69,9 +75,40 @@ public class EntityType implements ReloadableEntity<Integer> {
|
||||
|
||||
/**
|
||||
* The standard getter for the ID of this EntityType
|
||||
* @return The ID for this EntityType
|
||||
*
|
||||
* @return The ID for this EntityType
|
||||
*/
|
||||
public Integer getID() {
|
||||
return id;
|
||||
}
|
||||
|
||||
/**
|
||||
* Determines whether two entity types are equal based on the id and the label
|
||||
* @param obj object to be compared
|
||||
* @return
|
||||
*/
|
||||
public boolean equals(Object obj) {
|
||||
if (!(obj instanceof EntityType)) {
|
||||
return false;
|
||||
}
|
||||
EntityType entityType = (EntityType) obj;
|
||||
|
||||
if (!Objects.equals(this.getID(), entityType.getID())) {
|
||||
return false;
|
||||
}
|
||||
|
||||
if (!StringUtils.equals(this.getLabel(), entityType.getLabel())) {
|
||||
return false;
|
||||
}
|
||||
return true;
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns a hash code value for the object.
|
||||
* @return hash code value
|
||||
*/
|
||||
@Override
|
||||
public int hashCode() {
|
||||
return new HashCodeBuilder().append(getID()).toHashCode();
|
||||
}
|
||||
}
|
||||
|
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user