Compare commits

..

1 Commits

Author SHA1 Message Date
Terry Brady
cfc200f2c7 Update README.md 2018-06-22 14:33:35 -07:00
1142 changed files with 19281 additions and 49867 deletions

View File

@@ -1,6 +0,0 @@
.git/
.idea/
.settings/
*/target/
dspace/modules/*/target/
Dockerfile.*

View File

@@ -26,20 +26,19 @@ before_install:
# Skip install stage, as we'll do it below
install: "echo 'Skipping install stage, dependencies will be downloaded during build and test stages.'"
# Build DSpace and run both Unit and Integration Tests
# Two stage Build and Test
# 1. Install & Unit Test APIs
# 2. Assemble DSpace
script:
# Summary of flags used (below):
# license:check => Validate all source code license headers
# -Dmaven.test.skip=false => Enable DSpace Unit Tests
# -DskipITs=false => Enable DSpace Integration Tests
# -P !assembly => Skip assembly of "dspace-installer" directory (as it can be memory intensive)
# -B => Maven batch/non-interactive mode (recommended for CI)
# -V => Display Maven version info before build
# -Dsurefire.rerunFailingTestsCount=2 => try again for flakey tests, and keep track of/report on number of retries
# 1. [Install & Unit Test] Check source code licenses and run source code Unit Tests
# license:check => Validate all source code license headers
# -Dmaven.test.skip=false => Enable DSpace Unit Tests
# -DskipITs=false => Enable DSpace Integration Tests
# -P !assembly => Skip normal assembly (as it can be memory intensive)
# -B => Maven batch/non-interactive mode (recommended for CI)
# -V => Display Maven version info before build
# -Dsurefire.rerunFailingTestsCount=2 => try again for flakey tests, and keep track of/report on number of retries
- "mvn clean install license:check -Dmaven.test.skip=false -DskipITs=false -P !assembly -B -V -Dsurefire.rerunFailingTestsCount=2"
# After a successful build and test (see 'script'), send code coverage reports to coveralls.io
# These code coverage reports are generated by jacoco-maven-plugin (during test process above).
after_success:
# Run "verify", enabling the "coveralls" profile. This sends our reports to coveralls.io (see coveralls-maven-plugin)
- "cd dspace && mvn verify -P coveralls"
# 2. [Assemble DSpace] Ensure overlay & assembly process works (from [src]/dspace/)
# -P !assembly => SKIP the actual building of [src]/dspace/dspace-installer (as it can be memory intensive)
- "cd dspace && mvn package -P !assembly -B -V -Dsurefire.rerunFailingTestsCount=2"

View File

@@ -1,24 +0,0 @@
# This image will be published as dspace/dspace-dependencies
# The purpose of this image is to make the build for dspace/dspace run faster
# Step 1 - Run Maven Build
FROM maven:3-jdk-8 as build
ARG TARGET_DIR=dspace-installer
WORKDIR /app
RUN useradd dspace \
&& mkdir /home/dspace \
&& chown -Rv dspace: /home/dspace
USER dspace
# Copy the DSpace source code into the workdir (excluding .dockerignore contents)
ADD --chown=dspace . /app/
COPY dspace/src/main/docker/local.cfg /app/local.cfg
# Trigger the installation of all maven dependencies
# Clean up the built artifacts in the same step to keep the docker image small
RUN mvn package && mvn clean
# Clear the contents of the /app directory so no artifacts are left when dspace:dspace is built
USER root
RUN rm -rf /app/*

View File

@@ -1,63 +0,0 @@
# This image will be published as dspace/dspace
# See https://dspace-labs.github.io/DSpace-Docker-Images/ for usage details
#
# This version is JDK8 compatible
# - tomcat:8-jre8
# - ANT 1.10.5
# - maven:3-jdk-8
# - note:
# - default tag for branch: dspace/dspace: dspace/dspace:dspace-7_x-jdk8
# Step 1 - Run Maven Build
FROM dspace/dspace-dependencies:dspace-7_x as build
ARG TARGET_DIR=dspace-installer
WORKDIR /app
# The dspace-install directory will be written to /install
RUN mkdir /install \
&& chown -Rv dspace: /install \
&& chown -Rv dspace: /app
USER dspace
# Copy the DSpace source code into the workdir (excluding .dockerignore contents)
ADD --chown=dspace . /app/
COPY dspace/src/main/docker/local.cfg /app/local.cfg
# Build DSpace. Copy the dspace-install directory to /install. Clean up the build to keep the docker image small
RUN mvn package && \
mv /app/dspace/target/${TARGET_DIR}/* /install && \
mvn clean
# Step 2 - Run Ant Deploy
FROM tomcat:8-jre8 as ant_build
ARG TARGET_DIR=dspace-installer
COPY --from=build /install /dspace-src
WORKDIR /dspace-src
# Create the initial install deployment using ANT
ENV ANT_VERSION 1.10.5
ENV ANT_HOME /tmp/ant-$ANT_VERSION
ENV PATH $ANT_HOME/bin:$PATH
RUN mkdir $ANT_HOME && \
wget -qO- "https://www.apache.org/dist/ant/binaries/apache-ant-$ANT_VERSION-bin.tar.gz" | tar -zx --strip-components=1 -C $ANT_HOME
RUN ant init_installation update_configs update_code update_webapps update_solr_indexes
# Step 3 - Run tomcat
# Create a new tomcat image that does not retain the the build directory contents
FROM tomcat:8-jre8
ENV DSPACE_INSTALL=/dspace
COPY --from=ant_build /dspace $DSPACE_INSTALL
EXPOSE 8080 8009
ENV JAVA_OPTS=-Xmx2000m
RUN ln -s $DSPACE_INSTALL/webapps/solr /usr/local/tomcat/webapps/solr && \
ln -s $DSPACE_INSTALL/webapps/spring-rest /usr/local/tomcat/webapps/spring-rest && \
ln -s $DSPACE_INSTALL/webapps/rest /usr/local/tomcat/webapps/rest && \
ln -s $DSPACE_INSTALL/webapps/oai /usr/local/tomcat/webapps/oai && \
ln -s $DSPACE_INSTALL/webapps/rdf /usr/local/tomcat/webapps/rdf && \
ln -s $DSPACE_INSTALL/webapps/sword /usr/local/tomcat/webapps/sword && \
ln -s $DSPACE_INSTALL/webapps/swordv2 /usr/local/tomcat/webapps/swordv2

View File

@@ -1,69 +0,0 @@
# This image will be published as dspace/dspace
# See https://dspace-labs.github.io/DSpace-Docker-Images/ for usage details
#
# This version is JDK8 compatible
# - tomcat:8-jre8
# - ANT 1.10.5
# - maven:3-jdk-8
# - note:
# - default tag for branch: dspace/dspace: dspace/dspace:dspace-7_x-jdk8-test
# Step 1 - Run Maven Build
FROM dspace/dspace-dependencies:dspace-7_x as build
ARG TARGET_DIR=dspace-installer
WORKDIR /app
# The dspace-install directory will be written to /install
RUN mkdir /install \
&& chown -Rv dspace: /install \
&& chown -Rv dspace: /app
USER dspace
# Copy the DSpace source code into the workdir (excluding .dockerignore contents)
ADD --chown=dspace . /app/
COPY dspace/src/main/docker/local.cfg /app/local.cfg
# Build DSpace. Copy the dspace-install directory to /install. Clean up the build to keep the docker image small
RUN mvn package && \
mv /app/dspace/target/${TARGET_DIR}/* /install && \
mvn clean
# Step 2 - Run Ant Deploy
FROM tomcat:8-jre8 as ant_build
ARG TARGET_DIR=dspace-installer
COPY --from=build /install /dspace-src
WORKDIR /dspace-src
# Create the initial install deployment using ANT
ENV ANT_VERSION 1.10.5
ENV ANT_HOME /tmp/ant-$ANT_VERSION
ENV PATH $ANT_HOME/bin:$PATH
RUN mkdir $ANT_HOME && \
wget -qO- "https://www.apache.org/dist/ant/binaries/apache-ant-$ANT_VERSION-bin.tar.gz" | tar -zx --strip-components=1 -C $ANT_HOME
RUN ant init_installation update_configs update_code update_webapps update_solr_indexes
# Step 3 - Run tomcat
# Create a new tomcat image that does not retain the the build directory contents
FROM tomcat:8-jre8
ENV DSPACE_INSTALL=/dspace
COPY --from=ant_build /dspace $DSPACE_INSTALL
EXPOSE 8080 8009
ENV JAVA_OPTS=-Xmx2000m
RUN ln -s $DSPACE_INSTALL/webapps/solr /usr/local/tomcat/webapps/solr && \
ln -s $DSPACE_INSTALL/webapps/spring-rest /usr/local/tomcat/webapps/spring-rest && \
ln -s $DSPACE_INSTALL/webapps/rest /usr/local/tomcat/webapps/rest && \
ln -s $DSPACE_INSTALL/webapps/oai /usr/local/tomcat/webapps/oai && \
ln -s $DSPACE_INSTALL/webapps/rdf /usr/local/tomcat/webapps/rdf && \
ln -s $DSPACE_INSTALL/webapps/sword /usr/local/tomcat/webapps/sword && \
ln -s $DSPACE_INSTALL/webapps/swordv2 /usr/local/tomcat/webapps/swordv2
COPY dspace/src/main/docker/test/solr_web.xml $DSPACE_INSTALL/webapps/solr/WEB-INF/web.xml
COPY dspace/src/main/docker/test/rest_web.xml $DSPACE_INSTALL/webapps/rest/WEB-INF/web.xml
RUN sed -i -e "s|\${dspace.dir}|$DSPACE_INSTALL|" $DSPACE_INSTALL/webapps/solr/WEB-INF/web.xml && \
sed -i -e "s|\${dspace.dir}|$DSPACE_INSTALL|" $DSPACE_INSTALL/webapps/rest/WEB-INF/web.xml

View File

@@ -366,6 +366,7 @@ https://wiki.duraspace.org/display/DSPACE/Code+Contribution+Guidelines
* FindBugs-Annotations (com.google.code.findbugs:annotations:3.0.1u2 - http://findbugs.sourceforge.net/)
* MaxMind GeoIP Legacy API (com.maxmind.geoip:geoip-api:1.3.0 - https://github.com/maxmind/geoip-api-java)
* JHighlight (com.uwyn:jhighlight:1.0 - https://jhighlight.dev.java.net/)
* DSpace TM-Extractors Dependency (org.dspace.dependencies:dspace-tm-extractors:1.0.1 - http://projects.dspace.org/dspace-pom/dspace-tm-extractors)
* A Hibernate O/RM Module (org.hibernate:hibernate-core:4.2.21.Final - http://hibernate.org)
* A Hibernate O/RM Module (org.hibernate:hibernate-ehcache:4.2.21.Final - http://hibernate.org)
* Hibernate Commons Annotations (org.hibernate.common:hibernate-commons-annotations:4.0.2.Final - http://hibernate.org)

View File

@@ -1,6 +1,9 @@
# DSpace
## NOTE: The rest-tutorial branch has been created to support the [DSpace 7 REST documentation](https://dspace-labs.github.io/DSpace7RestTutorial/walkthrough/intro)
- This branch provides stable, referencable line numbers in code
[![Build Status](https://travis-ci.org/DSpace/DSpace.png?branch=master)](https://travis-ci.org/DSpace/DSpace)
[DSpace Documentation](https://wiki.duraspace.org/display/DSDOC/) |
@@ -40,9 +43,6 @@ Please be aware that, as a Java web application, DSpace requires a database (Pos
and a servlet container (usually Tomcat) in order to function.
More information about these and all other prerequisites can be found in the Installation instructions above.
## Dockerfile Usage
See the [DSpace Docker Tutorial](https://dspace-labs.github.io/DSpace-Docker-Images/).
## Contributing
DSpace is a community built and supported project. We do not have a centralized development or support team,
@@ -79,57 +79,6 @@ install, upgrade, customize or host DSpace, then we recommend getting in touch w
The DSpace Issue Tracker can be found at: https://jira.duraspace.org/projects/DS/summary
## Testing
### Running Tests
By default, in DSpace, Unit Tests and Integration Tests are disabled. However, they are
run automatically by [Travis CI](https://travis-ci.org/DSpace/DSpace/) for all Pull Requests and code commits.
* How to run both Unit Tests (via `maven-surefire-plugin`) and Integration Tests (via `maven-failsafe-plugin`):
```
# NOTE: while "mvn test" runs Unit Tests,
# Integration Tests only run for "verify" or "install" phases
mvn clean install -Dmaven.test.skip=false -DskipITs=false
```
* How to run just Unit Tests:
```
mvn clean test -Dmaven.test.skip=false
```
* How to run a *single* Unit Test
```
# Run all tests in a specific test class
# NOTE: testClassName is just the class name, do not include package
mvn clean test -Dmaven.test.skip=false -Dtest=[testClassName]
# Run one test method in a specific test class
mvn clean test -Dmaven.test.skip=false -Dtest=[testClassName]#[testMethodName]
```
* How to run Integration Tests (requires running Unit tests too)
```
mvn clean verify -Dmaven.test.skip=false -DskipITs=false
```
* How to run a *single* Integration Test (requires running Unit tests too)
```
# Run all integration tests in a specific test class
# NOTE: Integration Tests only run for "verify" or "install" phases
# NOTE: testClassName is just the class name, do not include package
mvn clean verify -Dmaven.test.skip=false -DskipITs=false -Dit.test=[testClassName]
# Run one test method in a specific test class
mvn clean verify -Dmaven.test.skip=false -DskipITs=false -Dit.test=[testClassName]#[testMethodName]
```
* How to run only tests of a specific DSpace module
```
# Before you can run only one module's tests, other modules may need installing into your ~/.m2
cd [dspace-src]
mvn clean install
# Then, move into a module subdirectory, and run the test command
cd [dspace-src]/dspace-spring-rest
# Choose your test command from the lists above
```
## License
DSpace source code is freely available under a standard [BSD 3-Clause license](https://opensource.org/licenses/BSD-3-Clause).

View File

@@ -1,4 +1,5 @@
<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd">
<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd">
<modelVersion>4.0.0</modelVersion>
<groupId>org.dspace</groupId>
<artifactId>dspace-api</artifactId>
@@ -12,7 +13,7 @@
<parent>
<groupId>org.dspace</groupId>
<artifactId>dspace-parent</artifactId>
<version>7.0-preview-1</version>
<version>7.0-SNAPSHOT</version>
<relativePath>..</relativePath>
</parent>
@@ -50,10 +51,6 @@
<configuration>
<debug>true</debug>
<showDeprecation>true</showDeprecation>
<compilerArguments>
<processor>org.hibernate.jpamodelgen.JPAMetaModelEntityProcessor</processor>
</compilerArguments>
</configuration>
</plugin>
<plugin>
@@ -84,7 +81,6 @@
<exclude>**/src/test/resources/**</exclude>
<exclude>**/src/test/data/**</exclude>
<exclude>**/.gitignore</exclude>
<exclude>**/src/main/resources/rebel.xml</exclude>
<exclude>src/test/data/dspaceFolder/config/spiders/**</exclude>
<exclude>src/main/java/org/apache/solr/handler/extraction/ExtractingParams.java</exclude>
</excludes>
@@ -311,10 +307,6 @@
<groupId>org.hibernate</groupId>
<artifactId>hibernate-ehcache</artifactId>
</dependency>
<dependency>
<groupId>org.hibernate</groupId>
<artifactId>hibernate-jpamodelgen</artifactId>
</dependency>
<dependency>
<groupId>org.hibernate</groupId>
<artifactId>hibernate-validator-cdi</artifactId>
@@ -342,6 +334,10 @@
<groupId>org.dspace</groupId>
<artifactId>mets</artifactId>
</dependency>
<dependency>
<groupId>org.dspace.dependencies</groupId>
<artifactId>dspace-tm-extractors</artifactId>
</dependency>
<dependency>
<groupId>org.apache.jena</groupId>
<artifactId>apache-jena-libs</artifactId>
@@ -354,14 +350,6 @@
<exclusion>
<groupId>com.fasterxml.jackson.core</groupId>
<artifactId>jackson-databind</artifactId>
</exclusion>
<exclusion>
<groupId>org.slf4j</groupId>
<artifactId>slf4j-log4j12</artifactId>
</exclusion>
<exclusion>
<groupId>log4j</groupId>
<artifactId>log4j</artifactId>
</exclusion>
</exclusions>
</dependency>
@@ -373,6 +361,10 @@
<groupId>commons-codec</groupId>
<artifactId>commons-codec</artifactId>
</dependency>
<dependency>
<groupId>commons-collections</groupId>
<artifactId>commons-collections</artifactId>
</dependency>
<dependency>
<groupId>org.apache.commons</groupId>
<artifactId>commons-collections4</artifactId>
@@ -391,8 +383,8 @@
<artifactId>commons-io</artifactId>
</dependency>
<dependency>
<groupId>org.apache.commons</groupId>
<artifactId>commons-lang3</artifactId>
<groupId>commons-lang</groupId>
<artifactId>commons-lang</artifactId>
</dependency>
<dependency>
<groupId>org.apache.commons</groupId>
@@ -408,7 +400,7 @@
</dependency>
<dependency>
<groupId>javax.servlet</groupId>
<artifactId>javax.servlet-api</artifactId>
<artifactId>servlet-api</artifactId>
<scope>provided</scope>
</dependency>
<dependency>
@@ -426,16 +418,8 @@
<artifactId>jdom</artifactId>
</dependency>
<dependency>
<groupId>org.apache.logging.log4j</groupId>
<artifactId>log4j-api</artifactId>
</dependency>
<dependency>
<groupId>org.apache.logging.log4j</groupId>
<artifactId>log4j-1.2-api</artifactId>
</dependency>
<dependency>
<groupId>org.apache.logging.log4j</groupId>
<artifactId>log4j-core</artifactId>
<groupId>log4j</groupId>
<artifactId>log4j</artifactId>
</dependency>
<dependency>
<groupId>oro</groupId>
@@ -547,10 +531,6 @@
<exclusion>
<groupId>org.mockito</groupId>
<artifactId>mockito-core</artifactId>
</exclusion>
<exclusion>
<groupId>log4j</groupId>
<artifactId>log4j</artifactId>
</exclusion>
</exclusions>
</dependency>
@@ -568,10 +548,6 @@
<exclusion>
<groupId>org.mockito</groupId>
<artifactId>mockito-core</artifactId>
</exclusion>
<exclusion>
<groupId>log4j</groupId>
<artifactId>log4j</artifactId>
</exclusion>
</exclusions>
</dependency>
@@ -586,7 +562,7 @@
<dependency>
<groupId>org.apache.solr</groupId>
<artifactId>solr-solrj</artifactId>
<version>${solr.client.version}</version>
<version>${solr.version}</version>
<exclusions>
<exclusion>
<groupId>org.slf4j</groupId>
@@ -600,8 +576,8 @@
</dependency>
<dependency>
<groupId>org.apache.commons</groupId>
<artifactId>commons-configuration2</artifactId>
<groupId>commons-configuration</groupId>
<artifactId>commons-configuration</artifactId>
</dependency>
<dependency>
<groupId>com.maxmind.geoip2</groupId>
@@ -621,7 +597,7 @@
<dependency>
<groupId>org.apache.lucene</groupId>
<artifactId>lucene-core</artifactId>
<version>${solr.client.version}</version>
<version>4.10.4</version>
</dependency>
<dependency>
@@ -640,6 +616,7 @@
<dependency>
<groupId>com.google.guava</groupId>
<artifactId>guava</artifactId>
<version>19.0</version>
</dependency>
@@ -742,19 +719,20 @@
<groupId>joda-time</groupId>
<artifactId>joda-time</artifactId>
</exclusion>
<exclusion>
<groupId>com.fasterxml.jackson.core</groupId>
<artifactId>jackson-databind</artifactId>
</exclusion>
</exclusions>
</dependency>
<!-- For ORCID v2 integration -->
<!-- S3 also wanted jackson... -->
<dependency>
<groupId>org.dspace</groupId>
<artifactId>orcid-jaxb-api</artifactId>
<version>2.1.0</version>
<groupId>com.fasterxml.jackson.core</groupId>
<artifactId>jackson-core</artifactId>
</dependency>
<dependency>
<groupId>org.json</groupId>
<artifactId>json</artifactId>
<version>20180130</version>
<groupId>com.fasterxml.jackson.core</groupId>
<artifactId>jackson-databind</artifactId>
</dependency>
</dependencies>

View File

@@ -17,7 +17,7 @@ import org.apache.commons.cli.CommandLineParser;
import org.apache.commons.cli.HelpFormatter;
import org.apache.commons.cli.Options;
import org.apache.commons.cli.PosixParser;
import org.apache.commons.collections4.CollectionUtils;
import org.apache.commons.collections.CollectionUtils;
import org.dspace.authorize.AuthorizeException;
import org.dspace.content.Community;
import org.dspace.content.factory.ContentServiceFactory;
@@ -180,9 +180,13 @@ public class CommunityFiliator {
// second test - circularity: parent's parents can't include proposed
// child
List<Community> parentDads = parent.getParentCommunities();
if (parentDads.contains(child)) {
System.out.println("Error, circular parentage - child is parent of parent");
System.exit(1);
for (int i = 0; i < parentDads.size(); i++) {
if (parentDads.get(i).getID().equals(child.getID())) {
System.out
.println("Error, circular parentage - child is parent of parent");
System.exit(1);
}
}
// everthing's OK
@@ -206,15 +210,26 @@ public class CommunityFiliator {
throws SQLException, AuthorizeException, IOException {
// verify that child is indeed a child of parent
List<Community> parentKids = parent.getSubcommunities();
if (!parentKids.contains(child)) {
System.out.println("Error, child community not a child of parent community");
boolean isChild = false;
for (int i = 0; i < parentKids.size(); i++) {
if (parentKids.get(i).getID().equals(child.getID())) {
isChild = true;
break;
}
}
if (!isChild) {
System.out
.println("Error, child community not a child of parent community");
System.exit(1);
}
// OK remove the mappings - but leave the community, which will become
// top-level
child.removeParentCommunity(parent);
parent.removeSubCommunity(child);
child.getParentCommunities().remove(parent);
parent.getSubcommunities().remove(child);
communityService.update(c, child);
communityService.update(c, parent);

View File

@@ -15,7 +15,7 @@ import org.apache.commons.cli.CommandLine;
import org.apache.commons.cli.CommandLineParser;
import org.apache.commons.cli.Options;
import org.apache.commons.cli.PosixParser;
import org.apache.commons.lang3.StringUtils;
import org.apache.commons.lang.StringUtils;
import org.dspace.core.ConfigurationManager;
import org.dspace.core.Context;
import org.dspace.core.I18nUtil;

View File

@@ -21,7 +21,6 @@ import org.apache.xpath.XPathAPI;
import org.dspace.authorize.AuthorizeException;
import org.dspace.content.MetadataField;
import org.dspace.content.MetadataSchema;
import org.dspace.content.MetadataSchemaEnum;
import org.dspace.content.NonUniqueMetadataException;
import org.dspace.content.factory.ContentServiceFactory;
import org.dspace.content.service.MetadataFieldService;
@@ -249,7 +248,7 @@ public class MetadataImporter {
// If the schema is not provided default to DC
if (schema == null) {
schema = MetadataSchemaEnum.DC.getName();
schema = MetadataSchema.DC_SCHEMA;
}

View File

@@ -17,7 +17,7 @@ import javax.xml.parsers.DocumentBuilderFactory;
import javax.xml.parsers.ParserConfigurationException;
import javax.xml.transform.TransformerException;
import org.apache.logging.log4j.Logger;
import org.apache.log4j.Logger;
import org.apache.xpath.XPathAPI;
import org.dspace.authorize.AuthorizeException;
import org.dspace.content.BitstreamFormat;
@@ -47,7 +47,7 @@ public class RegistryLoader {
/**
* log4j category
*/
private static Logger log = org.apache.logging.log4j.LogManager.getLogger(RegistryLoader.class);
private static Logger log = Logger.getLogger(RegistryLoader.class);
protected static BitstreamFormatService bitstreamFormatService = ContentServiceFactory.getInstance()
.getBitstreamFormatService();

View File

@@ -21,10 +21,8 @@ import javax.xml.transform.TransformerException;
import org.apache.commons.cli.CommandLine;
import org.apache.commons.cli.CommandLineParser;
import org.apache.commons.cli.DefaultParser;
import org.apache.commons.cli.HelpFormatter;
import org.apache.commons.cli.Options;
import org.apache.commons.cli.ParseException;
import org.apache.commons.cli.PosixParser;
import org.apache.xpath.XPathAPI;
import org.dspace.authorize.AuthorizeException;
import org.dspace.content.Collection;
@@ -47,7 +45,6 @@ import org.xml.sax.SAXException;
* an XML file.
*
* The XML file structure needs to be:
* <p>
* {@code
* <import_structure>
* <community>
@@ -59,31 +56,29 @@ import org.xml.sax.SAXException;
* </community>
* </import_structure>
* }
* <p>
* It can be arbitrarily deep, and supports all the metadata elements
* it can be arbitrarily deep, and supports all the metadata elements
* that make up the community and collection metadata. See the system
* documentation for more details.
* documentation for more details
*
* @author Richard Jones
*/
public class StructBuilder {
/**
* The output XML document which will contain updated information about the
* imported structure.
* the output xml document which will contain updated information about the
* imported structure
*/
private static final org.jdom.Document xmlOutput
= new org.jdom.Document(new Element("imported_structure"));
private static org.jdom.Document xmlOutput = new org.jdom.Document(new Element("imported_structure"));
/**
* A hash table to hold metadata for the collection being worked on.
* a hashtable to hold metadata for the collection being worked on
*/
private static final Map<String, String> collectionMap = new HashMap<>();
private static Map<String, String> collectionMap = new HashMap<String, String>();
/**
* A hash table to hold metadata for the community being worked on.
* a hashtable to hold metadata for the community being worked on
*/
private static final Map<String, String> communityMap = new HashMap<>();
private static Map<String, String> communityMap = new HashMap<String, String>();
protected static CommunityService communityService = ContentServiceFactory.getInstance().getCommunityService();
protected static CollectionService collectionService = ContentServiceFactory.getInstance().getCollectionService();
@@ -106,34 +101,19 @@ public class StructBuilder {
* with the handle for each imported item added as an attribute.
*
* @param argv the command line arguments given
* @throws ParserConfigurationException passed through.
* @throws SQLException passed through.
* @throws Exception if an error occurs
*/
public static void main(String[] argv)
throws ParserConfigurationException, SQLException {
CommandLineParser parser = new DefaultParser();
throws Exception {
CommandLineParser parser = new PosixParser();
Options options = new Options();
options.addOption("h", "help", false, "help");
options.addOption("?", "help");
options.addOption("f", "file", true, "input structure document");
options.addOption("f", "file", true, "file");
options.addOption("e", "eperson", true, "eperson");
options.addOption("o", "output", true, "output structure document");
options.addOption("o", "output", true, "output");
CommandLine line = null;
try {
line = parser.parse(options, argv);
} catch (ParseException ex) {
System.err.println(ex.getMessage());
usage(options);
System.exit(1);
}
if (line.hasOption('h') || line.hasOption('?')) {
usage(options);
System.exit(0);
}
CommandLine line = parser.parse(options, argv);
String file = null;
String eperson = null;
@@ -152,41 +132,22 @@ public class StructBuilder {
}
if (output == null || eperson == null || file == null) {
usage(options);
System.exit(1);
usage();
System.exit(0);
}
// create a context
Context context = new Context();
// set the context
try {
context.setCurrentUser(ePersonService.findByEmail(context, eperson));
} catch (SQLException ex) {
System.err.format("That user could not be found: %s%n", ex.getMessage());
System.exit(1);
}
context.setCurrentUser(ePersonService.findByEmail(context, eperson));
// load the XML
Document document = null;
try {
document = loadXML(file);
} catch (IOException ex) {
System.err.format("The input document could not be read: %s%n", ex.getMessage());
System.exit(1);
} catch (SAXException ex) {
System.err.format("The input document could not be parsed: %s%n", ex.getMessage());
System.exit(1);
}
Document document = loadXML(file);
// run the preliminary validation, to be sure that the the XML document
// is properly structured
try {
validate(document);
} catch (TransformerException ex) {
System.err.format("The input document is invalid: %s%n", ex.getMessage());
System.exit(1);
}
validate(document);
// load the mappings into the member variable hashmaps
communityMap.put("name", "name");
@@ -203,69 +164,60 @@ public class StructBuilder {
collectionMap.put("license", "license");
collectionMap.put("provenance", "provenance_description");
Element[] elements = new Element[]{};
try {
// get the top level community list
NodeList first = XPathAPI.selectNodeList(document, "/import_structure/community");
// get the top level community list
NodeList first = XPathAPI.selectNodeList(document, "/import_structure/community");
// run the import starting with the top level communities
elements = handleCommunities(context, first, null);
} catch (TransformerException ex) {
System.err.format("Input content not understood: %s%n", ex.getMessage());
System.exit(1);
} catch (AuthorizeException ex) {
System.err.format("Not authorized: %s%n", ex.getMessage());
System.exit(1);
}
// run the import starting with the top level communities
Element[] elements = handleCommunities(context, first, null);
// generate the output
Element root = xmlOutput.getRootElement();
for (Element element : elements) {
root.addContent(element);
for (int i = 0; i < elements.length; i++) {
root.addContent(elements[i]);
}
// finally write the string into the output file
try (BufferedWriter out = new BufferedWriter(new FileWriter(output));) {
try {
BufferedWriter out = new BufferedWriter(new FileWriter(output));
out.write(new XMLOutputter().outputString(xmlOutput));
out.close();
} catch (IOException e) {
System.out.println("Unable to write to output file " + output);
System.exit(1);
System.exit(0);
}
context.complete();
}
/**
* Output the usage information.
* Output the usage information
*/
private static void usage(Options options) {
HelpFormatter helper = new HelpFormatter();
helper.printHelp("java StructBuilder -f <source XML file> -o <output file> -e <eperson email>",
"Load community/collection structure from a file.",
options,
"Communities will be created from the top level,"
+ " and a map of communities to handles will be returned"
+ " in the output file.");
private static void usage() {
System.out.println("Usage: java StructBuilder -f <source XML file> -o <output file> -e <eperson email>");
System.out.println(
"Communities will be created from the top level, and a map of communities to handles will be returned in " +
"the output file");
return;
}
/**
* Validate the XML document. This method returns if the document is valid.
* If validation fails it generates an error and ceases execution.
* Validate the XML document. This method does not return, but if validation
* fails it generates an error and ceases execution
*
* @param document the XML document object
* @throws TransformerException if transformer error
*/
private static void validate(org.w3c.dom.Document document)
throws TransformerException {
StringBuilder err = new StringBuilder();
StringBuffer err = new StringBuffer();
boolean trip = false;
err.append("The following errors were encountered parsing the source XML.\n");
err.append("No changes have been made to the DSpace instance.\n\n");
err.append("The following errors were encountered parsing the source XML\n");
err.append("No changes have been made to the DSpace instance\n\n");
NodeList first = XPathAPI.selectNodeList(document, "/import_structure/community");
if (first.getLength() == 0) {
err.append("-There are no top level communities in the source document.");
err.append("-There are no top level communities in the source document");
System.out.println(err.toString());
System.exit(0);
}
@@ -284,7 +236,7 @@ public class StructBuilder {
/**
* Validate the communities section of the XML document. This returns a string
* containing any errors encountered, or null if there were no errors.
* containing any errors encountered, or null if there were no errors
*
* @param communities the NodeList of communities to validate
* @param level the level in the XML document that we are at, for the purposes
@@ -294,7 +246,7 @@ public class StructBuilder {
*/
private static String validateCommunities(NodeList communities, int level)
throws TransformerException {
StringBuilder err = new StringBuilder();
StringBuffer err = new StringBuffer();
boolean trip = false;
String errs = null;
@@ -303,9 +255,8 @@ public class StructBuilder {
NodeList name = XPathAPI.selectNodeList(n, "name");
if (name.getLength() != 1) {
String pos = Integer.toString(i + 1);
err.append("-The level ").append(level)
.append(" community in position ").append(pos)
.append(" does not contain exactly one name field.\n");
err.append("-The level " + level + " community in position " + pos);
err.append(" does not contain exactly one name field\n");
trip = true;
}
@@ -335,7 +286,7 @@ public class StructBuilder {
/**
* validate the collection section of the XML document. This generates a
* string containing any errors encountered, or returns null if no errors.
* string containing any errors encountered, or returns null if no errors
*
* @param collections a NodeList of collections to validate
* @param level the level in the XML document for the purposes of error reporting
@@ -343,7 +294,7 @@ public class StructBuilder {
*/
private static String validateCollections(NodeList collections, int level)
throws TransformerException {
StringBuilder err = new StringBuilder();
StringBuffer err = new StringBuffer();
boolean trip = false;
String errs = null;
@@ -352,9 +303,8 @@ public class StructBuilder {
NodeList name = XPathAPI.selectNodeList(n, "name");
if (name.getLength() != 1) {
String pos = Integer.toString(i + 1);
err.append("-The level ").append(level)
.append(" collection in position ").append(pos)
.append(" does not contain exactly one name field.\n");
err.append("-The level " + level + " collection in position " + pos);
err.append(" does not contain exactly one name field\n");
trip = true;
}
}
@@ -413,7 +363,7 @@ public class StructBuilder {
* created communities (e.g. the handles they have been assigned)
*/
private static Element[] handleCommunities(Context context, NodeList communities, Community parent)
throws TransformerException, SQLException, AuthorizeException {
throws TransformerException, SQLException, Exception {
Element[] elements = new Element[communities.getLength()];
for (int i = 0; i < communities.getLength(); i++) {
@@ -440,10 +390,12 @@ public class StructBuilder {
}
// FIXME: at the moment, if the community already exists by name
// then this will throw an SQLException on a duplicate key
// violation.
// Ideally we'd skip this row and continue to create sub communities
// and so forth where they don't exist, but it's proving difficult
// then this will throw a PSQLException on a duplicate key
// violation
// Ideally we'd skip this row and continue to create sub
// communities
// and so forth where they don't exist, but it's proving
// difficult
// to isolate the community that already exists without hitting
// the database directly.
communityService.update(context, community);
@@ -518,7 +470,7 @@ public class StructBuilder {
* created collections (e.g. the handle)
*/
private static Element[] handleCollections(Context context, NodeList collections, Community parent)
throws TransformerException, SQLException, AuthorizeException {
throws TransformerException, SQLException, AuthorizeException, IOException, Exception {
Element[] elements = new Element[collections.getLength()];
for (int i = 0; i < collections.getLength(); i++) {

View File

@@ -27,7 +27,6 @@ import java.util.UUID;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
import org.apache.commons.lang3.StringUtils;
import org.dspace.authority.AuthorityValue;
import org.dspace.authority.factory.AuthorityServiceFactory;
import org.dspace.authority.service.AuthorityValueService;
@@ -35,7 +34,6 @@ import org.dspace.content.Collection;
import org.dspace.content.Item;
import org.dspace.content.MetadataField;
import org.dspace.content.MetadataSchema;
import org.dspace.content.MetadataSchemaEnum;
import org.dspace.content.MetadataValue;
import org.dspace.content.authority.Choices;
import org.dspace.content.factory.ContentServiceFactory;
@@ -200,24 +198,20 @@ public class DSpaceCSV implements Serializable {
}
// Check that the scheme exists
if (!StringUtils.equals(metadataSchema, MetadataSchemaEnum.RELATION.getName())) {
MetadataSchema foundSchema = metadataSchemaService.find(c, metadataSchema);
if (foundSchema == null) {
throw new MetadataImportInvalidHeadingException(clean[0],
MetadataImportInvalidHeadingException
.SCHEMA,
columnCounter);
}
MetadataSchema foundSchema = metadataSchemaService.find(c, metadataSchema);
if (foundSchema == null) {
throw new MetadataImportInvalidHeadingException(clean[0],
MetadataImportInvalidHeadingException.SCHEMA,
columnCounter);
}
// Check that the metadata element exists in the schema
MetadataField foundField = metadataFieldService
.findByElement(c, foundSchema, metadataElement, metadataQualifier);
if (foundField == null) {
throw new MetadataImportInvalidHeadingException(clean[0],
MetadataImportInvalidHeadingException
.ELEMENT,
columnCounter);
}
// Check that the metadata element exists in the schema
MetadataField foundField = metadataFieldService
.findByElement(c, foundSchema, metadataElement, metadataQualifier);
if (foundField == null) {
throw new MetadataImportInvalidHeadingException(clean[0],
MetadataImportInvalidHeadingException.ELEMENT,
columnCounter);
}
// Store the heading

View File

@@ -15,7 +15,6 @@ import java.sql.SQLException;
import java.util.ArrayList;
import java.util.Enumeration;
import java.util.HashSet;
import java.util.LinkedList;
import java.util.List;
import java.util.Set;
import java.util.UUID;
@@ -26,31 +25,22 @@ import org.apache.commons.cli.HelpFormatter;
import org.apache.commons.cli.Options;
import org.apache.commons.cli.ParseException;
import org.apache.commons.cli.PosixParser;
import org.apache.commons.lang3.StringUtils;
import org.apache.logging.log4j.Logger;
import org.apache.commons.lang.StringUtils;
import org.apache.log4j.Logger;
import org.dspace.authority.AuthorityValue;
import org.dspace.authority.factory.AuthorityServiceFactory;
import org.dspace.authority.service.AuthorityValueService;
import org.dspace.authorize.AuthorizeException;
import org.dspace.content.Collection;
import org.dspace.content.DSpaceObject;
import org.dspace.content.Entity;
import org.dspace.content.EntityType;
import org.dspace.content.Item;
import org.dspace.content.MetadataSchemaEnum;
import org.dspace.content.MetadataValue;
import org.dspace.content.Relationship;
import org.dspace.content.RelationshipType;
import org.dspace.content.WorkspaceItem;
import org.dspace.content.authority.Choices;
import org.dspace.content.factory.ContentServiceFactory;
import org.dspace.content.service.CollectionService;
import org.dspace.content.service.EntityService;
import org.dspace.content.service.EntityTypeService;
import org.dspace.content.service.InstallItemService;
import org.dspace.content.service.ItemService;
import org.dspace.content.service.RelationshipService;
import org.dspace.content.service.RelationshipTypeService;
import org.dspace.content.service.WorkspaceItemService;
import org.dspace.core.ConfigurationManager;
import org.dspace.core.Constants;
@@ -60,7 +50,6 @@ import org.dspace.eperson.EPerson;
import org.dspace.eperson.factory.EPersonServiceFactory;
import org.dspace.handle.factory.HandleServiceFactory;
import org.dspace.handle.service.HandleService;
import org.dspace.util.UUIDUtils;
import org.dspace.workflow.WorkflowItem;
import org.dspace.workflow.WorkflowService;
import org.dspace.workflow.factory.WorkflowServiceFactory;
@@ -103,7 +92,7 @@ public class MetadataImport {
/**
* Logger
*/
protected static final Logger log = org.apache.logging.log4j.LogManager.getLogger(MetadataImport.class);
protected static final Logger log = Logger.getLogger(MetadataImport.class);
protected final AuthorityValueService authorityValueService;
@@ -112,10 +101,6 @@ public class MetadataImport {
protected final CollectionService collectionService;
protected final HandleService handleService;
protected final WorkspaceItemService workspaceItemService;
protected final RelationshipTypeService relationshipTypeService;
protected final RelationshipService relationshipService;
protected final EntityTypeService entityTypeService;
protected final EntityService entityService;
/**
* Create an instance of the metadata importer. Requires a context and an array of CSV lines
@@ -135,10 +120,6 @@ public class MetadataImport {
handleService = HandleServiceFactory.getInstance().getHandleService();
authorityValueService = AuthorityServiceFactory.getInstance().getAuthorityValueService();
workspaceItemService = ContentServiceFactory.getInstance().getWorkspaceItemService();
relationshipService = ContentServiceFactory.getInstance().getRelationshipService();
relationshipTypeService = ContentServiceFactory.getInstance().getRelationshipTypeService();
entityTypeService = ContentServiceFactory.getInstance().getEntityTypeService();
entityService = ContentServiceFactory.getInstance().getEntityService();
}
/**
@@ -355,30 +336,16 @@ public class MetadataImport {
item = wsItem.getItem();
// Add the metadata to the item
List<BulkEditMetadataValue> relationships = new LinkedList<>();
for (BulkEditMetadataValue dcv : whatHasChanged.getAdds()) {
if (StringUtils.equals(dcv.getSchema(), MetadataSchemaEnum.RELATION.getName())) {
if (!StringUtils.equals(dcv.getElement(), "type")) {
relationships.add(dcv);
} else {
handleRelationshipMetadataValueFromBulkEditMetadataValue(item, dcv);
}
} else {
itemService.addMetadata(c, item, dcv.getSchema(),
dcv.getElement(),
dcv.getQualifier(),
dcv.getLanguage(),
dcv.getValue(),
dcv.getAuthority(),
dcv.getConfidence());
}
itemService.addMetadata(c, item, dcv.getSchema(),
dcv.getElement(),
dcv.getQualifier(),
dcv.getLanguage(),
dcv.getValue(),
dcv.getAuthority(),
dcv.getConfidence());
}
for (BulkEditMetadataValue relationship : relationships) {
handleRelationshipMetadataValueFromBulkEditMetadataValue(item, relationship);
}
// Should the workflow be used?
if (useWorkflow) {
WorkflowService workflowService = WorkflowServiceFactory.getInstance().getWorkflowService();
@@ -429,27 +396,6 @@ public class MetadataImport {
return changes;
}
/**
* This metod handles the BulkEditMetadataValue objects that correspond to Relationship metadatavalues
* @param item The item to which this metadatavalue will belong
* @param dcv The BulkEditMetadataValue to be processed
* @throws SQLException If something goes wrong
* @throws AuthorizeException If something goes wrong
*/
private void handleRelationshipMetadataValueFromBulkEditMetadataValue(Item item, BulkEditMetadataValue dcv)
throws SQLException, AuthorizeException {
LinkedList<String> values = new LinkedList<>();
values.add(dcv.getValue());
LinkedList<String> authorities = new LinkedList<>();
authorities.add(dcv.getAuthority());
LinkedList<Integer> confidences = new LinkedList<>();
confidences.add(dcv.getConfidence());
handleRelationMetadata(c, item, dcv.getSchema(), dcv.getElement(),
dcv.getQualifier(),
dcv.getLanguage(), values, authorities, confidences);
}
/**
* Compare an item metadata with a line from CSV, and optionally update the item
*
@@ -637,251 +583,9 @@ public class MetadataImport {
}
}
if (StringUtils.equals(schema, MetadataSchemaEnum.RELATION.getName())) {
List<RelationshipType> relationshipTypeList = relationshipTypeService
.findByLeftOrRightLabel(c, element);
for (RelationshipType relationshipType : relationshipTypeList) {
for (Relationship relationship : relationshipService
.findByItemAndRelationshipType(c, item, relationshipType)) {
relationshipService.delete(c, relationship);
relationshipService.update(c, relationship);
}
}
handleRelationMetadata(c, item, schema, element, qualifier, language, values, authorities, confidences);
} else {
itemService.clearMetadata(c, item, schema, element, qualifier, language);
itemService.addMetadata(c, item, schema, element, qualifier,
language, values, authorities, confidences);
itemService.update(c, item);
}
}
}
/**
* This method decides whether the metadatavalue is of type relation.type or if it corresponds to
* a relationship and handles it accordingly to their respective methods
* @param c The relevant DSpace context
* @param item The item to which this metadatavalue belongs to
* @param schema The schema for the metadatavalue
* @param element The element for the metadatavalue
* @param qualifier The qualifier for the metadatavalue
* @param language The language for the metadatavalue
* @param values The values for the metadatavalue
* @param authorities The authorities for the metadatavalue
* @param confidences The confidences for the metadatavalue
* @throws SQLException If something goes wrong
* @throws AuthorizeException If something goes wrong
*/
private void handleRelationMetadata(Context c, Item item, String schema, String element, String qualifier,
String language, List<String> values, List<String> authorities,
List<Integer> confidences) throws SQLException, AuthorizeException {
if (StringUtils.equals(element, "type") && StringUtils.isBlank(qualifier)) {
handleRelationTypeMetadata(c, item, schema, element, qualifier, language, values, authorities, confidences);
} else {
for (String value : values) {
handleRelationOtherMetadata(c, item, element, value);
}
}
}
/**
* This method takes the item, element and values to determine what relationships should be built
* for these parameters and calls on the method to construct them
* @param c The relevant DSpace context
* @param item The item that the relationships will be made for
* @param element The string determining which relationshiptype is to be used
* @param value The value for the relationship
* @throws SQLException If something goes wrong
* @throws AuthorizeException If something goes wrong
*/
private void handleRelationOtherMetadata(Context c, Item item, String element, String value)
throws SQLException, AuthorizeException {
Entity entity = entityService.findByItemId(c, item.getID());
boolean left = false;
List<RelationshipType> acceptableRelationshipTypes = new LinkedList<>();
String url = handleService.resolveToURL(c, value);
UUID uuid = UUIDUtils.fromString(value);
if (uuid == null && StringUtils.isNotBlank(url)) {
return;
}
Entity relationEntity = entityService.findByItemId(c, uuid);
List<RelationshipType> leftRelationshipTypesForEntity = entityService.getLeftRelationshipTypes(c, entity);
List<RelationshipType> rightRelationshipTypesForEntity = entityService.getRightRelationshipTypes(c, entity);
for (RelationshipType relationshipType : entityService.getAllRelationshipTypes(c, entity)) {
if (StringUtils.equalsIgnoreCase(relationshipType.getLeftLabel(), element)) {
left = handleLeftLabelEqualityRelationshipTypeElement(c, entity, relationEntity, left,
acceptableRelationshipTypes,
leftRelationshipTypesForEntity,
relationshipType);
} else if (StringUtils.equalsIgnoreCase(relationshipType.getRightLabel(), element)) {
left = handleRightLabelEqualityRelationshipTypeElement(c, entity, relationEntity, left,
acceptableRelationshipTypes,
rightRelationshipTypesForEntity,
relationshipType);
}
}
if (acceptableRelationshipTypes.size() > 1) {
log.error("Ambiguous relationship_types were found");
return;
}
if (acceptableRelationshipTypes.size() == 0) {
log.error("no relationship_types were found");
return;
}
//There is exactly one
buildRelationObject(c, item, value, left, acceptableRelationshipTypes.get(0));
}
/**
* This method creates the relationship for the item and stores it in the database
* @param c The relevant DSpace context
* @param item The item for which this relationship will be constructed
* @param value The value for the relationship
* @param left A boolean indicating whether the item is the leftItem or the rightItem
* @param acceptedRelationshipType The acceptable relationship type
* @throws SQLException If something goes wrong
* @throws AuthorizeException If something goes wrong
*/
private void buildRelationObject(Context c, Item item, String value, boolean left,
RelationshipType acceptedRelationshipType)
throws SQLException, AuthorizeException {
Item leftItem = null;
Item rightItem = null;
if (left) {
leftItem = item;
rightItem = itemService.findByIdOrLegacyId(c, value);
} else {
rightItem = item;
leftItem = itemService.findByIdOrLegacyId(c, value);
}
RelationshipType relationshipType = acceptedRelationshipType;
int leftPlace = relationshipService.findLeftPlaceByLeftItem(c, leftItem) + 1;
int rightPlace = relationshipService.findRightPlaceByRightItem(c, rightItem) + 1;
Relationship persistedRelationship = relationshipService.create(c, leftItem, rightItem,
relationshipType, leftPlace, rightPlace);
relationshipService.update(c, persistedRelationship);
}
/**
* This method will add RelationshipType objects to the acceptableRelationshipTypes list
* if applicable and valid RelationshipType objects are found. It will also return a boolean indicating
* whether we're dealing with a left Relationship or not
* @param c The relevant DSpace context
* @param entity The Entity for which the RelationshipType has to be checked
* @param relationEntity The other Entity of the Relationship
* @param left Boolean indicating whether the Relationship is left or not
* @param acceptableRelationshipTypes The list of RelationshipType objects that will be added to
* @param rightRelationshipTypesForEntity The list of RelationshipType objects that are possible
* for the right entity
* @param relationshipType The RelationshipType object that we want to check whether it's
* valid to be added or not
* @return A boolean indicating whether the relationship is left or right, will
* be false in this case
* @throws SQLException If something goes wrong
*/
private boolean handleRightLabelEqualityRelationshipTypeElement(Context c, Entity entity, Entity relationEntity,
boolean left,
List<RelationshipType> acceptableRelationshipTypes,
List<RelationshipType>
rightRelationshipTypesForEntity,
RelationshipType relationshipType)
throws SQLException {
if (StringUtils.equalsIgnoreCase(entityService.getType(c, entity).getLabel(),
relationshipType.getRightType().getLabel()) &&
StringUtils.equalsIgnoreCase(entityService.getType(c, relationEntity).getLabel(),
relationshipType.getLeftType().getLabel())) {
for (RelationshipType rightRelationshipType : rightRelationshipTypesForEntity) {
if (StringUtils.equalsIgnoreCase(rightRelationshipType.getLeftType().getLabel(),
relationshipType.getLeftType().getLabel()) ||
StringUtils.equalsIgnoreCase(rightRelationshipType.getRightType().getLabel(),
relationshipType.getLeftType().getLabel())) {
left = false;
acceptableRelationshipTypes.add(relationshipType);
}
}
}
return left;
}
/**
* This method will add RelationshipType objects to the acceptableRelationshipTypes list
* if applicable and valid RelationshipType objects are found. It will also return a boolean indicating
* whether we're dealing with a left Relationship or not
* @param c The relevant DSpace context
* @param entity The Entity for which the RelationshipType has to be checked
* @param relationEntity The other Entity of the Relationship
* @param left Boolean indicating whether the Relationship is left or not
* @param acceptableRelationshipTypes The list of RelationshipType objects that will be added to
* @param leftRelationshipTypesForEntity The list of RelationshipType objects that are possible
* for the left entity
* @param relationshipType The RelationshipType object that we want to check whether it's
* valid to be added or not
* @return A boolean indicating whether the relationship is left or right, will
* be true in this case
* @throws SQLException If something goes wrong
*/
private boolean handleLeftLabelEqualityRelationshipTypeElement(Context c, Entity entity, Entity relationEntity,
boolean left,
List<RelationshipType> acceptableRelationshipTypes,
List<RelationshipType>
leftRelationshipTypesForEntity,
RelationshipType relationshipType)
throws SQLException {
if (StringUtils.equalsIgnoreCase(entityService.getType(c, entity).getLabel(),
relationshipType.getLeftType().getLabel()) &&
StringUtils.equalsIgnoreCase(entityService.getType(c, relationEntity).getLabel(),
relationshipType.getRightType().getLabel())) {
for (RelationshipType leftRelationshipType : leftRelationshipTypesForEntity) {
if (StringUtils.equalsIgnoreCase(leftRelationshipType.getRightType().getLabel(),
relationshipType.getRightType().getLabel()) ||
StringUtils.equalsIgnoreCase(leftRelationshipType.getLeftType().getLabel(),
relationshipType.getRightType().getLabel())) {
left = true;
acceptableRelationshipTypes.add(relationshipType);
}
}
}
return left;
}
/**
* This method will add the relationship.type metadata to the item if an EntityType can be found for the value in
* the values list.
* @param c The relevant DSpace context
* @param item The item to which this metadatavalue will be added
* @param schema The schema for the metadatavalue to be added
* @param element The element for the metadatavalue to be added
* @param qualifier The qualifier for the metadatavalue to be added
* @param language The language for the metadatavalue to be added
* @param values The value on which we'll search for EntityType object and it's the value
* for the metadatavalue that will be created
* @param authorities The authority for the metadatavalue. This will be filled with the ID
* of the found EntityType for the value if it exists
* @param confidences The confidence for the metadatavalue
* @throws SQLException If something goes wrong
* @throws AuthorizeException If something goes wrong
*/
private void handleRelationTypeMetadata(Context c, Item item, String schema, String element, String qualifier,
String language, List<String> values, List<String> authorities,
List<Integer> confidences)
throws SQLException, AuthorizeException {
EntityType entityType = entityTypeService.findByEntityType(c, values.get(0));
if (entityType != null) {
authorities.add(String.valueOf(entityType.getID()));
// Set those values
itemService.clearMetadata(c, item, schema, element, qualifier, language);
itemService.addMetadata(c, item, schema, element, qualifier, language,
values, authorities, confidences);
itemService.addMetadata(c, item, schema, element, qualifier, language, values, authorities, confidences);
itemService.update(c, item);
}
}

View File

@@ -23,8 +23,7 @@ import org.apache.commons.cli.OptionBuilder;
import org.apache.commons.cli.Options;
import org.apache.commons.cli.ParseException;
import org.apache.commons.cli.PosixParser;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
import org.apache.log4j.Logger;
import org.dspace.checker.BitstreamDispatcher;
import org.dspace.checker.CheckerCommand;
import org.dspace.checker.HandleDispatcher;
@@ -49,7 +48,7 @@ import org.dspace.core.Utils;
* @author Nathan Sarr
*/
public final class ChecksumChecker {
private static final Logger LOG = LogManager.getLogger(ChecksumChecker.class);
private static final Logger LOG = Logger.getLogger(ChecksumChecker.class);
private static final BitstreamService bitstreamService = ContentServiceFactory.getInstance().getBitstreamService();

View File

@@ -32,8 +32,8 @@ import java.util.zip.ZipEntry;
import java.util.zip.ZipOutputStream;
import javax.mail.MessagingException;
import org.apache.commons.lang3.StringUtils;
import org.apache.logging.log4j.Logger;
import org.apache.commons.lang.StringUtils;
import org.apache.log4j.Logger;
import org.dspace.app.itemexport.service.ItemExportService;
import org.dspace.content.Bitstream;
import org.dspace.content.Bundle;
@@ -42,7 +42,7 @@ import org.dspace.content.Community;
import org.dspace.content.DSpaceObject;
import org.dspace.content.Item;
import org.dspace.content.MetadataField;
import org.dspace.content.MetadataSchemaEnum;
import org.dspace.content.MetadataSchema;
import org.dspace.content.MetadataValue;
import org.dspace.content.service.BitstreamService;
import org.dspace.content.service.CommunityService;
@@ -98,7 +98,7 @@ public class ItemExportServiceImpl implements ItemExportService {
/**
* log4j logger
*/
private Logger log = org.apache.logging.log4j.LogManager.getLogger(ItemExportServiceImpl.class);
private Logger log = Logger.getLogger(ItemExportServiceImpl.class);
protected ItemExportServiceImpl() {
@@ -214,7 +214,7 @@ public class ItemExportServiceImpl implements ItemExportService {
protected void writeMetadata(Context c, String schema, Item i,
File destDir, boolean migrate) throws Exception {
String filename;
if (schema.equals(MetadataSchemaEnum.DC.getName())) {
if (schema.equals(MetadataSchema.DC_SCHEMA)) {
filename = "dublin_core.xml";
} else {
filename = "metadata_" + schema + ".xml";
@@ -271,8 +271,9 @@ public class ItemExportServiceImpl implements ItemExportService {
("date".equals(metadataField.getElement()) && "accessioned".equals(qualifier)) ||
("date".equals(metadataField.getElement()) && "available".equals(qualifier)) ||
("identifier".equals(metadataField.getElement()) && "uri".equals(qualifier) &&
(dcv.getValue() != null && dcv.getValue().startsWith(
handleService.getCanonicalPrefix() + handleService.getPrefix() + "/"))) ||
(dcv.getValue() != null && dcv.getValue().startsWith("http://hdl.handle.net/" +
handleService
.getPrefix() + "/"))) ||
("description".equals(metadataField.getElement()) && "provenance".equals(qualifier)) ||
("format".equals(metadataField.getElement()) && "extent".equals(qualifier)) ||
("format".equals(metadataField.getElement()) && "mimetype".equals(qualifier))))) {
@@ -546,7 +547,7 @@ public class ItemExportServiceImpl implements ItemExportService {
List<Bitstream> bitstreams = bundle.getBitstreams();
for (Bitstream bitstream : bitstreams) {
// add up the size
size += bitstream.getSizeBytes();
size += bitstream.getSize();
}
}
items.add(item.getID());
@@ -573,7 +574,7 @@ public class ItemExportServiceImpl implements ItemExportService {
List<Bitstream> bitstreams = bundle.getBitstreams();
for (Bitstream bitstream : bitstreams) {
// add up the size
size += bitstream.getSizeBytes();
size += bitstream.getSize();
}
}
items.add(item.getID());
@@ -592,7 +593,7 @@ public class ItemExportServiceImpl implements ItemExportService {
List<Bitstream> bitstreams = bundle.getBitstreams();
for (Bitstream bitstream : bitstreams) {
// add up the size
size += bitstream.getSizeBytes();
size += bitstream.getSize();
}
}
ArrayList<UUID> items = new ArrayList<>();

View File

@@ -52,13 +52,13 @@ import gr.ekt.bte.core.TransformationSpec;
import gr.ekt.bte.dataloader.FileDataLoader;
import gr.ekt.bteio.generators.DSpaceOutputGenerator;
import gr.ekt.bteio.loaders.OAIPMHDataLoader;
import org.apache.commons.collections4.ComparatorUtils;
import org.apache.commons.collections.ComparatorUtils;
import org.apache.commons.io.FileDeleteStrategy;
import org.apache.commons.io.FileUtils;
import org.apache.commons.lang3.RandomStringUtils;
import org.apache.commons.lang3.StringUtils;
import org.apache.commons.lang.RandomStringUtils;
import org.apache.commons.lang.StringUtils;
import org.apache.commons.lang3.exception.ExceptionUtils;
import org.apache.logging.log4j.Logger;
import org.apache.log4j.Logger;
import org.apache.xpath.XPathAPI;
import org.dspace.app.itemimport.service.ItemImportService;
import org.dspace.app.util.LocalSchemaFilenameFilter;
@@ -74,7 +74,6 @@ import org.dspace.content.DSpaceObject;
import org.dspace.content.Item;
import org.dspace.content.MetadataField;
import org.dspace.content.MetadataSchema;
import org.dspace.content.MetadataSchemaEnum;
import org.dspace.content.WorkspaceItem;
import org.dspace.content.service.BitstreamFormatService;
import org.dspace.content.service.BitstreamService;
@@ -125,7 +124,7 @@ import org.xml.sax.SAXException;
* allow the registration of files (bitstreams) into DSpace.
*/
public class ItemImportServiceImpl implements ItemImportService, InitializingBean {
private final Logger log = org.apache.logging.log4j.LogManager.getLogger(ItemImportServiceImpl.class);
private final Logger log = Logger.getLogger(ItemImportServiceImpl.class);
@Autowired(required = true)
protected AuthorizeService authorizeService;
@@ -678,7 +677,7 @@ public class ItemImportServiceImpl implements ItemImportService, InitializingBea
Node schemaAttr = metadata.item(0).getAttributes().getNamedItem(
"schema");
if (schemaAttr == null) {
schema = MetadataSchemaEnum.DC.getName();
schema = MetadataSchema.DC_SCHEMA;
} else {
schema = schemaAttr.getNodeValue();
}

View File

@@ -30,7 +30,7 @@ import javax.xml.transform.TransformerConfigurationException;
import javax.xml.transform.TransformerException;
import javax.xml.transform.TransformerFactory;
import org.apache.logging.log4j.Logger;
import org.apache.log4j.Logger;
import org.dspace.app.util.LocalSchemaFilenameFilter;
import org.dspace.authorize.AuthorizeException;
import org.dspace.content.DSpaceObject;
@@ -47,7 +47,7 @@ import org.w3c.dom.Document;
* Encapsulates the Item in the context of the DSpace Archive Format
*/
public class ItemArchive {
private static final Logger log = org.apache.logging.log4j.LogManager.getLogger(ItemArchive.class);
private static final Logger log = Logger.getLogger(ItemArchive.class);
public static final String DUBLIN_CORE_XML = "dublin_core.xml";

View File

@@ -31,12 +31,11 @@ import org.apache.commons.cli.PosixParser;
import org.dspace.content.Item;
import org.dspace.content.factory.ContentServiceFactory;
import org.dspace.content.service.ItemService;
import org.dspace.core.ConfigurationManager;
import org.dspace.core.Context;
import org.dspace.eperson.EPerson;
import org.dspace.eperson.factory.EPersonServiceFactory;
import org.dspace.eperson.service.EPersonService;
import org.dspace.handle.factory.HandleServiceFactory;
import org.dspace.handle.service.HandleService;
/**
* Provides some batch editing capabilities for items in DSpace:
@@ -79,7 +78,6 @@ public class ItemUpdate {
protected static final EPersonService epersonService = EPersonServiceFactory.getInstance().getEPersonService();
protected static final ItemService itemService = ContentServiceFactory.getInstance().getItemService();
protected static final HandleService handleService = HandleServiceFactory.getInstance().getHandleService();
static {
filterAliases.put("ORIGINAL", "org.dspace.app.itemupdate.OriginalBitstreamFilter");
@@ -332,7 +330,10 @@ public class ItemUpdate {
iu.setEPerson(context, iu.eperson);
context.turnOffAuthorisationSystem();
HANDLE_PREFIX = handleService.getCanonicalPrefix();
HANDLE_PREFIX = ConfigurationManager.getProperty("handle.canonical.prefix");
if (HANDLE_PREFIX == null || HANDLE_PREFIX.length() == 0) {
HANDLE_PREFIX = "http://hdl.handle.net/";
}
iu.processArchive(context, sourcedir, itemField, metadataIndexName, alterProvenance, isTest);

View File

@@ -28,13 +28,12 @@ import javax.xml.transform.TransformerException;
import javax.xml.transform.dom.DOMSource;
import javax.xml.transform.stream.StreamResult;
import org.apache.commons.lang3.StringUtils;
import org.apache.commons.lang.StringUtils;
import org.apache.xpath.XPathAPI;
import org.dspace.authorize.AuthorizeException;
import org.dspace.content.Item;
import org.dspace.content.MetadataField;
import org.dspace.content.MetadataSchema;
import org.dspace.content.MetadataSchemaEnum;
import org.dspace.content.MetadataValue;
import org.dspace.content.factory.ContentServiceFactory;
import org.dspace.content.service.ItemService;
@@ -190,7 +189,7 @@ public class MetadataUtilities {
NodeList metadata = XPathAPI.selectNodeList(document, "/dublin_core");
Node schemaAttr = metadata.item(0).getAttributes().getNamedItem("schema");
if (schemaAttr == null) {
schema = MetadataSchemaEnum.DC.getName();
schema = MetadataSchema.DC_SCHEMA;
} else {
schema = schemaAttr.getNodeValue();
}

View File

@@ -11,7 +11,7 @@ import java.io.InputStream;
import java.nio.charset.StandardCharsets;
import org.apache.commons.io.IOUtils;
import org.apache.logging.log4j.Logger;
import org.apache.log4j.Logger;
import org.apache.poi.POITextExtractor;
import org.apache.poi.extractor.ExtractorFactory;
import org.apache.poi.hssf.extractor.ExcelExtractor;
@@ -36,7 +36,7 @@ import org.dspace.content.Item;
*/
public class ExcelFilter extends MediaFilter {
private static Logger log = org.apache.logging.log4j.LogManager.getLogger(ExcelFilter.class);
private static Logger log = Logger.getLogger(ExcelFilter.class);
public String getFilteredName(String oldFilename) {
return oldFilename + ".txt";

View File

@@ -143,7 +143,7 @@ public abstract class ImageMagickThumbnailFilter extends MediaFilter {
// PDFs using the CMYK color system can be handled specially if
// profiles are defined
if (cmyk_profile != null && srgb_profile != null) {
Info imageInfo = new Info(f.getAbsolutePath() + s, true);
Info imageInfo = new Info(f.getAbsolutePath(), true);
String imageClass = imageInfo.getImageClass();
if (imageClass.contains("CMYK")) {
op.profile(cmyk_profile);

View File

@@ -22,7 +22,7 @@ import org.apache.commons.cli.Option;
import org.apache.commons.cli.OptionBuilder;
import org.apache.commons.cli.Options;
import org.apache.commons.cli.PosixParser;
import org.apache.commons.lang3.ArrayUtils;
import org.apache.commons.lang.ArrayUtils;
import org.dspace.app.mediafilter.factory.MediaFilterServiceFactory;
import org.dspace.app.mediafilter.service.MediaFilterService;
import org.dspace.content.Collection;

View File

@@ -220,7 +220,7 @@ public class MediaFilterServiceImpl implements MediaFilterService, InitializingB
} catch (Exception e) {
String handle = myItem.getHandle();
List<Bundle> bundles = myBitstream.getBundles();
long size = myBitstream.getSizeBytes();
long size = myBitstream.getSize();
String checksum = myBitstream.getChecksum() + " (" + myBitstream.getChecksumAlgorithm() + ")";
int assetstore = myBitstream.getStoreNumber();
@@ -310,11 +310,12 @@ public class MediaFilterServiceImpl implements MediaFilterService, InitializingB
// get bitstream filename, calculate destination filename
String newName = formatFilter.getFilteredName(source.getName());
// check if destination bitstream exists
Bundle existingBundle = null;
Bitstream existingBitstream = null;
Bitstream existingBitstream = null; // is there an existing rendition?
Bundle targetBundle = null; // bundle we're modifying
List<Bundle> bundles = itemService.getBundles(item, formatFilter.getBundleName());
// check if destination bitstream exists
if (bundles.size() > 0) {
// only finds the last match (FIXME?)
for (Bundle bundle : bundles) {
@@ -322,7 +323,7 @@ public class MediaFilterServiceImpl implements MediaFilterService, InitializingB
for (Bitstream bitstream : bitstreams) {
if (bitstream.getName().trim().equals(newName.trim())) {
existingBundle = bundle;
targetBundle = bundle;
existingBitstream = bitstream;
}
}
@@ -344,71 +345,63 @@ public class MediaFilterServiceImpl implements MediaFilterService, InitializingB
+ " (item: " + item.getHandle() + ")");
}
System.out.println("File: " + newName);
// start filtering of the bitstream, using try with resource to close all InputStreams properly
try (
// get the source stream
InputStream srcStream = bitstreamService.retrieve(context, source);
// filter the source stream to produce the destination stream
// this is the hard work, check for OutOfMemoryErrors at the end of the try clause.
InputStream destStream = formatFilter.getDestinationStream(item, srcStream, isVerbose);
) {
InputStream destStream;
try {
System.out.println("File: " + newName);
destStream = formatFilter.getDestinationStream(item, bitstreamService.retrieve(context, source), isVerbose);
if (destStream == null) {
if (!isQuiet) {
System.out.println("SKIPPED: bitstream " + source.getID()
+ " (item: " + item.getHandle() + ") because filtering was unsuccessful");
+ " (item: " + item.getHandle() + ") because filtering was unsuccessful");
}
return false;
}
Bundle targetBundle; // bundle we're modifying
if (bundles.size() < 1) {
// create new bundle if needed
targetBundle = bundleService.create(context, item, formatFilter.getBundleName());
} else {
// take the first match as we already looked out for the correct bundle name
targetBundle = bundles.get(0);
}
// create bitstream to store the filter result
Bitstream b = bitstreamService.create(context, targetBundle, destStream);
// set the name, source and description of the bitstream
b.setName(context, newName);
b.setSource(context, "Written by FormatFilter " + formatFilter.getClass().getName() +
" on " + DCDate.getCurrent() + " (GMT).");
b.setDescription(context, formatFilter.getDescription());
// Set the format of the bitstream
BitstreamFormat bf = bitstreamFormatService.findByShortDescription(context,
formatFilter.getFormatString());
bitstreamService.setFormat(context, b, bf);
bitstreamService.update(context, b);
//Set permissions on the derivative bitstream
//- First remove any existing policies
authorizeService.removeAllPolicies(context, b);
//- Determine if this is a public-derivative format
if (publicFiltersClasses.contains(formatFilter.getClass().getSimpleName())) {
//- Set derivative bitstream to be publicly accessible
Group anonymous = groupService.findByName(context, Group.ANONYMOUS);
authorizeService.addPolicy(context, b, Constants.READ, anonymous);
} else {
//- Inherit policies from the source bitstream
authorizeService.inheritPolicies(context, source, b);
}
//do post-processing of the generated bitstream
formatFilter.postProcessBitstream(context, item, b);
} catch (OutOfMemoryError oome) {
System.out.println("!!! OutOfMemoryError !!!");
return false;
}
// create new bundle if needed
if (bundles.size() < 1) {
targetBundle = bundleService.create(context, item, formatFilter.getBundleName());
} else {
// take the first match
targetBundle = bundles.get(0);
}
Bitstream b = bitstreamService.create(context, targetBundle, destStream);
// Now set the format and name of the bitstream
b.setName(context, newName);
b.setSource(context, "Written by FormatFilter " + formatFilter.getClass().getName() +
" on " + DCDate.getCurrent() + " (GMT).");
b.setDescription(context, formatFilter.getDescription());
// Find the proper format
BitstreamFormat bf = bitstreamFormatService.findByShortDescription(context,
formatFilter.getFormatString());
bitstreamService.setFormat(context, b, bf);
bitstreamService.update(context, b);
//Set permissions on the derivative bitstream
//- First remove any existing policies
authorizeService.removeAllPolicies(context, b);
//- Determine if this is a public-derivative format
if (publicFiltersClasses.contains(formatFilter.getClass().getSimpleName())) {
//- Set derivative bitstream to be publicly accessible
Group anonymous = groupService.findByName(context, Group.ANONYMOUS);
authorizeService.addPolicy(context, b, Constants.READ, anonymous);
} else {
//- Inherit policies from the source bitstream
authorizeService.inheritPolicies(context, source, b);
}
// fixme - set date?
// we are overwriting, so remove old bitstream
if (existingBitstream != null) {
bundleService.removeBitstream(context, existingBundle, existingBitstream);
bundleService.removeBitstream(context, targetBundle, existingBitstream);
}
if (!isQuiet) {
@@ -416,6 +409,9 @@ public class MediaFilterServiceImpl implements MediaFilterService, InitializingB
+ " (item: " + item.getHandle() + ") and created '" + newName + "'");
}
//do post-processing of the generated bitstream
formatFilter.postProcessBitstream(context, item, b);
return true;
}

View File

@@ -16,7 +16,7 @@ import java.io.InputStream;
import java.io.OutputStreamWriter;
import java.io.Writer;
import org.apache.logging.log4j.Logger;
import org.apache.log4j.Logger;
import org.apache.pdfbox.pdmodel.PDDocument;
import org.apache.pdfbox.text.PDFTextStripper;
import org.dspace.content.Item;
@@ -30,7 +30,7 @@ import org.dspace.core.ConfigurationManager;
*/
public class PDFFilter extends MediaFilter {
private static Logger log = org.apache.logging.log4j.LogManager.getLogger(PDFFilter.class);
private static Logger log = Logger.getLogger(PDFFilter.class);
@Override
public String getFilteredName(String oldFilename) {

View File

@@ -10,7 +10,7 @@ package org.dspace.app.mediafilter;
import java.io.ByteArrayInputStream;
import java.io.InputStream;
import org.apache.logging.log4j.Logger;
import org.apache.log4j.Logger;
import org.apache.poi.POITextExtractor;
import org.apache.poi.extractor.ExtractorFactory;
import org.apache.poi.hslf.extractor.PowerPointExtractor;
@@ -23,7 +23,7 @@ import org.dspace.content.Item;
*/
public class PowerPointFilter extends MediaFilter {
private static Logger log = org.apache.logging.log4j.LogManager.getLogger(PowerPointFilter.class);
private static Logger log = Logger.getLogger(PowerPointFilter.class);
@Override
public String getFilteredName(String oldFilename) {

View File

@@ -0,0 +1,93 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.app.mediafilter;
import java.io.ByteArrayInputStream;
import java.io.IOException;
import java.io.InputStream;
import org.apache.log4j.Logger;
import org.dspace.content.Item;
import org.textmining.extraction.TextExtractor;
import org.textmining.extraction.word.WordTextExtractorFactory;
/*
*
* to do: helpful error messages - can't find mediafilter.cfg - can't
* instantiate filter - bitstream format doesn't exist.
*
*/
public class WordFilter extends MediaFilter {
private static Logger log = Logger.getLogger(WordFilter.class);
@Override
public String getFilteredName(String oldFilename) {
return oldFilename + ".txt";
}
/**
* @return String bundle name
*/
@Override
public String getBundleName() {
return "TEXT";
}
/**
* @return String bitstreamformat
*/
@Override
public String getFormatString() {
return "Text";
}
/**
* @return String description
*/
@Override
public String getDescription() {
return "Extracted text";
}
/**
* @param currentItem item
* @param source source input stream
* @param verbose verbose mode
* @return InputStream the resulting input stream
* @throws Exception if error
*/
@Override
public InputStream getDestinationStream(Item currentItem, InputStream source, boolean verbose)
throws Exception {
// get input stream from bitstream
// pass to filter, get string back
try {
WordTextExtractorFactory factory = new WordTextExtractorFactory();
TextExtractor e = factory.textExtractor(source);
String extractedText = e.getText();
// if verbose flag is set, print out extracted text
// to STDOUT
if (verbose) {
System.out.println(extractedText);
}
// generate an input stream with the extracted text
byte[] textBytes = extractedText.getBytes();
ByteArrayInputStream bais = new ByteArrayInputStream(textBytes);
return bais; // will this work? or will the byte array be out of scope?
} catch (IOException ioe) {
System.out.println("Invalid Word Format");
log.error("Error detected - Word File format not recognized: "
+ ioe.getMessage(), ioe);
throw ioe;
}
}
}

View File

@@ -9,8 +9,8 @@ package org.dspace.app.requestitem;
import java.sql.SQLException;
import org.apache.commons.lang3.StringUtils;
import org.apache.logging.log4j.Logger;
import org.apache.commons.lang.StringUtils;
import org.apache.log4j.Logger;
import org.dspace.content.Item;
import org.dspace.core.ConfigurationManager;
import org.dspace.core.Context;
@@ -31,7 +31,7 @@ import org.springframework.beans.factory.annotation.Autowired;
*/
public class RequestItemHelpdeskStrategy extends RequestItemSubmitterStrategy {
private Logger log = org.apache.logging.log4j.LogManager.getLogger(RequestItemHelpdeskStrategy.class);
private Logger log = Logger.getLogger(RequestItemHelpdeskStrategy.class);
@Autowired(required = true)
protected EPersonService ePersonService;

View File

@@ -10,7 +10,7 @@ package org.dspace.app.requestitem;
import java.sql.SQLException;
import java.util.List;
import org.apache.commons.lang3.StringUtils;
import org.apache.commons.lang.StringUtils;
import org.dspace.content.Item;
import org.dspace.content.MetadataValue;
import org.dspace.content.service.ItemService;

View File

@@ -10,7 +10,7 @@ package org.dspace.app.requestitem;
import java.sql.SQLException;
import java.util.Date;
import org.apache.logging.log4j.Logger;
import org.apache.log4j.Logger;
import org.dspace.app.requestitem.dao.RequestItemDAO;
import org.dspace.app.requestitem.service.RequestItemService;
import org.dspace.content.Bitstream;
@@ -28,7 +28,7 @@ import org.springframework.beans.factory.annotation.Autowired;
*/
public class RequestItemServiceImpl implements RequestItemService {
private final Logger log = org.apache.logging.log4j.LogManager.getLogger(RequestItemServiceImpl.class);
private final Logger log = Logger.getLogger(RequestItemServiceImpl.class);
@Autowired(required = true)
protected RequestItemDAO requestItemDAO;

View File

@@ -8,15 +8,13 @@
package org.dspace.app.requestitem.dao.impl;
import java.sql.SQLException;
import javax.persistence.criteria.CriteriaBuilder;
import javax.persistence.criteria.CriteriaQuery;
import javax.persistence.criteria.Root;
import org.dspace.app.requestitem.RequestItem;
import org.dspace.app.requestitem.RequestItem_;
import org.dspace.app.requestitem.dao.RequestItemDAO;
import org.dspace.core.AbstractHibernateDAO;
import org.dspace.core.Context;
import org.hibernate.Criteria;
import org.hibernate.criterion.Restrictions;
/**
* Hibernate implementation of the Database Access Object interface class for the RequestItem object.
@@ -32,12 +30,9 @@ public class RequestItemDAOImpl extends AbstractHibernateDAO<RequestItem> implem
@Override
public RequestItem findByToken(Context context, String token) throws SQLException {
CriteriaBuilder criteriaBuilder = getCriteriaBuilder(context);
CriteriaQuery criteriaQuery = getCriteriaQuery(criteriaBuilder, RequestItem.class);
Root<RequestItem> requestItemRoot = criteriaQuery.from(RequestItem.class);
criteriaQuery.select(requestItemRoot);
criteriaQuery.where(criteriaBuilder.equal(requestItemRoot.get(RequestItem_.token), token));
return uniqueResult(context, criteriaQuery, false, RequestItem.class, -1, -1);
Criteria criteria = createCriteria(context, RequestItem.class);
criteria.add(Restrictions.eq("token", token));
return uniqueResult(criteria);
}

View File

@@ -15,8 +15,8 @@ import javax.xml.parsers.DocumentBuilder;
import javax.xml.parsers.DocumentBuilderFactory;
import javax.xml.parsers.ParserConfigurationException;
import org.apache.commons.lang3.StringUtils;
import org.apache.logging.log4j.Logger;
import org.apache.commons.lang.StringUtils;
import org.apache.log4j.Logger;
import org.dspace.app.sfx.service.SFXFileReaderService;
import org.dspace.content.DCPersonName;
import org.dspace.content.Item;
@@ -58,7 +58,7 @@ public class SFXFileReaderServiceImpl implements SFXFileReaderService {
/**
* log4j logger
*/
private final Logger log = org.apache.logging.log4j.LogManager.getLogger(SFXFileReaderServiceImpl.class);
private final Logger log = Logger.getLogger(SFXFileReaderServiceImpl.class);
protected SFXFileReaderServiceImpl() {
}

View File

@@ -13,7 +13,7 @@ import java.util.List;
import javax.xml.parsers.DocumentBuilder;
import javax.xml.parsers.DocumentBuilderFactory;
import org.apache.commons.lang3.StringUtils;
import org.apache.commons.lang.StringUtils;
import org.dspace.app.util.XMLUtils;
import org.w3c.dom.Document;
import org.w3c.dom.Element;

View File

@@ -7,7 +7,7 @@
*/
package org.dspace.app.sherpa;
import org.apache.commons.lang3.StringUtils;
import org.apache.commons.lang.StringUtils;
import org.apache.http.HttpEntity;
import org.apache.http.HttpResponse;
import org.apache.http.HttpStatus;
@@ -16,7 +16,7 @@ import org.apache.http.client.methods.HttpGet;
import org.apache.http.client.utils.URIBuilder;
import org.apache.http.impl.client.CloseableHttpClient;
import org.apache.http.impl.client.HttpClientBuilder;
import org.apache.logging.log4j.Logger;
import org.apache.log4j.Logger;
import org.dspace.core.ConfigurationManager;
public class SHERPAService {
@@ -29,7 +29,7 @@ public class SHERPAService {
/**
* log4j category
*/
private static final Logger log = org.apache.logging.log4j.LogManager.getLogger(SHERPAService.class);
private static final Logger log = Logger.getLogger(SHERPAService.class);
public SHERPAService() {
HttpClientBuilder builder = HttpClientBuilder.create();

View File

@@ -11,8 +11,8 @@ import java.util.LinkedHashSet;
import java.util.List;
import java.util.Set;
import org.apache.commons.lang3.StringUtils;
import org.apache.logging.log4j.Logger;
import org.apache.commons.lang.StringUtils;
import org.apache.log4j.Logger;
import org.dspace.app.sherpa.SHERPAResponse;
import org.dspace.app.sherpa.SHERPAService;
import org.dspace.content.Item;
@@ -27,7 +27,7 @@ public class SHERPASubmitService {
/**
* log4j logger
*/
private static Logger log = org.apache.logging.log4j.LogManager.getLogger(SHERPASubmitService.class);
private static Logger log = Logger.getLogger(SHERPASubmitService.class);
public void setConfiguration(SHERPASubmitConfigurationService configuration) {
this.configuration = configuration;

View File

@@ -27,9 +27,9 @@ import org.apache.commons.cli.HelpFormatter;
import org.apache.commons.cli.Options;
import org.apache.commons.cli.ParseException;
import org.apache.commons.cli.PosixParser;
import org.apache.commons.lang3.ArrayUtils;
import org.apache.commons.lang3.StringUtils;
import org.apache.logging.log4j.Logger;
import org.apache.commons.lang.ArrayUtils;
import org.apache.commons.lang.StringUtils;
import org.apache.log4j.Logger;
import org.dspace.content.Collection;
import org.dspace.content.Community;
import org.dspace.content.Item;
@@ -52,7 +52,7 @@ public class GenerateSitemaps {
/**
* Logger
*/
private static Logger log = org.apache.logging.log4j.LogManager.getLogger(GenerateSitemaps.class);
private static Logger log = Logger.getLogger(GenerateSitemaps.class);
private static final CommunityService communityService = ContentServiceFactory.getInstance().getCommunityService();
private static final CollectionService collectionService =

View File

@@ -28,7 +28,7 @@ import java.util.regex.Matcher;
import java.util.regex.Pattern;
import org.dspace.content.Item;
import org.dspace.content.MetadataSchemaEnum;
import org.dspace.content.MetadataSchema;
import org.dspace.content.MetadataValue;
import org.dspace.content.factory.ContentServiceFactory;
import org.dspace.content.service.ItemService;
@@ -763,10 +763,9 @@ public class ReportGenerator {
// build the referece
// FIXME: here we have blurred the line between content and presentation
// and it should probably be un-blurred
List<MetadataValue> title = itemService.getMetadata(item, MetadataSchemaEnum.DC.getName(),
"title", null, Item.ANY);
List<MetadataValue> title = itemService.getMetadata(item, MetadataSchema.DC_SCHEMA, "title", null, Item.ANY);
List<MetadataValue> author = itemService
.getMetadata(item, MetadataSchemaEnum.DC.getName(), "contributor", "author", Item.ANY);
.getMetadata(item, MetadataSchema.DC_SCHEMA, "contributor", "author", Item.ANY);
StringBuffer authors = new StringBuffer();
if (author.size() > 0) {

View File

@@ -21,7 +21,7 @@ import java.util.Set;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
import org.apache.commons.lang3.time.DateUtils;
import org.apache.commons.lang.time.DateUtils;
import org.dspace.core.ConfigurationManager;
/**

View File

@@ -13,8 +13,8 @@ import java.util.Map;
import java.util.regex.Pattern;
import java.util.regex.PatternSyntaxException;
import org.apache.commons.lang3.StringUtils;
import org.dspace.content.MetadataSchemaEnum;
import org.apache.commons.lang.StringUtils;
import org.dspace.content.MetadataSchema;
import org.dspace.core.Utils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
@@ -63,12 +63,6 @@ public class DCInput {
*/
private String label = null;
/**
* a style instruction to apply to the input. The exact way to use the style value is UI depending that receive the
* value from the REST API as is
*/
private String style = null;
/**
* the input type
*/
@@ -160,7 +154,7 @@ public class DCInput {
// Default the schema to dublin core
dcSchema = fieldMap.get("dc-schema");
if (dcSchema == null) {
dcSchema = MetadataSchemaEnum.DC.getName();
dcSchema = MetadataSchema.DC_SCHEMA;
}
//check if the input have a language tag
@@ -205,7 +199,7 @@ public class DCInput {
typeBind.add(type.trim());
}
}
style = fieldMap.get("style");
}
/**
@@ -268,7 +262,7 @@ public class DCInput {
}
/**
* Get the DC element for this form field.
* Get the DC element for this form row.
*
* @return the DC element
*/
@@ -277,7 +271,7 @@ public class DCInput {
}
/**
* Get the DC namespace prefix for this form field.
* Get the DC namespace prefix for this form row.
*
* @return the DC namespace prefix
*/
@@ -296,7 +290,7 @@ public class DCInput {
}
/**
* Is there a required string for this form field?
* Is there a required string for this form row?
*
* @return true if a required string is set
*/
@@ -305,7 +299,7 @@ public class DCInput {
}
/**
* Get the DC qualifier for this form field.
* Get the DC qualifier for this form row.
*
* @return the DC qualifier
*/
@@ -314,7 +308,7 @@ public class DCInput {
}
/**
* Get the language for this form field.
* Get the language for this form row.
*
* @return the language state
*/
@@ -323,7 +317,7 @@ public class DCInput {
}
/**
* Get the hint for this form field
* Get the hint for this form row, formatted for an HTML table
*
* @return the hints
*/
@@ -332,7 +326,7 @@ public class DCInput {
}
/**
* Get the label for this form field.
* Get the label for this form row.
*
* @return the label
*/
@@ -340,15 +334,6 @@ public class DCInput {
return label;
}
/**
* Get the style for this form field
*
* @return the style
*/
public String getStyle() {
return style;
}
/**
* Get the name of the pairs type
*

View File

@@ -25,26 +25,25 @@ public class DCInputSet {
/**
* the inputs ordered by row position
*/
private DCInput[][] inputs = null;
private DCInput[] inputs = null;
/**
* constructor
*
* @param formName form name
* @param headings
* @param mandatoryFlags
* @param rows the rows
* @param fields fields
* @param listMap map
*/
public DCInputSet(String formName, List<List<Map<String, String>>> rows, Map<String, List<String>> listMap) {
public DCInputSet(String formName,
List<Map<String, String>> fields, Map<String, List<String>> listMap) {
this.formName = formName;
this.inputs = new DCInput[rows.size()][];
this.inputs = new DCInput[fields.size()];
for (int i = 0; i < inputs.length; i++) {
List<Map<String, String>> fields = rows.get(i);
inputs[i] = new DCInput[fields.size()];
for (int j = 0; j < inputs[i].length; j++) {
Map<String, String> field = rows.get(i).get(j);
inputs[i][j] = new DCInput(field, listMap);
}
Map<String, String> field = fields.get(i);
inputs[i] = new DCInput(field, listMap);
}
}
@@ -72,7 +71,7 @@ public class DCInputSet {
* @return an array containing the fields
*/
public DCInput[][] getFields() {
public DCInput[] getFields() {
return inputs;
}
@@ -105,12 +104,10 @@ public class DCInputSet {
*/
public boolean isFieldPresent(String fieldName) {
for (int i = 0; i < inputs.length; i++) {
for (int j = 0; j < inputs[i].length; j++) {
DCInput field = inputs[i][j];
String fullName = field.getFieldName();
if (fullName.equals(fieldName)) {
return true;
}
DCInput field = inputs[i];
String fullName = field.getFieldName();
if (fullName.equals(fieldName)) {
return true;
}
}
return false;
@@ -130,13 +127,11 @@ public class DCInputSet {
documentType = "";
}
for (int i = 0; i < inputs.length; i++) {
for (int j = 0; j < inputs[i].length; j++) {
DCInput field = inputs[i][j];
String fullName = field.getFieldName();
if (fullName.equals(fieldName)) {
if (field.isAllowedFor(documentType)) {
return true;
}
DCInput field = inputs[i];
String fullName = field.getFieldName();
if (fullName.equals(fieldName)) {
if (field.isAllowedFor(documentType)) {
return true;
}
}
}

View File

@@ -21,7 +21,7 @@ import javax.xml.parsers.FactoryConfigurationError;
import org.apache.commons.lang3.StringUtils;
import org.dspace.content.Collection;
import org.dspace.content.MetadataSchemaEnum;
import org.dspace.content.MetadataSchema;
import org.dspace.core.Utils;
import org.dspace.services.factory.DSpaceServicesFactory;
import org.w3c.dom.Document;
@@ -74,7 +74,7 @@ public class DCInputsReader {
* Reference to the forms definitions map, computed from the forms
* definition file
*/
private Map<String, List<List<Map<String, String>>>> formDefns = null;
private Map<String, List<Map<String, String>>> formDefns = null;
/**
* Reference to the value-pairs map, computed from the forms definition file
@@ -115,7 +115,7 @@ public class DCInputsReader {
private void buildInputs(String fileName)
throws DCInputsReaderException {
formDefns = new HashMap<String, List<List<Map<String, String>>>>();
formDefns = new HashMap<String, List<Map<String, String>>>();
valuePairs = new HashMap<String, List<String>>();
String uri = "file:" + new File(fileName).getAbsolutePath();
@@ -212,7 +212,7 @@ public class DCInputsReader {
return lastInputSet;
}
// cache miss - construct new DCInputSet
List<List<Map<String, String>>> pages = formDefns.get(formName);
List<Map<String, String>> pages = formDefns.get(formName);
if (pages == null) {
throw new DCInputsReaderException("Missing the " + formName + " form");
}
@@ -292,8 +292,8 @@ public class DCInputsReader {
/**
* Process the form-definitions section of the XML file. Each element is
* formed thusly: <form name="formname">...row...</form> Each rows
* subsection is formed: <row> ...fields... </row> Each field
* formed thusly: <form name="formname">...pages...</form> Each pages
* subsection is formed: <page number="#"> ...fields... </page> Each field
* is formed from: dc-element, dc-qualifier, label, hint, input-type name,
* required text, and repeatable flag.
*/
@@ -311,24 +311,26 @@ public class DCInputsReader {
if (formName == null) {
throw new SAXException("form element has no name attribute");
}
List<List<Map<String, String>>> rows = new ArrayList<List<Map<String, String>>>(); // the form
// contains rows of fields
formDefns.put(formName, rows);
List<Map<String, String>> fields = new ArrayList<Map<String, String>>(); // the form contains fields
formDefns.put(formName, fields);
NodeList pl = nd.getChildNodes();
int lenpg = pl.getLength();
for (int j = 0; j < lenpg; j++) {
Node npg = pl.item(j);
if (npg.getNodeName().equals("row")) {
List<Map<String, String>> fields = new ArrayList<Map<String, String>>(); // the fields in the
// row
// process each row definition
processRow(formName, j, npg, fields);
rows.add(fields);
if (npg.getNodeName().equals("field")) {
// process each field definition
Map<String, String> field = new HashMap<String, String>();
processField(formName, npg, field);
fields.add(field);
// we omit the duplicate validation, allowing multiple
// fields definition for
// the same metadata and different visibility/type-bind
}
}
// sanity check number of fields
if (rows.size() < 1) {
throw new DCInputsReaderException("Form " + formName + " has no rows");
if (fields.size() < 1) {
throw new DCInputsReaderException("Form " + formName + " has no fields");
}
}
}
@@ -337,48 +339,6 @@ public class DCInputsReader {
}
}
/**
* Process parts of a row
*/
private void processRow(String formName, int rowIdx, Node n, List<Map<String, String>> fields)
throws SAXException, DCInputsReaderException {
NodeList pl = n.getChildNodes();
int lenpg = pl.getLength();
for (int j = 0; j < lenpg; j++) {
Node npg = pl.item(j);
if (npg.getNodeName().equals("field")) {
// process each field definition
Map<String, String> field = new HashMap<String, String>();
processField(formName, npg, field);
fields.add(field);
String key = field.get(PAIR_TYPE_NAME);
if (StringUtils
.isNotBlank(key)) {
String schema = field.get("dc-schema");
String element = field.get("dc-element");
String qualifier = field
.get("dc-qualifier");
String metadataField = schema + "."
+ element;
if (StringUtils.isNotBlank(qualifier)) {
metadataField += "." + qualifier;
}
}
// we omit the duplicate validation, allowing multiple
// fields definition for
// the same metadata and different visibility/type-bind
}
}
// sanity check number of fields
if (fields.size() < 1) {
throw new DCInputsReaderException("Form " + formName + "row " + rowIdx + " has no fields");
}
}
/**
* Process parts of a field
* At the end, make sure that input-types 'qualdrop_value' and
@@ -464,7 +424,7 @@ public class DCInputsReader {
String elem = field.get("dc-element");
String qual = field.get("dc-qualifier");
if ((schema == null) || (schema.equals(""))) {
schema = MetadataSchemaEnum.DC.getName();
schema = MetadataSchema.DC_SCHEMA;
}
String schemaTest;
@@ -474,7 +434,7 @@ public class DCInputsReader {
Map<String, String> fld = pg.get(j);
if ((fld.get("dc-schema") == null) ||
((fld.get("dc-schema")).equals(""))) {
schemaTest = MetadataSchemaEnum.DC.getName();
schemaTest = MetadataSchema.DC_SCHEMA;
} else {
schemaTest = fld.get("dc-schema");
}
@@ -577,29 +537,26 @@ public class DCInputsReader {
Iterator<String> ki = formDefns.keySet().iterator();
while (ki.hasNext()) {
String idName = ki.next();
List<List<Map<String, String>>> rows = formDefns.get(idName);
for (int j = 0; j < rows.size(); j++) {
List<Map<String, String>> fields = rows.get(j);
for (int i = 0; i < fields.size(); i++) {
Map<String, String> fld = fields.get(i);
// verify reference in certain input types
String type = fld.get("input-type");
if (type.equals("dropdown")
|| type.equals("qualdrop_value")
|| type.equals("list")) {
String pairsName = fld.get(PAIR_TYPE_NAME);
List<String> v = valuePairs.get(pairsName);
if (v == null) {
String errString = "Cannot find value pairs for " + pairsName;
throw new DCInputsReaderException(errString);
}
List<Map<String, String>> fields = formDefns.get(idName);
for (int i = 0; i < fields.size(); i++) {
Map<String, String> fld = fields.get(i);
// verify reference in certain input types
String type = fld.get("input-type");
if (type.equals("dropdown")
|| type.equals("qualdrop_value")
|| type.equals("list")) {
String pairsName = fld.get(PAIR_TYPE_NAME);
List<String> v = valuePairs.get(pairsName);
if (v == null) {
String errString = "Cannot find value pairs for " + pairsName;
throw new DCInputsReaderException(errString);
}
// we omit the "required" and "visibility" validation, provided this must be checked in the
// processing class
// only when it makes sense (if the field isn't visible means that it is not applicable,
// therefore it can't be required)
}
// we omit the "required" and "visibility" validation, provided this must be checked in the
// processing class
// only when it makes sense (if the field isn't visible means that it is not applicable, therefore it
// can't be required)
}
}
}
@@ -682,5 +639,4 @@ public class DCInputsReader {
}
throw new DCInputsReaderException("No field configuration found!");
}
}

View File

@@ -16,14 +16,14 @@ import java.util.Enumeration;
import javax.servlet.ServletContextEvent;
import javax.servlet.ServletContextListener;
import org.apache.logging.log4j.Logger;
import org.apache.log4j.Logger;
/**
* Class to initialize / cleanup resources used by DSpace when the web application
* is started or stopped.
*/
public class DSpaceContextListener implements ServletContextListener {
private static Logger log = org.apache.logging.log4j.LogManager.getLogger(DSpaceContextListener.class);
private static Logger log = Logger.getLogger(DSpaceContextListener.class);
/**
* Initialize any resources required by the application.

View File

@@ -0,0 +1,299 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.app.util;
import java.io.File;
import java.io.IOException;
import java.net.UnknownHostException;
import java.text.SimpleDateFormat;
import java.util.Calendar;
import java.util.Date;
import org.apache.commons.lang.time.DateUtils;
import org.apache.log4j.FileAppender;
import org.apache.log4j.helpers.LogLog;
import org.apache.log4j.spi.LoggingEvent;
/**
* Special log appender for log4j. Adds the current date (ie. year-mon) to
* the end of the file name, so that rolling on to the next log is simply
* a case of starting a new one - no renaming of old logs.
*
* This is advisable if you are using Windows, and have multiple applications
* (ie. dspace, dspace-oai, dspace-sword) that all want to write to the same log file,
* as each would otherwise try to rename the old files during rollover.
*
* An example log4j.properties (one log per month, retains three months of logs)
*
* log4j.rootCategory=INFO, A1
* log4j.appender.A1=org.dspace.app.util.DailyFileAppender
* log4j.appender.A1.File=@@log.dir@@/dspace.log
* log4j.appender.A1.DatePattern=yyyy-MM
* log4j.appender.A1.MaxLogs=3
* log4j.appender.A1.layout=org.apache.log4j.PatternLayout
* log4j.appender.A1.layout.ConversionPattern=%d %-5p %c @ %m%n
*/
public class DailyFileAppender extends FileAppender {
/**
* The fixed date pattern to be used if one is not specified.
*/
private static final String DATE_PATTERN = "yyyy-MM-dd";
/**
* The folder under which daily folders are created. This can be a absolute path
* or relative path also.
* e.g. JavaLogs/CPRILog or F:/LogFiles/CPRILog
*/
private String mstrFileName;
/**
* Used internally and contains the name of the date derived from current system date.
*/
private Date mstrDate = new Date(System.currentTimeMillis());
/**
* Holds the user specified DatePattern,
*/
private String mstrDatePattern = DATE_PATTERN;
private boolean mMonthOnly = false;
/**
* The date formatter object used for parsing the user specified DatePattern.
*/
private SimpleDateFormat mobjSDF;
private boolean mWithHostName = false;
private int mMaxLogs = 0;
/**
* Default constructor. This is required as the appender class is dynamically
* loaded.
*/
public DailyFileAppender() {
super();
}
/* (non-Javadoc)
* @see org.apache.log4j.FileAppender#activateOptions()
*/
@Override
public void activateOptions() {
setFileName();
cleanupOldFiles();
super.activateOptions();
}
/*------------------------------------------------------------------------------
* Getters
*----------------------------------------------------------------------------*/
public String getDatePattern() {
return this.mstrDatePattern;
}
@Override
public String getFile() {
return this.mstrFileName;
}
public boolean getWithHost() {
return mWithHostName;
}
public int getMaxLogs() {
return mMaxLogs;
}
/*------------------------------------------------------------------------------
* Setters
*----------------------------------------------------------------------------*/
public void setDatePattern(String pstrPattern) {
this.mstrDatePattern = checkPattern(pstrPattern);
if (mstrDatePattern.contains("dd") || mstrDatePattern.contains("DD")) {
mMonthOnly = false;
} else {
mMonthOnly = true;
}
}
@Override
public void setFile(String file) {
// Trim spaces from both ends. The users probably does not want
// trailing spaces in file names.
String val = file.trim();
mstrFileName = val;
}
public void setWithHost(boolean wh) {
mWithHostName = wh;
}
public void setMaxLogs(int ml) {
mMaxLogs = ml;
}
/*------------------------------------------------------------------------------
* Methods
*----------------------------------------------------------------------------*/
/* (non-Javadoc)
* @see org.apache.log4j.WriterAppender#subAppend(org.apache.log4j.spi.LoggingEvent)
*/
@Override
protected void subAppend(LoggingEvent pobjEvent) {
Date dtNow = new Date(System.currentTimeMillis());
boolean rollover = false;
if (mMonthOnly) {
Calendar now = Calendar.getInstance();
Calendar cur = Calendar.getInstance();
now.setTime(dtNow);
cur.setTime(mstrDate);
rollover = !(now.get(Calendar.YEAR) == cur.get(Calendar.YEAR) && now.get(Calendar.MONTH) == cur
.get(Calendar.MONTH));
} else {
rollover = !(DateUtils.isSameDay(dtNow, mstrDate));
}
if (rollover) {
try {
rollOver(dtNow);
} catch (IOException IOEx) {
LogLog.error("rollOver() failed!", IOEx);
}
}
super.subAppend(pobjEvent);
}
/*------------------------------------------------------------------------------
* Helpers
*----------------------------------------------------------------------------*/
/**
* The helper function to validate the DatePattern.
*
* @param pstrPattern The DatePattern to be validated.
* @return The validated date pattern or defautlt DATE_PATTERN
*/
private String checkPattern(String pstrPattern) {
String strRet = null;
SimpleDateFormat objFmt = new SimpleDateFormat(DATE_PATTERN);
try {
this.mobjSDF = new SimpleDateFormat(pstrPattern);
strRet = pstrPattern;
} catch (NullPointerException NPExIgnore) {
LogLog.error("Invalid DatePattern " + pstrPattern, NPExIgnore);
this.mobjSDF = objFmt;
strRet = DATE_PATTERN;
} catch (IllegalArgumentException IlArgExIgnore) {
LogLog.error("Invalid DatePattern " + pstrPattern, IlArgExIgnore);
this.mobjSDF = objFmt;
strRet = DATE_PATTERN;
} finally {
objFmt = null;
}
return strRet;
}
/**
* This function is responsible for performing the actual file rollover.
*
* @param pstrName The name of the new folder based on current system date.
* @throws IOException if IO error
*/
private static boolean deletingFiles = false;
private void cleanupOldFiles() {
// If we need to delete log files
if (mMaxLogs > 0 && !deletingFiles) {
deletingFiles = true;
// Determine the final file extension with the hostname
String hostFileExt = null;
try {
hostFileExt = "." + java.net.InetAddress.getLocalHost().getHostName();
} catch (UnknownHostException e) {
LogLog.error("Unable to retrieve host name");
}
try {
// Array to hold the logs we are going to keep
File[] logsToKeep = new File[mMaxLogs];
// Get a 'master' file handle, and the parent directory from it
File logMaster = new File(mstrFileName);
File logDir = logMaster.getParentFile();
if (logDir.isDirectory()) {
// Iterate all the files in that directory
File[] logArr = logDir.listFiles();
for (File curLog : logArr) {
LogLog.debug("Comparing '" + curLog.getAbsolutePath() + "' to '" + mstrFileName + "'");
String name = curLog.getAbsolutePath();
// First, see if we are not using hostname, or the log file ends with this host
if (!mWithHostName || (hostFileExt != null && name.endsWith(hostFileExt))) {
// Check that the file is indeed one we want (contains the master file name)
if (name.contains(mstrFileName)) {
// Iterate through the array of logs we are keeping
for (int i = 0; curLog != null && i < logsToKeep.length; i++) {
// Have we exhausted the 'to keep' array?
if (logsToKeep[i] == null) {
// Empty space, retain this log file
logsToKeep[i] = curLog;
curLog = null;
} else if (logsToKeep[i].getName().compareTo(curLog.getName()) < 0) {
// If the 'kept' file is older than the current one
// Replace tested entry with current file
File temp = logsToKeep[i];
logsToKeep[i] = curLog;
curLog = temp;
}
}
// If we have a 'current' entry at this point, it's a log we don't want
if (curLog != null) {
LogLog.debug("Deleting log " + curLog.getName());
if (!curLog.delete()) {
LogLog.error("Unable to delete log file");
}
}
}
}
}
}
} catch (Exception e) {
// Don't worry about exceptions
} finally {
deletingFiles = false;
}
}
}
private void rollOver(Date dtNow) throws IOException {
mstrDate = dtNow;
setFileName();
this.setFile(fileName, true, bufferedIO, bufferSize);
cleanupOldFiles();
}
private void setFileName() {
fileName = mstrFileName + "." + mobjSDF.format(mstrDate);
if (mWithHostName) {
try {
fileName += "." + java.net.InetAddress.getLocalHost().getHostName();
} catch (UnknownHostException e) {
LogLog.error("Unable to retrieve host name");
}
}
}
}

View File

@@ -12,7 +12,7 @@ import java.util.Comparator;
import java.util.HashMap;
import java.util.Map;
import org.apache.logging.log4j.Logger;
import org.apache.log4j.Logger;
import org.dspace.content.Bitstream;
import org.dspace.content.BitstreamFormat;
import org.dspace.content.factory.ContentServiceFactory;
@@ -24,7 +24,7 @@ import org.dspace.core.Context;
*/
public class GoogleBitstreamComparator implements Comparator<Bitstream> {
private final static Logger log = org.apache.logging.log4j.LogManager.getLogger(GoogleBitstreamComparator.class);
private final static Logger log = Logger.getLogger(GoogleBitstreamComparator.class);
HashMap<String, Integer> priorityMap = new HashMap<>();
@@ -86,7 +86,7 @@ public class GoogleBitstreamComparator implements Comparator<Bitstream> {
if (priority1 > priority2) {
return 1;
} else if (priority1 == priority2) {
if (b1.getSizeBytes() <= b2.getSizeBytes()) {
if (b1.getSize() <= b2.getSize()) {
return 1;
} else {
return -1;

View File

@@ -26,7 +26,7 @@ import java.util.Properties;
import com.google.common.collect.ArrayListMultimap;
import com.google.common.collect.ListMultimap;
import org.apache.logging.log4j.Logger;
import org.apache.log4j.Logger;
import org.dspace.authorize.factory.AuthorizeServiceFactory;
import org.dspace.content.Bitstream;
import org.dspace.content.Bundle;
@@ -51,7 +51,7 @@ import org.jdom.Element;
@SuppressWarnings("deprecation")
public class GoogleMetadata {
private final static Logger log = org.apache.logging.log4j.LogManager.getLogger(GoogleMetadata.class);
private final static Logger log = Logger.getLogger(GoogleMetadata.class);
protected static final String GOOGLE_PREFIX = "google.";

View File

@@ -121,24 +121,20 @@ public class IndexVersion {
}
// Open this index directory in Lucene
Directory indexDir = FSDirectory.open(dir.toPath());
Directory indexDir = FSDirectory.open(dir);
// Get info on the Lucene segment file(s) in index directory
SegmentInfos sis;
SegmentInfos sis = new SegmentInfos();
try {
sis = SegmentInfos.readLatestCommit(indexDir);
sis.read(indexDir);
} catch (IOException ie) {
// Wrap default IOException, providing more info about which directory cannot be read
throw new IOException("Could not read Lucene segments files in " + dir.getAbsolutePath(), ie);
}
if (null == sis) {
throw new IOException("Could not read Lucene segments files in " + dir.getAbsolutePath());
}
// If we have a valid Solr index dir, but it has no existing segments
// then just return an empty string. It's a valid but empty index.
if (sis.size() == 0) {
if (sis != null && sis.size() == 0) {
return "";
}

View File

@@ -1,227 +0,0 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.app.util;
import java.io.File;
import java.io.IOException;
import java.sql.SQLException;
import java.util.LinkedList;
import java.util.List;
import javax.xml.parsers.DocumentBuilder;
import javax.xml.parsers.DocumentBuilderFactory;
import javax.xml.parsers.ParserConfigurationException;
import org.apache.commons.cli.CommandLine;
import org.apache.commons.cli.CommandLineParser;
import org.apache.commons.cli.HelpFormatter;
import org.apache.commons.cli.Options;
import org.apache.commons.cli.ParseException;
import org.apache.commons.cli.PosixParser;
import org.apache.commons.lang3.StringUtils;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
import org.dspace.authorize.AuthorizeException;
import org.dspace.content.EntityType;
import org.dspace.content.RelationshipType;
import org.dspace.content.factory.ContentServiceFactory;
import org.dspace.content.service.EntityTypeService;
import org.dspace.content.service.RelationshipService;
import org.dspace.content.service.RelationshipTypeService;
import org.dspace.core.Context;
import org.w3c.dom.Document;
import org.w3c.dom.Element;
import org.w3c.dom.Node;
import org.w3c.dom.NodeList;
import org.xml.sax.SAXException;
/**
* This script is used to initialize the database with a set of relationshiptypes that are written
* in an xml file that is given to this script.
* This XML file needs to have a proper XML structure and needs to define the variables of the RelationshipType object
*/
public class InitializeEntities {
private final static Logger log = LogManager.getLogger();
private RelationshipTypeService relationshipTypeService;
private RelationshipService relationshipService;
private EntityTypeService entityTypeService;
private InitializeEntities() {
relationshipTypeService = ContentServiceFactory.getInstance().getRelationshipTypeService();
relationshipService = ContentServiceFactory.getInstance().getRelationshipService();
entityTypeService = ContentServiceFactory.getInstance().getEntityTypeService();
}
/**
* The main method for this script
*
* @param argv The commandline arguments given with this command
* @throws SQLException If something goes wrong with the database
* @throws AuthorizeException If something goes wrong with permissions
* @throws ParseException If something goes wrong with the parsing
*/
public static void main(String[] argv) throws SQLException, AuthorizeException, ParseException {
InitializeEntities initializeEntities = new InitializeEntities();
CommandLineParser parser = new PosixParser();
Options options = createCommandLineOptions();
CommandLine line = parser.parse(options,argv);
String fileLocation = getFileLocationFromCommandLine(line);
checkHelpEntered(options, line);
initializeEntities.run(fileLocation);
}
private static void checkHelpEntered(Options options, CommandLine line) {
if (line.hasOption("h")) {
HelpFormatter formatter = new HelpFormatter();
formatter.printHelp("Intialize Entities", options);
System.exit(0);
}
}
private static String getFileLocationFromCommandLine(CommandLine line) {
String query = line.getOptionValue("f");
if (StringUtils.isEmpty(query)) {
System.out.println("No file location was entered");
log.info("No file location was entered");
System.exit(1);
}
return query;
}
protected static Options createCommandLineOptions() {
Options options = new Options();
options.addOption("f", "file", true, "the location for the file containing the xml data");
return options;
}
private void run(String fileLocation) throws SQLException, AuthorizeException {
Context context = new Context();
context.turnOffAuthorisationSystem();
this.parseXMLToRelations(context, fileLocation);
context.complete();
}
private void parseXMLToRelations(Context context, String fileLocation) throws AuthorizeException {
try {
File fXmlFile = new File(fileLocation);
DocumentBuilderFactory dbFactory = DocumentBuilderFactory.newInstance();
DocumentBuilder dBuilder = null;
dBuilder = dbFactory.newDocumentBuilder();
Document doc = dBuilder.parse(fXmlFile);
doc.getDocumentElement().normalize();
NodeList nList = doc.getElementsByTagName("type");
List<RelationshipType> relationshipTypes = new LinkedList<>();
for (int i = 0; i < nList.getLength(); i++) {
Node nNode = nList.item(i);
if (nNode.getNodeType() == Node.ELEMENT_NODE) {
Element eElement = (Element) nNode;
String leftType = eElement.getElementsByTagName("leftType").item(0).getTextContent();
String rightType = eElement.getElementsByTagName("rightType").item(0).getTextContent();
String leftLabel = eElement.getElementsByTagName("leftLabel").item(0).getTextContent();
String rightLabel = eElement.getElementsByTagName("rightLabel").item(0).getTextContent();
NodeList leftCardinalityList = eElement.getElementsByTagName("leftCardinality");
NodeList rightCardinalityList = eElement.getElementsByTagName("rightCardinality");
String leftCardinalityMin = "";
String leftCardinalityMax = "";
String rightCardinalityMin = "";
String rightCardinalityMax = "";
for (int j = 0; j < leftCardinalityList.getLength(); j++) {
Node node = leftCardinalityList.item(j);
leftCardinalityMin = getString(leftCardinalityMin,(Element) node, "min");
leftCardinalityMax = getString(leftCardinalityMax,(Element) node, "max");
}
for (int j = 0; j < rightCardinalityList.getLength(); j++) {
Node node = rightCardinalityList.item(j);
rightCardinalityMin = getString(rightCardinalityMin,(Element) node, "min");
rightCardinalityMax = getString(rightCardinalityMax,(Element) node, "max");
}
populateRelationshipType(context, leftType, rightType, leftLabel, rightLabel,
leftCardinalityMin, leftCardinalityMax,
rightCardinalityMin, rightCardinalityMax);
}
}
} catch (ParserConfigurationException | SAXException | IOException | SQLException e) {
log.error("An error occurred while parsing the XML file to relations", e);
}
}
private String getString(String leftCardinalityMin,Element node, String minOrMax) {
if (node.getElementsByTagName(minOrMax).getLength() > 0) {
leftCardinalityMin = node.getElementsByTagName(minOrMax).item(0).getTextContent();
}
return leftCardinalityMin;
}
private void populateRelationshipType(Context context, String leftType, String rightType, String leftLabel,
String rightLabel, String leftCardinalityMin, String leftCardinalityMax,
String rightCardinalityMin, String rightCardinalityMax)
throws SQLException, AuthorizeException {
EntityType leftEntityType = entityTypeService.findByEntityType(context,leftType);
if (leftEntityType == null) {
leftEntityType = entityTypeService.create(context, leftType);
}
EntityType rightEntityType = entityTypeService.findByEntityType(context, rightType);
if (rightEntityType == null) {
rightEntityType = entityTypeService.create(context, rightType);
}
Integer leftCardinalityMinInteger;
Integer leftCardinalityMaxInteger;
Integer rightCardinalityMinInteger;
Integer rightCardinalityMaxInteger;
if (StringUtils.isNotBlank(leftCardinalityMin)) {
leftCardinalityMinInteger = Integer.parseInt(leftCardinalityMin);
} else {
leftCardinalityMinInteger = null;
}
if (StringUtils.isNotBlank(leftCardinalityMax)) {
leftCardinalityMaxInteger = Integer.parseInt(leftCardinalityMax);
} else {
leftCardinalityMaxInteger = null;
}
if (StringUtils.isNotBlank(rightCardinalityMin)) {
rightCardinalityMinInteger = Integer.parseInt(rightCardinalityMin);
} else {
rightCardinalityMinInteger = null;
}
if (StringUtils.isNotBlank(rightCardinalityMax)) {
rightCardinalityMaxInteger = Integer.parseInt(rightCardinalityMax);
} else {
rightCardinalityMaxInteger = null;
}
RelationshipType relationshipType = relationshipTypeService
.findbyTypesAndLabels(context, leftEntityType, rightEntityType, leftLabel, rightLabel);
if (relationshipType == null) {
relationshipTypeService.create(context, leftEntityType, rightEntityType, leftLabel, rightLabel,
leftCardinalityMinInteger, leftCardinalityMaxInteger,
rightCardinalityMinInteger, rightCardinalityMaxInteger);
} else {
relationshipType.setLeftMinCardinality(leftCardinalityMinInteger);
relationshipType.setLeftMaxCardinality(leftCardinalityMaxInteger);
relationshipType.setRightMinCardinality(rightCardinalityMinInteger);
relationshipType.setRightMaxCardinality(rightCardinalityMaxInteger);
relationshipTypeService.update(context, relationshipType);
}
}
}

View File

@@ -14,7 +14,7 @@ import java.util.List;
import java.util.Map;
import java.util.Set;
import org.apache.logging.log4j.Logger;
import org.apache.log4j.Logger;
import org.dspace.app.util.service.MetadataExposureService;
import org.dspace.authorize.service.AuthorizeService;
import org.dspace.core.Context;
@@ -58,7 +58,7 @@ import org.springframework.beans.factory.annotation.Autowired;
* @version $Revision: 3734 $
*/
public class MetadataExposureServiceImpl implements MetadataExposureService {
protected Logger log = org.apache.logging.log4j.LogManager.getLogger(MetadataExposureServiceImpl.class);
protected Logger log = Logger.getLogger(MetadataExposureServiceImpl.class);
protected Map<String, Set<String>> hiddenElementSets = null;
protected Map<String, Map<String, Set<String>>> hiddenElementMaps = null;

View File

@@ -20,13 +20,11 @@ import com.sun.syndication.feed.module.opensearch.OpenSearchModule;
import com.sun.syndication.feed.module.opensearch.entity.OSQuery;
import com.sun.syndication.feed.module.opensearch.impl.OpenSearchModuleImpl;
import com.sun.syndication.io.FeedException;
import org.apache.logging.log4j.Logger;
import org.apache.log4j.Logger;
import org.dspace.app.util.service.OpenSearchService;
import org.dspace.content.DSpaceObject;
import org.dspace.core.Constants;
import org.dspace.core.Context;
import org.dspace.discovery.IndexableObject;
import org.dspace.handle.service.HandleService;
import org.dspace.services.ConfigurationService;
import org.dspace.services.factory.DSpaceServicesFactory;
@@ -35,6 +33,7 @@ import org.jdom.JDOMException;
import org.jdom.Namespace;
import org.jdom.output.DOMOutputter;
import org.jdom.output.XMLOutputter;
import org.springframework.beans.factory.InitializingBean;
import org.springframework.beans.factory.annotation.Autowired;
import org.w3c.dom.Document;
@@ -54,14 +53,20 @@ import org.w3c.dom.Document;
*
* @author Richard Rodgers
*/
public class OpenSearchServiceImpl implements OpenSearchService {
private static final Logger log = org.apache.logging.log4j.LogManager.getLogger(OpenSearchServiceImpl.class);
public class OpenSearchServiceImpl implements OpenSearchService, InitializingBean {
private static final Logger log = Logger.getLogger(OpenSearchServiceImpl.class);
// are open search queries enabled?
protected boolean enabled = false;
// supported results formats
protected List<String> formats = null;
// Namespaces used
protected final String osNs = "http://a9.com/-/spec/opensearch/1.1/";
@Autowired(required = true)
protected ConfigurationService configurationService;
// base search UI URL
protected String uiUrl = null;
// base search service URL
protected String svcUrl = null;
@Autowired(required = true)
protected HandleService handleService;
@@ -70,35 +75,25 @@ public class OpenSearchServiceImpl implements OpenSearchService {
}
@Override
public List<String> getFormats() {
List<String> formats = new ArrayList<>();
// read formats only if enabled
if (isEnabled()) {
String[] fmts = configurationService.getArrayProperty("websvc.opensearch.formats");
public void afterPropertiesSet() throws Exception {
ConfigurationService config = DSpaceServicesFactory.getInstance().getConfigurationService();
enabled = config.getBooleanProperty("websvc.opensearch.enable");
svcUrl = config.getProperty("dspace.url") + "/" +
config.getProperty("websvc.opensearch.svccontext");
uiUrl = config.getProperty("dspace.url") + "/" +
config.getProperty("websvc.opensearch.uicontext");
// read rest of config info if enabled
formats = new ArrayList<String>();
if (enabled) {
String[] fmts = config.getArrayProperty("websvc.opensearch.formats");
formats = Arrays.asList(fmts);
}
return formats;
}
@Override
public boolean isEnabled() {
return configurationService.getBooleanProperty("websvc.opensearch.enable");
}
/**
* Get base search service URL (websvc.opensearch.svccontext)
*/
protected String getBaseSearchServiceURL() {
return configurationService.getProperty("dspace.url") + "/" +
configurationService.getProperty("websvc.opensearch.svccontext");
}
/**
* Get base search UI URL (websvc.opensearch.uicontext)
*/
protected String getBaseSearchUIURL() {
return configurationService.getProperty("dspace.url") + "/" +
configurationService.getProperty("websvc.opensearch.uicontext");
public List<String> getFormats() {
return formats;
}
@Override
@@ -120,7 +115,7 @@ public class OpenSearchServiceImpl implements OpenSearchService {
@Override
public String getResultsString(Context context, String format, String query, int totalResults, int start,
int pageSize,
IndexableObject scope, List<IndexableObject> results,
DSpaceObject scope, List<DSpaceObject> results,
Map<String, String> labels) throws IOException {
try {
return getResults(context, format, query, totalResults, start, pageSize, scope, results, labels)
@@ -134,7 +129,7 @@ public class OpenSearchServiceImpl implements OpenSearchService {
@Override
public Document getResultsDoc(Context context, String format, String query, int totalResults, int start,
int pageSize,
IndexableObject scope, List<IndexableObject> results, Map<String, String> labels)
DSpaceObject scope, List<DSpaceObject> results, Map<String, String> labels)
throws IOException {
try {
return getResults(context, format, query, totalResults, start, pageSize, scope, results, labels)
@@ -146,8 +141,8 @@ public class OpenSearchServiceImpl implements OpenSearchService {
}
protected SyndicationFeed getResults(Context context, String format, String query, int totalResults, int start,
int pageSize, IndexableObject scope,
List<IndexableObject> results, Map<String, String> labels) {
int pageSize,
DSpaceObject scope, List<DSpaceObject> results, Map<String, String> labels) {
// Encode results in requested format
if ("rss".equals(format)) {
format = "rss_2.0";
@@ -226,13 +221,13 @@ public class OpenSearchServiceImpl implements OpenSearchService {
root.addContent(fav);
}
// service URLs
for (String format : getFormats()) {
for (String format : formats) {
Element url = new Element("Url", ns).setAttribute("type", getContentType(format));
StringBuilder template = new StringBuilder();
if ("html".equals(format)) {
template.append(getBaseSearchUIURL());
template.append(uiUrl);
} else {
template.append(getBaseSearchServiceURL());
template.append(svcUrl);
}
template.append("?query={searchTerms}");
if (!"html".equals(format)) {

View File

@@ -11,7 +11,7 @@ import java.sql.SQLException;
import java.util.ArrayList;
import java.util.List;
import org.apache.logging.log4j.Logger;
import org.apache.log4j.Logger;
import org.dspace.content.Collection;
import org.dspace.content.factory.ContentServiceFactory;
import org.dspace.content.service.CollectionService;
@@ -28,7 +28,7 @@ import org.springframework.util.StopWatch;
* Invocation: dsrun org.dspace.app.util.OptimizeSelectCollection
*/
public class OptimizeSelectCollection {
private static final Logger log = org.apache.logging.log4j.LogManager.getLogger(OptimizeSelectCollection.class);
private static final Logger log = Logger.getLogger(OptimizeSelectCollection.class);
private static Context context;
private static ArrayList<EPerson> brokenPeople;

View File

@@ -12,7 +12,7 @@ import java.util.ArrayList;
import java.util.List;
import java.util.Map;
import org.apache.logging.log4j.Logger;
import org.apache.log4j.Logger;
/**
* Class representing a single Item Submission config definition, organized into
@@ -44,7 +44,7 @@ public class SubmissionConfig implements Serializable {
/**
* log4j logger
*/
private static Logger log = org.apache.logging.log4j.LogManager.getLogger(SubmissionConfig.class);
private static Logger log = Logger.getLogger(SubmissionConfig.class);
/**
* Constructs a new Submission Configuration object, based on the XML

View File

@@ -19,7 +19,7 @@ import javax.xml.parsers.DocumentBuilderFactory;
import javax.xml.parsers.FactoryConfigurationError;
import org.apache.commons.lang3.StringUtils;
import org.apache.logging.log4j.Logger;
import org.apache.log4j.Logger;
import org.dspace.content.Collection;
import org.dspace.content.DSpaceObject;
import org.dspace.core.Context;
@@ -71,7 +71,7 @@ public class SubmissionConfigReader {
/**
* log4j logger
*/
private static Logger log = org.apache.logging.log4j.LogManager.getLogger(SubmissionConfigReader.class);
private static Logger log = Logger.getLogger(SubmissionConfigReader.class);
/**
* The fully qualified pathname of the directory containing the Item Submission Configuration file

View File

@@ -34,9 +34,9 @@ import com.sun.syndication.feed.synd.SyndPerson;
import com.sun.syndication.feed.synd.SyndPersonImpl;
import com.sun.syndication.io.FeedException;
import com.sun.syndication.io.SyndFeedOutput;
import org.apache.commons.lang3.ArrayUtils;
import org.apache.commons.lang3.StringUtils;
import org.apache.logging.log4j.Logger;
import org.apache.commons.lang.ArrayUtils;
import org.apache.commons.lang.StringUtils;
import org.apache.log4j.Logger;
import org.dspace.content.Bitstream;
import org.dspace.content.Bundle;
import org.dspace.content.Collection;
@@ -52,7 +52,6 @@ import org.dspace.content.service.ItemService;
import org.dspace.core.ConfigurationManager;
import org.dspace.core.Constants;
import org.dspace.core.Context;
import org.dspace.discovery.IndexableObject;
import org.dspace.handle.factory.HandleServiceFactory;
import org.dspace.services.ConfigurationService;
import org.dspace.services.factory.DSpaceServicesFactory;
@@ -70,7 +69,7 @@ import org.w3c.dom.Document;
* @author Larry Stone
*/
public class SyndicationFeed {
protected final Logger log = org.apache.logging.log4j.LogManager.getLogger(SyndicationFeed.class);
protected final Logger log = Logger.getLogger(SyndicationFeed.class);
/**
@@ -180,12 +179,12 @@ public class SyndicationFeed {
*
* @param request request
* @param context context
* @param dso the scope
* @param dso DSpaceObject
* @param items array of objects
* @param labels label map
*/
public void populate(HttpServletRequest request, Context context, IndexableObject dso,
List<IndexableObject> items, Map<String, String> labels) {
public void populate(HttpServletRequest request, Context context, DSpaceObject dso,
List<? extends DSpaceObject> items, Map<String, String> labels) {
String logoURL = null;
String objectURL = null;
String defaultTitle = null;
@@ -209,7 +208,6 @@ public class SyndicationFeed {
if (cols != null && cols.length() > 1 && cols.contains(col.getHandle())) {
podcastFeed = true;
}
objectURL = resolveURL(request, col);
} else if (dso.getType() == Constants.COMMUNITY) {
Community comm = (Community) dso;
defaultTitle = comm.getName();
@@ -219,9 +217,8 @@ public class SyndicationFeed {
if (comms != null && comms.length() > 1 && comms.contains(comm.getHandle())) {
podcastFeed = true;
}
objectURL = resolveURL(request, comm);
}
objectURL = resolveURL(request, dso);
if (logo != null) {
logoURL = urlOfBitstream(request, logo);
}
@@ -250,11 +247,11 @@ public class SyndicationFeed {
// add entries for items
if (items != null) {
List<SyndEntry> entries = new ArrayList<SyndEntry>();
for (IndexableObject idxObj : items) {
if (idxObj.getType() != Constants.ITEM) {
for (DSpaceObject itemDSO : items) {
if (itemDSO.getType() != Constants.ITEM) {
continue;
}
Item item = (Item) idxObj;
Item item = (Item) itemDSO;
boolean hasDate = false;
SyndEntry entry = new SyndEntryImpl();
entries.add(entry);
@@ -369,7 +366,7 @@ public class SyndicationFeed {
if (ArrayUtils.contains(podcastableMIMETypes, mime)) {
SyndEnclosure enc = new SyndEnclosureImpl();
enc.setType(bit.getFormat(context).getMIMEType());
enc.setLength(bit.getSizeBytes());
enc.setLength(bit.getSize());
enc.setUrl(urlOfBitstream(request, bit));
enclosures.add(enc);
} else {

View File

@@ -22,9 +22,9 @@ import java.util.Set;
import java.util.UUID;
import javax.servlet.http.HttpServletRequest;
import org.apache.commons.collections4.ListUtils;
import org.apache.commons.lang3.StringUtils;
import org.apache.logging.log4j.Logger;
import org.apache.commons.collections.ListUtils;
import org.apache.commons.lang.StringUtils;
import org.apache.log4j.Logger;
import org.dspace.content.Collection;
import org.dspace.content.Item;
import org.dspace.content.MetadataValue;
@@ -44,7 +44,7 @@ public class Util {
// cache for source version result
private static String sourceVersion = null;
private static Logger log = org.apache.logging.log4j.LogManager.getLogger(Util.class);
private static Logger log = Logger.getLogger(Util.class);
/**
* Default constructor. Must be protected as org.dspace.xmlworkflow.WorkflowUtils extends it
@@ -418,12 +418,11 @@ public class Util {
List<DCInputSet> inputSets = inputsReader.getInputsByCollectionHandle(col_handle);
// Replace the values of Metadatum[] with the correct ones in case
// of
// controlled vocabularies
String currentField = Utils.standardize(schema, element, qualifier, ".");
for (DCInputSet inputSet : inputSets) {
// Replace the values of Metadatum[] with the correct ones in case
// of
// controlled vocabularies
String currentField = Utils.standardize(schema, element, qualifier, ".");
if (inputSet != null) {
@@ -431,20 +430,19 @@ public class Util {
for (int p = 0; p < fieldsNums; p++) {
DCInput[][] inputs = inputSet.getFields();
DCInput[] inputs = inputSet.getFields();
if (inputs != null) {
for (int i = 0; i < inputs.length; i++) {
for (int j = 0; j < inputs[i].length; j++) {
String inputField = Utils
.standardize(inputs[i][j].getSchema(), inputs[i][j].getElement(),
inputs[i][j].getQualifier(), ".");
if (currentField.equals(inputField)) {
myInputs = inputs[i][j];
myInputsFound = true;
break;
}
String inputField = Utils.standardize(inputs[i].getSchema(), inputs[i].getElement(),
inputs[i].getQualifier(), ".");
if (currentField.equals(inputField)) {
myInputs = inputs[i];
myInputsFound = true;
break;
}
}
}
@@ -473,26 +471,6 @@ public class Util {
return toReturn;
}
/**
* Split a list in an array of i sub-lists uniformly sized
*
* @param idsList the list to split
* @param i the number of sublists to return
*
* @return an array of sub-lists of fixed size
*/
public static <T> List<T>[] splitList(List<T> idsList, int i) {
int setmin = idsList.size() / i;
List<T>[] result = new List[i];
int offset = 0;
for (int idx = 0; idx < i - 1; idx++) {
result[idx] = idsList.subList(offset, offset + setmin);
offset += setmin;
}
result[i - 1] = idsList.subList(offset, idsList.size());
return result;
}
public static List<String> differenceInSubmissionFields(Collection fromCollection, Collection toCollection)
throws DCInputsReaderException {
DCInputsReader reader = new DCInputsReader();
@@ -502,17 +480,13 @@ public class Util {
Set<String> fromFieldName = new HashSet<>();
Set<String> toFieldName = new HashSet<>();
for (DCInputSet ff : from) {
for (DCInput[] fdcrow : ff.getFields()) {
for (DCInput fdc : fdcrow) {
fromFieldName.add(fdc.getFieldName());
}
for (DCInput fdc : ff.getFields()) {
fromFieldName.add(fdc.getFieldName());
}
}
for (DCInputSet tt : to) {
for (DCInput[] tdcrow : tt.getFields()) {
for (DCInput tdc : tdcrow) {
toFieldName.add(tdc.getFieldName());
}
for (DCInput tdc : tt.getFields()) {
toFieldName.add(tdc.getFieldName());
}
}

View File

@@ -18,7 +18,7 @@ import org.apache.http.HttpStatus;
import org.apache.http.client.HttpClient;
import org.apache.http.client.methods.HttpHead;
import org.apache.http.impl.client.DefaultHttpClient;
import org.apache.logging.log4j.Logger;
import org.apache.log4j.Logger;
import org.dspace.app.util.dao.WebAppDAO;
import org.dspace.app.util.service.WebAppService;
import org.dspace.core.Context;
@@ -33,7 +33,7 @@ import org.springframework.beans.factory.annotation.Autowired;
*/
public class WebAppServiceImpl implements WebAppService {
private final Logger log = org.apache.logging.log4j.LogManager.getLogger(WebAppServiceImpl.class);
private final Logger log = Logger.getLogger(WebAppServiceImpl.class);
@Autowired(required = true)
protected WebAppDAO webAppDAO;

View File

@@ -11,7 +11,7 @@ import java.util.ArrayList;
import java.util.LinkedList;
import java.util.List;
import org.apache.commons.lang3.StringUtils;
import org.apache.commons.lang.StringUtils;
import org.w3c.dom.Element;
import org.w3c.dom.NodeList;

View File

@@ -14,7 +14,6 @@ import java.util.Map;
import org.dspace.content.DSpaceObject;
import org.dspace.core.Context;
import org.dspace.discovery.IndexableObject;
import org.w3c.dom.Document;
/**
@@ -42,13 +41,6 @@ public interface OpenSearchService {
*/
public List<String> getFormats();
/**
* Determine if the module is active
*
* @return boolean indicator if the OpenSearch module is enabled or not
*/
public boolean isEnabled();
/**
* Returns a mime-type associated with passed format
*
@@ -84,7 +76,7 @@ public interface OpenSearchService {
* @param totalResults - the hit count
* @param start - start result index
* @param pageSize - page size
* @param scope - search scope, null or the community/collection
* @param scope - search scope, null or community/collection handle
* @param results the retreived DSpace objects satisfying search
* @param labels labels to apply - format specific
* @return formatted search results
@@ -92,7 +84,7 @@ public interface OpenSearchService {
*/
public String getResultsString(Context context, String format, String query, int totalResults, int start,
int pageSize,
IndexableObject scope, List<IndexableObject> results,
DSpaceObject scope, List<DSpaceObject> results,
Map<String, String> labels) throws IOException;
/**
@@ -104,7 +96,7 @@ public interface OpenSearchService {
* @param totalResults - the hit count
* @param start - start result index
* @param pageSize - page size
* @param scope - search scope, null or the community/collection
* @param scope - search scope, null or community/collection handle
* @param results the retreived DSpace objects satisfying search
* @param labels labels to apply - format specific
* @return formatted search results
@@ -112,7 +104,7 @@ public interface OpenSearchService {
*/
public Document getResultsDoc(Context context, String format, String query, int totalResults, int start,
int pageSize,
IndexableObject scope, List<IndexableObject> results, Map<String, String> labels)
DSpaceObject scope, List<DSpaceObject> results, Map<String, String> labels)
throws IOException;
public DSpaceObject resolveScope(Context context, String scope) throws SQLException;

View File

@@ -210,10 +210,4 @@ public interface AuthenticationMethod {
public String loginPageURL(Context context,
HttpServletRequest request,
HttpServletResponse response);
/**
* Returns a short name that uniquely identifies this authentication method
* @return The authentication method name
*/
public String getName();
}

View File

@@ -9,7 +9,6 @@ package org.dspace.authenticate;
import java.sql.SQLException;
import java.util.ArrayList;
import java.util.Collections;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
@@ -17,15 +16,15 @@ import java.util.UUID;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import org.apache.logging.log4j.Logger;
import org.apache.commons.collections.ListUtils;
import org.apache.log4j.Logger;
import org.dspace.core.ConfigurationManager;
import org.dspace.core.Context;
import org.dspace.core.LogManager;
import org.dspace.core.factory.CoreServiceFactory;
import org.dspace.eperson.EPerson;
import org.dspace.eperson.Group;
import org.dspace.eperson.factory.EPersonServiceFactory;
import org.dspace.eperson.service.GroupService;
import org.dspace.service.ClientInfoService;
import org.dspace.services.factory.DSpaceServicesFactory;
/**
@@ -50,7 +49,7 @@ public class IPAuthentication implements AuthenticationMethod {
/**
* Our logger
*/
private static Logger log = org.apache.logging.log4j.LogManager.getLogger(IPAuthentication.class);
private static Logger log = Logger.getLogger(IPAuthentication.class);
/**
* Whether to look for x-forwarded headers for logging IP addresses
@@ -68,7 +67,6 @@ public class IPAuthentication implements AuthenticationMethod {
protected List<IPMatcher> ipNegativeMatchers;
protected GroupService groupService;
protected ClientInfoService clientInfoService;
/**
@@ -93,7 +91,6 @@ public class IPAuthentication implements AuthenticationMethod {
ipMatcherGroupIDs = new HashMap<>();
ipMatcherGroupNames = new HashMap<>();
groupService = EPersonServiceFactory.getInstance().getGroupService();
clientInfoService = CoreServiceFactory.getInstance().getClientInfoService();
List<String> propNames = DSpaceServicesFactory.getInstance().getConfigurationService()
.getPropertyKeys("authentication-ip");
@@ -167,12 +164,23 @@ public class IPAuthentication implements AuthenticationMethod {
public List<Group> getSpecialGroups(Context context, HttpServletRequest request)
throws SQLException {
if (request == null) {
return Collections.EMPTY_LIST;
return ListUtils.EMPTY_LIST;
}
List<Group> groups = new ArrayList<Group>();
// Get the user's IP address
String addr = clientInfoService.getClientIp(request);
String addr = request.getRemoteAddr();
if (useProxies == null) {
useProxies = ConfigurationManager.getBooleanProperty("useProxies", false);
}
if (useProxies && request.getHeader("X-Forwarded-For") != null) {
/* This header is a comma delimited list */
for (String xfip : request.getHeader("X-Forwarded-For").split(",")) {
if (!request.getHeader("X-Forwarded-For").contains(addr)) {
addr = xfip.trim();
}
}
}
for (IPMatcher ipm : ipMatchers) {
try {
@@ -268,9 +276,4 @@ public class IPAuthentication implements AuthenticationMethod {
HttpServletResponse response) {
return null;
}
@Override
public String getName() {
return "ip";
}
}

View File

@@ -10,7 +10,7 @@ package org.dspace.authenticate;
import java.net.Inet6Address;
import java.net.UnknownHostException;
import org.apache.logging.log4j.Logger;
import org.apache.log4j.Logger;
/**
* <p>
@@ -38,7 +38,7 @@ import org.apache.logging.log4j.Logger;
* @version $Revision$
*/
public class IPMatcher {
private static Logger log = org.apache.logging.log4j.LogManager.getLogger(IPMatcher.class);
private static Logger log = Logger.getLogger(IPMatcher.class);
/**
* Network to match

View File

@@ -10,7 +10,6 @@ package org.dspace.authenticate;
import java.io.IOException;
import java.sql.SQLException;
import java.util.Arrays;
import java.util.Collections;
import java.util.Hashtable;
import java.util.List;
import javax.naming.NamingEnumeration;
@@ -28,8 +27,9 @@ import javax.naming.ldap.StartTlsResponse;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import org.apache.commons.lang3.StringUtils;
import org.apache.logging.log4j.Logger;
import org.apache.commons.collections.ListUtils;
import org.apache.commons.lang.StringUtils;
import org.apache.log4j.Logger;
import org.dspace.authenticate.factory.AuthenticateServiceFactory;
import org.dspace.authenticate.service.AuthenticationService;
import org.dspace.authorize.AuthorizeException;
@@ -61,7 +61,7 @@ public class LDAPAuthentication
/**
* log4j category
*/
private static Logger log = org.apache.logging.log4j.LogManager.getLogger(LDAPAuthentication.class);
private static Logger log = Logger.getLogger(LDAPAuthentication.class);
protected AuthenticationService authenticationService = AuthenticateServiceFactory.getInstance()
.getAuthenticationService();
@@ -136,7 +136,7 @@ public class LDAPAuthentication
log.warn(LogManager.getHeader(context,
"ldap_specialgroup",
"Group defined in login.specialgroup does not exist"));
return Collections.EMPTY_LIST;
return ListUtils.EMPTY_LIST;
} else {
return Arrays.asList(ldapGroup);
}
@@ -145,7 +145,7 @@ public class LDAPAuthentication
} catch (Exception npe) {
// The user is not an LDAP user, so we don't need to worry about them
}
return Collections.EMPTY_LIST;
return ListUtils.EMPTY_LIST;
}
/*
@@ -639,11 +639,6 @@ public class LDAPAuthentication
return null;
}
@Override
public String getName() {
return "ldap";
}
/*
* Add authenticated users to the group defined in dspace.cfg by
* the authentication-ldap.login.groupmap.* key.

View File

@@ -9,13 +9,13 @@ package org.dspace.authenticate;
import java.sql.SQLException;
import java.util.Arrays;
import java.util.Collections;
import java.util.List;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import org.apache.commons.collections.ListUtils;
import org.apache.commons.lang3.StringUtils;
import org.apache.logging.log4j.Logger;
import org.apache.log4j.Logger;
import org.dspace.core.Context;
import org.dspace.core.LogManager;
import org.dspace.eperson.EPerson;
@@ -49,7 +49,7 @@ public class PasswordAuthentication
/**
* log4j category
*/
private static Logger log = org.apache.logging.log4j.LogManager.getLogger(PasswordAuthentication.class);
private static Logger log = Logger.getLogger(PasswordAuthentication.class);
/**
@@ -151,7 +151,7 @@ public class PasswordAuthentication
"password_specialgroup",
"Group defined in modules/authentication-password.cfg login" +
".specialgroup does not exist"));
return Collections.EMPTY_LIST;
return ListUtils.EMPTY_LIST;
} else {
return Arrays.asList(specialGroup);
}
@@ -160,7 +160,7 @@ public class PasswordAuthentication
} catch (Exception e) {
log.error(LogManager.getHeader(context, "getSpecialGroups", ""), e);
}
return Collections.EMPTY_LIST;
return ListUtils.EMPTY_LIST;
}
/**
@@ -242,9 +242,4 @@ public class PasswordAuthentication
HttpServletResponse response) {
return null;
}
@Override
public String getName() {
return "password";
}
}

View File

@@ -12,7 +12,6 @@ import java.net.URLEncoder;
import java.sql.SQLException;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
import java.util.Enumeration;
import java.util.HashMap;
import java.util.HashSet;
@@ -23,8 +22,9 @@ import java.util.UUID;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import org.apache.commons.lang3.StringUtils;
import org.apache.logging.log4j.Logger;
import org.apache.commons.collections.ListUtils;
import org.apache.commons.lang.StringUtils;
import org.apache.log4j.Logger;
import org.dspace.authenticate.factory.AuthenticateServiceFactory;
import org.dspace.authorize.AuthorizeException;
import org.dspace.content.MetadataField;
@@ -70,7 +70,7 @@ public class ShibAuthentication implements AuthenticationMethod {
/**
* log4j category
*/
private static Logger log = org.apache.logging.log4j.LogManager.getLogger(ShibAuthentication.class);
private static Logger log = Logger.getLogger(ShibAuthentication.class);
/**
* Additional metadata mappings
@@ -288,7 +288,7 @@ public class ShibAuthentication implements AuthenticationMethod {
if (request == null ||
context.getCurrentUser() == null ||
request.getSession().getAttribute("shib.authenticated") == null) {
return Collections.EMPTY_LIST;
return ListUtils.EMPTY_LIST;
}
// If we have already calculated the special groups then return them.
@@ -404,7 +404,7 @@ public class ShibAuthentication implements AuthenticationMethod {
return new ArrayList<>(groups);
} catch (Throwable t) {
log.error("Unable to validate any sepcial groups this user may belong too because of an exception.", t);
return Collections.EMPTY_LIST;
return ListUtils.EMPTY_LIST;
}
}
@@ -538,11 +538,6 @@ public class ShibAuthentication implements AuthenticationMethod {
}
}
@Override
public String getName() {
return "shibboleth";
}
/**
* Identify an existing EPerson based upon the shibboleth attributes provided on
* the request object. There are three cases where this can occurr, each as

View File

@@ -21,7 +21,6 @@ import java.security.cert.CertificateFactory;
import java.security.cert.X509Certificate;
import java.sql.SQLException;
import java.util.ArrayList;
import java.util.Collections;
import java.util.Enumeration;
import java.util.List;
import java.util.StringTokenizer;
@@ -29,8 +28,9 @@ import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import javax.servlet.http.HttpSession;
import org.apache.commons.lang3.ArrayUtils;
import org.apache.logging.log4j.Logger;
import org.apache.commons.collections.ListUtils;
import org.apache.commons.lang.ArrayUtils;
import org.apache.log4j.Logger;
import org.dspace.authenticate.factory.AuthenticateServiceFactory;
import org.dspace.authenticate.service.AuthenticationService;
import org.dspace.authorize.AuthorizeException;
@@ -105,7 +105,7 @@ public class X509Authentication implements AuthenticationMethod {
/**
* log4j category
*/
private static Logger log = org.apache.logging.log4j.LogManager.getLogger(X509Authentication.class);
private static Logger log = Logger.getLogger(X509Authentication.class);
/**
* public key of CA to check client certs against.
@@ -442,7 +442,7 @@ public class X509Authentication implements AuthenticationMethod {
public List<Group> getSpecialGroups(Context context, HttpServletRequest request)
throws SQLException {
if (request == null) {
return Collections.EMPTY_LIST;
return ListUtils.EMPTY_LIST;
}
Boolean authenticated = false;
@@ -472,7 +472,7 @@ public class X509Authentication implements AuthenticationMethod {
return groups;
}
return Collections.EMPTY_LIST;
return ListUtils.EMPTY_LIST;
}
/**
@@ -589,9 +589,4 @@ public class X509Authentication implements AuthenticationMethod {
HttpServletResponse response) {
return loginPageURL;
}
@Override
public String getName() {
return "x509";
}
}

View File

@@ -7,7 +7,6 @@
*/
package org.dspace.authority;
import java.io.IOException;
import java.net.MalformedURLException;
import java.util.List;
@@ -23,8 +22,7 @@ import org.apache.solr.client.solrj.response.QueryResponse;
*/
public interface AuthoritySearchService {
public QueryResponse search(SolrQuery query)
throws SolrServerException, MalformedURLException, IOException;
public QueryResponse search(SolrQuery query) throws SolrServerException, MalformedURLException;
public List<String> getAllIndexedMetadataFields() throws Exception;

View File

@@ -46,10 +46,16 @@ public class AuthorityServiceImpl implements AuthorityService {
}
for (AuthorityIndexerInterface indexerInterface : indexers) {
List<AuthorityValue> authorityValues = indexerInterface.getAuthorityValues(context , item);
for (AuthorityValue authorityValue : authorityValues) {
indexingService.indexContent(authorityValue);
indexerInterface.init(context, item);
while (indexerInterface.hasMore()) {
AuthorityValue authorityValue = indexerInterface.nextValue();
if (authorityValue != null) {
indexingService.indexContent(authorityValue, true);
}
}
//Close up
indexerInterface.close();
}
//Commit to our server
indexingService.commit();

View File

@@ -12,10 +12,10 @@ import java.net.MalformedURLException;
import java.util.ArrayList;
import java.util.List;
import org.apache.logging.log4j.Logger;
import org.apache.log4j.Logger;
import org.apache.solr.client.solrj.SolrQuery;
import org.apache.solr.client.solrj.SolrServerException;
import org.apache.solr.client.solrj.impl.HttpSolrClient;
import org.apache.solr.client.solrj.impl.HttpSolrServer;
import org.apache.solr.client.solrj.response.FacetField;
import org.apache.solr.client.solrj.response.QueryResponse;
import org.apache.solr.common.SolrInputDocument;
@@ -30,7 +30,7 @@ import org.dspace.core.ConfigurationManager;
*/
public class AuthoritySolrServiceImpl implements AuthorityIndexingService, AuthoritySearchService {
private static final Logger log = org.apache.logging.log4j.LogManager.getLogger(AuthoritySolrServiceImpl.class);
private static final Logger log = Logger.getLogger(AuthoritySolrServiceImpl.class);
protected AuthoritySolrServiceImpl() {
@@ -39,17 +39,16 @@ public class AuthoritySolrServiceImpl implements AuthorityIndexingService, Autho
/**
* Non-Static CommonsHttpSolrServer for processing indexing events.
*/
protected HttpSolrClient solr = null;
protected HttpSolrServer solr = null;
protected HttpSolrClient getSolr()
throws MalformedURLException, SolrServerException, IOException {
protected HttpSolrServer getSolr() throws MalformedURLException, SolrServerException {
if (solr == null) {
String solrService = ConfigurationManager.getProperty("solr.authority.server");
log.debug("Solr authority URL: " + solrService);
solr = new HttpSolrClient.Builder(solrService).build();
solr = new HttpSolrServer(solrService);
solr.setBaseURL(solrService);
SolrQuery solrQuery = new SolrQuery().setQuery("*:*");
@@ -61,7 +60,7 @@ public class AuthoritySolrServiceImpl implements AuthorityIndexingService, Autho
}
@Override
public void indexContent(AuthorityValue value) {
public void indexContent(AuthorityValue value, boolean force) {
SolrInputDocument doc = value.getSolrInputDocument();
try {
@@ -130,8 +129,7 @@ public class AuthoritySolrServiceImpl implements AuthorityIndexingService, Autho
}
@Override
public QueryResponse search(SolrQuery query)
throws SolrServerException, MalformedURLException, IOException {
public QueryResponse search(SolrQuery query) throws SolrServerException, MalformedURLException {
return getSolr().query(query);
}

View File

@@ -12,7 +12,7 @@ import java.util.HashMap;
import java.util.List;
import java.util.Map;
import org.apache.logging.log4j.Logger;
import org.apache.log4j.Logger;
/**
* This class contains a list of active authority types.
@@ -32,7 +32,7 @@ public class AuthorityTypes {
/**
* log4j logger
*/
private static Logger log = org.apache.logging.log4j.LogManager.getLogger(AuthorityTypes.class);
private static Logger log = Logger.getLogger(AuthorityTypes.class);
protected List<AuthorityValue> types = new ArrayList<AuthorityValue>();

View File

@@ -14,8 +14,8 @@ import java.util.HashMap;
import java.util.List;
import java.util.Map;
import org.apache.commons.lang3.StringUtils;
import org.apache.logging.log4j.Logger;
import org.apache.commons.lang.StringUtils;
import org.apache.log4j.Logger;
import org.apache.solr.common.SolrDocument;
import org.apache.solr.common.SolrInputDocument;
import org.dspace.authorize.AuthorizeException;
@@ -234,7 +234,7 @@ public class AuthorityValue {
/**
* log4j logger
*/
private static Logger log = org.apache.logging.log4j.LogManager.getLogger(AuthorityValue.class);
private static Logger log = Logger.getLogger(AuthorityValue.class);
@Override
public String toString() {

View File

@@ -13,8 +13,8 @@ import java.util.List;
import java.util.Map;
import java.util.UUID;
import org.apache.commons.lang3.StringUtils;
import org.apache.logging.log4j.Logger;
import org.apache.commons.lang.StringUtils;
import org.apache.log4j.Logger;
import org.apache.solr.client.solrj.SolrQuery;
import org.apache.solr.client.solrj.response.QueryResponse;
import org.apache.solr.common.SolrDocument;
@@ -34,7 +34,7 @@ import org.springframework.beans.factory.annotation.Autowired;
*/
public class AuthorityValueServiceImpl implements AuthorityValueService {
private final Logger log = org.apache.logging.log4j.LogManager.getLogger(AuthorityValueServiceImpl.class);
private final Logger log = Logger.getLogger(AuthorityValueServiceImpl.class);
@Autowired(required = true)
protected AuthorityTypes authorityTypes;

View File

@@ -12,8 +12,8 @@ import java.util.Collection;
import java.util.List;
import java.util.Map;
import org.apache.commons.lang3.ObjectUtils;
import org.apache.commons.lang3.StringUtils;
import org.apache.commons.lang.ObjectUtils;
import org.apache.commons.lang.StringUtils;
import org.apache.solr.common.SolrDocument;
import org.apache.solr.common.SolrInputDocument;

View File

@@ -1,20 +0,0 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.authority;
import java.util.List;
/**
* @author Jonas Van Goolen (jonas at atmire dot com)
*/
public interface SolrAuthorityInterface {
List<AuthorityValue> queryAuthorities(String text, int max);
AuthorityValue queryAuthorityID(String id);
}

View File

@@ -19,7 +19,7 @@ import org.apache.commons.cli.HelpFormatter;
import org.apache.commons.cli.Options;
import org.apache.commons.cli.ParseException;
import org.apache.commons.cli.PosixParser;
import org.apache.logging.log4j.Logger;
import org.apache.log4j.Logger;
import org.dspace.authority.factory.AuthorityServiceFactory;
import org.dspace.authority.service.AuthorityValueService;
import org.dspace.content.Item;
@@ -40,7 +40,7 @@ public class UpdateAuthorities {
/**
* log4j logger
*/
private static Logger log = org.apache.logging.log4j.LogManager.getLogger(UpdateAuthorities.class);
private static Logger log = Logger.getLogger(UpdateAuthorities.class);
protected PrintWriter print = null;

View File

@@ -12,7 +12,7 @@ import java.util.HashSet;
import java.util.Set;
import java.util.UUID;
import org.apache.logging.log4j.Logger;
import org.apache.log4j.Logger;
import org.dspace.authority.factory.AuthorityServiceFactory;
import org.dspace.authority.service.AuthorityService;
import org.dspace.content.DSpaceObject;
@@ -33,7 +33,7 @@ import org.dspace.event.Event;
*/
public class AuthorityConsumer implements Consumer {
private final Logger log = org.apache.logging.log4j.LogManager.getLogger(AuthorityConsumer.class);
private final Logger log = Logger.getLogger(AuthorityConsumer.class);
/**
* A set of all item IDs installed which need their authority updated

View File

@@ -8,17 +8,13 @@
package org.dspace.authority.indexer;
import java.util.HashMap;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import org.apache.logging.log4j.Logger;
import org.apache.log4j.Logger;
import org.dspace.authority.AuthorityValue;
import org.dspace.authority.factory.AuthorityServiceFactory;
import org.dspace.authority.service.AuthorityService;
import org.dspace.content.Item;
import org.dspace.content.factory.ContentServiceFactory;
import org.dspace.content.service.ItemService;
import org.dspace.core.Context;
/**
@@ -29,7 +25,7 @@ import org.dspace.core.Context;
*/
public class AuthorityIndexClient {
private static Logger log = org.apache.logging.log4j.LogManager.getLogger(AuthorityIndexClient.class);
private static Logger log = Logger.getLogger(AuthorityIndexClient.class);
protected static final AuthorityService authorityService =
AuthorityServiceFactory.getInstance().getAuthorityService();
@@ -37,8 +33,6 @@ public class AuthorityIndexClient {
AuthorityServiceFactory.getInstance().getAuthorityIndexingService();
protected static final List<AuthorityIndexerInterface> indexers =
AuthorityServiceFactory.getInstance().getAuthorityIndexers();
protected static final ItemService itemService =
ContentServiceFactory.getInstance().getItemService();
/**
* Default constructor
@@ -70,17 +64,15 @@ public class AuthorityIndexClient {
for (AuthorityIndexerInterface indexerInterface : indexers) {
log.info("Initialize " + indexerInterface.getClass().getName());
System.out.println("Initialize " + indexerInterface.getClass().getName());
Iterator<Item> allItems = itemService.findAll(context);
Map<String, AuthorityValue> authorityCache = new HashMap<>();
while (allItems.hasNext()) {
Item item = allItems.next();
List<AuthorityValue> authorityValues = indexerInterface.getAuthorityValues(
context, item, authorityCache);
for (AuthorityValue authorityValue : authorityValues) {
indexerInterface.init(context, true);
while (indexerInterface.hasMore()) {
AuthorityValue authorityValue = indexerInterface.nextValue();
if (authorityValue != null) {
toIndexValues.put(authorityValue.getId(), authorityValue);
}
context.uncacheEntity(item);
}
//Close up
indexerInterface.close();
}
@@ -90,7 +82,7 @@ public class AuthorityIndexClient {
log.info("Writing new data");
System.out.println("Writing new data");
for (String id : toIndexValues.keySet()) {
indexingService.indexContent(toIndexValues.get(id));
indexingService.indexContent(toIndexValues.get(id), true);
indexingService.commit();
}

View File

@@ -9,8 +9,6 @@
package org.dspace.authority.indexer;
import java.sql.SQLException;
import java.util.List;
import java.util.Map;
import org.dspace.authority.AuthorityValue;
import org.dspace.authorize.AuthorizeException;
@@ -25,10 +23,17 @@ import org.dspace.core.Context;
*/
public interface AuthorityIndexerInterface {
public List<AuthorityValue> getAuthorityValues(Context context, Item item)
throws SQLException, AuthorizeException;
public List<AuthorityValue> getAuthorityValues(Context context, Item item, Map<String, AuthorityValue> cache)
throws SQLException, AuthorizeException;
public void init(Context context, Item item);
public void init(Context context, boolean useCache);
public void init(Context context);
public AuthorityValue nextValue();
public boolean hasMore() throws SQLException, AuthorizeException;
public void close();
public boolean isConfiguredProperly();
}

View File

@@ -19,7 +19,7 @@ import org.dspace.authority.AuthorityValue;
public interface AuthorityIndexingService {
public void indexContent(AuthorityValue value);
public void indexContent(AuthorityValue value, boolean force);
public void cleanIndex() throws Exception;

View File

@@ -9,12 +9,14 @@ package org.dspace.authority.indexer;
import java.sql.SQLException;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import org.apache.commons.collections4.CollectionUtils;
import org.apache.commons.lang3.StringUtils;
import org.apache.logging.log4j.Logger;
import org.apache.commons.collections.CollectionUtils;
import org.apache.commons.lang.StringUtils;
import org.apache.log4j.Logger;
import org.dspace.authority.AuthorityValue;
import org.dspace.authority.service.AuthorityValueService;
import org.dspace.authorize.AuthorizeException;
@@ -29,13 +31,12 @@ import org.springframework.beans.factory.annotation.Autowired;
/**
* DSpaceAuthorityIndexer is used in IndexClient, which is called by the AuthorityConsumer and the indexing-script.
* <p>
* The DSpaceAuthorityIndexer will return a list of all authority values for a
* given item. It will return an authority value for all metadata fields defined
* in dspace.conf with 'authority.author.indexer.field'.
* An instance of DSpaceAuthorityIndexer is bound to a list of items.
* This can be one item or all items too depending on the init() method.
* <p>
* You have to call getAuthorityValues for every Item you want to index. But you
* can supply an optional cache, to save the mapping from the metadata value to
* the new authority values for metadata fields without an authority key.
* DSpaceAuthorityIndexer lets you iterate over each metadata value
* for each metadata field defined in dspace.cfg with 'authority.author.indexer.field'
* for each item in the list.
* <p>
*
* @author Antoine Snyers (antoine at atmire.com)
@@ -45,16 +46,25 @@ import org.springframework.beans.factory.annotation.Autowired;
*/
public class DSpaceAuthorityIndexer implements AuthorityIndexerInterface, InitializingBean {
private static final Logger log = org.apache.logging.log4j.LogManager.getLogger(DSpaceAuthorityIndexer.class);
private static final Logger log = Logger.getLogger(DSpaceAuthorityIndexer.class);
protected Iterator<Item> itemIterator;
protected Item currentItem;
/**
* The list of metadata fields which are to be indexed *
*/
protected List<String> metadataFields;
protected int currentFieldIndex;
protected int currentMetadataIndex;
protected AuthorityValue nextValue;
protected Context context;
@Autowired(required = true)
protected AuthorityValueService authorityValueService;
@Autowired(required = true)
protected ItemService itemService;
protected boolean useCache;
protected Map<String, AuthorityValue> cache;
@Autowired(required = true)
protected ConfigurationService configurationService;
@@ -70,83 +80,146 @@ public class DSpaceAuthorityIndexer implements AuthorityIndexerInterface, Initia
}
}
@Override
public List<AuthorityValue> getAuthorityValues(Context context, Item item)
throws SQLException, AuthorizeException {
return getAuthorityValues(context, item, null);
public void init(Context context, Item item) {
ArrayList<Item> itemList = new ArrayList<>();
itemList.add(item);
this.itemIterator = itemList.iterator();
currentItem = this.itemIterator.next();
initialize(context);
}
public List<AuthorityValue> getAuthorityValues(Context context, Item item, Map<String, AuthorityValue> cache)
throws SQLException, AuthorizeException {
List<AuthorityValue> values = new ArrayList<>();
for (String metadataField : metadataFields) {
List<MetadataValue> metadataValues = itemService.getMetadataByMetadataString(item, metadataField);
for (MetadataValue metadataValue : metadataValues) {
String content = metadataValue.getValue();
String authorityKey = metadataValue.getAuthority();
// We only want to update our item IF our UUID is not present
// or if we need to generate one.
boolean requiresItemUpdate = StringUtils.isBlank(authorityKey) ||
StringUtils.startsWith(authorityKey, AuthorityValueService.GENERATE);
AuthorityValue value = null;
if (StringUtils.isBlank(authorityKey) && cache != null) {
// This is a value currently without an authority. So query
// the cache, if an authority is found for the exact value.
value = cache.get(content);
}
if (value == null) {
value = getAuthorityValue(context, metadataField, content,authorityKey);
}
if (value != null) {
if (requiresItemUpdate) {
value.updateItem(context, item, metadataValue);
try {
itemService.update(context, item);
} catch (Exception e) {
log.error("Error creating a metadatavalue's authority", e);
}
}
if (cache != null) {
cache.put(content, value);
}
values.add(value);
@Override
public void init(Context context) {
init(context, false);
}
@Override
public void init(Context context, boolean useCache) {
try {
this.itemIterator = itemService.findAll(context);
currentItem = this.itemIterator.next();
} catch (SQLException e) {
log.error("Error while retrieving all items in the metadata indexer");
}
initialize(context);
this.useCache = useCache;
}
protected void initialize(Context context) {
this.context = context;
currentFieldIndex = 0;
currentMetadataIndex = 0;
useCache = false;
cache = new HashMap<>();
}
@Override
public AuthorityValue nextValue() {
return nextValue;
}
@Override
public boolean hasMore() throws SQLException, AuthorizeException {
if (currentItem == null) {
return false;
}
// 1. iterate over the metadata values
String metadataField = metadataFields.get(currentFieldIndex);
List<MetadataValue> values = itemService.getMetadataByMetadataString(currentItem, metadataField);
if (currentMetadataIndex < values.size()) {
prepareNextValue(metadataField, values.get(currentMetadataIndex));
currentMetadataIndex++;
return true;
} else {
// 2. iterate over the metadata fields
if ((currentFieldIndex + 1) < metadataFields.size()) {
currentFieldIndex++;
//Reset our current metadata index since we are moving to another field
currentMetadataIndex = 0;
return hasMore();
} else {
// 3. iterate over the items
if (itemIterator.hasNext()) {
currentItem = itemIterator.next();
//Reset our current field index
currentFieldIndex = 0;
//Reset our current metadata index
currentMetadataIndex = 0;
} else {
log.error("Error getting an authority value for " +
"the metadata value \"" + content + "\" " +
"in the field \"" + metadataField + "\" " +
"of the item " + item.getHandle());
currentItem = null;
}
return hasMore();
}
}
return values;
}
/**
* This method looks at the authority of a metadata value.
* This method looks at the authority of a metadata.
* If the authority can be found in solr, that value is reused.
* Otherwise a new authority value will be generated that will be indexed in solr.
*
* If the authority starts with AuthorityValueGenerator.GENERATE, a specific type of AuthorityValue will be
* generated.
* Depending on the type this may involve querying an external REST service
*
* @param context Current DSpace context
* @param metadataField Is one of the fields defined in dspace.cfg to be indexed.
* @param metadataContent Content of the current metadata value.
* @param metadataAuthorityKey Existing authority of the metadata value.
* @param value Is one of the values of the given metadataField in one of the items being indexed.
* @throws SQLException if database error
* @throws AuthorizeException if authorization error
*/
private AuthorityValue getAuthorityValue(Context context, String metadataField,
String metadataContent, String metadataAuthorityKey) {
if (StringUtils.isNotBlank(metadataAuthorityKey) &&
!metadataAuthorityKey.startsWith(AuthorityValueService.GENERATE)) {
// !uid.startsWith(AuthorityValueGenerator.GENERATE) is not strictly
// necessary here but it prevents exceptions in solr
AuthorityValue value = authorityValueService.findByUID(context, metadataAuthorityKey);
if (value != null) {
return value;
protected void prepareNextValue(String metadataField, MetadataValue value) throws SQLException, AuthorizeException {
nextValue = null;
String content = value.getValue();
String authorityKey = value.getAuthority();
//We only want to update our item IF our UUID is not present or if we need to generate one.
boolean requiresItemUpdate = StringUtils.isBlank(authorityKey) || StringUtils
.startsWith(authorityKey, AuthorityValueService.GENERATE);
if (StringUtils.isNotBlank(authorityKey) && !authorityKey.startsWith(AuthorityValueService.GENERATE)) {
// !uid.startsWith(AuthorityValueGenerator.GENERATE) is not strictly necessary here but it prevents
// exceptions in solr
nextValue = authorityValueService.findByUID(context, authorityKey);
}
if (nextValue == null && StringUtils.isBlank(authorityKey) && useCache) {
// A metadata without authority is being indexed
// If there is an exact match in the cache, reuse it rather than adding a new one.
AuthorityValue cachedAuthorityValue = cache.get(content);
if (cachedAuthorityValue != null) {
nextValue = cachedAuthorityValue;
}
}
return authorityValueService.generate(context, metadataAuthorityKey,
metadataContent, metadataField.replaceAll("\\.", "_"));
if (nextValue == null) {
nextValue = authorityValueService
.generate(context, authorityKey, content, metadataField.replaceAll("\\.", "_"));
}
if (nextValue != null && requiresItemUpdate) {
nextValue.updateItem(context, currentItem, value);
try {
itemService.update(context, currentItem);
} catch (Exception e) {
log.error("Error creating a metadatavalue's authority", e);
}
}
if (useCache) {
cache.put(content, nextValue);
}
}
@Override
public void close() {
itemIterator = null;
cache.clear();
}
@Override

View File

@@ -0,0 +1,87 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.authority.orcid;
import java.net.URLEncoder;
import java.util.ArrayList;
import java.util.List;
import org.apache.log4j.Logger;
import org.dspace.authority.AuthorityValue;
import org.dspace.authority.orcid.model.Bio;
import org.dspace.authority.orcid.model.Work;
import org.dspace.authority.orcid.xml.XMLtoBio;
import org.dspace.authority.orcid.xml.XMLtoWork;
import org.dspace.authority.rest.RestSource;
import org.dspace.services.factory.DSpaceServicesFactory;
import org.w3c.dom.Document;
/**
* @author Antoine Snyers (antoine at atmire.com)
* @author Kevin Van de Velde (kevin at atmire dot com)
* @author Ben Bosman (ben at atmire dot com)
* @author Mark Diggory (markd at atmire dot com)
*/
public class Orcid extends RestSource {
/**
* log4j logger
*/
private static Logger log = Logger.getLogger(Orcid.class);
private static Orcid orcid;
public static Orcid getOrcid() {
if (orcid == null) {
orcid = DSpaceServicesFactory.getInstance().getServiceManager()
.getServiceByName("OrcidSource", Orcid.class);
}
return orcid;
}
private Orcid(String url) {
super(url);
}
public Bio getBio(String id) {
Document bioDocument = restConnector.get(id + "/orcid-bio");
XMLtoBio converter = new XMLtoBio();
Bio bio = converter.convert(bioDocument).get(0);
bio.setOrcid(id);
return bio;
}
public List<Work> getWorks(String id) {
Document document = restConnector.get(id + "/orcid-works");
XMLtoWork converter = new XMLtoWork();
return converter.convert(document);
}
public List<Bio> queryBio(String name, int start, int rows) {
Document bioDocument = restConnector
.get("search/orcid-bio?q=" + URLEncoder.encode("\"" + name + "\"") + "&start=" + start + "&rows=" + rows);
XMLtoBio converter = new XMLtoBio();
return converter.convert(bioDocument);
}
@Override
public List<AuthorityValue> queryAuthorities(String text, int max) {
List<Bio> bios = queryBio(text, 0, max);
List<AuthorityValue> authorities = new ArrayList<AuthorityValue>();
for (Bio bio : bios) {
authorities.add(OrcidAuthorityValue.create(bio));
}
return authorities;
}
@Override
public AuthorityValue queryAuthorityID(String id) {
Bio bio = getBio(id);
return OrcidAuthorityValue.create(bio);
}
}

View File

@@ -0,0 +1,328 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.authority.orcid;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Date;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.UUID;
import org.apache.commons.lang.StringUtils;
import org.apache.log4j.Logger;
import org.apache.solr.common.SolrDocument;
import org.apache.solr.common.SolrInputDocument;
import org.dspace.authority.AuthorityValue;
import org.dspace.authority.AuthorityValueServiceImpl;
import org.dspace.authority.PersonAuthorityValue;
import org.dspace.authority.orcid.model.Bio;
import org.dspace.authority.orcid.model.BioExternalIdentifier;
import org.dspace.authority.orcid.model.BioName;
import org.dspace.authority.orcid.model.BioResearcherUrl;
/**
* @author Antoine Snyers (antoine at atmire.com)
* @author Kevin Van de Velde (kevin at atmire dot com)
* @author Ben Bosman (ben at atmire dot com)
* @author Mark Diggory (markd at atmire dot com)
*/
public class OrcidAuthorityValue extends PersonAuthorityValue {
/**
* log4j logger
*/
private static Logger log = Logger.getLogger(OrcidAuthorityValue.class);
private String orcid_id;
private Map<String, List<String>> otherMetadata = new HashMap<String, List<String>>();
private boolean update; // used in setValues(Bio bio)
/**
* Creates an instance of OrcidAuthorityValue with only uninitialized fields.
* This is meant to be filled in with values from an existing record.
* To create a brand new OrcidAuthorityValue, use create()
*/
public OrcidAuthorityValue() {
}
public OrcidAuthorityValue(SolrDocument document) {
super(document);
}
public String getOrcid_id() {
return orcid_id;
}
public void setOrcid_id(String orcid_id) {
this.orcid_id = orcid_id;
}
public Map<String, List<String>> getOtherMetadata() {
return otherMetadata;
}
public void addOtherMetadata(String label, String data) {
List<String> strings = otherMetadata.get(label);
if (strings == null) {
strings = new ArrayList<String>();
}
strings.add(data);
otherMetadata.put(label, strings);
}
@Override
public SolrInputDocument getSolrInputDocument() {
SolrInputDocument doc = super.getSolrInputDocument();
if (StringUtils.isNotBlank(getOrcid_id())) {
doc.addField("orcid_id", getOrcid_id());
}
for (String t : otherMetadata.keySet()) {
List<String> data = otherMetadata.get(t);
for (String data_entry : data) {
doc.addField("label_" + t, data_entry);
}
}
return doc;
}
@Override
public void setValues(SolrDocument document) {
super.setValues(document);
this.orcid_id = String.valueOf(document.getFieldValue("orcid_id"));
otherMetadata = new HashMap<String, List<String>>();
for (String fieldName : document.getFieldNames()) {
String labelPrefix = "label_";
if (fieldName.startsWith(labelPrefix)) {
String label = fieldName.substring(labelPrefix.length());
List<String> list = new ArrayList<String>();
Collection<Object> fieldValues = document.getFieldValues(fieldName);
for (Object o : fieldValues) {
list.add(String.valueOf(o));
}
otherMetadata.put(label, list);
}
}
}
public static OrcidAuthorityValue create() {
OrcidAuthorityValue orcidAuthorityValue = new OrcidAuthorityValue();
orcidAuthorityValue.setId(UUID.randomUUID().toString());
orcidAuthorityValue.updateLastModifiedDate();
orcidAuthorityValue.setCreationDate(new Date());
return orcidAuthorityValue;
}
/**
* Create an authority based on a given orcid bio
*
* @param bio Bio
* @return OrcidAuthorityValue
*/
public static OrcidAuthorityValue create(Bio bio) {
OrcidAuthorityValue authority = OrcidAuthorityValue.create();
authority.setValues(bio);
return authority;
}
public boolean setValues(Bio bio) {
BioName name = bio.getName();
if (updateValue(bio.getOrcid(), getOrcid_id())) {
setOrcid_id(bio.getOrcid());
}
if (updateValue(name.getFamilyName(), getLastName())) {
setLastName(name.getFamilyName());
}
if (updateValue(name.getGivenNames(), getFirstName())) {
setFirstName(name.getGivenNames());
}
if (StringUtils.isNotBlank(name.getCreditName())) {
if (!getNameVariants().contains(name.getCreditName())) {
addNameVariant(name.getCreditName());
update = true;
}
}
for (String otherName : name.getOtherNames()) {
if (!getNameVariants().contains(otherName)) {
addNameVariant(otherName);
update = true;
}
}
if (updateOtherMetadata("country", bio.getCountry())) {
addOtherMetadata("country", bio.getCountry());
}
for (String keyword : bio.getKeywords()) {
if (updateOtherMetadata("keyword", keyword)) {
addOtherMetadata("keyword", keyword);
}
}
for (BioExternalIdentifier externalIdentifier : bio.getBioExternalIdentifiers()) {
if (updateOtherMetadata("external_identifier", externalIdentifier.toString())) {
addOtherMetadata("external_identifier", externalIdentifier.toString());
}
}
for (BioResearcherUrl researcherUrl : bio.getResearcherUrls()) {
if (updateOtherMetadata("researcher_url", researcherUrl.toString())) {
addOtherMetadata("researcher_url", researcherUrl.toString());
}
}
if (updateOtherMetadata("biography", bio.getBiography())) {
addOtherMetadata("biography", bio.getBiography());
}
setValue(getName());
if (update) {
update();
}
boolean result = update;
update = false;
return result;
}
private boolean updateOtherMetadata(String label, String data) {
List<String> strings = getOtherMetadata().get(label);
boolean update;
if (strings == null) {
update = StringUtils.isNotBlank(data);
} else {
update = !strings.contains(data);
}
if (update) {
this.update = true;
}
return update;
}
private boolean updateValue(String incoming, String resident) {
boolean update = StringUtils.isNotBlank(incoming) && !incoming.equals(resident);
if (update) {
this.update = true;
}
return update;
}
@Override
public Map<String, String> choiceSelectMap() {
Map<String, String> map = super.choiceSelectMap();
map.put("orcid", getOrcid_id());
return map;
}
@Override
public String getAuthorityType() {
return "orcid";
}
@Override
public String generateString() {
String generateString = AuthorityValueServiceImpl.GENERATE + getAuthorityType() + AuthorityValueServiceImpl
.SPLIT;
if (StringUtils.isNotBlank(getOrcid_id())) {
generateString += getOrcid_id();
}
return generateString;
}
@Override
public AuthorityValue newInstance(String info) {
AuthorityValue authorityValue = null;
if (StringUtils.isNotBlank(info)) {
Orcid orcid = Orcid.getOrcid();
authorityValue = orcid.queryAuthorityID(info);
} else {
authorityValue = OrcidAuthorityValue.create();
}
return authorityValue;
}
@Override
public boolean equals(Object o) {
if (this == o) {
return true;
}
if (o == null || getClass() != o.getClass()) {
return false;
}
OrcidAuthorityValue that = (OrcidAuthorityValue) o;
if (orcid_id != null ? !orcid_id.equals(that.orcid_id) : that.orcid_id != null) {
return false;
}
return true;
}
@Override
public int hashCode() {
return orcid_id != null ? orcid_id.hashCode() : 0;
}
@Override
public boolean hasTheSameInformationAs(Object o) {
if (this == o) {
return true;
}
if (o == null || getClass() != o.getClass()) {
return false;
}
if (!super.hasTheSameInformationAs(o)) {
return false;
}
OrcidAuthorityValue that = (OrcidAuthorityValue) o;
if (orcid_id != null ? !orcid_id.equals(that.orcid_id) : that.orcid_id != null) {
return false;
}
for (String key : otherMetadata.keySet()) {
if (otherMetadata.get(key) != null) {
List<String> metadata = otherMetadata.get(key);
List<String> otherMetadata = that.otherMetadata.get(key);
if (otherMetadata == null) {
return false;
} else {
HashSet<String> metadataSet = new HashSet<String>(metadata);
HashSet<String> otherMetadataSet = new HashSet<String>(otherMetadata);
if (!metadataSet.equals(otherMetadataSet)) {
return false;
}
}
} else {
if (that.otherMetadata.get(key) != null) {
return false;
}
}
}
return true;
}
}

View File

@@ -1,191 +0,0 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.authority.orcid;
import java.io.BufferedReader;
import java.io.IOException;
import java.io.InputStream;
import java.io.InputStreamReader;
import java.net.URLEncoder;
import java.util.ArrayList;
import java.util.List;
import org.apache.commons.lang3.StringUtils;
import org.apache.http.HttpResponse;
import org.apache.http.client.HttpClient;
import org.apache.http.client.methods.HttpPost;
import org.apache.http.impl.client.HttpClientBuilder;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
import org.dspace.authority.AuthorityValue;
import org.dspace.authority.SolrAuthorityInterface;
import org.dspace.authority.orcid.xml.XMLtoBio;
import org.dspace.authority.rest.RESTConnector;
import org.json.JSONObject;
import org.orcid.jaxb.model.record_v2.Person;
/**
* @author Jonas Van Goolen (jonas at atmire dot com)
* This class contains all methods for retrieving "Person" objects calling the ORCID (version 2) endpoints.
* Additionally, this can also create AuthorityValues based on these returned Person objects
*/
public class Orcidv2 implements SolrAuthorityInterface {
private static Logger log = LogManager.getLogger(Orcidv2.class);
public RESTConnector restConnector;
private String OAUTHUrl;
private String clientId;
private String clientSecret;
private String accessToken;
/**
* Initialize the accessToken that is required for all subsequent calls to ORCID.
*
* @throws java.io.IOException passed through from HTTPclient.
*/
public void init() throws IOException {
if (StringUtils.isNotBlank(accessToken) && StringUtils.isNotBlank(clientSecret)) {
String authenticationParameters = "?client_id=" + clientId +
"&client_secret=" + clientSecret +
"&scope=/read-public&grant_type=client_credentials";
HttpPost httpPost = new HttpPost(OAUTHUrl + authenticationParameters);
httpPost.addHeader("Accept", "application/json");
httpPost.addHeader("Content-Type", "application/x-www-form-urlencoded");
HttpClient httpClient = HttpClientBuilder.create().build();
HttpResponse getResponse = httpClient.execute(httpPost);
InputStream is = getResponse.getEntity().getContent();
BufferedReader streamReader = new BufferedReader(new InputStreamReader(is, "UTF-8"));
JSONObject responseObject = null;
String inputStr;
while ((inputStr = streamReader.readLine()) != null && responseObject == null) {
if (inputStr.startsWith("{") && inputStr.endsWith("}") && inputStr.contains("access_token")) {
try {
responseObject = new JSONObject(inputStr);
} catch (Exception e) {
//Not as valid as I'd hoped, move along
responseObject = null;
}
}
}
if (responseObject != null && responseObject.has("access_token")) {
accessToken = (String) responseObject.get("access_token");
}
}
}
/**
* Makes an instance of the Orcidv2 class based on the provided parameters.
* This constructor is called through the spring bean initialization
*/
private Orcidv2(String url, String OAUTHUrl, String clientId, String clientSecret) {
this.restConnector = new RESTConnector(url);
this.OAUTHUrl = OAUTHUrl;
this.clientId = clientId;
this.clientSecret = clientSecret;
}
/**
* Makes an instance of the Orcidv2 class based on the provided parameters.
* This constructor is called through the spring bean initialization
*/
private Orcidv2(String url) {
this.restConnector = new RESTConnector(url);
}
/**
* Makes an instance of the AuthorityValue with the given information.
* @param text search string
* @return List<AuthorityValue>
*/
@Override
public List<AuthorityValue> queryAuthorities(String text, int max) {
List<Person> bios = queryBio(text, max);
List<AuthorityValue> result = new ArrayList<>();
for (Person person : bios) {
AuthorityValue orcidAuthorityValue = Orcidv2AuthorityValue.create(person);
if (orcidAuthorityValue != null) {
result.add(orcidAuthorityValue);
}
}
return result;
}
/**
* Create an AuthorityValue from a Person retrieved using the given orcid identifier.
* @param id orcid identifier
* @return AuthorityValue
*/
public AuthorityValue queryAuthorityID(String id) {
Person person = getBio(id);
AuthorityValue valueFromPerson = Orcidv2AuthorityValue.create(person);
return valueFromPerson;
}
/**
* Retrieve a Person object based on a given orcid identifier
* @param id orcid identifier
* @return Person
*/
public Person getBio(String id) {
log.debug("getBio called with ID=" + id);
if (!isValid(id)) {
return null;
}
InputStream bioDocument = restConnector.get(id + ((id.endsWith("/person")) ? "" : "/person"), accessToken);
XMLtoBio converter = new XMLtoBio();
Person person = converter.convertSinglePerson(bioDocument);
return person;
}
/**
* Retrieve a list of Person objects.
* @param text search string
* @param start offset to use
* @param rows how many rows to return
* @return List<Person>
*/
public List<Person> queryBio(String text, int start, int rows) {
if (rows > 100) {
throw new IllegalArgumentException("The maximum number of results to retrieve cannot exceed 100.");
}
String searchPath = "search?q=" + URLEncoder.encode(text) + "&start=" + start + "&rows=" + rows;
log.debug("queryBio searchPath=" + searchPath + " accessToken=" + accessToken);
InputStream bioDocument = restConnector.get(searchPath, accessToken);
XMLtoBio converter = new XMLtoBio();
List<Person> bios = converter.convert(bioDocument);
return bios;
}
/**
* Retrieve a list of Person objects.
* @param text search string
* @param max how many rows to return
* @return List<Person>
*/
public List<Person> queryBio(String text, int max) {
return queryBio(text, 0, max);
}
/**
* Check to see if the provided text has the correct ORCID syntax.
* Since only searching on ORCID id is allowed, this way, we filter out any queries that would return a
* blank result anyway
*/
private boolean isValid(String text) {
return StringUtils.isNotBlank(text) && text.matches(Orcidv2AuthorityValue.ORCID_ID_SYNTAX);
}
}

View File

@@ -1,342 +0,0 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.authority.orcid;
import java.util.ArrayList;
import java.util.Date;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.UUID;
import org.apache.commons.lang3.StringUtils;
import org.apache.solr.common.SolrDocument;
import org.apache.solr.common.SolrInputDocument;
import org.dspace.authority.AuthorityValue;
import org.dspace.authority.AuthorityValueServiceImpl;
import org.dspace.authority.PersonAuthorityValue;
import org.dspace.utils.DSpace;
import org.orcid.jaxb.model.common_v2.ExternalId;
import org.orcid.jaxb.model.record_v2.ExternalIdentifiers;
import org.orcid.jaxb.model.record_v2.KeywordType;
import org.orcid.jaxb.model.record_v2.NameType;
import org.orcid.jaxb.model.record_v2.Person;
import org.orcid.jaxb.model.record_v2.ResearcherUrlType;
/**
* @author Jonas Van Goolen (jonas at atmire dot com)
*/
public class Orcidv2AuthorityValue extends PersonAuthorityValue {
/*
* The ORCID identifier
*/
private String orcid_id;
/*
* Map containing key-value pairs filled in by "setValues(Person person)".
* This represents all dynamic information of the object.
*/
private Map<String, List<String>> otherMetadata = new HashMap<String, List<String>>();
/**
* The syntax that the ORCID id needs to conform to
*/
public static final String ORCID_ID_SYNTAX = "\\d{4}-\\d{4}-\\d{4}-(\\d{3}X|\\d{4})";
/**
* Creates an instance of Orcidv2AuthorityValue with only uninitialized fields.
* This is meant to be filled in with values from an existing record.
* To create a brand new Orcidv2AuthorityValue, use create()
*/
public Orcidv2AuthorityValue() {
}
public Orcidv2AuthorityValue(SolrDocument document) {
super(document);
}
public String getOrcid_id() {
return orcid_id;
}
public void setOrcid_id(String orcid_id) {
this.orcid_id = orcid_id;
}
/**
* Create an empty authority.
* @return OrcidAuthorityValue
*/
public static Orcidv2AuthorityValue create() {
Orcidv2AuthorityValue orcidAuthorityValue = new Orcidv2AuthorityValue();
orcidAuthorityValue.setId(UUID.randomUUID().toString());
orcidAuthorityValue.updateLastModifiedDate();
orcidAuthorityValue.setCreationDate(new Date());
return orcidAuthorityValue;
}
/**
* Create an authority based on a given orcid bio
* @return OrcidAuthorityValue
*/
public static Orcidv2AuthorityValue create(Person person) {
if (person == null) {
return null;
}
Orcidv2AuthorityValue authority = Orcidv2AuthorityValue.create();
authority.setValues(person);
return authority;
}
/**
* Initialize this instance based on a Person object
* @param person Person
*/
protected void setValues(Person person) {
NameType name = person.getName();
if (!StringUtils.equals(name.getPath(), this.getOrcid_id())) {
this.setOrcid_id(name.getPath());
}
if (!StringUtils.equals(name.getFamilyName().getValue(), this.getLastName())) {
this.setLastName(name.getFamilyName().getValue());
}
if (!StringUtils.equals(name.getGivenNames().getValue(), this.getFirstName())) {
this.setFirstName(name.getGivenNames().getValue());
}
if (name.getCreditName() != null && StringUtils.isNotBlank(name.getCreditName().getValue())) {
if (!this.getNameVariants().contains(name.getCreditName().getValue())) {
this.addNameVariant(name.getCreditName().getValue());
}
}
if (person.getKeywords() != null) {
for (KeywordType keyword : person.getKeywords().getKeyword()) {
if (this.isNewMetadata("keyword", keyword.getContent())) {
this.addOtherMetadata("keyword", keyword.getContent());
}
}
}
ExternalIdentifiers externalIdentifiers = person.getExternalIdentifiers();
if (externalIdentifiers != null) {
for (ExternalId externalIdentifier : externalIdentifiers.getExternalIdentifier()) {
if (this.isNewMetadata("external_identifier", externalIdentifier.getExternalIdValue())) {
this.addOtherMetadata("external_identifier", externalIdentifier.getExternalIdValue());
}
}
}
if (person.getResearcherUrls() != null) {
for (ResearcherUrlType researcherUrl : person.getResearcherUrls().getResearcherUrl()) {
if (this.isNewMetadata("researcher_url", researcherUrl.getUrl().getValue())) {
this.addOtherMetadata("researcher_url", researcherUrl.getUrl().getValue());
}
}
}
if (person.getBiography() != null) {
if (this.isNewMetadata("biography", person.getBiography().getContent())) {
this.addOtherMetadata("biography", person.getBiography().getContent());
}
}
this.setValue(this.getName());
}
/**
* Makes an instance of the AuthorityValue with the given information.
* @param info string info
* @return AuthorityValue
*/
@Override
public AuthorityValue newInstance(String info) {
AuthorityValue authorityValue = null;
if (StringUtils.isNotBlank(info)) {
Orcidv2 orcid = new DSpace().getServiceManager().getServiceByName("AuthoritySource", Orcidv2.class);
authorityValue = orcid.queryAuthorityID(info);
} else {
authorityValue = this.create();
}
return authorityValue;
}
@Override
public void setValue(String value) {
super.setValue(value);
}
/**
* Check to see if the provided label / data pair is already present in the "otherMetadata" or not
* */
public boolean isNewMetadata(String label, String data) {
List<String> strings = getOtherMetadata().get(label);
boolean update;
if (strings == null) {
update = StringUtils.isNotBlank(data);
} else {
update = !strings.contains(data);
}
return update;
}
/**
* Add additional metadata to the otherMetadata map*/
public void addOtherMetadata(String label, String data) {
List<String> strings = otherMetadata.get(label);
if (strings == null) {
strings = new ArrayList<>();
}
strings.add(data);
otherMetadata.put(label, strings);
}
public Map<String, List<String>> getOtherMetadata() {
return otherMetadata;
}
/**
* Generate a solr record from this instance
* @return SolrInputDocument
*/
@Override
public SolrInputDocument getSolrInputDocument() {
SolrInputDocument doc = super.getSolrInputDocument();
if (StringUtils.isNotBlank(getOrcid_id())) {
doc.addField("orcid_id", getOrcid_id());
}
for (String t : otherMetadata.keySet()) {
List<String> data = otherMetadata.get(t);
for (String data_entry : data) {
doc.addField("label_" + t, data_entry);
}
}
return doc;
}
/**
* Information that can be used the choice ui
* @return map
*/
@Override
public Map<String, String> choiceSelectMap() {
Map<String, String> map = super.choiceSelectMap();
String orcid_id = getOrcid_id();
if (StringUtils.isNotBlank(orcid_id)) {
map.put("orcid", orcid_id);
}
return map;
}
@Override
public String getAuthorityType() {
return "orcid";
}
/**
* Provides a string that will allow this AuthorityType to be recognized and provides information to create a new
* instance to be created using public Orcidv2AuthorityValue newInstance(String info).
* @return see {@link org.dspace.authority.service.AuthorityValueService#GENERATE AuthorityValueService.GENERATE}
*/
@Override
public String generateString() {
String generateString = AuthorityValueServiceImpl.GENERATE + getAuthorityType() +
AuthorityValueServiceImpl.SPLIT;
if (StringUtils.isNotBlank(getOrcid_id())) {
generateString += getOrcid_id();
}
return generateString;
}
@Override
public boolean equals(Object o) {
if (this == o) {
return true;
}
if (o == null || getClass() != o.getClass()) {
return false;
}
Orcidv2AuthorityValue that = (Orcidv2AuthorityValue) o;
if (orcid_id != null ? !orcid_id.equals(that.orcid_id) : that.orcid_id != null) {
return false;
}
return true;
}
@Override
public int hashCode() {
return orcid_id != null ? orcid_id.hashCode() : 0;
}
/**
* The regular equals() only checks if both AuthorityValues describe the same authority.
* This method checks if the AuthorityValues have different information
* E.g. it is used to decide when lastModified should be updated.
* @param o object
* @return true or false
*/
@Override
public boolean hasTheSameInformationAs(Object o) {
if (this == o) {
return true;
}
if (o == null || getClass() != o.getClass()) {
return false;
}
if (!super.hasTheSameInformationAs(o)) {
return false;
}
Orcidv2AuthorityValue that = (Orcidv2AuthorityValue) o;
if (orcid_id != null ? !orcid_id.equals(that.orcid_id) : that.orcid_id != null) {
return false;
}
for (String key : otherMetadata.keySet()) {
if (otherMetadata.get(key) != null) {
List<String> metadata = otherMetadata.get(key);
List<String> otherMetadata = that.otherMetadata.get(key);
if (otherMetadata == null) {
return false;
} else {
HashSet<String> metadataSet = new HashSet<String>(metadata);
HashSet<String> otherMetadataSet = new HashSet<String>(otherMetadata);
if (!metadataSet.equals(otherMetadataSet)) {
return false;
}
}
} else {
if (that.otherMetadata.get(key) != null) {
return false;
}
}
}
return true;
}
}

View File

@@ -0,0 +1,112 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.authority.orcid.model;
import java.util.LinkedHashSet;
import java.util.Set;
/**
* @author Antoine Snyers (antoine at atmire.com)
* @author Kevin Van de Velde (kevin at atmire dot com)
* @author Ben Bosman (ben at atmire dot com)
* @author Mark Diggory (markd at atmire dot com)
*/
public class Bio {
protected String orcid;
protected BioName name;
protected String country;
protected Set<String> keywords;
protected Set<BioExternalIdentifier> bioExternalIdentifiers;
protected Set<BioResearcherUrl> researcherUrls;
protected String biography;
public Bio() {
this.name = new BioName();
keywords = new LinkedHashSet<String>();
bioExternalIdentifiers = new LinkedHashSet<BioExternalIdentifier>();
researcherUrls = new LinkedHashSet<BioResearcherUrl>();
}
public String getOrcid() {
return orcid;
}
public void setOrcid(String orcid) {
this.orcid = orcid;
}
public BioName getName() {
return name;
}
public void setName(BioName name) {
this.name = name;
}
public String getCountry() {
return country;
}
public void setCountry(String country) {
this.country = country;
}
public Set<String> getKeywords() {
return keywords;
}
public void addKeyword(String keyword) {
this.keywords.add(keyword);
}
public Set<BioExternalIdentifier> getBioExternalIdentifiers() {
return bioExternalIdentifiers;
}
public void addExternalIdentifier(BioExternalIdentifier externalReference) {
bioExternalIdentifiers.add(externalReference);
}
public Set<BioResearcherUrl> getResearcherUrls() {
return researcherUrls;
}
public void addResearcherUrl(BioResearcherUrl researcherUrl) {
researcherUrls.add(researcherUrl);
}
public String getBiography() {
return biography;
}
public void setBiography(String biography) {
this.biography = biography;
}
@Override
public String toString() {
return "Bio{" +
"orcid='" + orcid + '\'' +
", name=" + name +
", country='" + country + '\'' +
", keywords=" + keywords +
", bioExternalIdentifiers=" + bioExternalIdentifiers +
", researcherUrls=" + researcherUrls +
", biography='" + biography + '\'' +
'}';
}
}

View File

@@ -0,0 +1,108 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.authority.orcid.model;
/**
* @author Antoine Snyers (antoine at atmire.com)
* @author Kevin Van de Velde (kevin at atmire dot com)
* @author Ben Bosman (ben at atmire dot com)
* @author Mark Diggory (markd at atmire dot com)
*/
public class BioExternalIdentifier {
protected String id_orcid;
protected String id_common_name;
protected String id_reference;
protected String id_url;
public BioExternalIdentifier(String id_orcid, String id_common_name, String id_reference, String id_url) {
this.id_orcid = id_orcid;
this.id_common_name = id_common_name;
this.id_reference = id_reference;
this.id_url = id_url;
}
public String getId_orcid() {
return id_orcid;
}
public void setId_orcid(String id_orcid) {
this.id_orcid = id_orcid;
}
public String getId_common_name() {
return id_common_name;
}
public void setId_common_name(String id_common_name) {
this.id_common_name = id_common_name;
}
public String getId_reference() {
return id_reference;
}
public void setId_reference(String id_reference) {
this.id_reference = id_reference;
}
public String getId_url() {
return id_url;
}
public void setId_url(String id_url) {
this.id_url = id_url;
}
@Override
public String toString() {
return "BioExternalIdentifier{" +
"id_orcid='" + id_orcid + '\'' +
", id_common_name='" + id_common_name + '\'' +
", id_reference='" + id_reference + '\'' +
", id_url='" + id_url + '\'' +
'}';
}
@Override
public boolean equals(Object o) {
if (this == o) {
return true;
}
if (o == null || getClass() != o.getClass()) {
return false;
}
BioExternalIdentifier that = (BioExternalIdentifier) o;
if (id_common_name != null ? !id_common_name.equals(that.id_common_name) : that.id_common_name != null) {
return false;
}
if (id_orcid != null ? !id_orcid.equals(that.id_orcid) : that.id_orcid != null) {
return false;
}
if (id_reference != null ? !id_reference.equals(that.id_reference) : that.id_reference != null) {
return false;
}
if (id_url != null ? !id_url.equals(that.id_url) : that.id_url != null) {
return false;
}
return true;
}
@Override
public int hashCode() {
int result = id_orcid != null ? id_orcid.hashCode() : 0;
result = 31 * result + (id_common_name != null ? id_common_name.hashCode() : 0);
result = 31 * result + (id_reference != null ? id_reference.hashCode() : 0);
result = 31 * result + (id_url != null ? id_url.hashCode() : 0);
return result;
}
}

View File

@@ -0,0 +1,114 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.authority.orcid.model;
import java.util.ArrayList;
import java.util.List;
/**
* @author Antoine Snyers (antoine at atmire.com)
* @author Kevin Van de Velde (kevin at atmire dot com)
* @author Ben Bosman (ben at atmire dot com)
* @author Mark Diggory (markd at atmire dot com)
*/
public class BioName {
protected String givenNames;
protected String familyName;
protected String creditName;
protected List<String> otherNames;
BioName() {
otherNames = new ArrayList<String>();
}
BioName(String givenNames, String familyName, String creditName, List<String> otherNames) {
this.givenNames = givenNames;
this.familyName = familyName;
this.creditName = creditName;
this.otherNames = otherNames;
}
public String getGivenNames() {
return givenNames;
}
public void setGivenNames(String givenNames) {
this.givenNames = givenNames;
}
public String getFamilyName() {
return familyName;
}
public void setFamilyName(String familyName) {
this.familyName = familyName;
}
public String getCreditName() {
return creditName;
}
public void setCreditName(String creditName) {
this.creditName = creditName;
}
public List<String> getOtherNames() {
return otherNames;
}
public void setOtherNames(List<String> otherNames) {
this.otherNames = otherNames;
}
@Override
public String toString() {
return "BioName{" +
"givenNames='" + givenNames + '\'' +
", familyName='" + familyName + '\'' +
", creditName='" + creditName + '\'' +
", otherNames=" + otherNames +
'}';
}
@Override
public boolean equals(Object o) {
if (this == o) {
return true;
}
if (o == null || getClass() != o.getClass()) {
return false;
}
BioName bioName = (BioName) o;
if (creditName != null ? !creditName.equals(bioName.creditName) : bioName.creditName != null) {
return false;
}
if (familyName != null ? !familyName.equals(bioName.familyName) : bioName.familyName != null) {
return false;
}
if (givenNames != null ? !givenNames.equals(bioName.givenNames) : bioName.givenNames != null) {
return false;
}
if (otherNames != null ? !otherNames.equals(bioName.otherNames) : bioName.otherNames != null) {
return false;
}
return true;
}
@Override
public int hashCode() {
int result = givenNames != null ? givenNames.hashCode() : 0;
result = 31 * result + (familyName != null ? familyName.hashCode() : 0);
result = 31 * result + (creditName != null ? creditName.hashCode() : 0);
result = 31 * result + (otherNames != null ? otherNames.hashCode() : 0);
return result;
}
}

View File

@@ -0,0 +1,77 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.authority.orcid.model;
/**
* @author Antoine Snyers (antoine at atmire.com)
* @author Kevin Van de Velde (kevin at atmire dot com)
* @author Ben Bosman (ben at atmire dot com)
* @author Mark Diggory (markd at atmire dot com)
*/
public class BioResearcherUrl {
protected String name;
protected String url;
public BioResearcherUrl(String name, String url) {
this.name = name;
this.url = url;
}
public String getName() {
return name;
}
public void setName(String name) {
this.name = name;
}
public String getUrl() {
return url;
}
public void setUrl(String url) {
this.url = url;
}
@Override
public String toString() {
return "BioResearcherUrl{" +
"name='" + name + '\'' +
", url='" + url + '\'' +
'}';
}
@Override
public boolean equals(Object o) {
if (this == o) {
return true;
}
if (o == null || getClass() != o.getClass()) {
return false;
}
BioResearcherUrl that = (BioResearcherUrl) o;
if (name != null ? !name.equals(that.name) : that.name != null) {
return false;
}
if (url != null ? !url.equals(that.url) : that.url != null) {
return false;
}
return true;
}
@Override
public int hashCode() {
int result = name != null ? name.hashCode() : 0;
result = 31 * result + (url != null ? url.hashCode() : 0);
return result;
}
}

View File

@@ -0,0 +1,49 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.authority.orcid.model;
/**
* @author Antoine Snyers (antoine at atmire.com)
* @author Kevin Van de Velde (kevin at atmire dot com)
* @author Ben Bosman (ben at atmire dot com)
* @author Mark Diggory (markd at atmire dot com)
*/
public class Citation {
private CitationType type;
private String citation;
public Citation(CitationType type, String citation) {
this.type = type;
this.citation = citation;
}
public CitationType getType() {
return type;
}
public void setType(CitationType type) {
this.type = type;
}
public String getCitation() {
return citation;
}
public void setCitation(String citation) {
this.citation = citation;
}
@Override
public String toString() {
return "Citation{" +
"type=" + type +
", citation='" + citation + '\'' +
'}';
}
}

View File

@@ -0,0 +1,28 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.authority.orcid.model;
/**
* @author Antoine Snyers (antoine at atmire.com)
* @author Kevin Van de Velde (kevin at atmire dot com)
* @author Ben Bosman (ben at atmire dot com)
* @author Mark Diggory (markd at atmire dot com)
*/
public enum CitationType {
FORMATTED_UNSPECIFIED,
BIBTEX,
FORMATTED_APA,
FORMATTED_HARVARD,
FORMATTED_IEEE,
FORMATTED_MLA,
FORMATTED_VANCOUVER,
FORMATTED_CHICAGO
}

View File

@@ -0,0 +1,111 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.authority.orcid.model;
import java.util.Set;
/**
* @author Antoine Snyers (antoine at atmire.com)
* @author Kevin Van de Velde (kevin at atmire dot com)
* @author Ben Bosman (ben at atmire dot com)
* @author Mark Diggory (markd at atmire dot com)
*/
public class Contributor {
private String orcid;
private String creditName;
private String email;
private Set<ContributorAttribute> contributorAttributes;
public Contributor(String orcid, String creditName, String email, Set<ContributorAttribute> contributorAttributes) {
this.orcid = orcid;
this.creditName = creditName;
this.email = email;
this.contributorAttributes = contributorAttributes;
}
public String getOrcid() {
return orcid;
}
public void setOrcid(String orcid) {
this.orcid = orcid;
}
public String getCreditName() {
return creditName;
}
public void setCreditName(String creditName) {
this.creditName = creditName;
}
public String getEmail() {
return email;
}
public void setEmail(String email) {
this.email = email;
}
public Set<ContributorAttribute> getContributorAttributes() {
return contributorAttributes;
}
public void setContributorAttributes(Set<ContributorAttribute> contributorAttributes) {
this.contributorAttributes = contributorAttributes;
}
@Override
public String toString() {
return "Contributor{" +
"orcid='" + orcid + '\'' +
", creditName='" + creditName + '\'' +
", email='" + email + '\'' +
", contributorAttributes=" + contributorAttributes +
'}';
}
@Override
public boolean equals(Object o) {
if (this == o) {
return true;
}
if (o == null || getClass() != o.getClass()) {
return false;
}
Contributor that = (Contributor) o;
if (contributorAttributes != null ? !contributorAttributes
.equals(that.contributorAttributes) : that.contributorAttributes != null) {
return false;
}
if (creditName != null ? !creditName.equals(that.creditName) : that.creditName != null) {
return false;
}
if (email != null ? !email.equals(that.email) : that.email != null) {
return false;
}
if (orcid != null ? !orcid.equals(that.orcid) : that.orcid != null) {
return false;
}
return true;
}
@Override
public int hashCode() {
int result = orcid != null ? orcid.hashCode() : 0;
result = 31 * result + (creditName != null ? creditName.hashCode() : 0);
result = 31 * result + (email != null ? email.hashCode() : 0);
result = 31 * result + (contributorAttributes != null ? contributorAttributes.hashCode() : 0);
return result;
}
}

View File

@@ -0,0 +1,78 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.authority.orcid.model;
/**
* @author Antoine Snyers (antoine at atmire.com)
* @author Kevin Van de Velde (kevin at atmire dot com)
* @author Ben Bosman (ben at atmire dot com)
* @author Mark Diggory (markd at atmire dot com)
*/
public class ContributorAttribute {
private ContributorAttributeRole role;
private ContributorAttributeSequence sequence;
public ContributorAttribute(ContributorAttributeRole role, ContributorAttributeSequence sequence) {
this.role = role;
this.sequence = sequence;
}
public ContributorAttributeRole getRole() {
return role;
}
public void setRole(ContributorAttributeRole role) {
this.role = role;
}
public ContributorAttributeSequence getSequence() {
return sequence;
}
public void setSequence(ContributorAttributeSequence sequence) {
this.sequence = sequence;
}
@Override
public String toString() {
return "ContributorAttribute{" +
"role=" + role +
", sequence=" + sequence +
'}';
}
@Override
public boolean equals(Object o) {
if (this == o) {
return true;
}
if (o == null || getClass() != o.getClass()) {
return false;
}
ContributorAttribute that = (ContributorAttribute) o;
if (role != that.role) {
return false;
}
if (sequence != that.sequence) {
return false;
}
return true;
}
@Override
public int hashCode() {
int result = role != null ? role.hashCode() : 0;
result = 31 * result + (sequence != null ? sequence.hashCode() : 0);
return result;
}
}

View File

@@ -0,0 +1,32 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.authority.orcid.model;
/**
* http://support.orcid.org/knowledgebase/articles/118843-anatomy-of-a-contributor
*
* @author Antoine Snyers (antoine at atmire.com)
* @author Kevin Van de Velde (kevin at atmire dot com)
* @author Ben Bosman (ben at atmire dot com)
* @author Mark Diggory (markd at atmire dot com)
*/
public enum ContributorAttributeRole {
AUTHOR,
ASSIGNEE,
EDITOR,
CHAIR_OR_TRANSLATOR,
CO_INVESTIGATOR,
CO_INVENTOR,
GRADUATE_STUDENT,
OTHER_INVENTOR,
PRINCIPAL_INVESTIGATOR,
POSTDOCTORAL_RESEARCHER,
SUPPORT_STAFF
}

View File

@@ -0,0 +1,23 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.authority.orcid.model;
/**
* http://support.orcid.org/knowledgebase/articles/118843-anatomy-of-a-contributor
*
* @author Antoine Snyers (antoine at atmire.com)
* @author Kevin Van de Velde (kevin at atmire dot com)
* @author Ben Bosman (ben at atmire dot com)
* @author Mark Diggory (markd at atmire dot com)
*/
public enum ContributorAttributeSequence {
FIRST,
ADDITIONAL
}

View File

@@ -0,0 +1,116 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.authority.orcid.model;
import java.util.Set;
/**
* @author Antoine Snyers (antoine at atmire.com)
* @author Kevin Van de Velde (kevin at atmire dot com)
* @author Ben Bosman (ben at atmire dot com)
* @author Mark Diggory (markd at atmire dot com)
*/
public class Work {
private WorkTitle workTitle;
private String description;
private Citation citation;
private WorkType workType;
private String publicationDate;
private WorkExternalIdentifier workExternalIdentifier;
private String url;
private Set<Contributor> contributors;
private String workSource;
public WorkTitle getWorkTitle() {
return workTitle;
}
public void setWorkTitle(WorkTitle workTitle) {
this.workTitle = workTitle;
}
public String getDescription() {
return description;
}
public void setDescription(String description) {
this.description = description;
}
public Citation getCitation() {
return citation;
}
public void setCitation(Citation citation) {
this.citation = citation;
}
public WorkType getWorkType() {
return workType;
}
public void setWorkType(WorkType workType) {
this.workType = workType;
}
public String getPublicationDate() {
return publicationDate;
}
public void setPublicationDate(String publicationDate) {
this.publicationDate = publicationDate;
}
public WorkExternalIdentifier getWorkExternalIdentifier() {
return workExternalIdentifier;
}
public void setWorkExternalIdentifier(WorkExternalIdentifier workExternalIdentifier) {
this.workExternalIdentifier = workExternalIdentifier;
}
public String getUrl() {
return url;
}
public void setUrl(String url) {
this.url = url;
}
public Set<Contributor> getContributors() {
return contributors;
}
public void setContributors(Set<Contributor> contributors) {
this.contributors = contributors;
}
public String getWorkSource() {
return workSource;
}
public void setWorkSource(String workSource) {
this.workSource = workSource;
}
@Override
public String toString() {
return "Work{" +
"workTitle=" + workTitle +
", description='" + description + '\'' +
", citation=" + citation +
", workType=" + workType +
", publicationDate='" + publicationDate + '\'' +
", workExternalIdentifier=" + workExternalIdentifier +
", url='" + url + '\'' +
", contributors=" + contributors +
", workSource='" + workSource + '\'' +
'}';
}
}

View File

@@ -0,0 +1,73 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.authority.orcid.model;
/**
* http://support.orcid.org/knowledgebase/articles/118807
*
* @author Antoine Snyers (antoine at atmire.com)
* @author Kevin Van de Velde (kevin at atmire dot com)
* @author Ben Bosman (ben at atmire dot com)
* @author Mark Diggory (markd at atmire dot com)
*/
public class WorkExternalIdentifier {
private WorkExternalIdentifierType workExternalIdentifierType;
private String workExternalIdenfitierID;
public WorkExternalIdentifier(WorkExternalIdentifierType workExternalIdentifierType,
String workExternalIdenfitierID) {
this.workExternalIdentifierType = workExternalIdentifierType;
this.workExternalIdenfitierID = workExternalIdenfitierID;
}
public WorkExternalIdentifierType getWorkExternalIdentifierType() {
return workExternalIdentifierType;
}
public void setWorkExternalIdentifierType(WorkExternalIdentifierType workExternalIdentifierType) {
this.workExternalIdentifierType = workExternalIdentifierType;
}
@Override
public String toString() {
return "WorkExternalIdentifier{" +
"workExternalIdentifierType=" + workExternalIdentifierType +
", workExternalIdenfitierID='" + workExternalIdenfitierID + '\'' +
'}';
}
@Override
public boolean equals(Object o) {
if (this == o) {
return true;
}
if (o == null || getClass() != o.getClass()) {
return false;
}
WorkExternalIdentifier that = (WorkExternalIdentifier) o;
if (workExternalIdenfitierID != null ? !workExternalIdenfitierID
.equals(that.workExternalIdenfitierID) : that.workExternalIdenfitierID != null) {
return false;
}
if (workExternalIdentifierType != that.workExternalIdentifierType) {
return false;
}
return true;
}
@Override
public int hashCode() {
int result = workExternalIdentifierType != null ? workExternalIdentifierType.hashCode() : 0;
result = 31 * result + (workExternalIdenfitierID != null ? workExternalIdenfitierID.hashCode() : 0);
return result;
}
}

View File

@@ -0,0 +1,42 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.authority.orcid.model;
/**
* http://support.orcid.org/knowledgebase/articles/118807
*
* @author Antoine Snyers (antoine at atmire.com)
* @author Kevin Van de Velde (kevin at atmire dot com)
* @author Ben Bosman (ben at atmire dot com)
* @author Mark Diggory (markd at atmire dot com)
*/
public enum WorkExternalIdentifierType {
// OTHER_ID,
ARXIV,
ASIN,
ASIN_TLD,
BIBCODE,
DOI,
EID,
ISBN,
ISSN,
JFM,
JSTOR,
LCCN,
MR,
OCLC,
OL,
OSTI,
PMC,
PMID,
RFC,
SSRN,
ZBL
}

View File

@@ -0,0 +1,64 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.authority.orcid.model;
import java.util.Map;
/**
* http://support.orcid.org/knowledgebase/articles/118807
*
* @author Antoine Snyers (antoine at atmire.com)
* @author Kevin Van de Velde (kevin at atmire dot com)
* @author Ben Bosman (ben at atmire dot com)
* @author Mark Diggory (markd at atmire dot com)
*/
public class WorkTitle {
private String title;
private String subtitle;
private Map<String, String> translatedTitles;
public WorkTitle(String title, String subtitle, Map<String, String> translatedTitles) {
this.title = title;
this.subtitle = subtitle;
this.translatedTitles = translatedTitles;
}
public String getTitle() {
return title;
}
public void setTitle(String title) {
this.title = title;
}
public String getSubtitle() {
return subtitle;
}
public void setSubtitle(String subtitle) {
this.subtitle = subtitle;
}
public String getTranslatedTitles(String languageCode) {
return translatedTitles.get(languageCode);
}
public void setTranslatedTitle(String languageCode, String translatedTitle) {
translatedTitles.put(languageCode, translatedTitle);
}
@Override
public String toString() {
return "WorkTitle{" +
"title='" + title + '\'' +
", subtitle='" + subtitle + '\'' +
", translatedTitles=" + translatedTitles +
'}';
}
}

View File

@@ -0,0 +1,57 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.authority.orcid.model;
/**
* http://support.orcid.org/knowledgebase/articles/118795
*
* @author Antoine Snyers (antoine at atmire.com)
* @author Kevin Van de Velde (kevin at atmire dot com)
* @author Ben Bosman (ben at atmire dot com)
* @author Mark Diggory (markd at atmire dot com)
*/
public enum WorkType {
BOOK,
BOOK_CHAPTER,
BOOK_REVIEW,
DICTIONARY_ENTRY,
DISSERTATION,
ENCYCLOPEDIA_ARTICLE,
EDITED_BOOK,
JOURNAL_ARTICLE,
JOURNAL_ISSUE,
MAGAZINE_ARTICLE,
MANUAL,
ONLINE_RESOURCE,
NEWSLETTER_ARTICLE,
NEWSPAPER_ARTICLE,
REPORT,
RESEARCH_TOOL,
SUPERVISED_STUDENT_PUBLICATION,
TEST,
TRANSLATION,
WEBSITE,
CONFERENCE_ABSTRACT,
CONFERENCE_PAPER,
CONFERENCE_POSTER,
DISCLOSURE,
LICENSE,
PATENT,
REGISTERED_COPYRIGHT,
ARTISTIC_PERFORMANCE,
DATA_SET,
INVENTION,
LECTURE_SPEECH,
RESEARCH_TECHNIQUE,
SPIN_OFF_COMPANY,
STANDARDS_AND_POLICY,
TECHNICAL_STANDARD,
OTHER
}

View File

@@ -7,15 +7,8 @@
*/
package org.dspace.authority.orcid.xml;
import java.io.InputStream;
import java.net.URISyntaxException;
import javax.xml.bind.JAXBContext;
import javax.xml.bind.JAXBException;
import javax.xml.bind.Unmarshaller;
import org.apache.logging.log4j.Logger;
import org.xml.sax.SAXException;
import org.apache.log4j.Logger;
import org.w3c.dom.Document;
/**
* @param <T> type
@@ -29,17 +22,13 @@ public abstract class Converter<T> {
/**
* log4j logger
*/
private static Logger log = org.apache.logging.log4j.LogManager.getLogger(Converter.class);
private static Logger log = Logger.getLogger(Converter.class);
public abstract T convert(InputStream document);
protected Object unmarshall(InputStream input, Class<?> type) throws SAXException, URISyntaxException {
try {
JAXBContext context = JAXBContext.newInstance(type);
Unmarshaller unmarshaller = context.createUnmarshaller();
return unmarshaller.unmarshal(input);
} catch (JAXBException e) {
throw new RuntimeException("Unable to unmarshall orcid message" + e);
}
protected void processError(Document xml) {
String errorMessage = XMLErrors.getErrorMessage(xml);
log.error("The orcid-message reports an error: " + errorMessage);
}
public abstract T convert(Document document);
}

View File

@@ -0,0 +1,77 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.authority.orcid.xml;
import javax.xml.xpath.XPathExpressionException;
import org.apache.log4j.Logger;
import org.dspace.authority.util.XMLUtils;
import org.w3c.dom.Document;
/**
* @author Antoine Snyers (antoine at atmire.com)
* @author Kevin Van de Velde (kevin at atmire dot com)
* @author Ben Bosman (ben at atmire dot com)
* @author Mark Diggory (markd at atmire dot com)
*/
public class XMLErrors {
/**
* log4j logger
*/
private static Logger log = Logger.getLogger(XMLErrors.class);
private static final String ERROR_DESC = "/orcid-message/error-desc";
/**
* Default constructor
*/
private XMLErrors() { }
/**
* Evaluates whether a given xml document contains errors or not.
*
* @param xml The given xml document
* @return true if the given xml document is null
* or if it contains errors
*/
public static boolean check(Document xml) {
if (xml == null) {
return true;
}
String textContent = null;
try {
textContent = XMLUtils.getTextContent(xml, ERROR_DESC);
} catch (XPathExpressionException e) {
log.error("Error while checking for errors in orcid message", e);
}
return textContent == null;
}
public static String getErrorMessage(Document xml) {
if (xml == null) {
return "Did not receive an XML document.";
}
String textContent = null;
try {
textContent = XMLUtils.getTextContent(xml, ERROR_DESC);
} catch (XPathExpressionException e) {
log.error("Error while checking for errors in orcid message", e);
}
return textContent;
}
}

Some files were not shown because too many files have changed in this diff Show More