Merge branch 'main' into 8320

This commit is contained in:
Mark H. Wood
2022-07-13 12:17:35 -04:00
434 changed files with 52490 additions and 2352 deletions

View File

@@ -11,9 +11,7 @@ jobs:
runs-on: ubuntu-latest runs-on: ubuntu-latest
env: env:
# Give Maven 1GB of memory to work with # Give Maven 1GB of memory to work with
# Suppress all Maven "downloading" messages in logs (see https://stackoverflow.com/a/35653426) MAVEN_OPTS: "-Xmx1024M"
# This also slightly speeds builds, as there is less logging
MAVEN_OPTS: "-Xmx1024M -Dorg.slf4j.simpleLogger.log.org.apache.maven.cli.transfer.Slf4jMavenTransferListener=warn"
strategy: strategy:
# Create a matrix of two separate configurations for Unit vs Integration Tests # Create a matrix of two separate configurations for Unit vs Integration Tests
# This will ensure those tasks are run in parallel # This will ensure those tasks are run in parallel
@@ -67,7 +65,7 @@ jobs:
- name: Run Maven ${{ matrix.type }} - name: Run Maven ${{ matrix.type }}
env: env:
TEST_FLAGS: ${{ matrix.mvnflags }} TEST_FLAGS: ${{ matrix.mvnflags }}
run: mvn install -B -V -P-assembly -Pcoverage-report $TEST_FLAGS run: mvn --no-transfer-progress -V install -P-assembly -Pcoverage-report $TEST_FLAGS
# If previous step failed, save results of tests to downloadable artifact for this job # If previous step failed, save results of tests to downloadable artifact for this job
# (This artifact is downloadable at the bottom of any job's summary page) # (This artifact is downloadable at the bottom of any job's summary page)

View File

@@ -20,7 +20,7 @@ USER dspace
ADD --chown=dspace . /app/ ADD --chown=dspace . /app/
# Build DSpace (note: this build doesn't include the optional, deprecated "dspace-rest" webapp) # Build DSpace (note: this build doesn't include the optional, deprecated "dspace-rest" webapp)
# Copy the dspace-installer directory to /install. Clean up the build to keep the docker image small # Copy the dspace-installer directory to /install. Clean up the build to keep the docker image small
RUN mvn package && \ RUN mvn --no-transfer-progress package && \
mv /app/dspace/target/${TARGET_DIR}/* /install && \ mv /app/dspace/target/${TARGET_DIR}/* /install && \
mvn clean mvn clean

View File

@@ -19,7 +19,7 @@ USER dspace
# Copy the DSpace source code (from local machine) into the workdir (excluding .dockerignore contents) # Copy the DSpace source code (from local machine) into the workdir (excluding .dockerignore contents)
ADD --chown=dspace . /app/ ADD --chown=dspace . /app/
# Build DSpace. Copy the dspace-installer directory to /install. Clean up the build to keep the docker image small # Build DSpace. Copy the dspace-installer directory to /install. Clean up the build to keep the docker image small
RUN mvn package && \ RUN mvn --no-transfer-progress package && \
mv /app/dspace/target/${TARGET_DIR}/* /install && \ mv /app/dspace/target/${TARGET_DIR}/* /install && \
mvn clean mvn clean

View File

@@ -22,7 +22,7 @@ USER dspace
ADD --chown=dspace . /app/ ADD --chown=dspace . /app/
# Build DSpace (INCLUDING the optional, deprecated "dspace-rest" webapp) # Build DSpace (INCLUDING the optional, deprecated "dspace-rest" webapp)
# Copy the dspace-installer directory to /install. Clean up the build to keep the docker image small # Copy the dspace-installer directory to /install. Clean up the build to keep the docker image small
RUN mvn package -Pdspace-rest && \ RUN mvn --no-transfer-progress package -Pdspace-rest && \
mv /app/dspace/target/${TARGET_DIR}/* /install && \ mv /app/dspace/target/${TARGET_DIR}/* /install && \
mvn clean mvn clean

View File

@@ -25,24 +25,25 @@ https://wiki.lyrasis.org/display/DSPACE/Code+Contribution+Guidelines
* AWS Java SDK for AWS KMS (com.amazonaws:aws-java-sdk-kms:1.12.116 - https://aws.amazon.com/sdkforjava) * AWS Java SDK for AWS KMS (com.amazonaws:aws-java-sdk-kms:1.12.116 - https://aws.amazon.com/sdkforjava)
* AWS Java SDK for Amazon S3 (com.amazonaws:aws-java-sdk-s3:1.12.116 - https://aws.amazon.com/sdkforjava) * AWS Java SDK for Amazon S3 (com.amazonaws:aws-java-sdk-s3:1.12.116 - https://aws.amazon.com/sdkforjava)
* JMES Path Query library (com.amazonaws:jmespath-java:1.12.116 - https://aws.amazon.com/sdkforjava) * JMES Path Query library (com.amazonaws:jmespath-java:1.12.116 - https://aws.amazon.com/sdkforjava)
* jcommander (com.beust:jcommander:1.78 - https://jcommander.org)
* HPPC Collections (com.carrotsearch:hppc:0.8.1 - http://labs.carrotsearch.com/hppc.html/hppc) * HPPC Collections (com.carrotsearch:hppc:0.8.1 - http://labs.carrotsearch.com/hppc.html/hppc)
* parso (com.epam:parso:2.0.11 - https://github.com/epam/parso) * com.drewnoakes:metadata-extractor (com.drewnoakes:metadata-extractor:2.16.0 - https://drewnoakes.com/code/exif/)
* parso (com.epam:parso:2.0.14 - https://github.com/epam/parso)
* Esri Geometry API for Java (com.esri.geometry:esri-geometry-api:2.2.0 - https://github.com/Esri/geometry-api-java)
* ClassMate (com.fasterxml:classmate:1.3.0 - http://github.com/cowtowncoder/java-classmate) * ClassMate (com.fasterxml:classmate:1.3.0 - http://github.com/cowtowncoder/java-classmate)
* Jackson-annotations (com.fasterxml.jackson.core:jackson-annotations:2.12.3 - http://github.com/FasterXML/jackson) * Jackson-annotations (com.fasterxml.jackson.core:jackson-annotations:2.12.6 - http://github.com/FasterXML/jackson)
* Jackson-core (com.fasterxml.jackson.core:jackson-core:2.12.3 - https://github.com/FasterXML/jackson-core) * Jackson-core (com.fasterxml.jackson.core:jackson-core:2.12.6 - https://github.com/FasterXML/jackson-core)
* jackson-databind (com.fasterxml.jackson.core:jackson-databind:2.12.3 - http://github.com/FasterXML/jackson) * jackson-databind (com.fasterxml.jackson.core:jackson-databind:2.12.6.1 - http://github.com/FasterXML/jackson)
* Jackson dataformat: CBOR (com.fasterxml.jackson.dataformat:jackson-dataformat-cbor:2.12.3 - http://github.com/FasterXML/jackson-dataformats-binary) * Jackson dataformat: CBOR (com.fasterxml.jackson.dataformat:jackson-dataformat-cbor:2.12.3 - http://github.com/FasterXML/jackson-dataformats-binary)
* Jackson dataformat: Smile (com.fasterxml.jackson.dataformat:jackson-dataformat-smile:2.11.2 - http://github.com/FasterXML/jackson-dataformats-binary) * Jackson dataformat: Smile (com.fasterxml.jackson.dataformat:jackson-dataformat-smile:2.12.3 - http://github.com/FasterXML/jackson-dataformats-binary)
* Jackson-dataformat-YAML (com.fasterxml.jackson.dataformat:jackson-dataformat-yaml:2.11.1 - https://github.com/FasterXML/jackson-dataformats-text) * Jackson-dataformat-YAML (com.fasterxml.jackson.dataformat:jackson-dataformat-yaml:2.11.1 - https://github.com/FasterXML/jackson-dataformats-text)
* Jackson datatype: jdk8 (com.fasterxml.jackson.datatype:jackson-datatype-jdk8:2.10.3 - https://github.com/FasterXML/jackson-modules-java8/jackson-datatype-jdk8) * Jackson datatype: jdk8 (com.fasterxml.jackson.datatype:jackson-datatype-jdk8:2.13.3 - https://github.com/FasterXML/jackson-modules-java8/jackson-datatype-jdk8)
* Jackson datatype: JSR310 (com.fasterxml.jackson.datatype:jackson-datatype-jsr310:2.10.3 - https://github.com/FasterXML/jackson-modules-java8/jackson-datatype-jsr310)
* Jackson datatype: JSR310 (com.fasterxml.jackson.datatype:jackson-datatype-jsr310:2.11.1 - https://github.com/FasterXML/jackson-modules-java8/jackson-datatype-jsr310) * Jackson datatype: JSR310 (com.fasterxml.jackson.datatype:jackson-datatype-jsr310:2.11.1 - https://github.com/FasterXML/jackson-modules-java8/jackson-datatype-jsr310)
* Jackson-module-parameter-names (com.fasterxml.jackson.module:jackson-module-parameter-names:2.10.3 - https://github.com/FasterXML/jackson-modules-java8/jackson-module-parameter-names) * Jackson datatype: JSR310 (com.fasterxml.jackson.datatype:jackson-datatype-jsr310:2.13.3 - https://github.com/FasterXML/jackson-modules-java8/jackson-datatype-jsr310)
* Jackson-module-parameter-names (com.fasterxml.jackson.module:jackson-module-parameter-names:2.13.3 - https://github.com/FasterXML/jackson-modules-java8/jackson-module-parameter-names)
* Java UUID Generator (com.fasterxml.uuid:java-uuid-generator:4.0.1 - https://github.com/cowtowncoder/java-uuid-generator) * Java UUID Generator (com.fasterxml.uuid:java-uuid-generator:4.0.1 - https://github.com/cowtowncoder/java-uuid-generator)
* Woodstox (com.fasterxml.woodstox:woodstox-core:5.0.3 - https://github.com/FasterXML/woodstox) * Woodstox (com.fasterxml.woodstox:woodstox-core:6.2.4 - https://github.com/FasterXML/woodstox)
* zjsonpatch (com.flipkart.zjsonpatch:zjsonpatch:0.4.6 - https://github.com/flipkart-incubator/zjsonpatch/) * zjsonpatch (com.flipkart.zjsonpatch:zjsonpatch:0.4.6 - https://github.com/flipkart-incubator/zjsonpatch/)
* Caffeine cache (com.github.ben-manes.caffeine:caffeine:2.8.4 - https://github.com/ben-manes/caffeine) * Caffeine cache (com.github.ben-manes.caffeine:caffeine:2.9.2 - https://github.com/ben-manes/caffeine)
* btf (com.github.java-json-tools:btf:1.3 - https://github.com/java-json-tools/btf) * btf (com.github.java-json-tools:btf:1.3 - https://github.com/java-json-tools/btf)
* jackson-coreutils (com.github.java-json-tools:jackson-coreutils:2.0 - https://github.com/java-json-tools/jackson-coreutils) * jackson-coreutils (com.github.java-json-tools:jackson-coreutils:2.0 - https://github.com/java-json-tools/jackson-coreutils)
* jackson-coreutils-equivalence (com.github.java-json-tools:jackson-coreutils-equivalence:1.0 - https://github.com/java-json-tools/jackson-coreutils) * jackson-coreutils-equivalence (com.github.java-json-tools:jackson-coreutils-equivalence:1.0 - https://github.com/java-json-tools/jackson-coreutils)
@@ -50,28 +51,27 @@ https://wiki.lyrasis.org/display/DSPACE/Code+Contribution+Guidelines
* json-schema-validator (com.github.java-json-tools:json-schema-validator:2.2.14 - https://github.com/java-json-tools/json-schema-validator) * json-schema-validator (com.github.java-json-tools:json-schema-validator:2.2.14 - https://github.com/java-json-tools/json-schema-validator)
* msg-simple (com.github.java-json-tools:msg-simple:1.2 - https://github.com/java-json-tools/msg-simple) * msg-simple (com.github.java-json-tools:msg-simple:1.2 - https://github.com/java-json-tools/msg-simple)
* uri-template (com.github.java-json-tools:uri-template:0.10 - https://github.com/java-json-tools/uri-template) * uri-template (com.github.java-json-tools:uri-template:0.10 - https://github.com/java-json-tools/uri-template)
* Open JSON (com.github.openjson:openjson:1.0.12 - https://github.com/openjson/openjson)
* JCIP Annotations under Apache License (com.github.stephenc.jcip:jcip-annotations:1.0-1 - http://stephenc.github.com/jcip-annotations) * JCIP Annotations under Apache License (com.github.stephenc.jcip:jcip-annotations:1.0-1 - http://stephenc.github.com/jcip-annotations)
* Google APIs Client Library for Java (com.google.api-client:google-api-client:1.23.0 - https://github.com/google/google-api-java-client/google-api-client) * Google APIs Client Library for Java (com.google.api-client:google-api-client:1.23.0 - https://github.com/google/google-api-java-client/google-api-client)
* Google Analytics API v3-rev145-1.23.0 (com.google.apis:google-api-services-analytics:v3-rev145-1.23.0 - http://nexus.sonatype.org/oss-repository-hosting.html/google-api-services-analytics) * Google Analytics API v3-rev145-1.23.0 (com.google.apis:google-api-services-analytics:v3-rev145-1.23.0 - http://nexus.sonatype.org/oss-repository-hosting.html/google-api-services-analytics)
* FindBugs-jsr305 (com.google.code.findbugs:jsr305:3.0.1 - http://findbugs.sourceforge.net/) * FindBugs-jsr305 (com.google.code.findbugs:jsr305:3.0.1 - http://findbugs.sourceforge.net/)
* Gson (com.google.code.gson:gson:2.8.6 - https://github.com/google/gson/gson) * Gson (com.google.code.gson:gson:2.9.0 - https://github.com/google/gson/gson)
* error-prone annotations (com.google.errorprone:error_prone_annotations:2.3.4 - http://nexus.sonatype.org/oss-repository-hosting.html/error_prone_parent/error_prone_annotations) * error-prone annotations (com.google.errorprone:error_prone_annotations:2.7.1 - http://nexus.sonatype.org/oss-repository-hosting.html/error_prone_parent/error_prone_annotations)
* Guava InternalFutureFailureAccess and InternalFutures (com.google.guava:failureaccess:1.0.1 - https://github.com/google/guava/failureaccess) * Guava InternalFutureFailureAccess and InternalFutures (com.google.guava:failureaccess:1.0.1 - https://github.com/google/guava/failureaccess)
* Guava: Google Core Libraries for Java (com.google.guava:guava:30.0-jre - https://github.com/google/guava/guava) * Guava: Google Core Libraries for Java (com.google.guava:guava:31.0.1-jre - https://github.com/google/guava)
* Guava: Google Core Libraries for Java (JDK5 Backport) (com.google.guava:guava-jdk5:17.0 - http://code.google.com/p/guava-libraries/guava-jdk5) * Guava: Google Core Libraries for Java (JDK5 Backport) (com.google.guava:guava-jdk5:17.0 - http://code.google.com/p/guava-libraries/guava-jdk5)
* Guava ListenableFuture only (com.google.guava:listenablefuture:9999.0-empty-to-avoid-conflict-with-guava - https://github.com/google/guava/listenablefuture) * Guava ListenableFuture only (com.google.guava:listenablefuture:9999.0-empty-to-avoid-conflict-with-guava - https://github.com/google/guava/listenablefuture)
* Google HTTP Client Library for Java (com.google.http-client:google-http-client:1.23.0 - https://github.com/google/google-http-java-client/google-http-client) * Google HTTP Client Library for Java (com.google.http-client:google-http-client:1.23.0 - https://github.com/google/google-http-java-client/google-http-client)
* GSON extensions to the Google HTTP Client Library for Java. (com.google.http-client:google-http-client-gson:1.41.7 - https://github.com/googleapis/google-http-java-client/google-http-client-gson)
* Jackson 2 extensions to the Google HTTP Client Library for Java. (com.google.http-client:google-http-client-jackson2:1.23.0 - https://github.com/google/google-http-java-client/google-http-client-jackson2) * Jackson 2 extensions to the Google HTTP Client Library for Java. (com.google.http-client:google-http-client-jackson2:1.23.0 - https://github.com/google/google-http-java-client/google-http-client-jackson2)
* J2ObjC Annotations (com.google.j2objc:j2objc-annotations:1.3 - https://github.com/google/j2objc/) * J2ObjC Annotations (com.google.j2objc:j2objc-annotations:1.3 - https://github.com/google/j2objc/)
* Google OAuth Client Library for Java (com.google.oauth-client:google-oauth-client:1.32.1 - https://github.com/googleapis/google-oauth-java-client/google-oauth-client) * Google OAuth Client Library for Java (com.google.oauth-client:google-oauth-client:1.33.3 - https://github.com/googleapis/google-oauth-java-client/google-oauth-client)
* ConcurrentLinkedHashMap (com.googlecode.concurrentlinkedhashmap:concurrentlinkedhashmap-lru:1.4.2 - http://code.google.com/p/concurrentlinkedhashmap) * ConcurrentLinkedHashMap (com.googlecode.concurrentlinkedhashmap:concurrentlinkedhashmap-lru:1.4.2 - http://code.google.com/p/concurrentlinkedhashmap)
* JSON.simple (com.googlecode.json-simple:json-simple:1.1.1 - http://code.google.com/p/json-simple/)
* libphonenumber (com.googlecode.libphonenumber:libphonenumber:8.11.1 - https://github.com/google/libphonenumber/) * libphonenumber (com.googlecode.libphonenumber:libphonenumber:8.11.1 - https://github.com/google/libphonenumber/)
* Jackcess (com.healthmarketscience.jackcess:jackcess:3.0.1 - https://jackcess.sourceforge.io) * Jackcess (com.healthmarketscience.jackcess:jackcess:4.0.1 - https://jackcess.sourceforge.io)
* Jackcess Encrypt (com.healthmarketscience.jackcess:jackcess-encrypt:3.0.0 - http://jackcessencrypt.sf.net) * Jackcess Encrypt (com.healthmarketscience.jackcess:jackcess-encrypt:4.0.1 - http://jackcessencrypt.sf.net)
* project ':json-path' (com.jayway.jsonpath:json-path:2.4.0 - https://github.com/jayway/JsonPath) * project ':json-path' (com.jayway.jsonpath:json-path:2.6.0 - https://github.com/jayway/JsonPath)
* project ':json-path-assert' (com.jayway.jsonpath:json-path-assert:2.4.0 - https://github.com/jayway/JsonPath) * project ':json-path-assert' (com.jayway.jsonpath:json-path-assert:2.6.0 - https://github.com/jayway/JsonPath)
* Disruptor Framework (com.lmax:disruptor:3.4.2 - http://lmax-exchange.github.com/disruptor) * Disruptor Framework (com.lmax:disruptor:3.4.2 - http://lmax-exchange.github.com/disruptor)
* builder-commons (com.lyncode:builder-commons:1.0.2 - http://nexus.sonatype.org/oss-repository-hosting.html/builder-commons) * builder-commons (com.lyncode:builder-commons:1.0.2 - http://nexus.sonatype.org/oss-repository-hosting.html/builder-commons)
* MaxMind DB Reader (com.maxmind.db:maxmind-db:1.2.2 - http://dev.maxmind.com/) * MaxMind DB Reader (com.maxmind.db:maxmind-db:1.2.2 - http://dev.maxmind.com/)
@@ -79,12 +79,12 @@ https://wiki.lyrasis.org/display/DSPACE/Code+Contribution+Guidelines
* Nimbus JOSE+JWT (com.nimbusds:nimbus-jose-jwt:7.9 - https://bitbucket.org/connect2id/nimbus-jose-jwt) * Nimbus JOSE+JWT (com.nimbusds:nimbus-jose-jwt:7.9 - https://bitbucket.org/connect2id/nimbus-jose-jwt)
* opencsv (com.opencsv:opencsv:5.2 - http://opencsv.sf.net) * opencsv (com.opencsv:opencsv:5.2 - http://opencsv.sf.net)
* java-libpst (com.pff:java-libpst:0.9.3 - https://github.com/rjohnsondev/java-libpst) * java-libpst (com.pff:java-libpst:0.9.3 - https://github.com/rjohnsondev/java-libpst)
* rome (com.rometools:rome:1.12.2 - http://rometools.com/rome) * rome (com.rometools:rome:1.18.0 - http://rometools.com/rome)
* rome-utils (com.rometools:rome-utils:1.12.2 - http://rometools.com/rome-utils) * rome-modules (com.rometools:rome-modules:1.18.0 - http://rometools.com/rome-modules)
* rome-utils (com.rometools:rome-utils:1.18.0 - http://rometools.com/rome-utils)
* fastinfoset (com.sun.xml.fastinfoset:FastInfoset:1.2.15 - http://fi.java.net) * fastinfoset (com.sun.xml.fastinfoset:FastInfoset:1.2.15 - http://fi.java.net)
* T-Digest (com.tdunning:t-digest:3.1 - https://github.com/tdunning/t-digest) * T-Digest (com.tdunning:t-digest:3.1 - https://github.com/tdunning/t-digest)
* JSON library from Android SDK (com.vaadin.external.google:android-json:0.0.20131108.vaadin1 - http://developer.android.com/sdk) * JSON library from Android SDK (com.vaadin.external.google:android-json:0.0.20131108.vaadin1 - http://developer.android.com/sdk)
* HikariCP (com.zaxxer:HikariCP-java7:2.4.13 - https://github.com/brettwooldridge/HikariCP)
* SparseBitSet (com.zaxxer:SparseBitSet:1.2 - https://github.com/brettwooldridge/SparseBitSet) * SparseBitSet (com.zaxxer:SparseBitSet:1.2 - https://github.com/brettwooldridge/SparseBitSet)
* Apache Commons BeanUtils (commons-beanutils:commons-beanutils:1.9.4 - https://commons.apache.org/proper/commons-beanutils/) * Apache Commons BeanUtils (commons-beanutils:commons-beanutils:1.9.4 - https://commons.apache.org/proper/commons-beanutils/)
* Apache Commons CLI (commons-cli:commons-cli:1.4 - http://commons.apache.org/proper/commons-cli/) * Apache Commons CLI (commons-cli:commons-cli:1.4 - http://commons.apache.org/proper/commons-cli/)
@@ -98,30 +98,24 @@ https://wiki.lyrasis.org/display/DSPACE/Code+Contribution+Guidelines
* Apache Commons Validator (commons-validator:commons-validator:1.5.0 - http://commons.apache.org/proper/commons-validator/) * Apache Commons Validator (commons-validator:commons-validator:1.5.0 - http://commons.apache.org/proper/commons-validator/)
* GeoJson POJOs for Jackson (de.grundid.opendatalab:geojson-jackson:1.14 - https://github.com/opendatalab-de/geojson-jackson) * GeoJson POJOs for Jackson (de.grundid.opendatalab:geojson-jackson:1.14 - https://github.com/opendatalab-de/geojson-jackson)
* Boilerpipe -- Boilerplate Removal and Fulltext Extraction from HTML pages (de.l3s.boilerpipe:boilerpipe:1.1.0 - http://code.google.com/p/boilerpipe/) * Boilerpipe -- Boilerplate Removal and Fulltext Extraction from HTML pages (de.l3s.boilerpipe:boilerpipe:1.1.0 - http://code.google.com/p/boilerpipe/)
* SentimentAnalysisParser (edu.usc.ir:sentiment-analysis-parser:0.1 - https://github.com/USCDataScience/SentimentAnalysisParser)
* OpenAIRE Funders Model (eu.openaire:funders-model:2.0.0 - https://api.openaire.eu) * OpenAIRE Funders Model (eu.openaire:funders-model:2.0.0 - https://api.openaire.eu)
* Metrics Core (io.dropwizard.metrics:metrics-core:4.1.5 - https://metrics.dropwizard.io/metrics-core) * Metrics Core (io.dropwizard.metrics:metrics-core:4.1.5 - https://metrics.dropwizard.io/metrics-core)
* Graphite Integration for Metrics (io.dropwizard.metrics:metrics-graphite:4.1.5 - https://metrics.dropwizard.io/metrics-graphite) * Graphite Integration for Metrics (io.dropwizard.metrics:metrics-graphite:4.1.5 - https://metrics.dropwizard.io/metrics-graphite)
* Metrics Integration for Jetty 9.3 and higher (io.dropwizard.metrics:metrics-jetty9:4.1.5 - https://metrics.dropwizard.io/metrics-jetty9) * Metrics Integration for Jetty 9.3 and higher (io.dropwizard.metrics:metrics-jetty9:4.1.5 - https://metrics.dropwizard.io/metrics-jetty9)
* Metrics Integration with JMX (io.dropwizard.metrics:metrics-jmx:4.1.5 - https://metrics.dropwizard.io/metrics-jmx) * Metrics Integration with JMX (io.dropwizard.metrics:metrics-jmx:4.1.5 - https://metrics.dropwizard.io/metrics-jmx)
* JVM Integration for Metrics (io.dropwizard.metrics:metrics-jvm:4.1.5 - https://metrics.dropwizard.io/metrics-jvm) * JVM Integration for Metrics (io.dropwizard.metrics:metrics-jvm:4.1.5 - https://metrics.dropwizard.io/metrics-jvm)
* Netty (io.netty:netty:3.10.6.Final - http://netty.io/) * micrometer-core (io.micrometer:micrometer-core:1.8.6 - https://github.com/micrometer-metrics/micrometer)
* Netty/Buffer (io.netty:netty-buffer:4.1.50.Final - https://netty.io/netty-buffer/)
* Netty/Buffer (io.netty:netty-buffer:4.1.68.Final - https://netty.io/netty-buffer/) * Netty/Buffer (io.netty:netty-buffer:4.1.68.Final - https://netty.io/netty-buffer/)
* Netty/Codec (io.netty:netty-codec:4.1.50.Final - https://netty.io/netty-codec/)
* Netty/Codec (io.netty:netty-codec:4.1.68.Final - https://netty.io/netty-codec/) * Netty/Codec (io.netty:netty-codec:4.1.68.Final - https://netty.io/netty-codec/)
* Netty/Codec/HTTP (io.netty:netty-codec-http:4.1.53.Final - https://netty.io/netty-codec-http/) * Netty/Codec/HTTP (io.netty:netty-codec-http:4.1.53.Final - https://netty.io/netty-codec-http/)
* Netty/Codec/Socks (io.netty:netty-codec-socks:4.1.53.Final - https://netty.io/netty-codec-socks/) * Netty/Codec/Socks (io.netty:netty-codec-socks:4.1.53.Final - https://netty.io/netty-codec-socks/)
* Netty/Common (io.netty:netty-common:4.1.50.Final - https://netty.io/netty-common/)
* Netty/Common (io.netty:netty-common:4.1.68.Final - https://netty.io/netty-common/) * Netty/Common (io.netty:netty-common:4.1.68.Final - https://netty.io/netty-common/)
* Netty/Handler (io.netty:netty-handler:4.1.50.Final - https://netty.io/netty-handler/)
* Netty/Handler (io.netty:netty-handler:4.1.68.Final - https://netty.io/netty-handler/) * Netty/Handler (io.netty:netty-handler:4.1.68.Final - https://netty.io/netty-handler/)
* Netty/Handler/Proxy (io.netty:netty-handler-proxy:4.1.53.Final - https://netty.io/netty-handler-proxy/) * Netty/Handler/Proxy (io.netty:netty-handler-proxy:4.1.53.Final - https://netty.io/netty-handler-proxy/)
* Netty/Resolver (io.netty:netty-resolver:4.1.50.Final - https://netty.io/netty-resolver/) * Netty/Resolver (io.netty:netty-resolver:4.1.68.Final - https://netty.io/netty-resolver/)
* Netty/Transport (io.netty:netty-transport:4.1.50.Final - https://netty.io/netty-transport/)
* Netty/Transport (io.netty:netty-transport:4.1.68.Final - https://netty.io/netty-transport/) * Netty/Transport (io.netty:netty-transport:4.1.68.Final - https://netty.io/netty-transport/)
* Netty/Transport/Native/Epoll (io.netty:netty-transport-native-epoll:4.1.50.Final - https://netty.io/netty-transport-native-epoll/) * Netty/Transport/Native/Epoll (io.netty:netty-transport-native-epoll:4.1.68.Final - https://netty.io/netty-transport-native-epoll/)
* Netty/Transport/Native/Unix/Common (io.netty:netty-transport-native-unix-common:4.1.50.Final - https://netty.io/netty-transport-native-unix-common/) * Netty/Transport/Native/Unix/Common (io.netty:netty-transport-native-unix-common:4.1.68.Final - https://netty.io/netty-transport-native-unix-common/)
* OpenTracing API (io.opentracing:opentracing-api:0.33.0 - https://github.com/opentracing/opentracing-java/opentracing-api) * OpenTracing API (io.opentracing:opentracing-api:0.33.0 - https://github.com/opentracing/opentracing-java/opentracing-api)
* OpenTracing-noop (io.opentracing:opentracing-noop:0.33.0 - https://github.com/opentracing/opentracing-java/opentracing-noop) * OpenTracing-noop (io.opentracing:opentracing-noop:0.33.0 - https://github.com/opentracing/opentracing-java/opentracing-noop)
* OpenTracing-util (io.opentracing:opentracing-util:0.33.0 - https://github.com/opentracing/opentracing-java/opentracing-util) * OpenTracing-util (io.opentracing:opentracing-util:0.33.0 - https://github.com/opentracing/opentracing-java/opentracing-util)
@@ -147,28 +141,27 @@ https://wiki.lyrasis.org/display/DSPACE/Code+Contribution+Guidelines
* Byte Buddy (without dependencies) (net.bytebuddy:byte-buddy:1.11.13 - https://bytebuddy.net/byte-buddy) * Byte Buddy (without dependencies) (net.bytebuddy:byte-buddy:1.11.13 - https://bytebuddy.net/byte-buddy)
* Byte Buddy agent (net.bytebuddy:byte-buddy-agent:1.11.13 - https://bytebuddy.net/byte-buddy-agent) * Byte Buddy agent (net.bytebuddy:byte-buddy-agent:1.11.13 - https://bytebuddy.net/byte-buddy-agent)
* eigenbase-properties (net.hydromatic:eigenbase-properties:1.1.5 - http://github.com/julianhyde/eigenbase-properties) * eigenbase-properties (net.hydromatic:eigenbase-properties:1.1.5 - http://github.com/julianhyde/eigenbase-properties)
* Java Native Access (net.java.dev.jna:jna:5.5.0 - https://github.com/java-native-access/jna)
* json-unit-core (net.javacrumbs.json-unit:json-unit-core:2.19.0 - https://github.com/lukas-krecan/JsonUnit/json-unit-core) * json-unit-core (net.javacrumbs.json-unit:json-unit-core:2.19.0 - https://github.com/lukas-krecan/JsonUnit/json-unit-core)
* "Java Concurrency in Practice" book annotations (net.jcip:jcip-annotations:1.0 - http://jcip.net/) * "Java Concurrency in Practice" book annotations (net.jcip:jcip-annotations:1.0 - http://jcip.net/)
* ASM based accessors helper used by json-smart (net.minidev:accessors-smart:1.2 - http://www.minidev.net/) * ASM based accessors helper used by json-smart (net.minidev:accessors-smart:1.2 - http://www.minidev.net/)
* ASM based accessors helper used by json-smart (net.minidev:accessors-smart:2.4.7 - https://urielch.github.io/)
* JSON Small and Fast Parser (net.minidev:json-smart:2.3 - http://www.minidev.net/) * JSON Small and Fast Parser (net.minidev:json-smart:2.3 - http://www.minidev.net/)
* ehcache (net.sf.ehcache:ehcache:2.10.6 - http://ehcache.org) * JSON Small and Fast Parser (net.minidev:json-smart:2.4.7 - https://urielch.github.io/)
* Ehcache Core (net.sf.ehcache:ehcache-core:2.6.11 - http://ehcache.org)
* Abdera Core (org.apache.abdera:abdera-core:1.1.3 - http://abdera.apache.org/abdera-core) * Abdera Core (org.apache.abdera:abdera-core:1.1.3 - http://abdera.apache.org/abdera-core)
* I18N Libraries (org.apache.abdera:abdera-i18n:1.1.3 - http://abdera.apache.org) * I18N Libraries (org.apache.abdera:abdera-i18n:1.1.3 - http://abdera.apache.org)
* Apache Ant Core (org.apache.ant:ant:1.10.11 - https://ant.apache.org/) * Apache Ant Core (org.apache.ant:ant:1.10.11 - https://ant.apache.org/)
* Apache Ant Launcher (org.apache.ant:ant-launcher:1.10.11 - https://ant.apache.org/) * Apache Ant Launcher (org.apache.ant:ant-launcher:1.10.11 - https://ant.apache.org/)
* Apache Commons BCEL (org.apache.bcel:bcel:6.4.0 - https://commons.apache.org/proper/commons-bcel) * Apache Commons BCEL (org.apache.bcel:bcel:6.4.0 - https://commons.apache.org/proper/commons-bcel)
* Calcite Core (org.apache.calcite:calcite-core:1.18.0 - https://calcite.apache.org/calcite-core) * Calcite Core (org.apache.calcite:calcite-core:1.27.0 - https://calcite.apache.org)
* Calcite Linq4j (org.apache.calcite:calcite-linq4j:1.18.0 - https://calcite.apache.org/calcite-linq4j) * Calcite Linq4j (org.apache.calcite:calcite-linq4j:1.27.0 - https://calcite.apache.org)
* Apache Calcite Avatica (org.apache.calcite.avatica:avatica-core:1.13.0 - https://calcite.apache.org/avatica/avatica-core) * Apache Calcite Avatica (org.apache.calcite.avatica:avatica-core:1.18.0 - https://calcite.apache.org/avatica)
* Apache Commons Collections (org.apache.commons:commons-collections4:4.1 - http://commons.apache.org/proper/commons-collections/) * Apache Commons Collections (org.apache.commons:commons-collections4:4.1 - http://commons.apache.org/proper/commons-collections/)
* Apache Commons Compress (org.apache.commons:commons-compress:1.20 - https://commons.apache.org/proper/commons-compress/) * Apache Commons Compress (org.apache.commons:commons-compress:1.21 - https://commons.apache.org/proper/commons-compress/)
* Apache Commons Configuration (org.apache.commons:commons-configuration2:2.7 - https://commons.apache.org/proper/commons-configuration/) * Apache Commons Configuration (org.apache.commons:commons-configuration2:2.7 - https://commons.apache.org/proper/commons-configuration/)
* Apache Commons CSV (org.apache.commons:commons-csv:1.8 - https://commons.apache.org/proper/commons-csv/) * Apache Commons CSV (org.apache.commons:commons-csv:1.9.0 - https://commons.apache.org/proper/commons-csv/)
* Apache Commons DBCP (org.apache.commons:commons-dbcp2:2.8.0 - https://commons.apache.org/dbcp/) * Apache Commons DBCP (org.apache.commons:commons-dbcp2:2.8.0 - https://commons.apache.org/dbcp/)
* Apache Commons Exec (org.apache.commons:commons-exec:1.3 - http://commons.apache.org/proper/commons-exec/) * Apache Commons Exec (org.apache.commons:commons-exec:1.3 - http://commons.apache.org/proper/commons-exec/)
* Apache Commons Lang (org.apache.commons:commons-lang3:3.7 - http://commons.apache.org/proper/commons-lang/) * Apache Commons Lang (org.apache.commons:commons-lang3:3.12.0 - https://commons.apache.org/proper/commons-lang/)
* Apache Commons Math (org.apache.commons:commons-math3:3.6.1 - http://commons.apache.org/proper/commons-math/) * Apache Commons Math (org.apache.commons:commons-math3:3.6.1 - http://commons.apache.org/proper/commons-math/)
* Apache Commons Pool (org.apache.commons:commons-pool2:2.9.0 - https://commons.apache.org/proper/commons-pool/) * Apache Commons Pool (org.apache.commons:commons-pool2:2.9.0 - https://commons.apache.org/proper/commons-pool/)
* Apache Commons Text (org.apache.commons:commons-text:1.8 - https://commons.apache.org/proper/commons-text) * Apache Commons Text (org.apache.commons:commons-text:1.8 - https://commons.apache.org/proper/commons-text)
@@ -176,24 +169,17 @@ https://wiki.lyrasis.org/display/DSPACE/Code+Contribution+Guidelines
* Curator Client (org.apache.curator:curator-client:2.13.0 - http://curator.apache.org/curator-client) * Curator Client (org.apache.curator:curator-client:2.13.0 - http://curator.apache.org/curator-client)
* Curator Framework (org.apache.curator:curator-framework:2.13.0 - http://curator.apache.org/curator-framework) * Curator Framework (org.apache.curator:curator-framework:2.13.0 - http://curator.apache.org/curator-framework)
* Curator Recipes (org.apache.curator:curator-recipes:2.13.0 - http://curator.apache.org/curator-recipes) * Curator Recipes (org.apache.curator:curator-recipes:2.13.0 - http://curator.apache.org/curator-recipes)
* Apache CXF Core (org.apache.cxf:cxf-core:3.3.6 - https://cxf.apache.org) * Apache Hadoop Annotations (org.apache.hadoop:hadoop-annotations:3.2.2 - no url defined)
* Apache CXF Runtime JAX-RS Frontend (org.apache.cxf:cxf-rt-frontend-jaxrs:3.3.6 - https://cxf.apache.org) * Apache Hadoop Auth (org.apache.hadoop:hadoop-auth:3.2.2 - no url defined)
* Apache CXF JAX-RS Client (org.apache.cxf:cxf-rt-rs-client:3.3.6 - https://cxf.apache.org) * Apache Hadoop Common (org.apache.hadoop:hadoop-common:3.2.2 - no url defined)
* Apache CXF Runtime Security functionality (org.apache.cxf:cxf-rt-security:3.3.6 - https://cxf.apache.org) * Apache Hadoop HDFS Client (org.apache.hadoop:hadoop-hdfs-client:3.2.2 - no url defined)
* Apache CXF Runtime HTTP Transport (org.apache.cxf:cxf-rt-transports-http:3.3.6 - https://cxf.apache.org)
* JTA 1.1 (org.apache.geronimo.specs:geronimo-jta_1.1_spec:1.1.1 - http://geronimo.apache.org/specs/geronimo-jta_1.1_spec)
* Web Services Metadata 2.0 (org.apache.geronimo.specs:geronimo-ws-metadata_2.0_spec:1.1.3 - http://geronimo.apache.org/maven/specs/geronimo-ws-metadata_2.0_spec/1.1.3)
* Apache Hadoop Annotations (org.apache.hadoop:hadoop-annotations:3.2.0 - no url defined)
* Apache Hadoop Auth (org.apache.hadoop:hadoop-auth:3.2.0 - no url defined)
* Apache Hadoop Common (org.apache.hadoop:hadoop-common:3.2.0 - no url defined)
* Apache Hadoop HDFS Client (org.apache.hadoop:hadoop-hdfs-client:3.2.0 - no url defined)
* htrace-core4 (org.apache.htrace:htrace-core4:4.1.0-incubating - http://incubator.apache.org/projects/htrace.html) * htrace-core4 (org.apache.htrace:htrace-core4:4.1.0-incubating - http://incubator.apache.org/projects/htrace.html)
* Apache HttpClient (org.apache.httpcomponents:httpclient:4.5.13 - http://hc.apache.org/httpcomponents-client) * Apache HttpClient (org.apache.httpcomponents:httpclient:4.5.13 - http://hc.apache.org/httpcomponents-client)
* Apache HttpClient Cache (org.apache.httpcomponents:httpclient-cache:4.2.6 - http://hc.apache.org/httpcomponents-client) * Apache HttpClient Cache (org.apache.httpcomponents:httpclient-cache:4.2.6 - http://hc.apache.org/httpcomponents-client)
* Apache HttpCore (org.apache.httpcomponents:httpcore:4.4.4 - http://hc.apache.org/httpcomponents-core-ga) * Apache HttpCore (org.apache.httpcomponents:httpcore:4.4.15 - http://hc.apache.org/httpcomponents-core-ga)
* Apache HttpClient Mime (org.apache.httpcomponents:httpmime:4.5.12 - http://hc.apache.org/httpcomponents-client) * Apache HttpClient Mime (org.apache.httpcomponents:httpmime:4.5.13 - http://hc.apache.org/httpcomponents-client)
* Apache James :: Mime4j :: Core (org.apache.james:apache-mime4j-core:0.8.3 - http://james.apache.org/mime4j/apache-mime4j-core) * Apache James :: Mime4j :: Core (org.apache.james:apache-mime4j-core:0.8.4 - http://james.apache.org/mime4j/apache-mime4j-core)
* Apache James :: Mime4j :: DOM (org.apache.james:apache-mime4j-dom:0.8.3 - http://james.apache.org/mime4j/apache-mime4j-dom) * Apache James :: Mime4j :: DOM (org.apache.james:apache-mime4j-dom:0.8.4 - http://james.apache.org/mime4j/apache-mime4j-dom)
* Apache Jena - Libraries POM (org.apache.jena:apache-jena-libs:2.13.0 - http://jena.apache.org/apache-jena-libs/) * Apache Jena - Libraries POM (org.apache.jena:apache-jena-libs:2.13.0 - http://jena.apache.org/apache-jena-libs/)
* Apache Jena - ARQ (SPARQL 1.1 Query Engine) (org.apache.jena:jena-arq:2.13.0 - http://jena.apache.org/jena-arq/) * Apache Jena - ARQ (SPARQL 1.1 Query Engine) (org.apache.jena:jena-arq:2.13.0 - http://jena.apache.org/jena-arq/)
* Apache Jena - Core (org.apache.jena:jena-core:2.13.0 - http://jena.apache.org/jena-core/) * Apache Jena - Core (org.apache.jena:jena-core:2.13.0 - http://jena.apache.org/jena-core/)
@@ -207,121 +193,131 @@ https://wiki.lyrasis.org/display/DSPACE/Code+Contribution+Guidelines
* Apache Log4j API (org.apache.logging.log4j:log4j-api:2.17.1 - https://logging.apache.org/log4j/2.x/log4j-api/) * Apache Log4j API (org.apache.logging.log4j:log4j-api:2.17.1 - https://logging.apache.org/log4j/2.x/log4j-api/)
* Apache Log4j Core (org.apache.logging.log4j:log4j-core:2.17.1 - https://logging.apache.org/log4j/2.x/log4j-core/) * Apache Log4j Core (org.apache.logging.log4j:log4j-core:2.17.1 - https://logging.apache.org/log4j/2.x/log4j-core/)
* Apache Log4j JUL Adapter (org.apache.logging.log4j:log4j-jul:2.17.1 - https://logging.apache.org/log4j/2.x/log4j-jul/) * Apache Log4j JUL Adapter (org.apache.logging.log4j:log4j-jul:2.17.1 - https://logging.apache.org/log4j/2.x/log4j-jul/)
* Apache Log4j Layout for JSON template (org.apache.logging.log4j:log4j-layout-template-json:2.16.0 - https://logging.apache.org/log4j/2.x/log4j-layout-template-json/)
* Apache Log4j SLF4J Binding (org.apache.logging.log4j:log4j-slf4j-impl:2.17.1 - https://logging.apache.org/log4j/2.x/log4j-slf4j-impl/) * Apache Log4j SLF4J Binding (org.apache.logging.log4j:log4j-slf4j-impl:2.17.1 - https://logging.apache.org/log4j/2.x/log4j-slf4j-impl/)
* Apache Log4j Web (org.apache.logging.log4j:log4j-web:2.17.1 - https://logging.apache.org/log4j/2.x/log4j-web/) * Apache Log4j Web (org.apache.logging.log4j:log4j-web:2.17.1 - https://logging.apache.org/log4j/2.x/log4j-web/)
* Lucene Common Analyzers (org.apache.lucene:lucene-analyzers-common:8.8.1 - https://lucene.apache.org/lucene-parent/lucene-analyzers-common) * Lucene Common Analyzers (org.apache.lucene:lucene-analyzers-common:8.11.1 - https://lucene.apache.org/lucene-parent/lucene-analyzers-common)
* Lucene ICU Analysis Components (org.apache.lucene:lucene-analyzers-icu:8.8.1 - https://lucene.apache.org/lucene-parent/lucene-analyzers-icu) * Lucene ICU Analysis Components (org.apache.lucene:lucene-analyzers-icu:8.11.1 - https://lucene.apache.org/lucene-parent/lucene-analyzers-icu)
* Lucene Kuromoji Japanese Morphological Analyzer (org.apache.lucene:lucene-analyzers-kuromoji:8.8.1 - https://lucene.apache.org/lucene-parent/lucene-analyzers-kuromoji) * Lucene Kuromoji Japanese Morphological Analyzer (org.apache.lucene:lucene-analyzers-kuromoji:8.11.1 - https://lucene.apache.org/lucene-parent/lucene-analyzers-kuromoji)
* Lucene Nori Korean Morphological Analyzer (org.apache.lucene:lucene-analyzers-nori:8.8.1 - https://lucene.apache.org/lucene-parent/lucene-analyzers-nori) * Lucene Nori Korean Morphological Analyzer (org.apache.lucene:lucene-analyzers-nori:8.11.1 - https://lucene.apache.org/lucene-parent/lucene-analyzers-nori)
* Lucene Phonetic Filters (org.apache.lucene:lucene-analyzers-phonetic:8.8.1 - https://lucene.apache.org/lucene-parent/lucene-analyzers-phonetic) * Lucene Phonetic Filters (org.apache.lucene:lucene-analyzers-phonetic:8.11.1 - https://lucene.apache.org/lucene-parent/lucene-analyzers-phonetic)
* Lucene Smart Chinese Analyzer (org.apache.lucene:lucene-analyzers-smartcn:8.8.1 - https://lucene.apache.org/lucene-parent/lucene-analyzers-smartcn) * Lucene Smart Chinese Analyzer (org.apache.lucene:lucene-analyzers-smartcn:8.11.1 - https://lucene.apache.org/lucene-parent/lucene-analyzers-smartcn)
* Lucene Stempel Analyzer (org.apache.lucene:lucene-analyzers-stempel:8.8.1 - https://lucene.apache.org/lucene-parent/lucene-analyzers-stempel) * Lucene Stempel Analyzer (org.apache.lucene:lucene-analyzers-stempel:8.11.1 - https://lucene.apache.org/lucene-parent/lucene-analyzers-stempel)
* Lucene Memory (org.apache.lucene:lucene-backward-codecs:8.8.1 - https://lucene.apache.org/lucene-parent/lucene-backward-codecs) * Lucene Memory (org.apache.lucene:lucene-backward-codecs:8.11.1 - https://lucene.apache.org/lucene-parent/lucene-backward-codecs)
* Lucene Classification (org.apache.lucene:lucene-classification:8.8.1 - https://lucene.apache.org/lucene-parent/lucene-classification) * Lucene Classification (org.apache.lucene:lucene-classification:8.11.1 - https://lucene.apache.org/lucene-parent/lucene-classification)
* Lucene codecs (org.apache.lucene:lucene-codecs:8.8.1 - https://lucene.apache.org/lucene-parent/lucene-codecs) * Lucene codecs (org.apache.lucene:lucene-codecs:8.11.1 - https://lucene.apache.org/lucene-parent/lucene-codecs)
* Lucene Core (org.apache.lucene:lucene-core:8.8.1 - https://lucene.apache.org/lucene-parent/lucene-core) * Lucene Core (org.apache.lucene:lucene-core:8.11.1 - https://lucene.apache.org/lucene-parent/lucene-core)
* Lucene Expressions (org.apache.lucene:lucene-expressions:8.8.1 - https://lucene.apache.org/lucene-parent/lucene-expressions) * Lucene Expressions (org.apache.lucene:lucene-expressions:8.11.1 - https://lucene.apache.org/lucene-parent/lucene-expressions)
* Lucene Grouping (org.apache.lucene:lucene-grouping:8.8.1 - https://lucene.apache.org/lucene-parent/lucene-grouping) * Lucene Grouping (org.apache.lucene:lucene-grouping:8.11.1 - https://lucene.apache.org/lucene-parent/lucene-grouping)
* Lucene Highlighter (org.apache.lucene:lucene-highlighter:8.8.1 - https://lucene.apache.org/lucene-parent/lucene-highlighter) * Lucene Highlighter (org.apache.lucene:lucene-highlighter:8.11.1 - https://lucene.apache.org/lucene-parent/lucene-highlighter)
* Lucene Join (org.apache.lucene:lucene-join:8.8.1 - https://lucene.apache.org/lucene-parent/lucene-join) * Lucene Join (org.apache.lucene:lucene-join:8.11.1 - https://lucene.apache.org/lucene-parent/lucene-join)
* Lucene Memory (org.apache.lucene:lucene-memory:8.8.1 - https://lucene.apache.org/lucene-parent/lucene-memory) * Lucene Memory (org.apache.lucene:lucene-memory:8.11.1 - https://lucene.apache.org/lucene-parent/lucene-memory)
* Lucene Miscellaneous (org.apache.lucene:lucene-misc:8.8.1 - https://lucene.apache.org/lucene-parent/lucene-misc) * Lucene Miscellaneous (org.apache.lucene:lucene-misc:8.11.1 - https://lucene.apache.org/lucene-parent/lucene-misc)
* Lucene Queries (org.apache.lucene:lucene-queries:8.8.1 - https://lucene.apache.org/lucene-parent/lucene-queries) * Lucene Queries (org.apache.lucene:lucene-queries:8.11.1 - https://lucene.apache.org/lucene-parent/lucene-queries)
* Lucene QueryParsers (org.apache.lucene:lucene-queryparser:8.8.1 - https://lucene.apache.org/lucene-parent/lucene-queryparser) * Lucene QueryParsers (org.apache.lucene:lucene-queryparser:8.11.1 - https://lucene.apache.org/lucene-parent/lucene-queryparser)
* Lucene Sandbox (org.apache.lucene:lucene-sandbox:8.8.1 - https://lucene.apache.org/lucene-parent/lucene-sandbox) * Lucene Sandbox (org.apache.lucene:lucene-sandbox:8.11.1 - https://lucene.apache.org/lucene-parent/lucene-sandbox)
* Lucene Spatial Extras (org.apache.lucene:lucene-spatial-extras:8.8.1 - https://lucene.apache.org/lucene-parent/lucene-spatial-extras) * Lucene Spatial Extras (org.apache.lucene:lucene-spatial-extras:8.11.1 - https://lucene.apache.org/lucene-parent/lucene-spatial-extras)
* Lucene Spatial 3D (org.apache.lucene:lucene-spatial3d:8.8.1 - https://lucene.apache.org/lucene-parent/lucene-spatial3d) * Lucene Spatial 3D (org.apache.lucene:lucene-spatial3d:8.11.1 - https://lucene.apache.org/lucene-parent/lucene-spatial3d)
* Lucene Suggest (org.apache.lucene:lucene-suggest:8.8.1 - https://lucene.apache.org/lucene-parent/lucene-suggest) * Lucene Suggest (org.apache.lucene:lucene-suggest:8.11.1 - https://lucene.apache.org/lucene-parent/lucene-suggest)
* Apache OpenNLP Tools (org.apache.opennlp:opennlp-tools:1.9.2 - https://www.apache.org/opennlp/opennlp-tools/)
* Apache FontBox (org.apache.pdfbox:fontbox:2.0.24 - http://pdfbox.apache.org/) * Apache FontBox (org.apache.pdfbox:fontbox:2.0.24 - http://pdfbox.apache.org/)
* PDFBox JBIG2 ImageIO plugin (org.apache.pdfbox:jbig2-imageio:3.0.3 - https://www.apache.org/jbig2-imageio/) * PDFBox JBIG2 ImageIO plugin (org.apache.pdfbox:jbig2-imageio:3.0.3 - https://www.apache.org/jbig2-imageio/)
* Apache JempBox (org.apache.pdfbox:jempbox:1.8.16 - http://www.apache.org/pdfbox-parent/jempbox/) * Apache JempBox (org.apache.pdfbox:jempbox:1.8.16 - http://www.apache.org/pdfbox-parent/jempbox/)
* Apache PDFBox (org.apache.pdfbox:pdfbox:2.0.24 - https://www.apache.org/pdfbox-parent/pdfbox/) * Apache PDFBox (org.apache.pdfbox:pdfbox:2.0.24 - https://www.apache.org/pdfbox-parent/pdfbox/)
* Apache PDFBox tools (org.apache.pdfbox:pdfbox-tools:2.0.19 - https://www.apache.org/pdfbox-parent/pdfbox-tools/) * Apache PDFBox Debugger (org.apache.pdfbox:pdfbox-debugger:2.0.25 - https://www.apache.org/pdfbox-parent/pdfbox-debugger/)
* Apache Preflight (org.apache.pdfbox:preflight:2.0.19 - https://www.apache.org/pdfbox-parent/preflight/) * Apache PDFBox tools (org.apache.pdfbox:pdfbox-tools:2.0.25 - https://www.apache.org/pdfbox-parent/pdfbox-tools/)
* Apache XmpBox (org.apache.pdfbox:xmpbox:2.0.19 - https://www.apache.org/pdfbox-parent/xmpbox/) * Apache XmpBox (org.apache.pdfbox:xmpbox:2.0.25 - https://www.apache.org/pdfbox-parent/xmpbox/)
* Apache POI (org.apache.poi:poi:3.17 - http://poi.apache.org/) * Apache POI - Common (org.apache.poi:poi:5.2.0 - https://poi.apache.org/)
* Apache POI (org.apache.poi:poi-ooxml:3.17 - http://poi.apache.org/) * Apache POI - API based on OPC and OOXML schemas (org.apache.poi:poi-ooxml:5.2.0 - https://poi.apache.org/)
* Apache POI (org.apache.poi:poi-ooxml-schemas:3.17 - http://poi.apache.org/) * Apache POI (org.apache.poi:poi-ooxml-lite:5.2.0 - https://poi.apache.org/)
* Apache POI (org.apache.poi:poi-scratchpad:3.17 - http://poi.apache.org/) * Apache POI (org.apache.poi:poi-scratchpad:5.2.0 - https://poi.apache.org/)
* Apache SIS features (org.apache.sis.core:sis-feature:1.0 - http://sis.apache.org/core/sis-feature) * Apache Solr Core (org.apache.solr:solr-core:8.11.1 - https://lucene.apache.org/solr-parent/solr-core)
* Apache SIS metadata (org.apache.sis.core:sis-metadata:1.0 - http://sis.apache.org/core/sis-metadata) * Apache Solr Solrj (org.apache.solr:solr-solrj:8.11.1 - https://lucene.apache.org/solr-parent/solr-solrj)
* Apache SIS referencing (org.apache.sis.core:sis-referencing:1.0 - http://sis.apache.org/core/sis-referencing)
* Apache SIS utilities (org.apache.sis.core:sis-utility:1.0 - http://sis.apache.org/core/sis-utility)
* Apache SIS netCDF storage (org.apache.sis.storage:sis-netcdf:1.0 - http://sis.apache.org/storage/sis-netcdf)
* Apache SIS common storage (org.apache.sis.storage:sis-storage:1.0 - http://sis.apache.org/storage/sis-storage)
* Apache Solr Content Extraction Library (org.apache.solr:solr-cell:8.8.1 - https://lucene.apache.org/solr-parent/solr-cell)
* Apache Solr Core (org.apache.solr:solr-core:8.8.1 - https://lucene.apache.org/solr-parent/solr-core)
* Apache Solr Solrj (org.apache.solr:solr-solrj:8.8.1 - https://lucene.apache.org/solr-parent/solr-solrj)
* Apache Standard Taglib Implementation (org.apache.taglibs:taglibs-standard-impl:1.2.5 - http://tomcat.apache.org/taglibs/standard-1.2.5/taglibs-standard-impl) * Apache Standard Taglib Implementation (org.apache.taglibs:taglibs-standard-impl:1.2.5 - http://tomcat.apache.org/taglibs/standard-1.2.5/taglibs-standard-impl)
* Apache Standard Taglib Specification API (org.apache.taglibs:taglibs-standard-spec:1.2.5 - http://tomcat.apache.org/taglibs/standard-1.2.5/taglibs-standard-spec) * Apache Standard Taglib Specification API (org.apache.taglibs:taglibs-standard-spec:1.2.5 - http://tomcat.apache.org/taglibs/standard-1.2.5/taglibs-standard-spec)
* Apache Thrift (org.apache.thrift:libthrift:0.9.2 - http://thrift.apache.org) * Apache Thrift (org.apache.thrift:libthrift:0.9.2 - http://thrift.apache.org)
* Apache Tika core (org.apache.tika:tika-core:1.24.1 - http://tika.apache.org/) * Apache Tika core (org.apache.tika:tika-core:2.3.0 - https://tika.apache.org/)
* Apache Tika Java-7 Components (org.apache.tika:tika-java7:1.24.1 - http://tika.apache.org/) * Apache Tika Apple parser module (org.apache.tika:tika-parser-apple-module:2.3.0 - https://tika.apache.org/tika-parser-apple-module/)
* Apache Tika parsers (org.apache.tika:tika-parsers:1.24.1 - http://tika.apache.org/) * Apache Tika audiovideo parser module (org.apache.tika:tika-parser-audiovideo-module:2.3.0 - https://tika.apache.org/tika-parser-audiovideo-module/)
* Apache Tika XMP (org.apache.tika:tika-xmp:1.24.1 - http://tika.apache.org/) * Apache Tika cad parser module (org.apache.tika:tika-parser-cad-module:2.3.0 - https://tika.apache.org/tika-parser-cad-module/)
* tomcat-embed-core (org.apache.tomcat.embed:tomcat-embed-core:9.0.33 - https://tomcat.apache.org/) * Apache Tika code parser module (org.apache.tika:tika-parser-code-module:2.3.0 - https://tika.apache.org/tika-parser-code-module/)
* tomcat-embed-el (org.apache.tomcat.embed:tomcat-embed-el:9.0.33 - https://tomcat.apache.org/) * Apache Tika crypto parser module (org.apache.tika:tika-parser-crypto-module:2.3.0 - https://tika.apache.org/tika-parser-crypto-module/)
* tomcat-embed-websocket (org.apache.tomcat.embed:tomcat-embed-websocket:9.0.33 - https://tomcat.apache.org/) * Apache Tika digest commons (org.apache.tika:tika-parser-digest-commons:2.3.0 - https://tika.apache.org/tika-parser-digest-commons/)
* Apache Velocity - Engine (org.apache.velocity:velocity-engine-core:2.2 - http://velocity.apache.org/engine/devel/velocity-engine-core/) * Apache Tika font parser module (org.apache.tika:tika-parser-font-module:2.3.0 - https://tika.apache.org/tika-parser-font-module/)
* Apache Tika html commons (org.apache.tika:tika-parser-html-commons:2.3.0 - https://tika.apache.org/tika-parser-html-commons/)
* Apache Tika html parser module (org.apache.tika:tika-parser-html-module:2.3.0 - https://tika.apache.org/tika-parser-html-module/)
* Apache Tika image parser module (org.apache.tika:tika-parser-image-module:2.3.0 - https://tika.apache.org/tika-parser-image-module/)
* Apache Tika mail commons (org.apache.tika:tika-parser-mail-commons:2.3.0 - https://tika.apache.org/tika-parser-mail-commons/)
* Apache Tika mail parser module (org.apache.tika:tika-parser-mail-module:2.3.0 - https://tika.apache.org/tika-parser-mail-module/)
* Apache Tika Microsoft parser module (org.apache.tika:tika-parser-microsoft-module:2.3.0 - https://tika.apache.org/tika-parser-microsoft-module/)
* Apache Tika miscellaneous office format parser module (org.apache.tika:tika-parser-miscoffice-module:2.3.0 - https://tika.apache.org/tika-parser-miscoffice-module/)
* Apache Tika news parser module (org.apache.tika:tika-parser-news-module:2.3.0 - https://tika.apache.org/tika-parser-news-module/)
* Apache Tika OCR parser module (org.apache.tika:tika-parser-ocr-module:2.3.0 - https://tika.apache.org/tika-parser-ocr-module/)
* Apache Tika PDF parser module (org.apache.tika:tika-parser-pdf-module:2.3.0 - https://tika.apache.org/tika-parser-pdf-module/)
* Apache Tika package parser module (org.apache.tika:tika-parser-pkg-module:2.3.0 - https://tika.apache.org/tika-parser-pkg-module/)
* Apache Tika text parser module (org.apache.tika:tika-parser-text-module:2.3.0 - https://tika.apache.org/tika-parser-text-module/)
* Apache Tika XML parser module (org.apache.tika:tika-parser-xml-module:2.3.0 - https://tika.apache.org/tika-parser-xml-module/)
* Apache Tika XMP commons (org.apache.tika:tika-parser-xmp-commons:2.3.0 - https://tika.apache.org/tika-parser-xmp-commons/)
* Apache Tika ZIP commons (org.apache.tika:tika-parser-zip-commons:2.3.0 - https://tika.apache.org/tika-parser-zip-commons/)
* Apache Tika standard parser package (org.apache.tika:tika-parsers-standard-package:2.3.0 - https://tika.apache.org/tika-parsers/tika-parsers-standard/tika-parsers-standard-package/)
* tomcat-embed-core (org.apache.tomcat.embed:tomcat-embed-core:9.0.63 - https://tomcat.apache.org/)
* tomcat-embed-el (org.apache.tomcat.embed:tomcat-embed-el:9.0.63 - https://tomcat.apache.org/)
* tomcat-embed-websocket (org.apache.tomcat.embed:tomcat-embed-websocket:9.0.63 - https://tomcat.apache.org/)
* Apache Velocity - Engine (org.apache.velocity:velocity-engine-core:2.3 - http://velocity.apache.org/engine/devel/velocity-engine-core/)
* Apache Velocity - JSR 223 Scripting (org.apache.velocity:velocity-engine-scripting:2.2 - http://velocity.apache.org/engine/devel/velocity-engine-scripting/) * Apache Velocity - JSR 223 Scripting (org.apache.velocity:velocity-engine-scripting:2.2 - http://velocity.apache.org/engine/devel/velocity-engine-scripting/)
* Axiom API (org.apache.ws.commons.axiom:axiom-api:1.2.22 - http://ws.apache.org/axiom/) * Axiom API (org.apache.ws.commons.axiom:axiom-api:1.2.22 - http://ws.apache.org/axiom/)
* LLOM (org.apache.ws.commons.axiom:axiom-impl:1.2.22 - http://ws.apache.org/axiom/implementations/axiom-impl/)
* Abdera Model (FOM) Implementation (org.apache.ws.commons.axiom:fom-impl:1.2.22 - http://ws.apache.org/axiom/implementations/fom-impl/) * Abdera Model (FOM) Implementation (org.apache.ws.commons.axiom:fom-impl:1.2.22 - http://ws.apache.org/axiom/implementations/fom-impl/)
* XmlSchema Core (org.apache.ws.xmlschema:xmlschema-core:2.2.5 - https://ws.apache.org/commons/xmlschema20/xmlschema-core/) * XmlBeans (org.apache.xmlbeans:xmlbeans:5.0.3 - https://xmlbeans.apache.org/)
* XmlBeans (org.apache.xmlbeans:xmlbeans:3.1.0 - https://xmlbeans.apache.org/) * Apache ZooKeeper - Server (org.apache.zookeeper:zookeeper:3.6.2 - http://zookeeper.apache.org/zookeeper)
* zookeeper (org.apache.zookeeper:zookeeper:3.4.14 - no url defined)
* Apache ZooKeeper - Jute (org.apache.zookeeper:zookeeper-jute:3.6.2 - http://zookeeper.apache.org/zookeeper-jute) * Apache ZooKeeper - Jute (org.apache.zookeeper:zookeeper-jute:3.6.2 - http://zookeeper.apache.org/zookeeper-jute)
* AssertJ fluent assertions (org.assertj:assertj-core:3.13.2 - http://assertj.org/assertj-core) * org.apiguardian:apiguardian-api (org.apiguardian:apiguardian-api:1.1.0 - https://github.com/apiguardian-team/apiguardian)
* Evo Inflector (org.atteo:evo-inflector:1.2.2 - http://atteo.org/static/evo-inflector) * AssertJ fluent assertions (org.assertj:assertj-core:3.21.0 - https://assertj.github.io/doc/assertj-core/)
* Evo Inflector (org.atteo:evo-inflector:1.3 - http://atteo.org/static/evo-inflector)
* jose4j (org.bitbucket.b_c:jose4j:0.6.5 - https://bitbucket.org/b_c/jose4j/) * jose4j (org.bitbucket.b_c:jose4j:0.6.5 - https://bitbucket.org/b_c/jose4j/)
* TagSoup (org.ccil.cowan.tagsoup:tagsoup:1.2.1 - http://home.ccil.org/~cowan/XML/tagsoup/) * TagSoup (org.ccil.cowan.tagsoup:tagsoup:1.2.1 - http://home.ccil.org/~cowan/XML/tagsoup/)
* Woodstox (org.codehaus.woodstox:woodstox-core-asl:4.4.1 - http://woodstox.codehaus.org)
* jems (org.dmfs:jems:1.18 - https://github.com/dmfs/jems) * jems (org.dmfs:jems:1.18 - https://github.com/dmfs/jems)
* rfc3986-uri (org.dmfs:rfc3986-uri:0.8.1 - https://github.com/dmfs/uri-toolkit) * rfc3986-uri (org.dmfs:rfc3986-uri:0.8.1 - https://github.com/dmfs/uri-toolkit)
* Jetty :: Apache JSP Implementation (org.eclipse.jetty:apache-jsp:9.4.15.v20190215 - http://www.eclipse.org/jetty) * Jetty :: Apache JSP Implementation (org.eclipse.jetty:apache-jsp:9.4.15.v20190215 - http://www.eclipse.org/jetty)
* Apache :: JSTL module (org.eclipse.jetty:apache-jstl:9.4.15.v20190215 - http://tomcat.apache.org/taglibs/standard/) * Apache :: JSTL module (org.eclipse.jetty:apache-jstl:9.4.15.v20190215 - http://tomcat.apache.org/taglibs/standard/)
* Jetty :: ALPN :: Client (org.eclipse.jetty:jetty-alpn-client:9.4.34.v20201102 - https://eclipse.org/jetty/jetty-alpn-parent/jetty-alpn-client) * Jetty :: ALPN :: Client (org.eclipse.jetty:jetty-alpn-client:9.4.44.v20210927 - https://eclipse.org/jetty/jetty-alpn-parent/jetty-alpn-client)
* Jetty :: ALPN :: JDK9 Client Implementation (org.eclipse.jetty:jetty-alpn-java-client:9.4.34.v20201102 - https://eclipse.org/jetty/jetty-alpn-parent/jetty-alpn-java-client) * Jetty :: ALPN :: JDK9 Client Implementation (org.eclipse.jetty:jetty-alpn-java-client:9.4.44.v20210927 - https://eclipse.org/jetty/jetty-alpn-parent/jetty-alpn-java-client)
* Jetty :: ALPN :: JDK9 Server Implementation (org.eclipse.jetty:jetty-alpn-java-server:9.4.34.v20201102 - https://eclipse.org/jetty/jetty-alpn-parent/jetty-alpn-java-server) * Jetty :: ALPN :: JDK9 Server Implementation (org.eclipse.jetty:jetty-alpn-java-server:9.4.44.v20210927 - https://eclipse.org/jetty/jetty-alpn-parent/jetty-alpn-java-server)
* Jetty :: ALPN :: Server (org.eclipse.jetty:jetty-alpn-server:9.4.34.v20201102 - https://eclipse.org/jetty/jetty-alpn-parent/jetty-alpn-server) * Jetty :: ALPN :: Server (org.eclipse.jetty:jetty-alpn-server:9.4.44.v20210927 - https://eclipse.org/jetty/jetty-alpn-parent/jetty-alpn-server)
* Jetty :: Servlet Annotations (org.eclipse.jetty:jetty-annotations:9.4.15.v20190215 - http://www.eclipse.org/jetty) * Jetty :: Servlet Annotations (org.eclipse.jetty:jetty-annotations:9.4.15.v20190215 - http://www.eclipse.org/jetty)
* Jetty :: Asynchronous HTTP Client (org.eclipse.jetty:jetty-client:9.4.34.v20201102 - https://eclipse.org/jetty/jetty-client) * Jetty :: Asynchronous HTTP Client (org.eclipse.jetty:jetty-client:9.4.44.v20210927 - https://eclipse.org/jetty/jetty-client)
* Jetty :: Continuation (org.eclipse.jetty:jetty-continuation:9.4.34.v20201102 - https://eclipse.org/jetty/jetty-continuation) * Jetty :: Continuation (org.eclipse.jetty:jetty-continuation:9.4.44.v20210927 - https://eclipse.org/jetty/jetty-continuation)
* Jetty :: Deployers (org.eclipse.jetty:jetty-deploy:9.4.34.v20201102 - https://eclipse.org/jetty/jetty-deploy) * Jetty :: Deployers (org.eclipse.jetty:jetty-deploy:9.4.44.v20210927 - https://eclipse.org/jetty/jetty-deploy)
* Jetty :: Http Utility (org.eclipse.jetty:jetty-http:9.4.41.v20210516 - https://eclipse.org/jetty/jetty-http) * Jetty :: Http Utility (org.eclipse.jetty:jetty-http:9.4.44.v20210927 - https://eclipse.org/jetty/jetty-http)
* Jetty :: IO Utility (org.eclipse.jetty:jetty-io:9.4.41.v20210516 - https://eclipse.org/jetty/jetty-io) * Jetty :: IO Utility (org.eclipse.jetty:jetty-io:9.4.44.v20210927 - https://eclipse.org/jetty/jetty-io)
* Jetty :: JMX Management (org.eclipse.jetty:jetty-jmx:9.4.34.v20201102 - https://eclipse.org/jetty/jetty-jmx) * Jetty :: JMX Management (org.eclipse.jetty:jetty-jmx:9.4.44.v20210927 - https://eclipse.org/jetty/jetty-jmx)
* Jetty :: JNDI Naming (org.eclipse.jetty:jetty-jndi:9.4.15.v20190215 - http://www.eclipse.org/jetty) * Jetty :: JNDI Naming (org.eclipse.jetty:jetty-jndi:9.4.15.v20190215 - http://www.eclipse.org/jetty)
* Jetty :: Plus (org.eclipse.jetty:jetty-plus:9.4.15.v20190215 - http://www.eclipse.org/jetty) * Jetty :: Plus (org.eclipse.jetty:jetty-plus:9.4.15.v20190215 - http://www.eclipse.org/jetty)
* Jetty :: Rewrite Handler (org.eclipse.jetty:jetty-rewrite:9.4.34.v20201102 - https://eclipse.org/jetty/jetty-rewrite) * Jetty :: Rewrite Handler (org.eclipse.jetty:jetty-rewrite:9.4.44.v20210927 - https://eclipse.org/jetty/jetty-rewrite)
* Jetty :: Security (org.eclipse.jetty:jetty-security:9.4.34.v20201102 - https://eclipse.org/jetty/jetty-security) * Jetty :: Security (org.eclipse.jetty:jetty-security:9.4.44.v20210927 - https://eclipse.org/jetty/jetty-security)
* Jetty :: Server Core (org.eclipse.jetty:jetty-server:9.4.41.v20210516 - https://eclipse.org/jetty/jetty-server) * Jetty :: Server Core (org.eclipse.jetty:jetty-server:9.4.44.v20210927 - https://eclipse.org/jetty/jetty-server)
* Jetty :: Servlet Handling (org.eclipse.jetty:jetty-servlet:9.4.34.v20201102 - https://eclipse.org/jetty/jetty-servlet) * Jetty :: Servlet Handling (org.eclipse.jetty:jetty-servlet:9.4.44.v20210927 - https://eclipse.org/jetty/jetty-servlet)
* Jetty :: Utility Servlets and Filters (org.eclipse.jetty:jetty-servlets:9.4.34.v20201102 - https://eclipse.org/jetty/jetty-servlets) * Jetty :: Utility Servlets and Filters (org.eclipse.jetty:jetty-servlets:9.4.44.v20210927 - https://eclipse.org/jetty/jetty-servlets)
* Jetty :: Utilities (org.eclipse.jetty:jetty-util:9.4.41.v20210516 - https://eclipse.org/jetty/jetty-util) * Jetty :: Utilities (org.eclipse.jetty:jetty-util:9.4.44.v20210927 - https://eclipse.org/jetty/jetty-util)
* Jetty :: Webapp Application Support (org.eclipse.jetty:jetty-webapp:9.4.34.v20201102 - https://eclipse.org/jetty/jetty-webapp) * Jetty :: Webapp Application Support (org.eclipse.jetty:jetty-webapp:9.4.44.v20210927 - https://eclipse.org/jetty/jetty-webapp)
* Jetty :: XML utilities (org.eclipse.jetty:jetty-xml:9.4.34.v20201102 - https://eclipse.org/jetty/jetty-xml) * Jetty :: XML utilities (org.eclipse.jetty:jetty-xml:9.4.44.v20210927 - https://eclipse.org/jetty/jetty-xml)
* Jetty :: HTTP2 :: Client (org.eclipse.jetty.http2:http2-client:9.4.34.v20201102 - https://eclipse.org/jetty/http2-parent/http2-client) * Jetty :: HTTP2 :: Client (org.eclipse.jetty.http2:http2-client:9.4.44.v20210927 - https://eclipse.org/jetty/http2-parent/http2-client)
* Jetty :: HTTP2 :: Common (org.eclipse.jetty.http2:http2-common:9.4.34.v20201102 - https://eclipse.org/jetty/http2-parent/http2-common) * Jetty :: HTTP2 :: Common (org.eclipse.jetty.http2:http2-common:9.4.44.v20210927 - https://eclipse.org/jetty/http2-parent/http2-common)
* Jetty :: HTTP2 :: HPACK (org.eclipse.jetty.http2:http2-hpack:9.4.34.v20201102 - https://eclipse.org/jetty/http2-parent/http2-hpack) * Jetty :: HTTP2 :: HPACK (org.eclipse.jetty.http2:http2-hpack:9.4.44.v20210927 - https://eclipse.org/jetty/http2-parent/http2-hpack)
* Jetty :: HTTP2 :: HTTP Client Transport (org.eclipse.jetty.http2:http2-http-client-transport:9.4.34.v20201102 - https://eclipse.org/jetty/http2-parent/http2-http-client-transport) * Jetty :: HTTP2 :: HTTP Client Transport (org.eclipse.jetty.http2:http2-http-client-transport:9.4.44.v20210927 - https://eclipse.org/jetty/http2-parent/http2-http-client-transport)
* Jetty :: HTTP2 :: Server (org.eclipse.jetty.http2:http2-server:9.4.34.v20201102 - https://eclipse.org/jetty/http2-parent/http2-server) * Jetty :: HTTP2 :: Server (org.eclipse.jetty.http2:http2-server:9.4.44.v20210927 - https://eclipse.org/jetty/http2-parent/http2-server)
* Jetty :: Schemas (org.eclipse.jetty.toolchain:jetty-schemas:3.1.2 - https://eclipse.org/jetty/jetty-schemas) * Jetty :: Schemas (org.eclipse.jetty.toolchain:jetty-schemas:3.1.2 - https://eclipse.org/jetty/jetty-schemas)
* Ehcache (org.ehcache:ehcache:3.4.0 - http://ehcache.org) * Ehcache (org.ehcache:ehcache:3.4.0 - http://ehcache.org)
* flyway-core (org.flywaydb:flyway-core:6.5.7 - https://flywaydb.org/flyway-core) * flyway-core (org.flywaydb:flyway-core:8.4.4 - https://flywaydb.org/flyway-core)
* Ogg and Vorbis for Java, Core (org.gagravarr:vorbis-java-core:0.8 - https://github.com/Gagravarr/VorbisJava) * Ogg and Vorbis for Java, Core (org.gagravarr:vorbis-java-core:0.8 - https://github.com/Gagravarr/VorbisJava)
* Apache Tika plugin for Ogg, Vorbis and FLAC (org.gagravarr:vorbis-java-tika:0.8 - https://github.com/Gagravarr/VorbisJava) * Apache Tika plugin for Ogg, Vorbis and FLAC (org.gagravarr:vorbis-java-tika:0.8 - https://github.com/Gagravarr/VorbisJava)
* jersey-core-client (org.glassfish.jersey.core:jersey-client:2.30.1 - https://projects.eclipse.org/projects/ee4j.jersey/jersey-client) * jersey-core-client (org.glassfish.jersey.core:jersey-client:2.35 - https://projects.eclipse.org/projects/ee4j.jersey/jersey-client)
* jersey-core-common (org.glassfish.jersey.core:jersey-common:2.30.1 - https://projects.eclipse.org/projects/ee4j.jersey/jersey-common) * jersey-core-common (org.glassfish.jersey.core:jersey-common:2.35 - https://projects.eclipse.org/projects/ee4j.jersey/jersey-common)
* jersey-inject-hk2 (org.glassfish.jersey.inject:jersey-hk2:2.30.1 - https://projects.eclipse.org/projects/ee4j.jersey/project/jersey-hk2) * jersey-inject-hk2 (org.glassfish.jersey.inject:jersey-hk2:2.35 - https://projects.eclipse.org/projects/ee4j.jersey/project/jersey-hk2)
* Hibernate Validator Engine (org.hibernate.validator:hibernate-validator:6.0.18.Final - http://hibernate.org/validator/hibernate-validator) * Hibernate Validator Engine (org.hibernate.validator:hibernate-validator:6.0.23.Final - http://hibernate.org/validator/hibernate-validator)
* Hibernate Validator Portable Extension (org.hibernate.validator:hibernate-validator-cdi:6.0.18.Final - http://hibernate.org/validator/hibernate-validator-cdi) * Hibernate Validator Portable Extension (org.hibernate.validator:hibernate-validator-cdi:6.0.23.Final - http://hibernate.org/validator/hibernate-validator-cdi)
* Javassist (org.javassist:javassist:3.25.0-GA - http://www.javassist.org/) * Javassist (org.javassist:javassist:3.25.0-GA - http://www.javassist.org/)
* Java Annotation Indexer (org.jboss:jandex:2.1.1.Final - http://www.jboss.org/jandex) * Java Annotation Indexer (org.jboss:jandex:2.4.2.Final - http://www.jboss.org/jandex)
* JBoss Logging 3 (org.jboss.logging:jboss-logging:3.3.2.Final - http://www.jboss.org) * JBoss Logging 3 (org.jboss.logging:jboss-logging:3.4.3.Final - http://www.jboss.org)
* JDOM (org.jdom:jdom:1.1.3 - http://www.jdom.org) * JDOM (org.jdom:jdom2:2.0.6.1 - http://www.jdom.org)
* JDOM (org.jdom:jdom2:2.0.6 - http://www.jdom.org)
* jtwig-core (org.jtwig:jtwig-core:5.87.0.RELEASE - http://jtwig.org) * jtwig-core (org.jtwig:jtwig-core:5.87.0.RELEASE - http://jtwig.org)
* jtwig-reflection (org.jtwig:jtwig-reflection:5.87.0.RELEASE - http://jtwig.org) * jtwig-reflection (org.jtwig:jtwig-reflection:5.87.0.RELEASE - http://jtwig.org)
* jtwig-spring (org.jtwig:jtwig-spring:5.87.0.RELEASE - http://jtwig.org) * jtwig-spring (org.jtwig:jtwig-spring:5.87.0.RELEASE - http://jtwig.org)
@@ -341,69 +337,66 @@ https://wiki.lyrasis.org/display/DSPACE/Code+Contribution+Guidelines
* Objenesis (org.objenesis:objenesis:3.2 - http://objenesis.org/objenesis) * Objenesis (org.objenesis:objenesis:3.2 - http://objenesis.org/objenesis)
* parboiled-core (org.parboiled:parboiled-core:1.3.1 - http://parboiled.org) * parboiled-core (org.parboiled:parboiled-core:1.3.1 - http://parboiled.org)
* parboiled-java (org.parboiled:parboiled-java:1.3.1 - http://parboiled.org) * parboiled-java (org.parboiled:parboiled-java:1.3.1 - http://parboiled.org)
* quartz (org.quartz-scheduler:quartz:2.3.2 - http://www.quartz-scheduler.org/quartz)
* rome-modules (org.rometools:rome-modules:1.0 - http://www.rometools.org)
* RRD4J (org.rrd4j:rrd4j:3.5 - https://github.com/rrd4j/rrd4j/) * RRD4J (org.rrd4j:rrd4j:3.5 - https://github.com/rrd4j/rrd4j/)
* JSONassert (org.skyscreamer:jsonassert:1.5.0 - https://github.com/skyscreamer/JSONassert) * JSONassert (org.skyscreamer:jsonassert:1.5.0 - https://github.com/skyscreamer/JSONassert)
* Spring AOP (org.springframework:spring-aop:5.2.5.RELEASE - https://github.com/spring-projects/spring-framework) * Spring AOP (org.springframework:spring-aop:5.3.20 - https://github.com/spring-projects/spring-framework)
* Spring Beans (org.springframework:spring-beans:5.2.5.RELEASE - https://github.com/spring-projects/spring-framework) * Spring Beans (org.springframework:spring-beans:5.3.20 - https://github.com/spring-projects/spring-framework)
* Spring Context (org.springframework:spring-context:5.2.5.RELEASE - https://github.com/spring-projects/spring-framework) * Spring Context (org.springframework:spring-context:5.3.20 - https://github.com/spring-projects/spring-framework)
* Spring Context Support (org.springframework:spring-context-support:5.2.5.RELEASE - https://github.com/spring-projects/spring-framework) * Spring Context Support (org.springframework:spring-context-support:5.3.20 - https://github.com/spring-projects/spring-framework)
* Spring Core (org.springframework:spring-core:5.2.5.RELEASE - https://github.com/spring-projects/spring-framework) * Spring Core (org.springframework:spring-core:5.3.20 - https://github.com/spring-projects/spring-framework)
* Spring Expression Language (SpEL) (org.springframework:spring-expression:5.2.5.RELEASE - https://github.com/spring-projects/spring-framework) * Spring Expression Language (SpEL) (org.springframework:spring-expression:5.3.20 - https://github.com/spring-projects/spring-framework)
* Spring Commons Logging Bridge (org.springframework:spring-jcl:5.2.5.RELEASE - https://github.com/spring-projects/spring-framework) * Spring Commons Logging Bridge (org.springframework:spring-jcl:5.3.20 - https://github.com/spring-projects/spring-framework)
* Spring JDBC (org.springframework:spring-jdbc:5.2.5.RELEASE - https://github.com/spring-projects/spring-framework) * Spring JDBC (org.springframework:spring-jdbc:5.3.20 - https://github.com/spring-projects/spring-framework)
* Spring Object/Relational Mapping (org.springframework:spring-orm:5.2.5.RELEASE - https://github.com/spring-projects/spring-framework) * Spring Object/Relational Mapping (org.springframework:spring-orm:5.3.20 - https://github.com/spring-projects/spring-framework)
* Spring TestContext Framework (org.springframework:spring-test:5.2.5.RELEASE - https://github.com/spring-projects/spring-framework) * Spring TestContext Framework (org.springframework:spring-test:5.3.20 - https://github.com/spring-projects/spring-framework)
* Spring Transaction (org.springframework:spring-tx:5.2.5.RELEASE - https://github.com/spring-projects/spring-framework) * Spring Transaction (org.springframework:spring-tx:5.3.20 - https://github.com/spring-projects/spring-framework)
* Spring Web (org.springframework:spring-web:5.2.5.RELEASE - https://github.com/spring-projects/spring-framework) * Spring Web (org.springframework:spring-web:5.3.20 - https://github.com/spring-projects/spring-framework)
* Spring Web MVC (org.springframework:spring-webmvc:5.2.5.RELEASE - https://github.com/spring-projects/spring-framework) * Spring Web MVC (org.springframework:spring-webmvc:5.3.20 - https://github.com/spring-projects/spring-framework)
* Spring Boot (org.springframework.boot:spring-boot:2.2.6.RELEASE - https://projects.spring.io/spring-boot/#/spring-boot-parent/spring-boot) * spring-boot (org.springframework.boot:spring-boot:2.6.8 - https://spring.io/projects/spring-boot)
* Spring Boot AutoConfigure (org.springframework.boot:spring-boot-autoconfigure:2.2.6.RELEASE - https://projects.spring.io/spring-boot/#/spring-boot-parent/spring-boot-autoconfigure) * spring-boot-actuator (org.springframework.boot:spring-boot-actuator:2.6.8 - https://spring.io/projects/spring-boot)
* spring-boot-actuator-autoconfigure (org.springframework.boot:spring-boot-actuator-autoconfigure:2.6.8 - https://spring.io/projects/spring-boot)
* spring-boot-autoconfigure (org.springframework.boot:spring-boot-autoconfigure:2.6.8 - https://spring.io/projects/spring-boot)
* Spring Boot Configuration Processor (org.springframework.boot:spring-boot-configuration-processor:2.0.0.RELEASE - https://projects.spring.io/spring-boot/#/spring-boot-parent/spring-boot-tools/spring-boot-configuration-processor) * Spring Boot Configuration Processor (org.springframework.boot:spring-boot-configuration-processor:2.0.0.RELEASE - https://projects.spring.io/spring-boot/#/spring-boot-parent/spring-boot-tools/spring-boot-configuration-processor)
* Spring Boot Starter (org.springframework.boot:spring-boot-starter:2.2.6.RELEASE - https://projects.spring.io/spring-boot/#/spring-boot-parent/spring-boot-starters/spring-boot-starter) * spring-boot-starter (org.springframework.boot:spring-boot-starter:2.6.8 - https://spring.io/projects/spring-boot)
* Spring Boot AOP Starter (org.springframework.boot:spring-boot-starter-aop:2.2.6.RELEASE - https://projects.spring.io/spring-boot/#/spring-boot-parent/spring-boot-starters/spring-boot-starter-aop) * spring-boot-starter-actuator (org.springframework.boot:spring-boot-starter-actuator:2.6.8 - https://spring.io/projects/spring-boot)
* Spring Boot Cache Starter (org.springframework.boot:spring-boot-starter-cache:2.2.6.RELEASE - https://projects.spring.io/spring-boot/#/spring-boot-parent/spring-boot-starters/spring-boot-starter-cache) * spring-boot-starter-aop (org.springframework.boot:spring-boot-starter-aop:2.6.8 - https://spring.io/projects/spring-boot)
* Spring Boot Data REST Starter (org.springframework.boot:spring-boot-starter-data-rest:2.2.6.RELEASE - https://projects.spring.io/spring-boot/#/spring-boot-parent/spring-boot-starters/spring-boot-starter-data-rest) * spring-boot-starter-cache (org.springframework.boot:spring-boot-starter-cache:2.6.8 - https://spring.io/projects/spring-boot)
* Spring Boot Json Starter (org.springframework.boot:spring-boot-starter-json:2.2.6.RELEASE - https://projects.spring.io/spring-boot/#/spring-boot-parent/spring-boot-starters/spring-boot-starter-json) * spring-boot-starter-data-rest (org.springframework.boot:spring-boot-starter-data-rest:2.6.8 - https://spring.io/projects/spring-boot)
* Spring Boot Log4j 2 Starter (org.springframework.boot:spring-boot-starter-log4j2:2.2.6.RELEASE - https://projects.spring.io/spring-boot/#/spring-boot-parent/spring-boot-starters/spring-boot-starter-log4j2) * spring-boot-starter-json (org.springframework.boot:spring-boot-starter-json:2.6.8 - https://spring.io/projects/spring-boot)
* Spring Boot Security Starter (org.springframework.boot:spring-boot-starter-security:2.2.6.RELEASE - https://projects.spring.io/spring-boot/#/spring-boot-parent/spring-boot-starters/spring-boot-starter-security) * spring-boot-starter-log4j2 (org.springframework.boot:spring-boot-starter-log4j2:2.6.8 - https://spring.io/projects/spring-boot)
* Spring Boot Test Starter (org.springframework.boot:spring-boot-starter-test:2.2.6.RELEASE - https://projects.spring.io/spring-boot/#/spring-boot-parent/spring-boot-starters/spring-boot-starter-test) * spring-boot-starter-security (org.springframework.boot:spring-boot-starter-security:2.6.8 - https://spring.io/projects/spring-boot)
* Spring Boot Tomcat Starter (org.springframework.boot:spring-boot-starter-tomcat:2.2.6.RELEASE - https://projects.spring.io/spring-boot/#/spring-boot-parent/spring-boot-starters/spring-boot-starter-tomcat) * spring-boot-starter-test (org.springframework.boot:spring-boot-starter-test:2.6.8 - https://spring.io/projects/spring-boot)
* Spring Boot Validation Starter (org.springframework.boot:spring-boot-starter-validation:2.2.6.RELEASE - https://projects.spring.io/spring-boot/#/spring-boot-parent/spring-boot-starters/spring-boot-starter-validation) * spring-boot-starter-tomcat (org.springframework.boot:spring-boot-starter-tomcat:2.6.8 - https://spring.io/projects/spring-boot)
* Spring Boot Web Starter (org.springframework.boot:spring-boot-starter-web:2.2.6.RELEASE - https://projects.spring.io/spring-boot/#/spring-boot-parent/spring-boot-starters/spring-boot-starter-web) * spring-boot-starter-web (org.springframework.boot:spring-boot-starter-web:2.6.8 - https://spring.io/projects/spring-boot)
* Spring Boot Test (org.springframework.boot:spring-boot-test:2.2.6.RELEASE - https://projects.spring.io/spring-boot/#/spring-boot-parent/spring-boot-test) * spring-boot-test (org.springframework.boot:spring-boot-test:2.6.8 - https://spring.io/projects/spring-boot)
* Spring Boot Test Auto-Configure (org.springframework.boot:spring-boot-test-autoconfigure:2.2.6.RELEASE - https://projects.spring.io/spring-boot/#/spring-boot-parent/spring-boot-test-autoconfigure) * spring-boot-test-autoconfigure (org.springframework.boot:spring-boot-test-autoconfigure:2.6.8 - https://spring.io/projects/spring-boot)
* Spring Data Core (org.springframework.data:spring-data-commons:2.2.6.RELEASE - https://www.spring.io/spring-data/spring-data-commons) * Spring Data Core (org.springframework.data:spring-data-commons:2.6.4 - https://www.spring.io/spring-data/spring-data-commons)
* Spring Data REST - Core (org.springframework.data:spring-data-rest-core:3.2.6.RELEASE - https://www.spring.io/spring-data/spring-data-rest-parent/spring-data-rest-core) * Spring Data REST - Core (org.springframework.data:spring-data-rest-core:3.6.4 - https://www.spring.io/spring-data/spring-data-rest-parent/spring-data-rest-core)
* Spring Data REST - HAL Browser (org.springframework.data:spring-data-rest-hal-browser:3.2.6.RELEASE - https://www.spring.io/spring-data/spring-data-rest-parent/spring-data-rest-hal-browser) * Spring Data REST - WebMVC (org.springframework.data:spring-data-rest-webmvc:3.6.4 - https://www.spring.io/spring-data/spring-data-rest-parent/spring-data-rest-webmvc)
* Spring Data REST - WebMVC (org.springframework.data:spring-data-rest-webmvc:3.2.6.RELEASE - https://www.spring.io/spring-data/spring-data-rest-parent/spring-data-rest-webmvc) * Spring HATEOAS (org.springframework.hateoas:spring-hateoas:1.4.2 - https://github.com/spring-projects/spring-hateoas)
* Spring HATEOAS (org.springframework.hateoas:spring-hateoas:1.0.4.RELEASE - https://github.com/spring-projects/spring-hateoas)
* Spring Plugin - Core (org.springframework.plugin:spring-plugin-core:2.0.0.RELEASE - https://github.com/spring-projects/spring-plugin/spring-plugin-core) * Spring Plugin - Core (org.springframework.plugin:spring-plugin-core:2.0.0.RELEASE - https://github.com/spring-projects/spring-plugin/spring-plugin-core)
* spring-security-config (org.springframework.security:spring-security-config:5.2.2.RELEASE - http://spring.io/spring-security) * spring-security-config (org.springframework.security:spring-security-config:5.6.5 - https://spring.io/projects/spring-security)
* spring-security-core (org.springframework.security:spring-security-core:5.2.2.RELEASE - http://spring.io/spring-security) * spring-security-core (org.springframework.security:spring-security-core:5.6.5 - https://spring.io/projects/spring-security)
* spring-security-test (org.springframework.security:spring-security-test:5.2.2.RELEASE - http://spring.io/spring-security) * spring-security-crypto (org.springframework.security:spring-security-crypto:5.6.5 - https://spring.io/projects/spring-security)
* spring-security-web (org.springframework.security:spring-security-web:5.2.2.RELEASE - http://spring.io/spring-security) * spring-security-test (org.springframework.security:spring-security-test:5.6.5 - https://spring.io/projects/spring-security)
* spring-security-web (org.springframework.security:spring-security-web:5.6.5 - https://spring.io/projects/spring-security)
* SWORD v2 :: Common Server Library (org.swordapp:sword2-server:1.0 - http://www.swordapp.org/) * SWORD v2 :: Common Server Library (org.swordapp:sword2-server:1.0 - http://www.swordapp.org/)
* ISO Parser (org.tallison:isoparser:1.9.41.2 - https://github.com/tballison/mp4parser)
* org.tallison:metadata-extractor (org.tallison:metadata-extractor:2.13.0 - https://drewnoakes.com/code/exif/)
* XMPCore Shaded (org.tallison.xmp:xmpcore-shaded:6.1.10 - https://github.com/tballison)
* snappy-java (org.xerial.snappy:snappy-java:1.1.7.6 - https://github.com/xerial/snappy-java) * snappy-java (org.xerial.snappy:snappy-java:1.1.7.6 - https://github.com/xerial/snappy-java)
* xml-matchers (org.xmlmatchers:xml-matchers:0.10 - http://code.google.com/p/xml-matchers/) * xml-matchers (org.xmlmatchers:xml-matchers:0.10 - http://code.google.com/p/xml-matchers/)
* org.xmlunit:xmlunit-core (org.xmlunit:xmlunit-core:2.6.4 - https://www.xmlunit.org/)
* org.xmlunit:xmlunit-core (org.xmlunit:xmlunit-core:2.8.0 - https://www.xmlunit.org/) * org.xmlunit:xmlunit-core (org.xmlunit:xmlunit-core:2.8.0 - https://www.xmlunit.org/)
* org.xmlunit:xmlunit-core (org.xmlunit:xmlunit-core:2.8.4 - https://www.xmlunit.org/)
* org.xmlunit:xmlunit-placeholders (org.xmlunit:xmlunit-placeholders:2.8.0 - https://www.xmlunit.org/xmlunit-placeholders/) * org.xmlunit:xmlunit-placeholders (org.xmlunit:xmlunit-placeholders:2.8.0 - https://www.xmlunit.org/xmlunit-placeholders/)
* SnakeYAML (org.yaml:snakeyaml:1.25 - http://www.snakeyaml.org) * SnakeYAML (org.yaml:snakeyaml:1.29 - http://www.snakeyaml.org)
* SnakeYAML (org.yaml:snakeyaml:1.26 - http://www.snakeyaml.org)
* ROME, RSS and atOM utilitiEs for Java (rome:rome:1.0 - https://rome.dev.java.net/)
* software.amazon.ion:ion-java (software.amazon.ion:ion-java:1.0.2 - https://github.com/amznlabs/ion-java/) * software.amazon.ion:ion-java (software.amazon.ion:ion-java:1.0.2 - https://github.com/amznlabs/ion-java/)
* xalan (xalan:xalan:2.7.0 - no url defined) * Xalan Java Serializer (xalan:serializer:2.7.2 - http://xml.apache.org/xalan-j/)
* Xerces2-j (xerces:xercesImpl:2.12.0 - https://xerces.apache.org/xerces2-j/) * Xalan Java (xalan:xalan:2.7.2 - http://xml.apache.org/xalan-j/)
* Xerces2-j (xerces:xercesImpl:2.12.2 - https://xerces.apache.org/xerces2-j/)
* XML Commons External Components XML APIs (xml-apis:xml-apis:1.4.01 - http://xml.apache.org/commons/components/external/) * XML Commons External Components XML APIs (xml-apis:xml-apis:1.4.01 - http://xml.apache.org/commons/components/external/)
BSD License: BSD License:
* AntLR Parser Generator (antlr:antlr:2.7.7 - http://www.antlr.org/) * AntLR Parser Generator (antlr:antlr:2.7.7 - http://www.antlr.org/)
* Adobe XMPCore (com.adobe.xmp:xmpcore:6.1.11 - https://www.adobe.com/devnet/xmp/library/eula-xmp-library-java.html)
* coverity-escapers (com.coverity.security:coverity-escapers:1.1.1 - http://coverity.com/security) * coverity-escapers (com.coverity.security:coverity-escapers:1.1.1 - http://coverity.com/security)
* Java Advanced Imaging Image I/O Tools API core (standalone) (com.github.jai-imageio:jai-imageio-core:1.4.0 - https://github.com/jai-imageio/jai-imageio-core) * Java Advanced Imaging Image I/O Tools API core (standalone) (com.github.jai-imageio:jai-imageio-core:1.4.0 - https://github.com/jai-imageio/jai-imageio-core)
* JSONLD Java :: Core (com.github.jsonld-java:jsonld-java:0.5.1 - http://github.com/jsonld-java/jsonld-java/jsonld-java/) * JSONLD Java :: Core (com.github.jsonld-java:jsonld-java:0.5.1 - http://github.com/jsonld-java/jsonld-java/jsonld-java/)
@@ -411,41 +404,36 @@ https://wiki.lyrasis.org/display/DSPACE/Code+Contribution+Guidelines
* Protocol Buffers [Core] (com.google.protobuf:protobuf-java:3.11.0 - https://developers.google.com/protocol-buffers/protobuf-java/) * Protocol Buffers [Core] (com.google.protobuf:protobuf-java:3.11.0 - https://developers.google.com/protocol-buffers/protobuf-java/)
* JZlib (com.jcraft:jzlib:1.1.3 - http://www.jcraft.com/jzlib/) * JZlib (com.jcraft:jzlib:1.1.3 - http://www.jcraft.com/jzlib/)
* dnsjava (dnsjava:dnsjava:2.1.7 - http://www.dnsjava.org) * dnsjava (dnsjava:dnsjava:2.1.7 - http://www.dnsjava.org)
* Units of Measurement API (javax.measure:unit-api:1.0 - http://unitsofmeasurement.github.io/)
* jaxen (jaxen:jaxen:1.1.6 - http://jaxen.codehaus.org/) * jaxen (jaxen:jaxen:1.1.6 - http://jaxen.codehaus.org/)
* JLine (jline:jline:0.9.94 - http://jline.sourceforge.net)
* ANTLR 4 Runtime (org.antlr:antlr4-runtime:4.5.1-1 - http://www.antlr.org/antlr4-runtime) * ANTLR 4 Runtime (org.antlr:antlr4-runtime:4.5.1-1 - http://www.antlr.org/antlr4-runtime)
* commons-compiler (org.codehaus.janino:commons-compiler:3.0.9 - http://janino-compiler.github.io/commons-compiler/) * commons-compiler (org.codehaus.janino:commons-compiler:3.0.9 - http://janino-compiler.github.io/commons-compiler/)
* janino (org.codehaus.janino:janino:3.0.9 - http://janino-compiler.github.io/janino/) * janino (org.codehaus.janino:janino:3.0.9 - http://janino-compiler.github.io/janino/)
* Stax2 API (org.codehaus.woodstox:stax2-api:3.1.4 - http://wiki.fasterxml.com/WoodstoxStax2) * Stax2 API (org.codehaus.woodstox:stax2-api:4.2.1 - http://github.com/FasterXML/stax2-api)
* dom4j (org.dom4j:dom4j:2.1.1 - http://dom4j.github.io/)
* Hamcrest Date (org.exparity:hamcrest-date:2.0.7 - https://github.com/exparity/hamcrest-date) * Hamcrest Date (org.exparity:hamcrest-date:2.0.7 - https://github.com/exparity/hamcrest-date)
* jersey-core-client (org.glassfish.jersey.core:jersey-client:2.30.1 - https://projects.eclipse.org/projects/ee4j.jersey/jersey-client) * jersey-core-client (org.glassfish.jersey.core:jersey-client:2.35 - https://projects.eclipse.org/projects/ee4j.jersey/jersey-client)
* jersey-inject-hk2 (org.glassfish.jersey.inject:jersey-hk2:2.30.1 - https://projects.eclipse.org/projects/ee4j.jersey/project/jersey-hk2) * jersey-inject-hk2 (org.glassfish.jersey.inject:jersey-hk2:2.35 - https://projects.eclipse.org/projects/ee4j.jersey/project/jersey-hk2)
* Hamcrest (org.hamcrest:hamcrest:2.1 - http://hamcrest.org/JavaHamcrest/) * Hamcrest (org.hamcrest:hamcrest:2.2 - http://hamcrest.org/JavaHamcrest/)
* Hamcrest All (org.hamcrest:hamcrest-all:1.3 - https://github.com/hamcrest/JavaHamcrest/hamcrest-all) * Hamcrest All (org.hamcrest:hamcrest-all:1.3 - https://github.com/hamcrest/JavaHamcrest/hamcrest-all)
* Hamcrest Core (org.hamcrest:hamcrest-core:1.3 - https://github.com/hamcrest/JavaHamcrest/hamcrest-core) * Hamcrest Core (org.hamcrest:hamcrest-core:1.3 - https://github.com/hamcrest/JavaHamcrest/hamcrest-core)
* Hamcrest library (org.hamcrest:hamcrest-library:1.3 - https://github.com/hamcrest/JavaHamcrest/hamcrest-library) * HdrHistogram (org.hdrhistogram:HdrHistogram:2.1.12 - http://hdrhistogram.github.io/HdrHistogram/)
* JBibTeX (org.jbibtex:jbibtex:1.0.10 - http://www.jbibtex.org) * JBibTeX (org.jbibtex:jbibtex:1.0.10 - http://www.jbibtex.org)
* asm (org.ow2.asm:asm:8.0.1 - http://asm.ow2.io/) * asm (org.ow2.asm:asm:8.0.1 - http://asm.ow2.io/)
* asm-analysis (org.ow2.asm:asm-analysis:7.1 - http://asm.ow2.org/) * asm-analysis (org.ow2.asm:asm-analysis:7.1 - http://asm.ow2.org/)
* asm-commons (org.ow2.asm:asm-commons:8.0.1 - http://asm.ow2.io/) * asm-commons (org.ow2.asm:asm-commons:8.0.1 - http://asm.ow2.io/)
* asm-tree (org.ow2.asm:asm-tree:7.1 - http://asm.ow2.org/) * asm-tree (org.ow2.asm:asm-tree:7.1 - http://asm.ow2.org/)
* asm-util (org.ow2.asm:asm-util:7.1 - http://asm.ow2.org/) * asm-util (org.ow2.asm:asm-util:7.1 - http://asm.ow2.org/)
* PostgreSQL JDBC Driver (org.postgresql:postgresql:42.2.25 - https://jdbc.postgresql.org) * PostgreSQL JDBC Driver (org.postgresql:postgresql:42.3.3 - https://jdbc.postgresql.org)
* Reflections (org.reflections:reflections:0.9.12 - http://github.com/ronmamo/reflections) * Reflections (org.reflections:reflections:0.9.12 - http://github.com/ronmamo/reflections)
* JMatIO (org.tallison:jmatio:1.5 - https://github.com/tballison/jmatio) * JMatIO (org.tallison:jmatio:1.5 - https://github.com/tballison/jmatio)
* XMLUnit for Java (xmlunit:xmlunit:1.3 - http://xmlunit.sourceforge.net/) * XMLUnit for Java (xmlunit:xmlunit:1.3 - http://xmlunit.sourceforge.net/)
Common Development and Distribution License (CDDL): Common Development and Distribution License (CDDL):
* JavaBeans Activation Framework (com.sun.activation:javax.activation:1.2.0 - http://java.net/all/javax.activation/)
* istack common utility code runtime (com.sun.istack:istack-commons-runtime:3.0.7 - http://java.net/istack-commons/istack-commons-runtime/) * istack common utility code runtime (com.sun.istack:istack-commons-runtime:3.0.7 - http://java.net/istack-commons/istack-commons-runtime/)
* JavaMail API (com.sun.mail:javax.mail:1.6.2 - http://javaee.github.io/javamail/javax.mail) * JavaMail API (com.sun.mail:javax.mail:1.6.2 - http://javaee.github.io/javamail/javax.mail)
* JavaMail API (no providers) (com.sun.mail:mailapi:1.6.2 - http://javaee.github.io/javamail/mailapi) * JavaMail API (no providers) (com.sun.mail:mailapi:1.6.2 - http://javaee.github.io/javamail/mailapi)
* Old JAXB Core (com.sun.xml.bind:jaxb-core:2.3.0.1 - http://jaxb.java.net/jaxb-bundles/jaxb-core) * Old JAXB Core (com.sun.xml.bind:jaxb-core:2.3.0.1 - http://jaxb.java.net/jaxb-bundles/jaxb-core)
* Old JAXB Runtime (com.sun.xml.bind:jaxb-impl:2.3.1 - http://jaxb.java.net/jaxb-bundles/jaxb-impl) * Old JAXB Runtime (com.sun.xml.bind:jaxb-impl:2.3.1 - http://jaxb.java.net/jaxb-bundles/jaxb-impl)
* saaj-impl (com.sun.xml.messaging.saaj:saaj-impl:1.4.0-b03 - http://java.net/saaj-impl/)
* Jakarta Annotations API (jakarta.annotation:jakarta.annotation-api:1.3.5 - https://projects.eclipse.org/projects/ee4j.ca) * Jakarta Annotations API (jakarta.annotation:jakarta.annotation-api:1.3.5 - https://projects.eclipse.org/projects/ee4j.ca)
* jakarta.ws.rs-api (jakarta.ws.rs:jakarta.ws.rs-api:2.1.6 - https://github.com/eclipse-ee4j/jaxrs-api) * jakarta.ws.rs-api (jakarta.ws.rs:jakarta.ws.rs-api:2.1.6 - https://github.com/eclipse-ee4j/jaxrs-api)
* JavaBeans Activation Framework (JAF) (javax.activation:activation:1.1 - http://java.sun.com/products/javabeans/jaf/index.jsp) * JavaBeans Activation Framework (JAF) (javax.activation:activation:1.1 - http://java.sun.com/products/javabeans/jaf/index.jsp)
@@ -454,7 +442,6 @@ https://wiki.lyrasis.org/display/DSPACE/Code+Contribution+Guidelines
* Java Servlet API (javax.servlet:javax.servlet-api:3.1.0 - http://servlet-spec.java.net) * Java Servlet API (javax.servlet:javax.servlet-api:3.1.0 - http://servlet-spec.java.net)
* javax.transaction API (javax.transaction:javax.transaction-api:1.3 - http://jta-spec.java.net) * javax.transaction API (javax.transaction:javax.transaction-api:1.3 - http://jta-spec.java.net)
* jaxb-api (javax.xml.bind:jaxb-api:2.3.1 - https://github.com/javaee/jaxb-spec/jaxb-api) * jaxb-api (javax.xml.bind:jaxb-api:2.3.1 - https://github.com/javaee/jaxb-spec/jaxb-api)
* JAX-WS API (javax.xml.ws:jaxws-api:2.3.1 - https://github.com/javaee/jax-ws-spec)
* JHighlight (org.codelibs:jhighlight:1.0.3 - https://github.com/codelibs/jhighlight) * JHighlight (org.codelibs:jhighlight:1.0.3 - https://github.com/codelibs/jhighlight)
* HK2 API module (org.glassfish.hk2:hk2-api:2.6.1 - https://github.com/eclipse-ee4j/glassfish-hk2/hk2-api) * HK2 API module (org.glassfish.hk2:hk2-api:2.6.1 - https://github.com/eclipse-ee4j/glassfish-hk2/hk2-api)
* ServiceLocator Default Implementation (org.glassfish.hk2:hk2-locator:2.6.1 - https://github.com/eclipse-ee4j/glassfish-hk2/hk2-locator) * ServiceLocator Default Implementation (org.glassfish.hk2:hk2-locator:2.6.1 - https://github.com/eclipse-ee4j/glassfish-hk2/hk2-locator)
@@ -464,10 +451,9 @@ https://wiki.lyrasis.org/display/DSPACE/Code+Contribution+Guidelines
* javax.inject:1 as OSGi bundle (org.glassfish.hk2.external:jakarta.inject:2.6.1 - https://github.com/eclipse-ee4j/glassfish-hk2/external/jakarta.inject) * javax.inject:1 as OSGi bundle (org.glassfish.hk2.external:jakarta.inject:2.6.1 - https://github.com/eclipse-ee4j/glassfish-hk2/external/jakarta.inject)
* JAXB Runtime (org.glassfish.jaxb:jaxb-runtime:2.3.1 - http://jaxb.java.net/jaxb-runtime-parent/jaxb-runtime) * JAXB Runtime (org.glassfish.jaxb:jaxb-runtime:2.3.1 - http://jaxb.java.net/jaxb-runtime-parent/jaxb-runtime)
* TXW2 Runtime (org.glassfish.jaxb:txw2:2.3.1 - http://jaxb.java.net/jaxb-txw-parent/txw2) * TXW2 Runtime (org.glassfish.jaxb:txw2:2.3.1 - http://jaxb.java.net/jaxb-txw-parent/txw2)
* jersey-core-client (org.glassfish.jersey.core:jersey-client:2.30.1 - https://projects.eclipse.org/projects/ee4j.jersey/jersey-client) * jersey-core-client (org.glassfish.jersey.core:jersey-client:2.35 - https://projects.eclipse.org/projects/ee4j.jersey/jersey-client)
* jersey-inject-hk2 (org.glassfish.jersey.inject:jersey-hk2:2.30.1 - https://projects.eclipse.org/projects/ee4j.jersey/project/jersey-hk2) * jersey-inject-hk2 (org.glassfish.jersey.inject:jersey-hk2:2.35 - https://projects.eclipse.org/projects/ee4j.jersey/project/jersey-hk2)
* Java Transaction API (org.jboss.spec.javax.transaction:jboss-transaction-api_1.2_spec:1.1.1.Final - http://www.jboss.org/jboss-transaction-api_1.2_spec) * Java Transaction API (org.jboss.spec.javax.transaction:jboss-transaction-api_1.2_spec:1.1.1.Final - http://www.jboss.org/jboss-transaction-api_1.2_spec)
* MIME streaming extension (org.jvnet.mimepull:mimepull:1.9.7 - http://mimepull.java.net)
* Extended StAX API (org.jvnet.staxex:stax-ex:1.8 - http://stax-ex.java.net/) * Extended StAX API (org.jvnet.staxex:stax-ex:1.8 - http://stax-ex.java.net/)
Cordra (Version 2) License Agreement: Cordra (Version 2) License Agreement:
@@ -478,56 +464,51 @@ https://wiki.lyrasis.org/display/DSPACE/Code+Contribution+Guidelines
Eclipse Distribution License, Version 1.0: Eclipse Distribution License, Version 1.0:
* JavaBeans Activation Framework (com.sun.activation:jakarta.activation:1.2.1 - https://github.com/eclipse-ee4j/jaf/jakarta.activation)
* JavaBeans Activation Framework API jar (jakarta.activation:jakarta.activation-api:1.2.1 - https://github.com/eclipse-ee4j/jaf/jakarta.activation-api)
* Jakarta Activation API jar (jakarta.activation:jakarta.activation-api:1.2.2 - https://github.com/eclipse-ee4j/jaf/jakarta.activation-api) * Jakarta Activation API jar (jakarta.activation:jakarta.activation-api:1.2.2 - https://github.com/eclipse-ee4j/jaf/jakarta.activation-api)
* jakarta.xml.bind-api (jakarta.xml.bind:jakarta.xml.bind-api:2.3.2 - https://github.com/eclipse-ee4j/jaxb-api/jakarta.xml.bind-api)
* Jakarta XML Binding API (jakarta.xml.bind:jakarta.xml.bind-api:2.3.3 - https://github.com/eclipse-ee4j/jaxb-api/jakarta.xml.bind-api) * Jakarta XML Binding API (jakarta.xml.bind:jakarta.xml.bind-api:2.3.3 - https://github.com/eclipse-ee4j/jaxb-api/jakarta.xml.bind-api)
* javax.persistence-api (javax.persistence:javax.persistence-api:2.2 - https://github.com/javaee/jpa-spec) * javax.persistence-api (javax.persistence:javax.persistence-api:2.2 - https://github.com/javaee/jpa-spec)
* jersey-core-client (org.glassfish.jersey.core:jersey-client:2.30.1 - https://projects.eclipse.org/projects/ee4j.jersey/jersey-client) * jersey-core-client (org.glassfish.jersey.core:jersey-client:2.35 - https://projects.eclipse.org/projects/ee4j.jersey/jersey-client)
* jersey-inject-hk2 (org.glassfish.jersey.inject:jersey-hk2:2.30.1 - https://projects.eclipse.org/projects/ee4j.jersey/project/jersey-hk2) * jersey-inject-hk2 (org.glassfish.jersey.inject:jersey-hk2:2.35 - https://projects.eclipse.org/projects/ee4j.jersey/project/jersey-hk2)
* Java Persistence API, Version 2.1 (org.hibernate.javax.persistence:hibernate-jpa-2.1-api:1.0.0.Final - http://hibernate.org) * Java Persistence API, Version 2.1 (org.hibernate.javax.persistence:hibernate-jpa-2.1-api:1.0.2.Final - http://hibernate.org)
Eclipse Public License: Eclipse Public License:
* System Rules (com.github.stefanbirkner:system-rules:1.19.0 - http://stefanbirkner.github.io/system-rules/) * System Rules (com.github.stefanbirkner:system-rules:1.19.0 - http://stefanbirkner.github.io/system-rules/)
* c3p0 (com.mchange:c3p0:0.9.5.5 - https://github.com/swaldman/c3p0) * H2 Database Engine (com.h2database:h2:2.1.210 - https://h2database.com)
* mchange-commons-java (com.mchange:mchange-commons-java:0.2.19 - https://github.com/swaldman/mchange-commons-java)
* Jakarta Annotations API (jakarta.annotation:jakarta.annotation-api:1.3.5 - https://projects.eclipse.org/projects/ee4j.ca) * Jakarta Annotations API (jakarta.annotation:jakarta.annotation-api:1.3.5 - https://projects.eclipse.org/projects/ee4j.ca)
* jakarta.ws.rs-api (jakarta.ws.rs:jakarta.ws.rs-api:2.1.6 - https://github.com/eclipse-ee4j/jaxrs-api) * jakarta.ws.rs-api (jakarta.ws.rs:jakarta.ws.rs-api:2.1.6 - https://github.com/eclipse-ee4j/jaxrs-api)
* javax.persistence-api (javax.persistence:javax.persistence-api:2.2 - https://github.com/javaee/jpa-spec) * javax.persistence-api (javax.persistence:javax.persistence-api:2.2 - https://github.com/javaee/jpa-spec)
* JUnit (junit:junit:4.13.1 - http://junit.org) * JUnit (junit:junit:4.13.1 - http://junit.org)
* AspectJ runtime (org.aspectj:aspectjrt:1.8.0 - http://www.aspectj.org) * AspectJ Weaver (org.aspectj:aspectjweaver:1.9.7 - https://www.eclipse.org/aspectj/)
* AspectJ weaver (org.aspectj:aspectjweaver:1.9.5 - http://www.aspectj.org)
* Eclipse Compiler for Java(TM) (org.eclipse.jdt:ecj:3.14.0 - http://www.eclipse.org/jdt) * Eclipse Compiler for Java(TM) (org.eclipse.jdt:ecj:3.14.0 - http://www.eclipse.org/jdt)
* Jetty :: Apache JSP Implementation (org.eclipse.jetty:apache-jsp:9.4.15.v20190215 - http://www.eclipse.org/jetty) * Jetty :: Apache JSP Implementation (org.eclipse.jetty:apache-jsp:9.4.15.v20190215 - http://www.eclipse.org/jetty)
* Apache :: JSTL module (org.eclipse.jetty:apache-jstl:9.4.15.v20190215 - http://tomcat.apache.org/taglibs/standard/) * Apache :: JSTL module (org.eclipse.jetty:apache-jstl:9.4.15.v20190215 - http://tomcat.apache.org/taglibs/standard/)
* Jetty :: ALPN :: Client (org.eclipse.jetty:jetty-alpn-client:9.4.34.v20201102 - https://eclipse.org/jetty/jetty-alpn-parent/jetty-alpn-client) * Jetty :: ALPN :: Client (org.eclipse.jetty:jetty-alpn-client:9.4.44.v20210927 - https://eclipse.org/jetty/jetty-alpn-parent/jetty-alpn-client)
* Jetty :: ALPN :: JDK9 Client Implementation (org.eclipse.jetty:jetty-alpn-java-client:9.4.34.v20201102 - https://eclipse.org/jetty/jetty-alpn-parent/jetty-alpn-java-client) * Jetty :: ALPN :: JDK9 Client Implementation (org.eclipse.jetty:jetty-alpn-java-client:9.4.44.v20210927 - https://eclipse.org/jetty/jetty-alpn-parent/jetty-alpn-java-client)
* Jetty :: ALPN :: JDK9 Server Implementation (org.eclipse.jetty:jetty-alpn-java-server:9.4.34.v20201102 - https://eclipse.org/jetty/jetty-alpn-parent/jetty-alpn-java-server) * Jetty :: ALPN :: JDK9 Server Implementation (org.eclipse.jetty:jetty-alpn-java-server:9.4.44.v20210927 - https://eclipse.org/jetty/jetty-alpn-parent/jetty-alpn-java-server)
* Jetty :: ALPN :: Server (org.eclipse.jetty:jetty-alpn-server:9.4.34.v20201102 - https://eclipse.org/jetty/jetty-alpn-parent/jetty-alpn-server) * Jetty :: ALPN :: Server (org.eclipse.jetty:jetty-alpn-server:9.4.44.v20210927 - https://eclipse.org/jetty/jetty-alpn-parent/jetty-alpn-server)
* Jetty :: Servlet Annotations (org.eclipse.jetty:jetty-annotations:9.4.15.v20190215 - http://www.eclipse.org/jetty) * Jetty :: Servlet Annotations (org.eclipse.jetty:jetty-annotations:9.4.15.v20190215 - http://www.eclipse.org/jetty)
* Jetty :: Asynchronous HTTP Client (org.eclipse.jetty:jetty-client:9.4.34.v20201102 - https://eclipse.org/jetty/jetty-client) * Jetty :: Asynchronous HTTP Client (org.eclipse.jetty:jetty-client:9.4.44.v20210927 - https://eclipse.org/jetty/jetty-client)
* Jetty :: Continuation (org.eclipse.jetty:jetty-continuation:9.4.34.v20201102 - https://eclipse.org/jetty/jetty-continuation) * Jetty :: Continuation (org.eclipse.jetty:jetty-continuation:9.4.44.v20210927 - https://eclipse.org/jetty/jetty-continuation)
* Jetty :: Deployers (org.eclipse.jetty:jetty-deploy:9.4.34.v20201102 - https://eclipse.org/jetty/jetty-deploy) * Jetty :: Deployers (org.eclipse.jetty:jetty-deploy:9.4.44.v20210927 - https://eclipse.org/jetty/jetty-deploy)
* Jetty :: Http Utility (org.eclipse.jetty:jetty-http:9.4.41.v20210516 - https://eclipse.org/jetty/jetty-http) * Jetty :: Http Utility (org.eclipse.jetty:jetty-http:9.4.44.v20210927 - https://eclipse.org/jetty/jetty-http)
* Jetty :: IO Utility (org.eclipse.jetty:jetty-io:9.4.41.v20210516 - https://eclipse.org/jetty/jetty-io) * Jetty :: IO Utility (org.eclipse.jetty:jetty-io:9.4.44.v20210927 - https://eclipse.org/jetty/jetty-io)
* Jetty :: JMX Management (org.eclipse.jetty:jetty-jmx:9.4.34.v20201102 - https://eclipse.org/jetty/jetty-jmx) * Jetty :: JMX Management (org.eclipse.jetty:jetty-jmx:9.4.44.v20210927 - https://eclipse.org/jetty/jetty-jmx)
* Jetty :: JNDI Naming (org.eclipse.jetty:jetty-jndi:9.4.15.v20190215 - http://www.eclipse.org/jetty) * Jetty :: JNDI Naming (org.eclipse.jetty:jetty-jndi:9.4.15.v20190215 - http://www.eclipse.org/jetty)
* Jetty :: Plus (org.eclipse.jetty:jetty-plus:9.4.15.v20190215 - http://www.eclipse.org/jetty) * Jetty :: Plus (org.eclipse.jetty:jetty-plus:9.4.15.v20190215 - http://www.eclipse.org/jetty)
* Jetty :: Rewrite Handler (org.eclipse.jetty:jetty-rewrite:9.4.34.v20201102 - https://eclipse.org/jetty/jetty-rewrite) * Jetty :: Rewrite Handler (org.eclipse.jetty:jetty-rewrite:9.4.44.v20210927 - https://eclipse.org/jetty/jetty-rewrite)
* Jetty :: Security (org.eclipse.jetty:jetty-security:9.4.34.v20201102 - https://eclipse.org/jetty/jetty-security) * Jetty :: Security (org.eclipse.jetty:jetty-security:9.4.44.v20210927 - https://eclipse.org/jetty/jetty-security)
* Jetty :: Server Core (org.eclipse.jetty:jetty-server:9.4.41.v20210516 - https://eclipse.org/jetty/jetty-server) * Jetty :: Server Core (org.eclipse.jetty:jetty-server:9.4.44.v20210927 - https://eclipse.org/jetty/jetty-server)
* Jetty :: Servlet Handling (org.eclipse.jetty:jetty-servlet:9.4.34.v20201102 - https://eclipse.org/jetty/jetty-servlet) * Jetty :: Servlet Handling (org.eclipse.jetty:jetty-servlet:9.4.44.v20210927 - https://eclipse.org/jetty/jetty-servlet)
* Jetty :: Utility Servlets and Filters (org.eclipse.jetty:jetty-servlets:9.4.34.v20201102 - https://eclipse.org/jetty/jetty-servlets) * Jetty :: Utility Servlets and Filters (org.eclipse.jetty:jetty-servlets:9.4.44.v20210927 - https://eclipse.org/jetty/jetty-servlets)
* Jetty :: Utilities (org.eclipse.jetty:jetty-util:9.4.41.v20210516 - https://eclipse.org/jetty/jetty-util) * Jetty :: Utilities (org.eclipse.jetty:jetty-util:9.4.44.v20210927 - https://eclipse.org/jetty/jetty-util)
* Jetty :: Webapp Application Support (org.eclipse.jetty:jetty-webapp:9.4.34.v20201102 - https://eclipse.org/jetty/jetty-webapp) * Jetty :: Webapp Application Support (org.eclipse.jetty:jetty-webapp:9.4.44.v20210927 - https://eclipse.org/jetty/jetty-webapp)
* Jetty :: XML utilities (org.eclipse.jetty:jetty-xml:9.4.34.v20201102 - https://eclipse.org/jetty/jetty-xml) * Jetty :: XML utilities (org.eclipse.jetty:jetty-xml:9.4.44.v20210927 - https://eclipse.org/jetty/jetty-xml)
* Jetty :: HTTP2 :: Client (org.eclipse.jetty.http2:http2-client:9.4.34.v20201102 - https://eclipse.org/jetty/http2-parent/http2-client) * Jetty :: HTTP2 :: Client (org.eclipse.jetty.http2:http2-client:9.4.44.v20210927 - https://eclipse.org/jetty/http2-parent/http2-client)
* Jetty :: HTTP2 :: Common (org.eclipse.jetty.http2:http2-common:9.4.34.v20201102 - https://eclipse.org/jetty/http2-parent/http2-common) * Jetty :: HTTP2 :: Common (org.eclipse.jetty.http2:http2-common:9.4.44.v20210927 - https://eclipse.org/jetty/http2-parent/http2-common)
* Jetty :: HTTP2 :: HPACK (org.eclipse.jetty.http2:http2-hpack:9.4.34.v20201102 - https://eclipse.org/jetty/http2-parent/http2-hpack) * Jetty :: HTTP2 :: HPACK (org.eclipse.jetty.http2:http2-hpack:9.4.44.v20210927 - https://eclipse.org/jetty/http2-parent/http2-hpack)
* Jetty :: HTTP2 :: HTTP Client Transport (org.eclipse.jetty.http2:http2-http-client-transport:9.4.34.v20201102 - https://eclipse.org/jetty/http2-parent/http2-http-client-transport) * Jetty :: HTTP2 :: HTTP Client Transport (org.eclipse.jetty.http2:http2-http-client-transport:9.4.44.v20210927 - https://eclipse.org/jetty/http2-parent/http2-http-client-transport)
* Jetty :: HTTP2 :: Server (org.eclipse.jetty.http2:http2-server:9.4.34.v20201102 - https://eclipse.org/jetty/http2-parent/http2-server) * Jetty :: HTTP2 :: Server (org.eclipse.jetty.http2:http2-server:9.4.44.v20210927 - https://eclipse.org/jetty/http2-parent/http2-server)
* Jetty :: Schemas (org.eclipse.jetty.toolchain:jetty-schemas:3.1.2 - https://eclipse.org/jetty/jetty-schemas) * Jetty :: Schemas (org.eclipse.jetty.toolchain:jetty-schemas:3.1.2 - https://eclipse.org/jetty/jetty-schemas)
* HK2 API module (org.glassfish.hk2:hk2-api:2.6.1 - https://github.com/eclipse-ee4j/glassfish-hk2/hk2-api) * HK2 API module (org.glassfish.hk2:hk2-api:2.6.1 - https://github.com/eclipse-ee4j/glassfish-hk2/hk2-api)
* ServiceLocator Default Implementation (org.glassfish.hk2:hk2-locator:2.6.1 - https://github.com/eclipse-ee4j/glassfish-hk2/hk2-locator) * ServiceLocator Default Implementation (org.glassfish.hk2:hk2-locator:2.6.1 - https://github.com/eclipse-ee4j/glassfish-hk2/hk2-locator)
@@ -535,10 +516,10 @@ https://wiki.lyrasis.org/display/DSPACE/Code+Contribution+Guidelines
* OSGi resource locator (org.glassfish.hk2:osgi-resource-locator:1.0.3 - https://projects.eclipse.org/projects/ee4j/osgi-resource-locator) * OSGi resource locator (org.glassfish.hk2:osgi-resource-locator:1.0.3 - https://projects.eclipse.org/projects/ee4j/osgi-resource-locator)
* aopalliance version 1.0 repackaged as a module (org.glassfish.hk2.external:aopalliance-repackaged:2.6.1 - https://github.com/eclipse-ee4j/glassfish-hk2/external/aopalliance-repackaged) * aopalliance version 1.0 repackaged as a module (org.glassfish.hk2.external:aopalliance-repackaged:2.6.1 - https://github.com/eclipse-ee4j/glassfish-hk2/external/aopalliance-repackaged)
* javax.inject:1 as OSGi bundle (org.glassfish.hk2.external:jakarta.inject:2.6.1 - https://github.com/eclipse-ee4j/glassfish-hk2/external/jakarta.inject) * javax.inject:1 as OSGi bundle (org.glassfish.hk2.external:jakarta.inject:2.6.1 - https://github.com/eclipse-ee4j/glassfish-hk2/external/jakarta.inject)
* jersey-core-client (org.glassfish.jersey.core:jersey-client:2.30.1 - https://projects.eclipse.org/projects/ee4j.jersey/jersey-client) * jersey-core-client (org.glassfish.jersey.core:jersey-client:2.35 - https://projects.eclipse.org/projects/ee4j.jersey/jersey-client)
* jersey-core-common (org.glassfish.jersey.core:jersey-common:2.30.1 - https://projects.eclipse.org/projects/ee4j.jersey/jersey-common) * jersey-core-common (org.glassfish.jersey.core:jersey-common:2.35 - https://projects.eclipse.org/projects/ee4j.jersey/jersey-common)
* jersey-inject-hk2 (org.glassfish.jersey.inject:jersey-hk2:2.30.1 - https://projects.eclipse.org/projects/ee4j.jersey/project/jersey-hk2) * jersey-inject-hk2 (org.glassfish.jersey.inject:jersey-hk2:2.35 - https://projects.eclipse.org/projects/ee4j.jersey/project/jersey-hk2)
* Java Persistence API, Version 2.1 (org.hibernate.javax.persistence:hibernate-jpa-2.1-api:1.0.0.Final - http://hibernate.org) * Java Persistence API, Version 2.1 (org.hibernate.javax.persistence:hibernate-jpa-2.1-api:1.0.2.Final - http://hibernate.org)
* Jetty Server (org.mortbay.jetty:jetty:6.1.26 - http://www.eclipse.org/jetty/jetty-parent/project/modules/jetty) * Jetty Server (org.mortbay.jetty:jetty:6.1.26 - http://www.eclipse.org/jetty/jetty-parent/project/modules/jetty)
* Jetty Servlet Tester (org.mortbay.jetty:jetty-servlet-tester:6.1.26 - http://www.eclipse.org/jetty/jetty-parent/project/jetty-servlet-tester) * Jetty Servlet Tester (org.mortbay.jetty:jetty-servlet-tester:6.1.26 - http://www.eclipse.org/jetty/jetty-parent/project/jetty-servlet-tester)
* Jetty Utilities (org.mortbay.jetty:jetty-util:6.1.26 - http://www.eclipse.org/jetty/jetty-parent/project/jetty-util) * Jetty Utilities (org.mortbay.jetty:jetty-util:6.1.26 - http://www.eclipse.org/jetty/jetty-parent/project/jetty-util)
@@ -552,21 +533,16 @@ https://wiki.lyrasis.org/display/DSPACE/Code+Contribution+Guidelines
* json-schema-validator (com.github.java-json-tools:json-schema-validator:2.2.14 - https://github.com/java-json-tools/json-schema-validator) * json-schema-validator (com.github.java-json-tools:json-schema-validator:2.2.14 - https://github.com/java-json-tools/json-schema-validator)
* msg-simple (com.github.java-json-tools:msg-simple:1.2 - https://github.com/java-json-tools/msg-simple) * msg-simple (com.github.java-json-tools:msg-simple:1.2 - https://github.com/java-json-tools/msg-simple)
* uri-template (com.github.java-json-tools:uri-template:0.10 - https://github.com/java-json-tools/uri-template) * uri-template (com.github.java-json-tools:uri-template:0.10 - https://github.com/java-json-tools/uri-template)
* SpotBugs Annotations (com.github.spotbugs:spotbugs-annotations:3.1.9 - https://spotbugs.github.io/)
* FindBugs-Annotations (com.google.code.findbugs:annotations:3.0.1u2 - http://findbugs.sourceforge.net/) * FindBugs-Annotations (com.google.code.findbugs:annotations:3.0.1u2 - http://findbugs.sourceforge.net/)
* c3p0 (com.mchange:c3p0:0.9.5.5 - https://github.com/swaldman/c3p0)
* mchange-commons-java (com.mchange:mchange-commons-java:0.2.19 - https://github.com/swaldman/mchange-commons-java)
* Java Native Access (net.java.dev.jna:jna:5.5.0 - https://github.com/java-native-access/jna)
* JHighlight (org.codelibs:jhighlight:1.0.3 - https://github.com/codelibs/jhighlight) * JHighlight (org.codelibs:jhighlight:1.0.3 - https://github.com/codelibs/jhighlight)
* Hibernate ORM - hibernate-core (org.hibernate:hibernate-core:5.4.10.Final - http://hibernate.org/orm) * Hibernate ORM - hibernate-core (org.hibernate:hibernate-core:5.6.5.Final - https://hibernate.org/orm)
* Hibernate ORM - hibernate-ehcache (org.hibernate:hibernate-ehcache:5.4.10.Final - http://hibernate.org/orm) * Hibernate ORM - hibernate-jcache (org.hibernate:hibernate-jcache:5.6.5.Final - https://hibernate.org/orm)
* Hibernate ORM - hibernate-jpamodelgen (org.hibernate:hibernate-jpamodelgen:5.4.10.Final - http://hibernate.org/orm) * Hibernate ORM - hibernate-jpamodelgen (org.hibernate:hibernate-jpamodelgen:5.6.5.Final - https://hibernate.org/orm)
* Hibernate Commons Annotations (org.hibernate.common:hibernate-commons-annotations:5.1.0.Final - http://hibernate.org) * Hibernate Commons Annotations (org.hibernate.common:hibernate-commons-annotations:5.1.2.Final - http://hibernate.org)
* im4java (org.im4java:im4java:1.4.0 - http://sourceforge.net/projects/im4java/) * im4java (org.im4java:im4java:1.4.0 - http://sourceforge.net/projects/im4java/)
* JacORB OMG-API (org.jacorb:jacorb-omgapi:3.9 - http://www.jacorb.org)
* Javassist (org.javassist:javassist:3.25.0-GA - http://www.javassist.org/) * Javassist (org.javassist:javassist:3.25.0-GA - http://www.javassist.org/)
* Java RMI API (org.jboss.spec.javax.rmi:jboss-rmi-api_1.0_spec:1.0.6.Final - http://www.jboss.org/jboss-rmi-api_1.0_spec)
* XOM (xom:xom:1.2.5 - http://xom.nu) * XOM (xom:xom:1.2.5 - http://xom.nu)
* XOM (xom:xom:1.3.7 - https://xom.nu)
Go License: Go License:
@@ -576,29 +552,21 @@ https://wiki.lyrasis.org/display/DSPACE/Code+Contribution+Guidelines
* Handle Server (net.handle:handle:9.3.0 - https://www.handle.net) * Handle Server (net.handle:handle:9.3.0 - https://www.handle.net)
JDOM License (Apache-style license):
* jdom (jdom:jdom:1.0 - no url defined)
MIT License: MIT License:
* Java SemVer (com.github.zafarkhaja:java-semver:0.9.0 - https://github.com/zafarkhaja/jsemver) * Java SemVer (com.github.zafarkhaja:java-semver:0.9.0 - https://github.com/zafarkhaja/jsemver)
* dd-plist (com.googlecode.plist:dd-plist:1.23 - http://www.github.com/3breadt/dd-plist)
* DigitalCollections: IIIF API Library (de.digitalcollections.iiif:iiif-apis:0.3.9 - https://github.com/dbmdz/iiif-apis) * DigitalCollections: IIIF API Library (de.digitalcollections.iiif:iiif-apis:0.3.9 - https://github.com/dbmdz/iiif-apis)
* CDM core library (edu.ucar:cdm:4.5.5 - http://www.unidata.ucar.edu/software/netcdf-java/documentation.htm)
* GRIB IOSP and Feature Collection (edu.ucar:grib:4.5.5 - http://www.unidata.ucar.edu/software/netcdf-java/)
* HttpClient Wrappers (edu.ucar:httpservices:4.5.5 - http://www.unidata.ucar.edu/software/netcdf-java/documentation.htm)
* netCDF-4 IOSP JNI connection to C library (edu.ucar:netcdf4:4.5.5 - http://www.unidata.ucar.edu/software/netcdf-java/netcdf4/)
* udunits (edu.ucar:udunits:4.5.5 - http://www.unidata.ucar.edu/software/udunits//)
* JOpt Simple (net.sf.jopt-simple:jopt-simple:5.0.4 - http://jopt-simple.github.io/jopt-simple) * JOpt Simple (net.sf.jopt-simple:jopt-simple:5.0.4 - http://jopt-simple.github.io/jopt-simple)
* Bouncy Castle S/MIME API (org.bouncycastle:bcmail-jdk15on:1.65 - http://www.bouncycastle.org/java.html) * Bouncy Castle S/MIME API (org.bouncycastle:bcmail-jdk15on:1.70 - https://www.bouncycastle.org/java.html)
* Bouncy Castle PKIX, CMS, EAC, TSP, PKCS, OCSP, CMP, and CRMF APIs (org.bouncycastle:bcpkix-jdk15on:1.65 - http://www.bouncycastle.org/java.html) * Bouncy Castle PKIX, CMS, EAC, TSP, PKCS, OCSP, CMP, and CRMF APIs (org.bouncycastle:bcpkix-jdk15on:1.70 - https://www.bouncycastle.org/java.html)
* Bouncy Castle Provider (org.bouncycastle:bcprov-jdk15on:1.65 - http://www.bouncycastle.org/java.html) * Bouncy Castle Provider (org.bouncycastle:bcprov-jdk15on:1.70 - https://www.bouncycastle.org/java.html)
* Bouncy Castle ASN.1 Extension and Utility APIs (org.bouncycastle:bcutil-jdk15on:1.70 - https://www.bouncycastle.org/java.html)
* org.brotli:dec (org.brotli:dec:0.1.2 - http://brotli.org/dec) * org.brotli:dec (org.brotli:dec:0.1.2 - http://brotli.org/dec)
* Checker Qual (org.checkerframework:checker-qual:3.10.0 - https://checkerframework.org)
* Checker Qual (org.checkerframework:checker-qual:3.5.0 - https://checkerframework.org) * Checker Qual (org.checkerframework:checker-qual:3.5.0 - https://checkerframework.org)
* jersey-core-client (org.glassfish.jersey.core:jersey-client:2.30.1 - https://projects.eclipse.org/projects/ee4j.jersey/jersey-client) * jersey-core-client (org.glassfish.jersey.core:jersey-client:2.35 - https://projects.eclipse.org/projects/ee4j.jersey/jersey-client)
* jersey-inject-hk2 (org.glassfish.jersey.inject:jersey-hk2:2.30.1 - https://projects.eclipse.org/projects/ee4j.jersey/project/jersey-hk2) * jersey-inject-hk2 (org.glassfish.jersey.inject:jersey-hk2:2.35 - https://projects.eclipse.org/projects/ee4j.jersey/project/jersey-hk2)
* Itadaki jbzip2 (org.itadaki:bzip2:0.9.1 - https://code.google.com/p/jbzip2/)
* jsoup Java HTML Parser (org.jsoup:jsoup:1.13.1 - https://jsoup.org/)
* mockito-core (org.mockito:mockito-core:3.12.4 - https://github.com/mockito/mockito) * mockito-core (org.mockito:mockito-core:3.12.4 - https://github.com/mockito/mockito)
* mockito-inline (org.mockito:mockito-inline:3.12.4 - https://github.com/mockito/mockito) * mockito-inline (org.mockito:mockito-inline:3.12.4 - https://github.com/mockito/mockito)
* ORCID - Model (org.orcid:orcid-model:3.0.2 - http://github.com/ORCID/orcid-model) * ORCID - Model (org.orcid:orcid-model:3.0.2 - http://github.com/ORCID/orcid-model)
@@ -606,29 +574,33 @@ https://wiki.lyrasis.org/display/DSPACE/Code+Contribution+Guidelines
* JUL to SLF4J bridge (org.slf4j:jul-to-slf4j:1.7.25 - http://www.slf4j.org) * JUL to SLF4J bridge (org.slf4j:jul-to-slf4j:1.7.25 - http://www.slf4j.org)
* SLF4J API Module (org.slf4j:slf4j-api:1.7.25 - http://www.slf4j.org) * SLF4J API Module (org.slf4j:slf4j-api:1.7.25 - http://www.slf4j.org)
* SLF4J Extensions Module (org.slf4j:slf4j-ext:1.7.28 - http://www.slf4j.org) * SLF4J Extensions Module (org.slf4j:slf4j-ext:1.7.28 - http://www.slf4j.org)
* HAL Browser (org.webjars:hal-browser:ad9b865 - http://webjars.org)
* toastr (org.webjars.bowergithub.codeseven:toastr:2.1.4 - http://webjars.org) * toastr (org.webjars.bowergithub.codeseven:toastr:2.1.4 - http://webjars.org)
* jquery (org.webjars.bowergithub.jquery:jquery-dist:3.5.1 - https://www.webjars.org) * backbone (org.webjars.bowergithub.jashkenas:backbone:1.4.1 - https://www.webjars.org)
* bootstrap (org.webjars.bowergithub.twbs:bootstrap:4.5.2 - https://www.webjars.org) * underscore (org.webjars.bowergithub.jashkenas:underscore:1.13.2 - https://www.webjars.org)
* jquery (org.webjars.bowergithub.jquery:jquery-dist:3.6.0 - https://www.webjars.org)
* urijs (org.webjars.bowergithub.medialize:uri.js:1.19.10 - https://www.webjars.org)
* bootstrap (org.webjars.bowergithub.twbs:bootstrap:4.6.1 - https://www.webjars.org)
* core-js (org.webjars.npm:core-js:3.22.8 - https://www.webjars.org)
* @json-editor/json-editor (org.webjars.npm:json-editor__json-editor:2.6.1 - https://www.webjars.org)
Mozilla Public License: Mozilla Public License:
* juniversalchardet (com.googlecode.juniversalchardet:juniversalchardet:1.0.3 - http://juniversalchardet.googlecode.com/) * juniversalchardet (com.googlecode.juniversalchardet:juniversalchardet:1.0.3 - http://juniversalchardet.googlecode.com/)
* h2 (com.h2database:h2:1.4.187 - no url defined) * H2 Database Engine (com.h2database:h2:2.1.210 - https://h2database.com)
* Saxon-HE (net.sf.saxon:Saxon-HE:9.8.0-14 - http://www.saxonica.com/) * Saxon-HE (net.sf.saxon:Saxon-HE:9.8.0-14 - http://www.saxonica.com/)
* Javassist (org.javassist:javassist:3.25.0-GA - http://www.javassist.org/) * Javassist (org.javassist:javassist:3.25.0-GA - http://www.javassist.org/)
* Mozilla Rhino (org.mozilla:rhino:1.7.7.2 - https://developer.mozilla.org/en/Rhino) * Mozilla Rhino (org.mozilla:rhino:1.7.7.2 - https://developer.mozilla.org/en/Rhino)
OGC copyright:
* GeoAPI (org.opengis:geoapi:3.0.1 - http://www.geoapi.org/geoapi/)
Public Domain: Public Domain:
* jersey-core-client (org.glassfish.jersey.core:jersey-client:2.30.1 - https://projects.eclipse.org/projects/ee4j.jersey/jersey-client) * jersey-core-client (org.glassfish.jersey.core:jersey-client:2.35 - https://projects.eclipse.org/projects/ee4j.jersey/jersey-client)
* jersey-core-common (org.glassfish.jersey.core:jersey-common:2.30.1 - https://projects.eclipse.org/projects/ee4j.jersey/jersey-common) * jersey-core-common (org.glassfish.jersey.core:jersey-common:2.35 - https://projects.eclipse.org/projects/ee4j.jersey/jersey-common)
* jersey-inject-hk2 (org.glassfish.jersey.inject:jersey-hk2:2.30.1 - https://projects.eclipse.org/projects/ee4j.jersey/project/jersey-hk2) * jersey-inject-hk2 (org.glassfish.jersey.inject:jersey-hk2:2.35 - https://projects.eclipse.org/projects/ee4j.jersey/project/jersey-hk2)
* HdrHistogram (org.hdrhistogram:HdrHistogram:2.1.12 - http://hdrhistogram.github.io/HdrHistogram/)
* LatencyUtils (org.latencyutils:LatencyUtils:2.0.3 - http://latencyutils.github.io/LatencyUtils/)
* Reflections (org.reflections:reflections:0.9.12 - http://github.com/ronmamo/reflections) * Reflections (org.reflections:reflections:0.9.12 - http://github.com/ronmamo/reflections)
* XZ for Java (org.tukaani:xz:1.8 - https://tukaani.org/xz/java.html) * XZ for Java (org.tukaani:xz:1.9 - https://tukaani.org/xz/java.html)
The JSON License: The JSON License:
@@ -636,7 +608,7 @@ https://wiki.lyrasis.org/display/DSPACE/Code+Contribution+Guidelines
UnRar License: UnRar License:
* Java UnRar (com.github.junrar:junrar:4.0.0 - https://github.com/junrar/junrar) * Java Unrar (com.github.junrar:junrar:7.4.1 - https://github.com/junrar/junrar)
Unicode/ICU License: Unicode/ICU License:
@@ -644,10 +616,10 @@ https://wiki.lyrasis.org/display/DSPACE/Code+Contribution+Guidelines
W3C license: W3C license:
* jersey-core-client (org.glassfish.jersey.core:jersey-client:2.30.1 - https://projects.eclipse.org/projects/ee4j.jersey/jersey-client) * jersey-core-client (org.glassfish.jersey.core:jersey-client:2.35 - https://projects.eclipse.org/projects/ee4j.jersey/jersey-client)
* jersey-inject-hk2 (org.glassfish.jersey.inject:jersey-hk2:2.30.1 - https://projects.eclipse.org/projects/ee4j.jersey/project/jersey-hk2) * jersey-inject-hk2 (org.glassfish.jersey.inject:jersey-hk2:2.35 - https://projects.eclipse.org/projects/ee4j.jersey/project/jersey-hk2)
jQuery license: jQuery license:
* jersey-core-client (org.glassfish.jersey.core:jersey-client:2.30.1 - https://projects.eclipse.org/projects/ee4j.jersey/jersey-client) * jersey-core-client (org.glassfish.jersey.core:jersey-client:2.35 - https://projects.eclipse.org/projects/ee4j.jersey/jersey-client)
* jersey-inject-hk2 (org.glassfish.jersey.inject:jersey-hk2:2.30.1 - https://projects.eclipse.org/projects/ee4j.jersey/project/jersey-hk2) * jersey-inject-hk2 (org.glassfish.jersey.inject:jersey-hk2:2.35 - https://projects.eclipse.org/projects/ee4j.jersey/project/jersey-hk2)

View File

@@ -1,6 +1,4 @@
<project xmlns="http://maven.apache.org/POM/4.0.0" <project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd">
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd">
<modelVersion>4.0.0</modelVersion> <modelVersion>4.0.0</modelVersion>
<groupId>org.dspace</groupId> <groupId>org.dspace</groupId>
<artifactId>dspace-api</artifactId> <artifactId>dspace-api</artifactId>
@@ -14,7 +12,7 @@
<parent> <parent>
<groupId>org.dspace</groupId> <groupId>org.dspace</groupId>
<artifactId>dspace-parent</artifactId> <artifactId>dspace-parent</artifactId>
<version>7.3-SNAPSHOT</version> <version>7.4-SNAPSHOT</version>
<relativePath>..</relativePath> <relativePath>..</relativePath>
</parent> </parent>
@@ -336,7 +334,6 @@
</profiles> </profiles>
<dependencies> <dependencies>
<dependency> <dependency>
<groupId>org.apache.logging.log4j</groupId> <groupId>org.apache.logging.log4j</groupId>
<artifactId>log4j-api</artifactId> <artifactId>log4j-api</artifactId>
@@ -361,6 +358,23 @@
<artifactId>ehcache</artifactId> <artifactId>ehcache</artifactId>
<version>${ehcache.version}</version> <version>${ehcache.version}</version>
</dependency> </dependency>
<!-- https://mvnrepository.com/artifact/org.springframework.boot/spring-boot-starter-cache
Caching dependencies for sherpa service. -->
<dependency>
<groupId>org.springframework.boot</groupId>
<artifactId>spring-boot-starter-cache</artifactId>
<version>${spring-boot.version}</version>
<exclusions>
<exclusion>
<groupId>org.springframework.boot</groupId>
<artifactId>spring-boot-starter-logging</artifactId>
</exclusion>
</exclusions>
</dependency>
<dependency>
<groupId>javax.cache</groupId>
<artifactId>cache-api</artifactId>
</dependency>
<dependency> <dependency>
<groupId>org.hibernate</groupId> <groupId>org.hibernate</groupId>
<artifactId>hibernate-jpamodelgen</artifactId> <artifactId>hibernate-jpamodelgen</artifactId>
@@ -534,10 +548,17 @@
<groupId>com.ibm.icu</groupId> <groupId>com.ibm.icu</groupId>
<artifactId>icu4j</artifactId> <artifactId>icu4j</artifactId>
</dependency> </dependency>
<!-- Codebase at https://github.com/OCLC-Research/oaiharvester2/ -->
<dependency> <dependency>
<groupId>org.dspace</groupId> <groupId>org.dspace</groupId>
<artifactId>oclc-harvester2</artifactId> <artifactId>oclc-harvester2</artifactId>
</dependency> </dependency>
<!-- Xalan is REQUIRED by 'oclc-harvester2' listed above (OAI harvesting fails without it).
Please do NOT use Xalan in DSpace codebase as it is not well maintained. -->
<dependency>
<groupId>xalan</groupId>
<artifactId>xalan</artifactId>
</dependency>
<dependency> <dependency>
<groupId>org.dspace</groupId> <groupId>org.dspace</groupId>
<artifactId>dspace-services</artifactId> <artifactId>dspace-services</artifactId>
@@ -862,6 +883,13 @@
<artifactId>mockserver-junit-rule</artifactId> <artifactId>mockserver-junit-rule</artifactId>
<version>5.11.2</version> <version>5.11.2</version>
<scope>test</scope> <scope>test</scope>
<exclusions>
<!-- Exclude snakeyaml to avoid conflicts with: spring-boot-starter-cache -->
<exclusion>
<groupId>org.yaml</groupId>
<artifactId>snakeyaml</artifactId>
</exclusion>
</exclusions>
</dependency> </dependency>
</dependencies> </dependencies>

View File

@@ -0,0 +1,170 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.app.bulkedit;
import java.sql.SQLException;
import java.util.ArrayList;
import java.util.Iterator;
import java.util.List;
import java.util.UUID;
import org.apache.commons.cli.ParseException;
import org.dspace.content.Item;
import org.dspace.content.MetadataDSpaceCsvExportServiceImpl;
import org.dspace.content.factory.ContentServiceFactory;
import org.dspace.content.service.CollectionService;
import org.dspace.content.service.CommunityService;
import org.dspace.content.service.MetadataDSpaceCsvExportService;
import org.dspace.core.Context;
import org.dspace.discovery.DiscoverQuery;
import org.dspace.discovery.IndexableObject;
import org.dspace.discovery.SearchService;
import org.dspace.discovery.SearchUtils;
import org.dspace.discovery.configuration.DiscoveryConfiguration;
import org.dspace.discovery.configuration.DiscoveryConfigurationService;
import org.dspace.discovery.indexobject.IndexableCollection;
import org.dspace.discovery.indexobject.IndexableCommunity;
import org.dspace.discovery.utils.DiscoverQueryBuilder;
import org.dspace.discovery.utils.parameter.QueryBuilderSearchFilter;
import org.dspace.eperson.factory.EPersonServiceFactory;
import org.dspace.eperson.service.EPersonService;
import org.dspace.scripts.DSpaceRunnable;
import org.dspace.sort.SortOption;
import org.dspace.utils.DSpace;
/**
* Metadata exporter to allow the batch export of metadata from a discovery search into a file
*
*/
public class MetadataExportSearch extends DSpaceRunnable<MetadataExportSearchScriptConfiguration> {
private static final String EXPORT_CSV = "exportCSV";
private boolean help = false;
private String identifier;
private String discoveryConfigName;
private String[] filterQueryStrings;
private boolean hasScope = false;
private String query;
private SearchService searchService;
private MetadataDSpaceCsvExportService metadataDSpaceCsvExportService;
private EPersonService ePersonService;
private DiscoveryConfigurationService discoveryConfigurationService;
private CommunityService communityService;
private CollectionService collectionService;
private DiscoverQueryBuilder queryBuilder;
@Override
public MetadataExportSearchScriptConfiguration getScriptConfiguration() {
return new DSpace().getServiceManager()
.getServiceByName("metadata-export-search", MetadataExportSearchScriptConfiguration.class);
}
@Override
public void setup() throws ParseException {
searchService = SearchUtils.getSearchService();
metadataDSpaceCsvExportService = new DSpace().getServiceManager()
.getServiceByName(
MetadataDSpaceCsvExportServiceImpl.class.getCanonicalName(),
MetadataDSpaceCsvExportService.class
);
ePersonService = EPersonServiceFactory.getInstance().getEPersonService();
discoveryConfigurationService = SearchUtils.getConfigurationService();
communityService = ContentServiceFactory.getInstance().getCommunityService();
collectionService = ContentServiceFactory.getInstance().getCollectionService();
queryBuilder = SearchUtils.getQueryBuilder();
if (commandLine.hasOption('h')) {
help = true;
return;
}
if (commandLine.hasOption('q')) {
query = commandLine.getOptionValue('q');
}
if (commandLine.hasOption('s')) {
hasScope = true;
identifier = commandLine.getOptionValue('s');
}
if (commandLine.hasOption('c')) {
discoveryConfigName = commandLine.getOptionValue('c');
}
if (commandLine.hasOption('f')) {
filterQueryStrings = commandLine.getOptionValues('f');
}
}
@Override
public void internalRun() throws Exception {
if (help) {
loghelpinfo();
printHelp();
return;
}
handler.logDebug("starting search export");
IndexableObject dso = null;
Context context = new Context();
context.setCurrentUser(ePersonService.find(context, this.getEpersonIdentifier()));
if (hasScope) {
dso = resolveScope(context, identifier);
}
DiscoveryConfiguration discoveryConfiguration =
discoveryConfigurationService.getDiscoveryConfiguration(discoveryConfigName);
List<QueryBuilderSearchFilter> queryBuilderSearchFilters = new ArrayList<>();
handler.logDebug("processing filter queries");
if (filterQueryStrings != null) {
for (String filterQueryString: filterQueryStrings) {
String field = filterQueryString.split(",", 2)[0];
String operator = filterQueryString.split("(,|=)", 3)[1];
String value = filterQueryString.split("=", 2)[1];
QueryBuilderSearchFilter queryBuilderSearchFilter =
new QueryBuilderSearchFilter(field, operator, value);
queryBuilderSearchFilters.add(queryBuilderSearchFilter);
}
}
handler.logDebug("building query");
DiscoverQuery discoverQuery =
queryBuilder.buildQuery(context, dso, discoveryConfiguration, query, queryBuilderSearchFilters,
"Item", 10, Long.getLong("0"), null, SortOption.DESCENDING);
handler.logDebug("creating iterator");
Iterator<Item> itemIterator = searchService.iteratorSearch(context, dso, discoverQuery);
handler.logDebug("creating dspacecsv");
DSpaceCSV dSpaceCSV = metadataDSpaceCsvExportService.export(context, itemIterator, true);
handler.logDebug("writing to file " + getFileNameOrExportFile());
handler.writeFilestream(context, getFileNameOrExportFile(), dSpaceCSV.getInputStream(), EXPORT_CSV);
context.restoreAuthSystemState();
context.complete();
}
protected void loghelpinfo() {
handler.logInfo("metadata-export");
}
protected String getFileNameOrExportFile() {
return "metadataExportSearch.csv";
}
public IndexableObject resolveScope(Context context, String id) throws SQLException {
UUID uuid = UUID.fromString(id);
IndexableObject scopeObj = new IndexableCommunity(communityService.find(context, uuid));
if (scopeObj.getIndexedObject() == null) {
scopeObj = new IndexableCollection(collectionService.find(context, uuid));
}
return scopeObj;
}
}

View File

@@ -0,0 +1,20 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.app.bulkedit;
/**
* The cli version of the {@link MetadataExportSearch} script
*/
public class MetadataExportSearchCli extends MetadataExportSearch {
@Override
protected String getFileNameOrExportFile() {
return commandLine.getOptionValue('n');
}
}

View File

@@ -0,0 +1,26 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.app.bulkedit;
import org.apache.commons.cli.Options;
/**
* This is the CLI version of the {@link MetadataExportSearchScriptConfiguration} class that handles the
* configuration for the {@link MetadataExportSearchCli} script
*/
public class MetadataExportSearchCliScriptConfiguration
extends MetadataExportSearchScriptConfiguration<MetadataExportSearchCli> {
@Override
public Options getOptions() {
Options options = super.getOptions();
options.addOption("n", "filename", true, "the filename to export to");
return super.getOptions();
}
}

View File

@@ -0,0 +1,62 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.app.bulkedit;
import org.apache.commons.cli.Options;
import org.dspace.core.Context;
import org.dspace.scripts.configuration.ScriptConfiguration;
/**
* The {@link ScriptConfiguration} for the {@link MetadataExportSearch} script
*/
public class MetadataExportSearchScriptConfiguration<T extends MetadataExportSearch> extends ScriptConfiguration<T> {
private Class<T> dspaceRunnableclass;
@Override
public Class<T> getDspaceRunnableClass() {
return dspaceRunnableclass;
}
@Override
public void setDspaceRunnableClass(Class<T> dspaceRunnableClass) {
this.dspaceRunnableclass = dspaceRunnableClass;
}
@Override
public boolean isAllowedToExecute(Context context) {
return true;
}
@Override
public Options getOptions() {
if (options == null) {
Options options = new Options();
options.addOption("q", "query", true,
"The discovery search string to will be used to match records. Not URL encoded");
options.getOption("q").setType(String.class);
options.addOption("s", "scope", true,
"UUID of a specific DSpace container (site, community or collection) to which the search has to be " +
"limited");
options.getOption("s").setType(String.class);
options.addOption("c", "configuration", true,
"The name of a Discovery configuration that should be used by this search");
options.getOption("c").setType(String.class);
options.addOption("f", "filter", true,
"Advanced search filter that has to be used to filter the result set, with syntax `<:filter-name>," +
"<:filter-operator>=<:filter-value>`. Not URL encoded. For example `author," +
"authority=5df05073-3be7-410d-8166-e254369e4166` or `title,contains=sample text`");
options.getOption("f").setType(String.class);
options.addOption("h", "help", false, "help");
super.options = options;
}
return options;
}
}

View File

@@ -925,11 +925,10 @@ public class MetadataImport extends DSpaceRunnable<MetadataImportScriptConfigura
rightItem = item; rightItem = item;
} }
// Create the relationship // Create the relationship, appending to the end
int leftPlace = relationshipService.findNextLeftPlaceByLeftItem(c, leftItem); Relationship persistedRelationship = relationshipService.create(
int rightPlace = relationshipService.findNextRightPlaceByRightItem(c, rightItem); c, leftItem, rightItem, foundRelationshipType, -1, -1
Relationship persistedRelationship = relationshipService.create(c, leftItem, rightItem, );
foundRelationshipType, leftPlace, rightPlace);
relationshipService.update(c, persistedRelationship); relationshipService.update(c, persistedRelationship);
} }

View File

@@ -0,0 +1,32 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.app.exception;
/**
* This class provides an exception to be used when trying to save a resource
* that already exists.
*
* @author Luca Giamminonni (luca.giamminonni at 4science.it)
*
*/
public class ResourceAlreadyExistsException extends RuntimeException {
private static final long serialVersionUID = 1L;
/**
* Create a ResourceAlreadyExistsException with a message and the already
* existing resource.
*
* @param message the error message
*/
public ResourceAlreadyExistsException(String message) {
super(message);
}
}

View File

@@ -403,10 +403,8 @@ public class ItemImportServiceImpl implements ItemImportService, InitializingBea
} }
// Create the relationship // Create the relationship
int leftPlace = relationshipService.findNextLeftPlaceByLeftItem(c, leftItem); Relationship persistedRelationship =
int rightPlace = relationshipService.findNextRightPlaceByRightItem(c, rightItem); relationshipService.create(c, leftItem, rightItem, foundRelationshipType, -1, -1);
Relationship persistedRelationship = relationshipService.create(
c, leftItem, rightItem, foundRelationshipType, leftPlace, rightPlace);
// relationshipService.update(c, persistedRelationship); // relationshipService.update(c, persistedRelationship);
System.out.println("\tAdded relationship (type: " + relationshipType + ") from " + System.out.println("\tAdded relationship (type: " + relationshipType + ") from " +

View File

@@ -31,6 +31,7 @@ import org.dspace.app.sherpa.v2.SHERPAResponse;
import org.dspace.app.sherpa.v2.SHERPAUtils; import org.dspace.app.sherpa.v2.SHERPAUtils;
import org.dspace.services.ConfigurationService; import org.dspace.services.ConfigurationService;
import org.springframework.beans.factory.annotation.Autowired; import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.cache.annotation.Cacheable;
/** /**
* SHERPAService is responsible for making the HTTP call to the SHERPA v2 API * SHERPAService is responsible for making the HTTP call to the SHERPA v2 API
@@ -43,6 +44,7 @@ import org.springframework.beans.factory.annotation.Autowired;
* @author Kim Shepherd * @author Kim Shepherd
*/ */
public class SHERPAService { public class SHERPAService {
private CloseableHttpClient client = null; private CloseableHttpClient client = null;
private int maxNumberOfTries; private int maxNumberOfTries;
@@ -91,6 +93,7 @@ public class SHERPAService {
* @param query ISSN string to pass in an "issn equals" API query * @param query ISSN string to pass in an "issn equals" API query
* @return SHERPAResponse containing an error or journal policies * @return SHERPAResponse containing an error or journal policies
*/ */
@Cacheable(key = "#query", cacheNames = "sherpa.searchByJournalISSN")
public SHERPAResponse searchByJournalISSN(String query) { public SHERPAResponse searchByJournalISSN(String query) {
return performRequest("publication", "issn", "equals", query, 0, 1); return performRequest("publication", "issn", "equals", query, 0, 1);
} }
@@ -413,4 +416,5 @@ public class SHERPAService {
public void setTimeout(int timeout) { public void setTimeout(int timeout) {
this.timeout = timeout; this.timeout = timeout;
} }
}
}

View File

@@ -0,0 +1,71 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.app.sherpa.cache;
import java.util.Objects;
import java.util.Set;
import org.dspace.app.sherpa.submit.SHERPASubmitService;
import org.dspace.content.Item;
import org.dspace.core.Context;
import org.springframework.cache.CacheManager;
/**
* This service is responsible to deal with the SherpaService cache.
*
* @author Mykhaylo Boychuk (mykhaylo.boychuk at 4science.com)
*/
public class SherpaCacheEvictService {
// The cache that is managed by this service.
static final String CACHE_NAME = "sherpa.searchByJournalISSN";
private CacheManager cacheManager;
private SHERPASubmitService sherpaSubmitService;
/**
* Remove immediately from the cache all the response that are related to a specific item
* extracting the ISSNs from the item
*
* @param context The DSpace context
* @param item an Item
*/
public void evictCacheValues(Context context, Item item) {
Set<String> ISSNs = sherpaSubmitService.getISSNs(context, item);
for (String issn : ISSNs) {
Objects.requireNonNull(cacheManager.getCache(CACHE_NAME)).evictIfPresent(issn);
}
}
/**
* Invalidate immediately the Sherpa cache
*/
public void evictAllCacheValues() {
Objects.requireNonNull(cacheManager.getCache(CACHE_NAME)).invalidate();
}
/**
* Set the reference to the cacheManager
*
* @param cacheManager
*/
public void setCacheManager(CacheManager cacheManager) {
this.cacheManager = cacheManager;
}
/**
* Set the reference to the SherpaSubmitService
*
* @param sherpaSubmitService
*/
public void setSherpaSubmitService(SHERPASubmitService sherpaSubmitService) {
this.sherpaSubmitService = sherpaSubmitService;
}
}

View File

@@ -0,0 +1,34 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.app.sherpa.cache;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
import org.ehcache.event.CacheEvent;
import org.ehcache.event.CacheEventListener;
/**
* This is a EHCache listner responsible for logging sherpa cache events. It is
* bound to the sherpa cache via the dspace/config/ehcache.xml file. We need a
* dedicated Logger for each cache as the CacheEvent doesn't include details
* about where the event occur
*
* @author Mykhaylo Boychuk (mykhaylo.boychuk at 4science.com)
*
*/
public class SherpaCacheLogger implements CacheEventListener<Object, Object> {
private static final Logger log = LogManager.getLogger(SherpaCacheLogger.class);
@Override
public void onEvent(CacheEvent<?, ?> cacheEvent) {
log.debug("Sherpa Cache Event Type: {} | Key: {} ",
cacheEvent.getType(), cacheEvent.getKey());
}
}

View File

@@ -9,7 +9,6 @@ package org.dspace.app.sherpa.submit;
import java.util.Iterator; import java.util.Iterator;
import java.util.LinkedHashSet; import java.util.LinkedHashSet;
import java.util.LinkedList;
import java.util.List; import java.util.List;
import java.util.Set; import java.util.Set;
@@ -63,19 +62,19 @@ public class SHERPASubmitService {
* issnItemExtractor(s) in the SHERPA spring configuration. * issnItemExtractor(s) in the SHERPA spring configuration.
* The ISSNs are not validated with a regular expression or other rules - any values * The ISSNs are not validated with a regular expression or other rules - any values
* extracted will be included in API queries. * extracted will be included in API queries.
* Return the first not empty response from Sherpa
* @see "dspace-dspace-addon-sherpa-configuration-services.xml" * @see "dspace-dspace-addon-sherpa-configuration-services.xml"
* @param context DSpace context * @param context DSpace context
* @param item DSpace item containing ISSNs to be checked * @param item DSpace item containing ISSNs to be checked
* @return SHERPA v2 API response (policy data) * @return SHERPA v2 API response (policy data)
*/ */
public List<SHERPAResponse> searchRelatedJournals(Context context, Item item) { public SHERPAResponse searchRelatedJournals(Context context, Item item) {
Set<String> issns = getISSNs(context, item); Set<String> issns = getISSNs(context, item);
if (issns == null || issns.size() == 0) { if (issns == null || issns.size() == 0) {
return null; return null;
} else { } else {
// SHERPA v2 API no longer supports "OR'd" ISSN search, perform individual searches instead // SHERPA v2 API no longer supports "OR'd" ISSN search, perform individual searches instead
Iterator<String> issnIterator = issns.iterator(); Iterator<String> issnIterator = issns.iterator();
List<SHERPAResponse> responses = new LinkedList<>();
while (issnIterator.hasNext()) { while (issnIterator.hasNext()) {
String issn = issnIterator.next(); String issn = issnIterator.next();
SHERPAResponse response = sherpaService.searchByJournalISSN(issn); SHERPAResponse response = sherpaService.searchByJournalISSN(issn);
@@ -83,14 +82,13 @@ public class SHERPASubmitService {
// Continue with loop // Continue with loop
log.warn("Failed to look up SHERPA ROMeO result for ISSN: " + issn log.warn("Failed to look up SHERPA ROMeO result for ISSN: " + issn
+ ": " + response.getMessage()); + ": " + response.getMessage());
return response;
} else if (!response.getJournals().isEmpty()) {
// return this response, if it is not empty
return response;
} }
// Store this response, even if it has an error (useful for UI reporting)
responses.add(response);
} }
if (responses.isEmpty()) { return new SHERPAResponse();
responses.add(new SHERPAResponse("SHERPA ROMeO lookup failed"));
}
return responses;
} }
} }

View File

@@ -0,0 +1,45 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.app.sherpa.v2;
import java.io.Serializable;
/**
* Model class for the Embargo of SHERPAv2 API (JSON)
*
* @author Mykhaylo Boychuk (mykhaylo.boychuk@4science.com)
*/
public class SHERPAEmbargo implements Serializable {
private static final long serialVersionUID = 6140668058547523656L;
private int amount;
private String units;
public SHERPAEmbargo(int amount, String units) {
this.amount = amount;
this.units = units;
}
public int getAmount() {
return amount;
}
public void setAmount(int amount) {
this.amount = amount;
}
public String getUnits() {
return units;
}
public void setUnits(String units) {
this.units = units;
}
}

View File

@@ -7,6 +7,7 @@
*/ */
package org.dspace.app.sherpa.v2; package org.dspace.app.sherpa.v2;
import java.io.Serializable;
import java.util.List; import java.util.List;
/** /**
@@ -21,7 +22,7 @@ import java.util.List;
* *
* @author Kim Shepherd * @author Kim Shepherd
*/ */
public class SHERPAJournal { public class SHERPAJournal implements Serializable {
private List<String> titles; private List<String> titles;
private String url; private String url;

View File

@@ -7,6 +7,7 @@
*/ */
package org.dspace.app.sherpa.v2; package org.dspace.app.sherpa.v2;
import java.io.Serializable;
import java.util.List; import java.util.List;
/** /**
@@ -28,7 +29,9 @@ import java.util.List;
* *
* @see SHERPAPublisherPolicy * @see SHERPAPublisherPolicy
*/ */
public class SHERPAPermittedVersion { public class SHERPAPermittedVersion implements Serializable {
private static final long serialVersionUID = 4992181606327727442L;
// Version (submitted, accepted, published) // Version (submitted, accepted, published)
private String articleVersion; private String articleVersion;
@@ -47,11 +50,6 @@ public class SHERPAPermittedVersion {
// Embargo // Embargo
private SHERPAEmbargo embargo; private SHERPAEmbargo embargo;
protected static class SHERPAEmbargo {
String units;
int amount;
}
public String getArticleVersion() { public String getArticleVersion() {
return articleVersion; return articleVersion;
} }

View File

@@ -7,6 +7,8 @@
*/ */
package org.dspace.app.sherpa.v2; package org.dspace.app.sherpa.v2;
import java.io.Serializable;
/** /**
* Plain java representation of a SHERPA Publisher object, based on SHERPA API v2 responses. * Plain java representation of a SHERPA Publisher object, based on SHERPA API v2 responses.
* *
@@ -18,7 +20,7 @@ package org.dspace.app.sherpa.v2;
* @see SHERPAJournal * @see SHERPAJournal
* @see SHERPAPublisherResponse * @see SHERPAPublisherResponse
*/ */
public class SHERPAPublisher { public class SHERPAPublisher implements Serializable {
private String name = null; private String name = null;
private String relationshipType; private String relationshipType;
private String country; private String country;

View File

@@ -7,6 +7,7 @@
*/ */
package org.dspace.app.sherpa.v2; package org.dspace.app.sherpa.v2;
import java.io.Serializable;
import java.util.List; import java.util.List;
import java.util.Map; import java.util.Map;
@@ -22,7 +23,7 @@ import java.util.Map;
* @see SHERPAJournal * @see SHERPAJournal
* @see SHERPAPermittedVersion * @see SHERPAPermittedVersion
*/ */
public class SHERPAPublisherPolicy { public class SHERPAPublisherPolicy implements Serializable {
private int id; private int id;
private boolean openAccessPermitted; private boolean openAccessPermitted;

View File

@@ -10,12 +10,15 @@ package org.dspace.app.sherpa.v2;
import java.io.IOException; import java.io.IOException;
import java.io.InputStream; import java.io.InputStream;
import java.io.InputStreamReader; import java.io.InputStreamReader;
import java.io.Serializable;
import java.nio.charset.StandardCharsets; import java.nio.charset.StandardCharsets;
import java.util.ArrayList; import java.util.ArrayList;
import java.util.Date;
import java.util.List; import java.util.List;
import java.util.Map; import java.util.Map;
import java.util.TreeMap; import java.util.TreeMap;
import com.fasterxml.jackson.annotation.JsonIgnore;
import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger; import org.apache.logging.log4j.Logger;
import org.json.JSONArray; import org.json.JSONArray;
@@ -33,7 +36,10 @@ import org.json.JSONTokener;
* @author Kim Shepherd * @author Kim Shepherd
* *
*/ */
public class SHERPAResponse { public class SHERPAResponse implements Serializable {
private static final long serialVersionUID = 2732963970169240597L;
// Is this response to be treated as an error? // Is this response to be treated as an error?
private boolean error; private boolean error;
@@ -52,6 +58,9 @@ public class SHERPAResponse {
// SHERPA URI (the human page version of this API response) // SHERPA URI (the human page version of this API response)
private String uri; private String uri;
@JsonIgnore
private Date retrievalTime = new Date();
// Format enum - currently only JSON is supported // Format enum - currently only JSON is supported
public enum SHERPAFormat { public enum SHERPAFormat {
JSON, XML JSON, XML
@@ -71,6 +80,11 @@ public class SHERPAResponse {
} }
} }
/**
* Create an empty SHERPAResponse representation
*/
public SHERPAResponse() {}
/** /**
* Parse the SHERPA v2 API JSON and construct Romeo policy data for display * Parse the SHERPA v2 API JSON and construct Romeo policy data for display
* This method does not return a value, but rather populates the metadata and journals objects * This method does not return a value, but rather populates the metadata and journals objects
@@ -479,6 +493,12 @@ public class SHERPAResponse {
} }
permittedVersion.setLicenses(sherpaLicenses); permittedVersion.setLicenses(sherpaLicenses);
if (permitted.has("embargo")) {
JSONObject embargo = permitted.getJSONObject("embargo");
SHERPAEmbargo SHERPAEmbargo = new SHERPAEmbargo(embargo.getInt("amount"), embargo.getString("units"));
permittedVersion.setEmbargo(SHERPAEmbargo);
}
return permittedVersion; return permittedVersion;
} }
@@ -542,4 +562,8 @@ public class SHERPAResponse {
public SHERPASystemMetadata getMetadata() { public SHERPASystemMetadata getMetadata() {
return metadata; return metadata;
} }
public Date getRetrievalTime() {
return retrievalTime;
}
} }

View File

@@ -7,6 +7,8 @@
*/ */
package org.dspace.app.sherpa.v2; package org.dspace.app.sherpa.v2;
import java.io.Serializable;
/** /**
* Plain java representation of a SHERPA System Metadata object, based on SHERPA API v2 responses. * Plain java representation of a SHERPA System Metadata object, based on SHERPA API v2 responses.
* *
@@ -18,7 +20,7 @@ package org.dspace.app.sherpa.v2;
* *
* @author Kim Shepherd * @author Kim Shepherd
*/ */
public class SHERPASystemMetadata { public class SHERPASystemMetadata implements Serializable {
private int id; private int id;
private String uri; private String uri;

View File

@@ -0,0 +1,175 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.app.solrdatabaseresync;
import static org.dspace.discovery.indexobject.ItemIndexFactoryImpl.STATUS_FIELD;
import static org.dspace.discovery.indexobject.ItemIndexFactoryImpl.STATUS_FIELD_PREDB;
import java.io.IOException;
import java.sql.SQLException;
import java.util.Calendar;
import java.util.HashMap;
import java.util.Map;
import java.util.Optional;
import org.apache.commons.cli.ParseException;
import org.apache.logging.log4j.Logger;
import org.apache.solr.client.solrj.SolrQuery;
import org.apache.solr.client.solrj.SolrServerException;
import org.apache.solr.client.solrj.response.QueryResponse;
import org.apache.solr.common.SolrDocument;
import org.dspace.core.Context;
import org.dspace.discovery.IndexableObject;
import org.dspace.discovery.IndexingService;
import org.dspace.discovery.SearchServiceException;
import org.dspace.discovery.SearchUtils;
import org.dspace.discovery.SolrSearchCore;
import org.dspace.discovery.indexobject.IndexableItem;
import org.dspace.discovery.indexobject.factory.IndexObjectFactoryFactory;
import org.dspace.scripts.DSpaceRunnable;
import org.dspace.services.ConfigurationService;
import org.dspace.services.factory.DSpaceServicesFactory;
import org.dspace.util.SolrUtils;
import org.dspace.utils.DSpace;
/**
* {@link DSpaceRunnable} implementation to update solr items with "predb" status to either:
* - Delete them from solr if they're not present in the database
* - Remove their status if they're present in the database
*/
public class SolrDatabaseResyncCli extends DSpaceRunnable<SolrDatabaseResyncCliScriptConfiguration> {
/* Log4j logger */
private static final Logger log = org.apache.logging.log4j.LogManager.getLogger(SolrDatabaseResyncCli.class);
public static final String TIME_UNTIL_REINDEX_PROPERTY = "solr-database-resync.time-until-reindex";
private IndexingService indexingService;
private SolrSearchCore solrSearchCore;
private IndexObjectFactoryFactory indexObjectServiceFactory;
private ConfigurationService configurationService;
private int timeUntilReindex = 0;
private String maxTime;
@Override
public SolrDatabaseResyncCliScriptConfiguration getScriptConfiguration() {
return new DSpace().getServiceManager()
.getServiceByName("solr-database-resync", SolrDatabaseResyncCliScriptConfiguration.class);
}
public static void runScheduled() throws Exception {
SolrDatabaseResyncCli script = new SolrDatabaseResyncCli();
script.setup();
script.internalRun();
}
@Override
public void setup() throws ParseException {
indexingService = DSpaceServicesFactory.getInstance().getServiceManager()
.getServiceByName(IndexingService.class.getName(), IndexingService.class);
solrSearchCore = DSpaceServicesFactory.getInstance().getServiceManager()
.getServicesByType(SolrSearchCore.class).get(0);
indexObjectServiceFactory = IndexObjectFactoryFactory.getInstance();
configurationService = DSpaceServicesFactory.getInstance().getConfigurationService();
}
@Override
public void internalRun() throws Exception {
logInfoAndOut("Starting Item resync of Solr and Database...");
timeUntilReindex = getTimeUntilReindex();
maxTime = getMaxTime();
Context context = new Context();
try {
context.turnOffAuthorisationSystem();
performStatusUpdate(context);
} finally {
context.restoreAuthSystemState();
context.complete();
}
}
private void performStatusUpdate(Context context) throws SearchServiceException, SolrServerException, IOException {
SolrQuery solrQuery = new SolrQuery();
solrQuery.setQuery(STATUS_FIELD + ":" + STATUS_FIELD_PREDB);
solrQuery.addFilterQuery(SearchUtils.RESOURCE_TYPE_FIELD + ":" + IndexableItem.TYPE);
String dateRangeFilter = SearchUtils.LAST_INDEXED_FIELD + ":[* TO " + maxTime + "]";
logDebugAndOut("Date range filter used; " + dateRangeFilter);
solrQuery.addFilterQuery(dateRangeFilter);
solrQuery.addField(SearchUtils.RESOURCE_ID_FIELD);
solrQuery.addField(SearchUtils.RESOURCE_UNIQUE_ID);
QueryResponse response = solrSearchCore.getSolr().query(solrQuery, solrSearchCore.REQUEST_METHOD);
if (response != null) {
logInfoAndOut(response.getResults().size() + " items found to process");
for (SolrDocument doc : response.getResults()) {
String uuid = (String) doc.getFirstValue(SearchUtils.RESOURCE_ID_FIELD);
String uniqueId = (String) doc.getFirstValue(SearchUtils.RESOURCE_UNIQUE_ID);
logDebugAndOut("Processing item with UUID: " + uuid);
Optional<IndexableObject> indexableObject = Optional.empty();
try {
indexableObject = indexObjectServiceFactory
.getIndexableObjectFactory(uniqueId).findIndexableObject(context, uuid);
} catch (SQLException e) {
log.warn("An exception occurred when attempting to retrieve item with UUID \"" + uuid +
"\" from the database, removing related solr document", e);
}
try {
if (indexableObject.isPresent()) {
logDebugAndOut("Item exists in DB, updating solr document");
updateItem(context, indexableObject.get());
} else {
logDebugAndOut("Item doesn't exist in DB, removing solr document");
removeItem(context, uniqueId);
}
} catch (SQLException | IOException e) {
log.error(e.getMessage(), e);
}
}
}
indexingService.commit();
}
private void updateItem(Context context, IndexableObject indexableObject) throws SolrServerException, IOException {
Map<String,Object> fieldModifier = new HashMap<>(1);
fieldModifier.put("remove", STATUS_FIELD_PREDB);
indexingService.atomicUpdate(context, indexableObject.getUniqueIndexID(), STATUS_FIELD, fieldModifier);
}
private void removeItem(Context context, String uniqueId) throws IOException, SQLException {
indexingService.unIndexContent(context, uniqueId);
}
private String getMaxTime() {
Calendar cal = Calendar.getInstance();
if (timeUntilReindex > 0) {
cal.add(Calendar.MILLISECOND, -timeUntilReindex);
}
return SolrUtils.getDateFormatter().format(cal.getTime());
}
private int getTimeUntilReindex() {
return configurationService.getIntProperty(TIME_UNTIL_REINDEX_PROPERTY, 0);
}
private void logInfoAndOut(String message) {
log.info(message);
System.out.println(message);
}
private void logDebugAndOut(String message) {
log.debug(message);
System.out.println(message);
}
}

View File

@@ -0,0 +1,42 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.app.solrdatabaseresync;
import org.apache.commons.cli.Options;
import org.dspace.core.Context;
import org.dspace.scripts.configuration.ScriptConfiguration;
/**
* The {@link ScriptConfiguration} for the {@link SolrDatabaseResyncCli} script.
*/
public class SolrDatabaseResyncCliScriptConfiguration extends ScriptConfiguration<SolrDatabaseResyncCli> {
private Class<SolrDatabaseResyncCli> dspaceRunnableClass;
@Override
public Class<SolrDatabaseResyncCli> getDspaceRunnableClass() {
return dspaceRunnableClass;
}
@Override
public void setDspaceRunnableClass(Class<SolrDatabaseResyncCli> dspaceRunnableClass) {
this.dspaceRunnableClass = dspaceRunnableClass;
}
@Override
public boolean isAllowedToExecute(Context context) {
return true;
}
@Override
public Options getOptions() {
if (options == null) {
options = new Options();
}
return options;
}
}

View File

@@ -0,0 +1,104 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.authenticate;
import java.sql.SQLException;
import java.util.Iterator;
import java.util.List;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import org.dspace.authenticate.factory.AuthenticateServiceFactory;
import org.dspace.core.Context;
import org.dspace.eperson.EPerson;
import org.dspace.eperson.Group;
import org.dspace.kernel.ServiceManager;
import org.dspace.utils.DSpace;
/**
* Implementation of {@link AuthenticationMethod} that delegate all the method
* invocations to the bean of class {@link OrcidAuthenticationBean}.
*
* @author Luca Giamminonni (luca.giamminonni at 4science.it)
*
*/
public class OrcidAuthentication implements AuthenticationMethod {
private final ServiceManager serviceManager = new DSpace().getServiceManager();
/**
* Check if OrcidAuthentication plugin is enabled
* @return true if enabled, false otherwise
*/
public static boolean isEnabled() {
String pluginName = new OrcidAuthentication().getName();
Iterator<AuthenticationMethod> authenticationMethodIterator = AuthenticateServiceFactory.getInstance()
.getAuthenticationService().authenticationMethodIterator();
while (authenticationMethodIterator.hasNext()) {
if (pluginName.equals(authenticationMethodIterator.next().getName())) {
return true;
}
}
return false;
}
@Override
public boolean canSelfRegister(Context context, HttpServletRequest request, String username) throws SQLException {
return getOrcidAuthentication().canSelfRegister(context, request, username);
}
@Override
public void initEPerson(Context context, HttpServletRequest request, EPerson eperson) throws SQLException {
getOrcidAuthentication().initEPerson(context, request, eperson);
}
@Override
public boolean allowSetPassword(Context context, HttpServletRequest request, String username) throws SQLException {
return getOrcidAuthentication().allowSetPassword(context, request, username);
}
@Override
public boolean isImplicit() {
return getOrcidAuthentication().isImplicit();
}
@Override
public List<Group> getSpecialGroups(Context context, HttpServletRequest request) throws SQLException {
return getOrcidAuthentication().getSpecialGroups(context, request);
}
@Override
public int authenticate(Context context, String username, String password, String realm, HttpServletRequest request)
throws SQLException {
return getOrcidAuthentication().authenticate(context, username, password, realm, request);
}
@Override
public String loginPageURL(Context context, HttpServletRequest request, HttpServletResponse response) {
return getOrcidAuthentication().loginPageURL(context, request, response);
}
@Override
public String getName() {
return getOrcidAuthentication().getName();
}
private OrcidAuthenticationBean getOrcidAuthentication() {
return serviceManager.getServiceByName("orcidAuthentication", OrcidAuthenticationBean.class);
}
@Override
public boolean isUsed(Context context, HttpServletRequest request) {
return getOrcidAuthentication().isUsed(context, request);
}
}

View File

@@ -0,0 +1,330 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.authenticate;
import static java.lang.String.format;
import static java.net.URLEncoder.encode;
import static org.apache.commons.lang.BooleanUtils.toBoolean;
import static org.apache.commons.lang3.StringUtils.isBlank;
import static org.dspace.content.Item.ANY;
import java.io.UnsupportedEncodingException;
import java.sql.SQLException;
import java.util.Collections;
import java.util.List;
import java.util.Optional;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import org.apache.commons.collections4.CollectionUtils;
import org.apache.commons.lang3.StringUtils;
import org.dspace.authorize.AuthorizeException;
import org.dspace.core.Context;
import org.dspace.eperson.EPerson;
import org.dspace.eperson.Group;
import org.dspace.eperson.service.EPersonService;
import org.dspace.orcid.OrcidToken;
import org.dspace.orcid.client.OrcidClient;
import org.dspace.orcid.client.OrcidConfiguration;
import org.dspace.orcid.model.OrcidTokenResponseDTO;
import org.dspace.orcid.service.OrcidSynchronizationService;
import org.dspace.orcid.service.OrcidTokenService;
import org.dspace.profile.ResearcherProfile;
import org.dspace.profile.service.ResearcherProfileService;
import org.dspace.services.ConfigurationService;
import org.orcid.jaxb.model.v3.release.record.Email;
import org.orcid.jaxb.model.v3.release.record.Person;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
/**
* ORCID authentication for DSpace.
*
* @author Luca Giamminonni (luca.giamminonni at 4science.it)
*
*/
public class OrcidAuthenticationBean implements AuthenticationMethod {
public static final String ORCID_AUTH_ATTRIBUTE = "orcid-authentication";
private final static Logger LOGGER = LoggerFactory.getLogger(OrcidAuthenticationBean.class);
private final static String LOGIN_PAGE_URL_FORMAT = "%s?client_id=%s&response_type=code&scope=%s&redirect_uri=%s";
@Autowired
private OrcidClient orcidClient;
@Autowired
private OrcidConfiguration orcidConfiguration;
@Autowired
private ConfigurationService configurationService;
@Autowired
private EPersonService ePersonService;
@Autowired
private ResearcherProfileService researcherProfileService;
@Autowired
private OrcidSynchronizationService orcidSynchronizationService;
@Autowired
private OrcidTokenService orcidTokenService;
@Override
public int authenticate(Context context, String username, String password, String realm, HttpServletRequest request)
throws SQLException {
if (request == null) {
LOGGER.warn("Unable to authenticate using ORCID because the request object is null.");
return BAD_ARGS;
}
String code = (String) request.getParameter("code");
if (StringUtils.isEmpty(code)) {
LOGGER.warn("The incoming request has not code parameter");
return NO_SUCH_USER;
}
request.setAttribute(ORCID_AUTH_ATTRIBUTE, true);
return authenticateWithOrcid(context, code, request);
}
@Override
public String loginPageURL(Context context, HttpServletRequest request, HttpServletResponse response) {
String authorizeUrl = orcidConfiguration.getAuthorizeEndpointUrl();
String clientId = orcidConfiguration.getClientId();
String redirectUri = orcidConfiguration.getRedirectUrl();
String scopes = String.join("+", orcidConfiguration.getScopes());
if (StringUtils.isAnyBlank(authorizeUrl, clientId, redirectUri, scopes)) {
LOGGER.error("Missing mandatory configuration properties for OrcidAuthentication");
return "";
}
try {
return format(LOGIN_PAGE_URL_FORMAT, authorizeUrl, clientId, scopes, encode(redirectUri, "UTF-8"));
} catch (UnsupportedEncodingException e) {
LOGGER.error(e.getMessage(), e);
return "";
}
}
@Override
public boolean isUsed(Context context, HttpServletRequest request) {
return request.getAttribute(ORCID_AUTH_ATTRIBUTE) != null;
}
@Override
public boolean canSelfRegister(Context context, HttpServletRequest request, String username) throws SQLException {
return canSelfRegister();
}
@Override
public void initEPerson(Context context, HttpServletRequest request, EPerson eperson) throws SQLException {
}
@Override
public boolean allowSetPassword(Context context, HttpServletRequest request, String username) throws SQLException {
return false;
}
@Override
public boolean isImplicit() {
return false;
}
@Override
public List<Group> getSpecialGroups(Context context, HttpServletRequest request) throws SQLException {
return Collections.emptyList();
}
@Override
public String getName() {
return "orcid";
}
private int authenticateWithOrcid(Context context, String code, HttpServletRequest request) throws SQLException {
OrcidTokenResponseDTO token = getOrcidAccessToken(code);
if (token == null) {
return NO_SUCH_USER;
}
String orcid = token.getOrcid();
EPerson ePerson = ePersonService.findByNetid(context, orcid);
if (ePerson != null) {
return ePerson.canLogIn() ? logInEPerson(context, token, ePerson) : BAD_ARGS;
}
Person person = getPersonFromOrcid(token);
if (person == null) {
return NO_SUCH_USER;
}
String email = getEmail(person).orElse(null);
ePerson = ePersonService.findByEmail(context, email);
if (ePerson != null) {
return ePerson.canLogIn() ? logInEPerson(context, token, ePerson) : BAD_ARGS;
}
return canSelfRegister() ? registerNewEPerson(context, person, token) : NO_SUCH_USER;
}
private int logInEPerson(Context context, OrcidTokenResponseDTO token, EPerson ePerson)
throws SQLException {
context.setCurrentUser(ePerson);
setOrcidMetadataOnEPerson(context, ePerson, token);
ResearcherProfile profile = findProfile(context, ePerson);
if (profile != null) {
orcidSynchronizationService.linkProfile(context, profile.getItem(), token);
}
return SUCCESS;
}
private ResearcherProfile findProfile(Context context, EPerson ePerson) throws SQLException {
try {
return researcherProfileService.findById(context, ePerson.getID());
} catch (AuthorizeException e) {
throw new RuntimeException(e);
}
}
private int registerNewEPerson(Context context, Person person, OrcidTokenResponseDTO token) throws SQLException {
try {
context.turnOffAuthorisationSystem();
String email = getEmail(person)
.orElseThrow(() -> new IllegalStateException("The email is configured private on orcid"));
String orcid = token.getOrcid();
EPerson eperson = ePersonService.create(context);
eperson.setNetid(orcid);
eperson.setEmail(email);
Optional<String> firstName = getFirstName(person);
if (firstName.isPresent()) {
eperson.setFirstName(context, firstName.get());
}
Optional<String> lastName = getLastName(person);
if (lastName.isPresent()) {
eperson.setLastName(context, lastName.get());
}
eperson.setCanLogIn(true);
eperson.setSelfRegistered(true);
setOrcidMetadataOnEPerson(context, eperson, token);
ePersonService.update(context, eperson);
context.setCurrentUser(eperson);
context.dispatchEvents();
return SUCCESS;
} catch (Exception ex) {
LOGGER.error("An error occurs registering a new EPerson from ORCID", ex);
context.rollback();
return NO_SUCH_USER;
} finally {
context.restoreAuthSystemState();
}
}
private void setOrcidMetadataOnEPerson(Context context, EPerson person, OrcidTokenResponseDTO token)
throws SQLException {
String orcid = token.getOrcid();
String accessToken = token.getAccessToken();
String[] scopes = token.getScopeAsArray();
ePersonService.setMetadataSingleValue(context, person, "eperson", "orcid", null, null, orcid);
ePersonService.clearMetadata(context, person, "eperson", "orcid", "scope", ANY);
for (String scope : scopes) {
ePersonService.addMetadata(context, person, "eperson", "orcid", "scope", null, scope);
}
OrcidToken orcidToken = orcidTokenService.findByEPerson(context, person);
if (orcidToken == null) {
orcidTokenService.create(context, person, accessToken);
} else {
orcidToken.setAccessToken(accessToken);
}
}
private Person getPersonFromOrcid(OrcidTokenResponseDTO token) {
try {
return orcidClient.getPerson(token.getAccessToken(), token.getOrcid());
} catch (Exception ex) {
LOGGER.error("An error occurs retriving the ORCID record with id " + token.getOrcid(), ex);
return null;
}
}
private Optional<String> getEmail(Person person) {
List<Email> emails = person.getEmails() != null ? person.getEmails().getEmails() : Collections.emptyList();
if (CollectionUtils.isEmpty(emails)) {
return Optional.empty();
}
return Optional.ofNullable(emails.get(0).getEmail());
}
private Optional<String> getFirstName(Person person) {
return Optional.ofNullable(person.getName())
.map(name -> name.getGivenNames())
.map(givenNames -> givenNames.getContent());
}
private Optional<String> getLastName(Person person) {
return Optional.ofNullable(person.getName())
.map(name -> name.getFamilyName())
.map(givenNames -> givenNames.getContent());
}
private boolean canSelfRegister() {
String canSelfRegister = configurationService.getProperty("authentication-orcid.can-self-register", "true");
if (isBlank(canSelfRegister)) {
return true;
}
return toBoolean(canSelfRegister);
}
private OrcidTokenResponseDTO getOrcidAccessToken(String code) {
try {
return orcidClient.getAccessToken(code);
} catch (Exception ex) {
LOGGER.error("An error occurs retriving the ORCID access_token", ex);
return null;
}
}
public OrcidClient getOrcidClient() {
return orcidClient;
}
public void setOrcidClient(OrcidClient orcidClient) {
this.orcidClient = orcidClient;
}
}

View File

@@ -50,7 +50,7 @@ public class AuthoritySolrServiceImpl implements AuthorityIndexingService, Autho
*/ */
protected SolrClient solr = null; protected SolrClient solr = null;
protected SolrClient getSolr() public SolrClient getSolr()
throws MalformedURLException, SolrServerException, IOException { throws MalformedURLException, SolrServerException, IOException {
if (solr == null) { if (solr == null) {
@@ -67,7 +67,11 @@ public class AuthoritySolrServiceImpl implements AuthorityIndexingService, Autho
SolrQuery solrQuery = new SolrQuery().setQuery("*:*"); SolrQuery solrQuery = new SolrQuery().setQuery("*:*");
solrServer.query(solrQuery); try {
solrServer.query(solrQuery);
} catch (Exception ex) {
log.error("An error occurs querying authority solr core", ex);
}
solr = solrServer; solr = solrServer;
} }

View File

@@ -7,6 +7,9 @@
*/ */
package org.dspace.authorize; package org.dspace.authorize;
import static org.dspace.app.util.AuthorizeUtil.canCollectionAdminManageAccounts;
import static org.dspace.app.util.AuthorizeUtil.canCommunityAdminManageAccounts;
import java.sql.SQLException; import java.sql.SQLException;
import java.util.ArrayList; import java.util.ArrayList;
import java.util.Arrays; import java.util.Arrays;
@@ -900,6 +903,16 @@ public class AuthorizeServiceImpl implements AuthorizeService {
return discoverResult.getTotalSearchResults(); return discoverResult.getTotalSearchResults();
} }
@Override
public boolean isAccountManager(Context context) {
try {
return (canCommunityAdminManageAccounts() && isCommunityAdmin(context)
|| canCollectionAdminManageAccounts() && isCollectionAdmin(context));
} catch (SQLException e) {
throw new RuntimeException(e);
}
}
private boolean performCheck(Context context, String query) throws SQLException { private boolean performCheck(Context context, String query) throws SQLException {
if (context.getCurrentUser() == null) { if (context.getCurrentUser() == null) {
return false; return false;

View File

@@ -592,4 +592,12 @@ public interface AuthorizeService {
*/ */
long countAdminAuthorizedCollection(Context context, String query) long countAdminAuthorizedCollection(Context context, String query)
throws SearchServiceException, SQLException; throws SearchServiceException, SQLException;
/**
* Returns true if the current user can manage accounts.
*
* @param context context with the current user
* @return true if the current user can manage accounts
*/
boolean isAccountManager(Context context);
} }

View File

@@ -158,6 +158,11 @@ public class BundleServiceImpl extends DSpaceObjectServiceImpl<Bundle> implement
} }
bundle.addBitstream(bitstream); bundle.addBitstream(bitstream);
// If a bitstream is moved from one bundle to another it may be temporarily flagged as deleted
// (when removed from the original bundle)
if (bitstream.isDeleted()) {
bitstream.setDeleted(false);
}
bitstream.getBundles().add(bundle); bitstream.getBundles().add(bundle);

View File

@@ -621,8 +621,14 @@ public abstract class DSpaceObjectServiceImpl<T extends DSpaceObject> implements
}); });
for (MetadataValue metadataValue : metadataValues) { for (MetadataValue metadataValue : metadataValues) {
//Retrieve & store the place for each metadata value //Retrieve & store the place for each metadata value
if (StringUtils.startsWith(metadataValue.getAuthority(), Constants.VIRTUAL_AUTHORITY_PREFIX) && if (
((RelationshipMetadataValue) metadataValue).isUseForPlace()) { // For virtual MDVs with useForPlace=true,
// update both the place of the metadatum and the place of the Relationship.
// E.g. for an Author relationship,
// the place should be updated using the same principle as dc.contributor.author.
StringUtils.startsWith(metadataValue.getAuthority(), Constants.VIRTUAL_AUTHORITY_PREFIX)
&& ((RelationshipMetadataValue) metadataValue).isUseForPlace()
) {
int mvPlace = getMetadataValuePlace(fieldToLastPlace, metadataValue); int mvPlace = getMetadataValuePlace(fieldToLastPlace, metadataValue);
metadataValue.setPlace(mvPlace); metadataValue.setPlace(mvPlace);
String authority = metadataValue.getAuthority(); String authority = metadataValue.getAuthority();
@@ -635,8 +641,16 @@ public abstract class DSpaceObjectServiceImpl<T extends DSpaceObject> implements
} }
relationshipService.update(context, relationship); relationshipService.update(context, relationship);
} else if (!StringUtils.startsWith(metadataValue.getAuthority(), } else if (
Constants.VIRTUAL_AUTHORITY_PREFIX)) { // Otherwise, just set the place of the metadatum
// ...unless the metadatum in question is a relation.* metadatum.
// This case is a leftover from when a Relationship is removed and copied to metadata.
// If we let its place change the order of any remaining Relationships will be affected.
// todo: this makes it so these leftover MDVs can't be reordered later on
!StringUtils.equals(
metadataValue.getMetadataField().getMetadataSchema().getName(), "relation"
)
) {
int mvPlace = getMetadataValuePlace(fieldToLastPlace, metadataValue); int mvPlace = getMetadataValuePlace(fieldToLastPlace, metadataValue);
metadataValue.setPlace(mvPlace); metadataValue.setPlace(mvPlace);
} }

View File

@@ -17,6 +17,7 @@ import java.util.Date;
import java.util.Iterator; import java.util.Iterator;
import java.util.LinkedList; import java.util.LinkedList;
import java.util.List; import java.util.List;
import java.util.Map;
import java.util.Objects; import java.util.Objects;
import java.util.UUID; import java.util.UUID;
import java.util.function.Supplier; import java.util.function.Supplier;
@@ -40,6 +41,7 @@ import org.dspace.content.service.BitstreamService;
import org.dspace.content.service.BundleService; import org.dspace.content.service.BundleService;
import org.dspace.content.service.CollectionService; import org.dspace.content.service.CollectionService;
import org.dspace.content.service.CommunityService; import org.dspace.content.service.CommunityService;
import org.dspace.content.service.EntityTypeService;
import org.dspace.content.service.InstallItemService; import org.dspace.content.service.InstallItemService;
import org.dspace.content.service.ItemService; import org.dspace.content.service.ItemService;
import org.dspace.content.service.MetadataSchemaService; import org.dspace.content.service.MetadataSchemaService;
@@ -56,6 +58,15 @@ import org.dspace.harvest.HarvestedItem;
import org.dspace.harvest.service.HarvestedItemService; import org.dspace.harvest.service.HarvestedItemService;
import org.dspace.identifier.IdentifierException; import org.dspace.identifier.IdentifierException;
import org.dspace.identifier.service.IdentifierService; import org.dspace.identifier.service.IdentifierService;
import org.dspace.orcid.OrcidHistory;
import org.dspace.orcid.OrcidQueue;
import org.dspace.orcid.OrcidToken;
import org.dspace.orcid.model.OrcidEntityType;
import org.dspace.orcid.service.OrcidHistoryService;
import org.dspace.orcid.service.OrcidQueueService;
import org.dspace.orcid.service.OrcidSynchronizationService;
import org.dspace.orcid.service.OrcidTokenService;
import org.dspace.profile.service.ResearcherProfileService;
import org.dspace.services.ConfigurationService; import org.dspace.services.ConfigurationService;
import org.dspace.versioning.service.VersioningService; import org.dspace.versioning.service.VersioningService;
import org.dspace.workflow.WorkflowItemService; import org.dspace.workflow.WorkflowItemService;
@@ -120,6 +131,24 @@ public class ItemServiceImpl extends DSpaceObjectServiceImpl<Item> implements It
@Autowired(required = true) @Autowired(required = true)
private RelationshipMetadataService relationshipMetadataService; private RelationshipMetadataService relationshipMetadataService;
@Autowired(required = true)
private EntityTypeService entityTypeService;
@Autowired
private OrcidTokenService orcidTokenService;
@Autowired(required = true)
private OrcidHistoryService orcidHistoryService;
@Autowired(required = true)
private OrcidQueueService orcidQueueService;
@Autowired(required = true)
private OrcidSynchronizationService orcidSynchronizationService;
@Autowired(required = true)
private ResearcherProfileService researcherProfileService;
protected ItemServiceImpl() { protected ItemServiceImpl() {
super(); super();
} }
@@ -241,6 +270,10 @@ public class ItemServiceImpl extends DSpaceObjectServiceImpl<Item> implements It
return itemDAO.findAll(context, true, true); return itemDAO.findAll(context, true, true);
} }
public Iterator<Item> findAllRegularItems(Context context) throws SQLException {
return itemDAO.findAllRegularItems(context);
};
@Override @Override
public Iterator<Item> findBySubmitter(Context context, EPerson eperson) throws SQLException { public Iterator<Item> findBySubmitter(Context context, EPerson eperson) throws SQLException {
return itemDAO.findBySubmitter(context, eperson); return itemDAO.findBySubmitter(context, eperson);
@@ -724,7 +757,7 @@ public class ItemServiceImpl extends DSpaceObjectServiceImpl<Item> implements It
+ item.getID())); + item.getID()));
// Remove relationships // Remove relationships
for (Relationship relationship : relationshipService.findByItem(context, item)) { for (Relationship relationship : relationshipService.findByItem(context, item, -1, -1, false, false)) {
relationshipService.forceDelete(context, relationship, false, false); relationshipService.forceDelete(context, relationship, false, false);
} }
@@ -737,6 +770,8 @@ public class ItemServiceImpl extends DSpaceObjectServiceImpl<Item> implements It
// remove version attached to the item // remove version attached to the item
removeVersion(context, item); removeVersion(context, item);
removeOrcidSynchronizationStuff(context, item);
// Also delete the item if it appears in a harvested collection. // Also delete the item if it appears in a harvested collection.
HarvestedItem hi = harvestedItemService.find(context, item); HarvestedItem hi = harvestedItemService.find(context, item);
@@ -744,6 +779,11 @@ public class ItemServiceImpl extends DSpaceObjectServiceImpl<Item> implements It
harvestedItemService.delete(context, hi); harvestedItemService.delete(context, hi);
} }
OrcidToken orcidToken = orcidTokenService.findByProfileItem(context, item);
if (orcidToken != null) {
orcidToken.setProfileItem(null);
}
//Only clear collections after we have removed everything else from the item //Only clear collections after we have removed everything else from the item
item.clearCollections(); item.clearCollections();
item.setOwningCollection(null); item.setOwningCollection(null);
@@ -1131,6 +1171,50 @@ prevent the generation of resource policy entry values with null dspace_object a
return !(hasCustomPolicy && isAnonimousGroup && datesAreNull); return !(hasCustomPolicy && isAnonimousGroup && datesAreNull);
} }
/**
* Returns an iterator of Items possessing the passed metadata field, or only
* those matching the passed value, if value is not Item.ANY
*
* @param context DSpace context object
* @param schema metadata field schema
* @param element metadata field element
* @param qualifier metadata field qualifier
* @param value field value or Item.ANY to match any value
* @return an iterator over the items matching that authority value
* @throws SQLException if database error
* An exception that provides information on a database access error or other errors.
* @throws AuthorizeException if authorization error
* Exception indicating the current user of the context does not have permission
* to perform a particular action.
*/
@Override
public Iterator<Item> findArchivedByMetadataField(Context context,
String schema, String element, String qualifier, String value)
throws SQLException, AuthorizeException {
MetadataSchema mds = metadataSchemaService.find(context, schema);
if (mds == null) {
throw new IllegalArgumentException("No such metadata schema: " + schema);
}
MetadataField mdf = metadataFieldService.findByElement(context, mds, element, qualifier);
if (mdf == null) {
throw new IllegalArgumentException(
"No such metadata field: schema=" + schema + ", element=" + element + ", qualifier=" + qualifier);
}
if (Item.ANY.equals(value)) {
return itemDAO.findByMetadataField(context, mdf, null, true);
} else {
return itemDAO.findByMetadataField(context, mdf, value, true);
}
}
@Override
public Iterator<Item> findArchivedByMetadataField(Context context, String metadataField, String value)
throws SQLException, AuthorizeException {
String[] mdValueByField = getMDValueByField(metadataField);
return findArchivedByMetadataField(context, mdValueByField[0], mdValueByField[1], mdValueByField[2], value);
}
/** /**
* Returns an iterator of Items possessing the passed metadata field, or only * Returns an iterator of Items possessing the passed metadata field, or only
* those matching the passed value, if value is not Item.ANY * those matching the passed value, if value is not Item.ANY
@@ -1535,5 +1619,100 @@ prevent the generation of resource policy entry values with null dspace_object a
.stream().findFirst().orElse(null); .stream().findFirst().orElse(null);
} }
@Override
public String getEntityTypeLabel(Item item) {
List<MetadataValue> mdvs = getMetadata(item, "dspace", "entity", "type", Item.ANY, false);
if (mdvs.isEmpty()) {
return null;
}
if (mdvs.size() > 1) {
log.warn(
"Item with uuid {}, handle {} has {} entity types ({}), expected 1 entity type",
item.getID(), item.getHandle(), mdvs.size(),
mdvs.stream().map(MetadataValue::getValue).collect(Collectors.toList())
);
}
String entityType = mdvs.get(0).getValue();
if (StringUtils.isBlank(entityType)) {
return null;
}
return entityType;
}
@Override
public EntityType getEntityType(Context context, Item item) throws SQLException {
String entityTypeString = getEntityTypeLabel(item);
if (StringUtils.isBlank(entityTypeString)) {
return null;
}
return entityTypeService.findByEntityType(context, entityTypeString);
}
private void removeOrcidSynchronizationStuff(Context context, Item item) throws SQLException, AuthorizeException {
if (isNotProfileOrOrcidEntity(item)) {
return;
}
context.turnOffAuthorisationSystem();
try {
createOrcidQueueRecordsToDeleteOnOrcid(context, item);
deleteOrcidHistoryRecords(context, item);
deleteOrcidQueueRecords(context, item);
} finally {
context.restoreAuthSystemState();
}
}
private boolean isNotProfileOrOrcidEntity(Item item) {
String entityType = getEntityTypeLabel(item);
return !OrcidEntityType.isValidEntityType(entityType)
&& !researcherProfileService.getProfileType().equals(entityType);
}
private void createOrcidQueueRecordsToDeleteOnOrcid(Context context, Item entity) throws SQLException {
String entityType = getEntityTypeLabel(entity);
if (entityType == null || researcherProfileService.getProfileType().equals(entityType)) {
return;
}
Map<Item, String> profileAndPutCodeMap = orcidHistoryService.findLastPutCodes(context, entity);
for (Item profile : profileAndPutCodeMap.keySet()) {
if (orcidSynchronizationService.isSynchronizationAllowed(profile, entity)) {
String putCode = profileAndPutCodeMap.get(profile);
String title = getMetadataFirstValue(entity, "dc", "title", null, Item.ANY);
orcidQueueService.createEntityDeletionRecord(context, profile, title, entityType, putCode);
}
}
}
private void deleteOrcidHistoryRecords(Context context, Item item) throws SQLException {
List<OrcidHistory> historyRecords = orcidHistoryService.findByProfileItemOrEntity(context, item);
for (OrcidHistory historyRecord : historyRecords) {
if (historyRecord.getProfileItem().equals(item)) {
orcidHistoryService.delete(context, historyRecord);
} else {
historyRecord.setEntity(null);
orcidHistoryService.update(context, historyRecord);
}
}
}
private void deleteOrcidQueueRecords(Context context, Item item) throws SQLException {
List<OrcidQueue> orcidQueueRecords = orcidQueueService.findByProfileItemOrEntity(context, item);
for (OrcidQueue orcidQueueRecord : orcidQueueRecords) {
orcidQueueService.delete(context, orcidQueueRecord);
}
}
} }

View File

@@ -89,6 +89,15 @@ public class Relationship implements ReloadableEntity<Integer> {
@Column(name = "rightward_value") @Column(name = "rightward_value")
private String rightwardValue; private String rightwardValue;
/**
* Whether the left and/or right side of a given relationship are the "latest".
* A side of a relationship is "latest" if the item on that side has either no other versions,
* or the item on that side is the most recent version that is relevant to the given relationship.
* This column affects what version of an item appears on search pages or the relationship listings of other items.
*/
@Column(name = "latest_version_status")
private LatestVersionStatus latestVersionStatus = LatestVersionStatus.BOTH;
/** /**
* Protected constructor, create object using: * Protected constructor, create object using:
* {@link org.dspace.content.service.RelationshipService#create(Context)} } * {@link org.dspace.content.service.RelationshipService#create(Context)} }
@@ -216,6 +225,39 @@ public class Relationship implements ReloadableEntity<Integer> {
this.rightwardValue = rightwardValue; this.rightwardValue = rightwardValue;
} }
/**
* Getter for {@link #latestVersionStatus}.
* @return the latest version status of this relationship.
*/
public LatestVersionStatus getLatestVersionStatus() {
return latestVersionStatus;
}
/**
* Setter for {@link #latestVersionStatus}.
* @param latestVersionStatus the new latest version status for this relationship.
*/
public void setLatestVersionStatus(LatestVersionStatus latestVersionStatus) {
if (this.latestVersionStatus == latestVersionStatus) {
return; // no change or cache reset needed
}
this.latestVersionStatus = latestVersionStatus;
// on one item, relation.* fields will change
// on the other item, relation.*.latestForDiscovery will change
leftItem.setMetadataModified();
rightItem.setMetadataModified();
}
public enum LatestVersionStatus {
// NOTE: SQL migration expects BOTH to be the first constant in this enum!
BOTH, // both items in this relationship are the "latest"
LEFT_ONLY, // the left-hand item of this relationship is the "latest", but the right-hand item is not
RIGHT_ONLY // the right-hand item of this relationship is the "latest", but the left-hand item is not
// NOTE: one side of any given relationship should ALWAYS be the "latest"
}
/** /**
* Standard getter for the ID for this Relationship * Standard getter for the ID for this Relationship
* @return The ID of this relationship * @return The ID of this relationship

View File

@@ -56,7 +56,9 @@ public interface RelationshipMetadataService {
* This method will retrieve the EntityType String from an item * This method will retrieve the EntityType String from an item
* @param item The Item for which the entityType String will be returned * @param item The Item for which the entityType String will be returned
* @return A String value indicating the entityType * @return A String value indicating the entityType
* @deprecated use {@link org.dspace.content.service.ItemService#getEntityTypeLabel(Item)} instead.
*/ */
@Deprecated
public String getEntityTypeStringFromMetadata(Item item); public String getEntityTypeStringFromMetadata(Item item);
} }

View File

@@ -7,16 +7,24 @@
*/ */
package org.dspace.content; package org.dspace.content;
import static org.dspace.content.RelationshipType.Tilted.LEFT;
import static org.dspace.content.RelationshipType.Tilted.RIGHT;
import java.sql.SQLException; import java.sql.SQLException;
import java.util.HashMap; import java.util.HashMap;
import java.util.LinkedList; import java.util.LinkedList;
import java.util.List; import java.util.List;
import java.util.Map; import java.util.Map;
import java.util.Objects;
import java.util.stream.Collectors;
import org.apache.commons.lang3.StringUtils; import org.apache.commons.lang3.StringUtils;
import org.apache.logging.log4j.Logger; import org.apache.logging.log4j.Logger;
import org.dspace.content.dao.pojo.ItemUuidAndRelationshipId;
import org.dspace.content.service.ItemService;
import org.dspace.content.service.MetadataFieldService; import org.dspace.content.service.MetadataFieldService;
import org.dspace.content.service.RelationshipService; import org.dspace.content.service.RelationshipService;
import org.dspace.content.service.RelationshipTypeService;
import org.dspace.content.virtual.VirtualMetadataConfiguration; import org.dspace.content.virtual.VirtualMetadataConfiguration;
import org.dspace.content.virtual.VirtualMetadataPopulator; import org.dspace.content.virtual.VirtualMetadataPopulator;
import org.dspace.core.Constants; import org.dspace.core.Constants;
@@ -33,6 +41,12 @@ public class RelationshipMetadataServiceImpl implements RelationshipMetadataServ
@Autowired(required = true) @Autowired(required = true)
protected RelationshipService relationshipService; protected RelationshipService relationshipService;
@Autowired(required = true)
protected RelationshipTypeService relationshipTypeService;
@Autowired(required = true)
protected ItemService itemService;
@Autowired(required = true) @Autowired(required = true)
protected VirtualMetadataPopulator virtualMetadataPopulator; protected VirtualMetadataPopulator virtualMetadataPopulator;
@@ -44,12 +58,25 @@ public class RelationshipMetadataServiceImpl implements RelationshipMetadataServ
Context context = new Context(); Context context = new Context();
List<RelationshipMetadataValue> fullMetadataValueList = new LinkedList<>(); List<RelationshipMetadataValue> fullMetadataValueList = new LinkedList<>();
try { try {
String entityType = getEntityTypeStringFromMetadata(item); EntityType entityType = itemService.getEntityType(context, item);
if (StringUtils.isNotBlank(entityType)) { if (entityType != null) {
// NOTE: The following code will add metadata fields of type relation.*.latestForDiscovery
// (e.g. relation.isAuthorOfPublication.latestForDiscovery).
// These fields contain the UUIDs of the items that have a relationship with current item,
// from the perspective of the other item. In other words, given a relationship with this item,
// the current item should have "latest status" in order for the other item to appear in
// relation.*.latestForDiscovery fields.
fullMetadataValueList.addAll(findLatestForDiscoveryMetadataValues(context, item, entityType));
// NOTE: The following code will, among other things,
// add metadata fields of type relation.* (e.g. relation.isAuthorOfPublication).
// These fields contain the UUIDs of the items that have a relationship with current item,
// from the perspective of this item. In other words, given a relationship with this item,
// the other item should have "latest status" in order to appear in relation.* fields.
List<Relationship> relationships = relationshipService.findByItem(context, item, -1, -1, true); List<Relationship> relationships = relationshipService.findByItem(context, item, -1, -1, true);
for (Relationship relationship : relationships) { for (Relationship relationship : relationships) {
fullMetadataValueList fullMetadataValueList
.addAll(findRelationshipMetadataValueForItemRelationship(context, item, entityType, .addAll(findRelationshipMetadataValueForItemRelationship(context, item, entityType.getLabel(),
relationship, enableVirtualMetadata)); relationship, enableVirtualMetadata));
} }
@@ -60,16 +87,90 @@ public class RelationshipMetadataServiceImpl implements RelationshipMetadataServ
return fullMetadataValueList; return fullMetadataValueList;
} }
public String getEntityTypeStringFromMetadata(Item item) { /**
List<MetadataValue> list = item.getMetadata(); * Create the list of relation.*.latestForDiscovery virtual metadata values for the given item.
for (MetadataValue mdv : list) { * @param context the DSpace context.
if (StringUtils.equals(mdv.getMetadataField().getMetadataSchema().getName(), "dspace") * @param item the item.
&& StringUtils.equals(mdv.getMetadataField().getElement(), "entity") * @param itemEntityType the entity type of the item.
&& StringUtils.equals(mdv.getMetadataField().getQualifier(), "type")) { * @return a list (may be empty) of metadata values of type relation.*.latestForDiscovery.
return mdv.getValue(); */
protected List<RelationshipMetadataValue> findLatestForDiscoveryMetadataValues(
Context context, Item item, EntityType itemEntityType
) throws SQLException {
final String schema = MetadataSchemaEnum.RELATION.getName();
final String qualifier = "latestForDiscovery";
List<RelationshipMetadataValue> mdvs = new LinkedList<>();
List<RelationshipType> relationshipTypes = relationshipTypeService.findByEntityType(context, itemEntityType);
for (RelationshipType relationshipType : relationshipTypes) {
// item is on left side of this relationship type
// NOTE: On the left item, we should index the uuids of the right items. If the relationship type is
// "tilted right", it means that we expect a huge amount of right items, so we don't index their uuids
// on the left item as a storage/performance improvement.
// As a consequence, when searching for related items (using discovery)
// on the pages of the right items you won't be able to find the left item.
if (relationshipType.getTilted() != RIGHT && relationshipType.getLeftType().equals(itemEntityType)) {
String element = relationshipType.getLeftwardType();
List<ItemUuidAndRelationshipId> data = relationshipService
.findByLatestItemAndRelationshipType(context, item, relationshipType, true);
mdvs.addAll(constructLatestForDiscoveryMetadataValues(context, schema, element, qualifier, data));
}
// item is on right side of this relationship type
// NOTE: On the right item, we should index the uuids of the left items. If the relationship type is
// "tilted left", it means that we expect a huge amount of left items, so we don't index their uuids
// on the right item as a storage/performance improvement.
// As a consequence, when searching for related items (using discovery)
// on the pages of the left items you won't be able to find the right item.
if (relationshipType.getTilted() != LEFT && relationshipType.getRightType().equals(itemEntityType)) {
String element = relationshipType.getRightwardType();
List<ItemUuidAndRelationshipId> data = relationshipService
.findByLatestItemAndRelationshipType(context, item, relationshipType, false);
mdvs.addAll(constructLatestForDiscoveryMetadataValues(context, schema, element, qualifier, data));
} }
} }
return null;
return mdvs;
}
/**
* Turn the given data into a list of relation.*.latestForDiscovery virtual metadata values.
* @param context the DSpace context.
* @param schema the schema for all metadata values.
* @param element the element for all metadata values.
* @param qualifier the qualifier for all metadata values.
* @param data a POJO containing the item uuid and relationship id.
* @return a list (may be empty) of metadata values of type relation.*.latestForDiscovery.
*/
protected List<RelationshipMetadataValue> constructLatestForDiscoveryMetadataValues(
Context context, String schema, String element, String qualifier, List<ItemUuidAndRelationshipId> data
) {
String mdf = new MetadataFieldName(schema, element, qualifier).toString();
return data.stream()
.map(datum -> {
RelationshipMetadataValue mdv = constructMetadataValue(context, mdf);
if (mdv == null) {
return null;
}
mdv.setAuthority(Constants.VIRTUAL_AUTHORITY_PREFIX + datum.getRelationshipId());
mdv.setValue(datum.getItemUuid().toString());
// NOTE: place has no meaning for relation.*.latestForDiscovery metadata fields
mdv.setPlace(-1);
mdv.setUseForPlace(false);
return mdv;
})
.filter(Objects::nonNull)
.collect(Collectors.toUnmodifiableList());
}
@Override
@Deprecated
public String getEntityTypeStringFromMetadata(Item item) {
return itemService.getEntityTypeLabel(item);
} }
@Override @Override

View File

@@ -10,9 +10,11 @@ package org.dspace.content;
import java.sql.SQLException; import java.sql.SQLException;
import java.util.ArrayList; import java.util.ArrayList;
import java.util.Collections; import java.util.Collections;
import java.util.Comparator; import java.util.HashMap;
import java.util.List; import java.util.List;
import java.util.UUID; import java.util.UUID;
import java.util.stream.Collectors;
import java.util.stream.Stream;
import org.apache.commons.collections4.CollectionUtils; import org.apache.commons.collections4.CollectionUtils;
import org.apache.commons.lang3.StringUtils; import org.apache.commons.lang3.StringUtils;
@@ -20,15 +22,19 @@ import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger; import org.apache.logging.log4j.Logger;
import org.dspace.authorize.AuthorizeException; import org.dspace.authorize.AuthorizeException;
import org.dspace.authorize.service.AuthorizeService; import org.dspace.authorize.service.AuthorizeService;
import org.dspace.content.Relationship.LatestVersionStatus;
import org.dspace.content.dao.RelationshipDAO; import org.dspace.content.dao.RelationshipDAO;
import org.dspace.content.dao.pojo.ItemUuidAndRelationshipId;
import org.dspace.content.service.EntityTypeService; import org.dspace.content.service.EntityTypeService;
import org.dspace.content.service.ItemService; import org.dspace.content.service.ItemService;
import org.dspace.content.service.RelationshipService; import org.dspace.content.service.RelationshipService;
import org.dspace.content.service.RelationshipTypeService; import org.dspace.content.service.RelationshipTypeService;
import org.dspace.content.virtual.VirtualMetadataConfiguration;
import org.dspace.content.virtual.VirtualMetadataPopulator; import org.dspace.content.virtual.VirtualMetadataPopulator;
import org.dspace.core.Constants; import org.dspace.core.Constants;
import org.dspace.core.Context; import org.dspace.core.Context;
import org.dspace.services.ConfigurationService; import org.dspace.services.ConfigurationService;
import org.dspace.versioning.utils.RelationshipVersioningUtils;
import org.springframework.beans.factory.annotation.Autowired; import org.springframework.beans.factory.annotation.Autowired;
public class RelationshipServiceImpl implements RelationshipService { public class RelationshipServiceImpl implements RelationshipService {
@@ -55,6 +61,10 @@ public class RelationshipServiceImpl implements RelationshipService {
@Autowired @Autowired
private RelationshipMetadataService relationshipMetadataService; private RelationshipMetadataService relationshipMetadataService;
@Autowired
private RelationshipVersioningUtils relationshipVersioningUtils;
@Autowired @Autowired
private VirtualMetadataPopulator virtualMetadataPopulator; private VirtualMetadataPopulator virtualMetadataPopulator;
@@ -76,9 +86,10 @@ public class RelationshipServiceImpl implements RelationshipService {
@Override @Override
public Relationship create(Context c, Item leftItem, Item rightItem, RelationshipType relationshipType, public Relationship create(
int leftPlace, int rightPlace, String leftwardValue, String rightwardValue) Context c, Item leftItem, Item rightItem, RelationshipType relationshipType, int leftPlace, int rightPlace,
throws AuthorizeException, SQLException { String leftwardValue, String rightwardValue, LatestVersionStatus latestVersionStatus
) throws AuthorizeException, SQLException {
Relationship relationship = new Relationship(); Relationship relationship = new Relationship();
relationship.setLeftItem(leftItem); relationship.setLeftItem(leftItem);
relationship.setRightItem(rightItem); relationship.setRightItem(rightItem);
@@ -87,9 +98,21 @@ public class RelationshipServiceImpl implements RelationshipService {
relationship.setRightPlace(rightPlace); relationship.setRightPlace(rightPlace);
relationship.setLeftwardValue(leftwardValue); relationship.setLeftwardValue(leftwardValue);
relationship.setRightwardValue(rightwardValue); relationship.setRightwardValue(rightwardValue);
relationship.setLatestVersionStatus(latestVersionStatus);
return create(c, relationship); return create(c, relationship);
} }
@Override
public Relationship create(
Context c, Item leftItem, Item rightItem, RelationshipType relationshipType, int leftPlace, int rightPlace,
String leftwardValue, String rightwardValue
) throws AuthorizeException, SQLException {
return create(
c, leftItem, rightItem, relationshipType, leftPlace, rightPlace, leftwardValue, rightwardValue,
LatestVersionStatus.BOTH
);
}
@Override @Override
public Relationship create(Context context, Relationship relationship) throws SQLException, AuthorizeException { public Relationship create(Context context, Relationship relationship) throws SQLException, AuthorizeException {
if (isRelationshipValidToCreate(context, relationship)) { if (isRelationshipValidToCreate(context, relationship)) {
@@ -98,7 +121,7 @@ public class RelationshipServiceImpl implements RelationshipService {
// This order of execution should be handled in the creation (create, updateplace, update relationship) // This order of execution should be handled in the creation (create, updateplace, update relationship)
// for a proper place allocation // for a proper place allocation
Relationship relationshipToReturn = relationshipDAO.create(context, relationship); Relationship relationshipToReturn = relationshipDAO.create(context, relationship);
updatePlaceInRelationship(context, relationshipToReturn); updatePlaceInRelationship(context, relationshipToReturn, null, null, true, true);
update(context, relationshipToReturn); update(context, relationshipToReturn);
updateItemsInRelationship(context, relationship); updateItemsInRelationship(context, relationship);
return relationshipToReturn; return relationshipToReturn;
@@ -113,71 +136,388 @@ public class RelationshipServiceImpl implements RelationshipService {
} }
@Override @Override
public void updatePlaceInRelationship(Context context, Relationship relationship) public Relationship move(
throws SQLException, AuthorizeException { Context context, Relationship relationship, Integer newLeftPlace, Integer newRightPlace
) throws SQLException, AuthorizeException {
if (authorizeService.authorizeActionBoolean(context, relationship.getLeftItem(), Constants.WRITE) ||
authorizeService.authorizeActionBoolean(context, relationship.getRightItem(), Constants.WRITE)) {
// Don't do anything if neither the leftPlace nor rightPlace was updated
if (newLeftPlace != null || newRightPlace != null) {
// This order of execution should be handled in the creation (create, updateplace, update relationship)
// for a proper place allocation
updatePlaceInRelationship(context, relationship, newLeftPlace, newRightPlace, false, false);
update(context, relationship);
updateItemsInRelationship(context, relationship);
}
return relationship;
} else {
throw new AuthorizeException(
"You do not have write rights on this relationship's items");
}
}
@Override
public Relationship move(
Context context, Relationship relationship, Item newLeftItem, Item newRightItem
) throws SQLException, AuthorizeException {
// If the new Item is the same as the current Item, don't move
newLeftItem = newLeftItem != relationship.getLeftItem() ? newLeftItem : null;
newRightItem = newRightItem != relationship.getRightItem() ? newRightItem : null;
// Don't do anything if neither the leftItem nor rightItem was updated
if (newLeftItem != null || newRightItem != null) {
// First move the Relationship to the back within the current Item's lists
// This ensures that we won't have any gaps once we move the Relationship to a different Item
move(
context, relationship,
newLeftItem != null ? -1 : null,
newRightItem != null ? -1 : null
);
boolean insertLeft = false;
boolean insertRight = false;
// If Item has been changed, mark the previous Item as modified to make sure we discard the old relation.*
// metadata on the next update.
// Set the Relationship's Items to the new ones, appending to the end
if (newLeftItem != null) {
relationship.getLeftItem().setMetadataModified();
relationship.setLeftItem(newLeftItem);
relationship.setLeftPlace(-1);
insertLeft = true;
}
if (newRightItem != null) {
relationship.getRightItem().setMetadataModified();
relationship.setRightItem(newRightItem);
relationship.setRightPlace(-1);
insertRight = true;
}
// This order of execution should be handled in the creation (create, updateplace, update relationship)
// for a proper place allocation
updatePlaceInRelationship(context, relationship, null, null, insertLeft, insertRight);
update(context, relationship);
updateItemsInRelationship(context, relationship);
}
return relationship;
}
/**
* This method will update the place for the Relationship and all other relationships found by the items and
* relationship type of the given Relationship.
*
* @param context The relevant DSpace context
* @param relationship The Relationship object that will have its place updated and that will be used
* to retrieve the other relationships whose place might need to be updated.
* @param newLeftPlace If the Relationship in question is to be moved, the leftPlace it is to be moved to.
* Set this to null if the Relationship has not been moved, i.e. it has just been created,
* deleted or when its Items have been modified.
* @param newRightPlace If the Relationship in question is to be moved, the rightPlace it is to be moved to.
* Set this to null if the Relationship has not been moved, i.e. it has just been created,
* deleted or when its Items have been modified.
* @param insertLeft Whether the Relationship in question should be inserted into the left Item.
* Should be set to true when creating or moving to a different Item.
* @param insertRight Whether the Relationship in question should be inserted into the right Item.
* Should be set to true when creating or moving to a different Item.
* @throws SQLException If something goes wrong
* @throws AuthorizeException
* If the user is not authorized to update the Relationship or its Items
*/
private void updatePlaceInRelationship(
Context context, Relationship relationship,
Integer newLeftPlace, Integer newRightPlace, boolean insertLeft, boolean insertRight
) throws SQLException, AuthorizeException {
Item leftItem = relationship.getLeftItem(); Item leftItem = relationship.getLeftItem();
// Max value is used to ensure that these will get added to the back of the list and thus receive the highest
// (last) place as it's set to a -1 for creation
if (relationship.getLeftPlace() == -1) {
relationship.setLeftPlace(Integer.MAX_VALUE);
}
Item rightItem = relationship.getRightItem(); Item rightItem = relationship.getRightItem();
if (relationship.getRightPlace() == -1) {
relationship.setRightPlace(Integer.MAX_VALUE);
}
List<Relationship> leftRelationships = findByItemAndRelationshipType(context,
leftItem,
relationship.getRelationshipType(), true);
List<Relationship> rightRelationships = findByItemAndRelationshipType(context,
rightItem,
relationship.getRelationshipType(),
false);
// These relationships are only deleted from the temporary lists incase they're present in them so that we can // These list also include the non-latest. This is relevant to determine whether it's deleted.
// This can also imply there may be overlapping places, and/or the given relationship will overlap
// But the shift will allow this, and only happen when needed based on the latest status
List<Relationship> leftRelationships = findByItemAndRelationshipType(
context, leftItem, relationship.getRelationshipType(), true, -1, -1, false
);
List<Relationship> rightRelationships = findByItemAndRelationshipType(
context, rightItem, relationship.getRelationshipType(), false, -1, -1, false
);
// These relationships are only deleted from the temporary lists in case they're present in them so that we can
// properly perform our place calculation later down the line in this method. // properly perform our place calculation later down the line in this method.
if (leftRelationships.contains(relationship)) { boolean deletedFromLeft = !leftRelationships.contains(relationship);
leftRelationships.remove(relationship); boolean deletedFromRight = !rightRelationships.contains(relationship);
} leftRelationships.remove(relationship);
if (rightRelationships.contains(relationship)) { rightRelationships.remove(relationship);
rightRelationships.remove(relationship);
} List<MetadataValue> leftMetadata = getSiblingMetadata(leftItem, relationship, true);
List<MetadataValue> rightMetadata = getSiblingMetadata(rightItem, relationship, false);
// For new relationships added to the end, this will be -1.
// For new relationships added at a specific position, this will contain that position.
// For existing relationships, this will contain the place before it was moved.
// For deleted relationships, this will contain the place before it was deleted.
int oldLeftPlace = relationship.getLeftPlace();
int oldRightPlace = relationship.getRightPlace();
boolean movedUpLeft = resolveRelationshipPlace(
relationship, true, leftRelationships, leftMetadata, oldLeftPlace, newLeftPlace
);
boolean movedUpRight = resolveRelationshipPlace(
relationship, false, rightRelationships, rightMetadata, oldRightPlace, newRightPlace
);
context.turnOffAuthorisationSystem(); context.turnOffAuthorisationSystem();
//If useForPlace for the leftwardType is false for the relationshipType,
// we need to sort the relationships here based on leftplace.
if (!virtualMetadataPopulator.isUseForPlaceTrueForRelationshipType(relationship.getRelationshipType(), true)) {
if (!leftRelationships.isEmpty()) {
leftRelationships.sort(Comparator.comparingInt(Relationship::getLeftPlace));
for (int i = 0; i < leftRelationships.size(); i++) {
leftRelationships.get(i).setLeftPlace(i);
}
relationship.setLeftPlace(leftRelationships.size());
} else {
relationship.setLeftPlace(0);
}
} else {
updateItem(context, leftItem);
//only shift if the place is relevant for the latest relationships
if (relationshipVersioningUtils.otherSideIsLatest(true, relationship.getLatestVersionStatus())) {
shiftSiblings(
relationship, true, oldLeftPlace, movedUpLeft, insertLeft, deletedFromLeft,
leftRelationships, leftMetadata
);
}
if (relationshipVersioningUtils.otherSideIsLatest(false, relationship.getLatestVersionStatus())) {
shiftSiblings(
relationship, false, oldRightPlace, movedUpRight, insertRight, deletedFromRight,
rightRelationships, rightMetadata
);
} }
//If useForPlace for the rightwardType is false for the relationshipType, updateItem(context, leftItem);
// we need to sort the relationships here based on the rightplace. updateItem(context, rightItem);
if (!virtualMetadataPopulator.isUseForPlaceTrueForRelationshipType(relationship.getRelationshipType(), false)) {
if (!rightRelationships.isEmpty()) {
rightRelationships.sort(Comparator.comparingInt(Relationship::getRightPlace));
for (int i = 0; i < rightRelationships.size(); i++) {
rightRelationships.get(i).setRightPlace(i);
}
relationship.setRightPlace(rightRelationships.size());
} else {
relationship.setRightPlace(0);
}
} else {
updateItem(context, rightItem);
}
context.restoreAuthSystemState(); context.restoreAuthSystemState();
}
/**
* Return the MDVs in the Item's MDF corresponding to the given Relationship.
* Return an empty list if the Relationship isn't mapped to any MDF
* or if the mapping is configured with useForPlace=false.
*
* This returns actual metadata (not virtual) which in the same metadata field as the useForPlace.
* For a publication with 2 author relationships and 3 plain text dc.contributor.author values,
* it would return the 3 plain text dc.contributor.author values.
* For a person related to publications, it would return an empty list.
*/
private List<MetadataValue> getSiblingMetadata(
Item item, Relationship relationship, boolean isLeft
) {
List<MetadataValue> metadata = new ArrayList<>();
if (virtualMetadataPopulator.isUseForPlaceTrueForRelationshipType(relationship.getRelationshipType(), isLeft)) {
HashMap<String, VirtualMetadataConfiguration> mapping;
if (isLeft) {
mapping = virtualMetadataPopulator.getMap().get(relationship.getRelationshipType().getLeftwardType());
} else {
mapping = virtualMetadataPopulator.getMap().get(relationship.getRelationshipType().getRightwardType());
}
if (mapping != null) {
for (String mdf : mapping.keySet()) {
metadata.addAll(
// Make sure we're only looking at database MDVs; if the relationship currently overlaps
// one of these, its virtual MDV will overwrite the database MDV in itemService.getMetadata()
// The relationship pass should be sufficient to move any sibling virtual MDVs.
item.getMetadata()
.stream()
.filter(mdv -> mdv.getMetadataField().toString().equals(mdf.replace(".", "_")))
.collect(Collectors.toList())
);
}
}
}
return metadata;
}
/**
* Set the left/right place of a Relationship
* - To a new place in case it's being moved
* - Resolve -1 to the actual last place based on the places of its sibling Relationships and/or MDVs
* and determine if it has been moved up in the list.
*
* Examples:
* - Insert a Relationship at place 3
* newPlace starts out as null and is not updated. Return movedUp=false
* - Insert a Relationship at place -1
* newPlace starts out as null and is resolved to e.g. 6. Update the Relationship and return movedUp=false
* - Move a Relationship from place 4 to 2
* Update the Relationship and return movedUp=false.
* - Move a Relationship from place 2 to -1
* newPlace starts out as -1 and is resolved to e.g. 5. Update the relationship and return movedUp=true.
* - Remove a relationship from place 1
* Return movedUp=false
*
* @param relationship the Relationship that's being updated
* @param isLeft whether to consider the left side of the Relationship.
* This method should be called twice, once with isLeft=true and once with isLeft=false.
* Make sure this matches the provided relationships/metadata/oldPlace/newPlace.
* @param relationships the list of sibling Relationships
* @param metadata the list of sibling MDVs
* @param oldPlace the previous place for this Relationship, in case it has been moved.
* Otherwise, the current place of a deleted Relationship
* or the place a Relationship has been inserted.
* @param newPlace The new place for this Relationship. Will be null on insert/delete.
* @return true if the Relationship was moved and newPlace > oldPlace
*/
private boolean resolveRelationshipPlace(
Relationship relationship, boolean isLeft,
List<Relationship> relationships, List<MetadataValue> metadata,
int oldPlace, Integer newPlace
) {
boolean movedUp = false;
if (newPlace != null) {
// We're moving an existing Relationship...
if (newPlace == -1) {
// ...to the end of the list
int nextPlace = getNextPlace(relationships, metadata, isLeft);
if (nextPlace == oldPlace) {
// If this Relationship is already at the end, do nothing.
newPlace = oldPlace;
} else {
// Subtract 1 from the next place since we're moving, not inserting and
// the total number of Relationships stays the same.
newPlace = nextPlace - 1;
}
}
if (newPlace > oldPlace) {
// ...up the list. We have to keep track of this in order to shift correctly later on
movedUp = true;
}
} else if (oldPlace == -1) {
// We're _not_ moving an existing Relationship. The newPlace is already set in the Relationship object.
// We only need to resolve it to the end of the list if it's set to -1, otherwise we can just keep it as is.
newPlace = getNextPlace(relationships, metadata, isLeft);
}
if (newPlace != null) {
setPlace(relationship, isLeft, newPlace);
}
return movedUp;
}
/**
* Return the index of the next place in a list of Relationships and Metadata.
* By not relying on the size of both lists we can support one-to-many virtual MDV mappings.
* @param isLeft whether to take the left or right place of each Relationship
*/
private int getNextPlace(List<Relationship> relationships, List<MetadataValue> metadata, boolean isLeft) {
return Stream.concat(
metadata.stream().map(MetadataValue::getPlace),
relationships.stream().map(r -> getPlace(r, isLeft))
).max(Integer::compare)
.map(integer -> integer + 1)
.orElse(0);
}
/**
* Adjust the left/right place of sibling Relationships and MDVs
*
* Examples: with sibling Relationships R,S,T and metadata a,b,c
* - Insert T at place 1 aRbSc -> a T RbSc
* Shift all siblings with place >= 1 one place to the right
* - Delete R from place 2 aT R bSc -> aTbSc
* Shift all siblings with place > 2 one place to the left
* - Move S from place 3 to place 2 (movedUp=false) aTb S c -> aT S bc
* Shift all siblings with 2 < place <= 3 one place to the right
* - Move T from place 1 to place 3 (movedUp=true) a T Sbc -> aSb T c
* Shift all siblings with 1 < place <= 3 one place to the left
*
* @param relationship the Relationship that's being updated
* @param isLeft whether to consider the left side of the Relationship.
* This method should be called twice, once with isLeft=true and once with isLeft=false.
* Make sure this matches the provided relationships/metadata/oldPlace/newPlace.
* @param oldPlace the previous place for this Relationship, in case it has been moved.
* Otherwise, the current place of a deleted Relationship
* or the place a Relationship has been inserted.
* @param movedUp if this Relationship has been moved up the list, e.g. from place 2 to place 4
* @param deleted whether this Relationship has been deleted
* @param relationships the list of sibling Relationships
* @param metadata the list of sibling MDVs
*/
private void shiftSiblings(
Relationship relationship, boolean isLeft, int oldPlace, boolean movedUp, boolean inserted, boolean deleted,
List<Relationship> relationships, List<MetadataValue> metadata
) {
int newPlace = getPlace(relationship, isLeft);
for (Relationship sibling : relationships) {
// NOTE: If and only if the other side of the relationship has "latest" status, the relationship will appear
// as a metadata value on the item at the current side (indicated by isLeft) of the relationship.
//
// Example: volume <----> issue (LEFT_ONLY)
// => LEFT_ONLY means that the volume has "latest" status, but the issue does NOT have "latest" status
// => the volume will appear in the metadata of the issue,
// but the issue will NOT appear in the metadata of the volume
//
// This means that the other side of the relationship has to have "latest" status, otherwise this
// relationship is NOT relevant for place calculation.
if (relationshipVersioningUtils.otherSideIsLatest(isLeft, sibling.getLatestVersionStatus())) {
int siblingPlace = getPlace(sibling, isLeft);
if (
(deleted && siblingPlace > newPlace)
// If the relationship was deleted, all relationships after it should shift left
// We must make the distinction between deletes and moves because for inserts oldPlace == newPlace
|| (movedUp && siblingPlace <= newPlace && siblingPlace > oldPlace)
// If the relationship was moved up e.g. from place 2 to 5, all relationships
// with place > 2 (the old place) and <= to 5 should shift left
) {
setPlace(sibling, isLeft, siblingPlace - 1);
} else if (
(inserted && siblingPlace >= newPlace)
// If the relationship was inserted, all relationships starting from that place should shift right
// We must make the distinction between inserts and moves because for inserts oldPlace == newPlace
|| (!movedUp && siblingPlace >= newPlace && siblingPlace < oldPlace)
// If the relationship was moved down e.g. from place 5 to 2, all relationships
// with place >= 2 and < 5 (the old place) should shift right
) {
setPlace(sibling, isLeft, siblingPlace + 1);
}
}
}
for (MetadataValue mdv : metadata) {
// NOTE: Plain text metadata values should ALWAYS be included in the place calculation,
// because they are by definition only visible/relevant to the side of the relationship
// that we are currently processing.
int mdvPlace = mdv.getPlace();
if (
(deleted && mdvPlace > newPlace)
// If the relationship was deleted, all metadata after it should shift left
// We must make the distinction between deletes and moves because for inserts oldPlace == newPlace
// If the reltionship was copied to metadata on deletion:
// - the plain text can be after the relationship (in which case it's moved forward again)
// - or before the relationship (in which case it remains in place)
|| (movedUp && mdvPlace <= newPlace && mdvPlace > oldPlace)
// If the relationship was moved up e.g. from place 2 to 5, all metadata
// with place > 2 (the old place) and <= to 5 should shift left
) {
mdv.setPlace(mdvPlace - 1);
} else if (
(inserted && mdvPlace >= newPlace)
// If the relationship was inserted, all relationships starting from that place should shift right
// We must make the distinction between inserts and moves because for inserts oldPlace == newPlace
|| (!movedUp && mdvPlace >= newPlace && mdvPlace < oldPlace)
// If the relationship was moved down e.g. from place 5 to 2, all relationships
// with place >= 2 and < 5 (the old place) should shift right
) {
mdv.setPlace(mdvPlace + 1);
}
}
}
private int getPlace(Relationship relationship, boolean isLeft) {
if (isLeft) {
return relationship.getLeftPlace();
} else {
return relationship.getRightPlace();
}
}
private void setPlace(Relationship relationship, boolean isLeft, int place) {
if (isLeft) {
relationship.setLeftPlace(place);
} else {
relationship.setRightPlace(place);
}
} }
@Override @Override
@@ -187,16 +527,6 @@ public class RelationshipServiceImpl implements RelationshipService {
itemService.update(context, relatedItem); itemService.update(context, relatedItem);
} }
@Override
public int findNextLeftPlaceByLeftItem(Context context, Item item) throws SQLException {
return relationshipDAO.findNextLeftPlaceByLeftItem(context, item);
}
@Override
public int findNextRightPlaceByRightItem(Context context, Item item) throws SQLException {
return relationshipDAO.findNextRightPlaceByRightItem(context, item);
}
private boolean isRelationshipValidToCreate(Context context, Relationship relationship) throws SQLException { private boolean isRelationshipValidToCreate(Context context, Relationship relationship) throws SQLException {
RelationshipType relationshipType = relationship.getRelationshipType(); RelationshipType relationshipType = relationship.getRelationshipType();
@@ -212,15 +542,19 @@ public class RelationshipServiceImpl implements RelationshipService {
logRelationshipTypeDetailsForError(relationshipType); logRelationshipTypeDetailsForError(relationshipType);
return false; return false;
} }
if (!verifyMaxCardinality(context, relationship.getLeftItem(), if (!relationship.getLatestVersionStatus().equals(LatestVersionStatus.LEFT_ONLY)
&& !verifyMaxCardinality(context, relationship.getLeftItem(),
relationshipType.getLeftMaxCardinality(), relationshipType, true)) { relationshipType.getLeftMaxCardinality(), relationshipType, true)) {
//If RIGHT_ONLY => it's a copied relationship, and the count can be ignored
log.warn("The relationship has been deemed invalid since the left item has more" + log.warn("The relationship has been deemed invalid since the left item has more" +
" relationships than the left max cardinality allows after we'd store this relationship"); " relationships than the left max cardinality allows after we'd store this relationship");
logRelationshipTypeDetailsForError(relationshipType); logRelationshipTypeDetailsForError(relationshipType);
return false; return false;
} }
if (!verifyMaxCardinality(context, relationship.getRightItem(), if (!relationship.getLatestVersionStatus().equals(LatestVersionStatus.RIGHT_ONLY)
&& !verifyMaxCardinality(context, relationship.getRightItem(),
relationshipType.getRightMaxCardinality(), relationshipType, false)) { relationshipType.getRightMaxCardinality(), relationshipType, false)) {
//If LEFT_ONLY => it's a copied relationship, and the count can be ignored
log.warn("The relationship has been deemed invalid since the right item has more" + log.warn("The relationship has been deemed invalid since the right item has more" +
" relationships than the right max cardinality allows after we'd store this relationship"); " relationships than the right max cardinality allows after we'd store this relationship");
logRelationshipTypeDetailsForError(relationshipType); logRelationshipTypeDetailsForError(relationshipType);
@@ -279,14 +613,22 @@ public class RelationshipServiceImpl implements RelationshipService {
} }
@Override @Override
public List<Relationship> findByItem(Context context, Item item, Integer limit, Integer offset, public List<Relationship> findByItem(
boolean excludeTilted) throws SQLException { Context context, Item item, Integer limit, Integer offset, boolean excludeTilted
) throws SQLException {
return findByItem(context, item, limit, offset, excludeTilted, true);
}
List<Relationship> list = relationshipDAO.findByItem(context, item, limit, offset, excludeTilted); @Override
public List<Relationship> findByItem(
Context context, Item item, Integer limit, Integer offset, boolean excludeTilted, boolean excludeNonLatest
) throws SQLException {
List<Relationship> list =
relationshipDAO.findByItem(context, item, limit, offset, excludeTilted, excludeNonLatest);
list.sort((o1, o2) -> { list.sort((o1, o2) -> {
int relationshipType = o1.getRelationshipType().getLeftwardType() int relationshipType = o1.getRelationshipType().getLeftwardType()
.compareTo(o2.getRelationshipType().getLeftwardType()); .compareTo(o2.getRelationshipType().getLeftwardType());
if (relationshipType != 0) { if (relationshipType != 0) {
return relationshipType; return relationshipType;
} else { } else {
@@ -377,7 +719,7 @@ public class RelationshipServiceImpl implements RelationshipService {
if (authorizeService.authorizeActionBoolean(context, relationship.getLeftItem(), Constants.WRITE) || if (authorizeService.authorizeActionBoolean(context, relationship.getLeftItem(), Constants.WRITE) ||
authorizeService.authorizeActionBoolean(context, relationship.getRightItem(), Constants.WRITE)) { authorizeService.authorizeActionBoolean(context, relationship.getRightItem(), Constants.WRITE)) {
relationshipDAO.delete(context, relationship); relationshipDAO.delete(context, relationship);
updatePlaceInRelationship(context, relationship); updatePlaceInRelationship(context, relationship, null, null, false, false);
updateItemsInRelationship(context, relationship); updateItemsInRelationship(context, relationship);
} else { } else {
throw new AuthorizeException( throw new AuthorizeException(
@@ -450,7 +792,7 @@ public class RelationshipServiceImpl implements RelationshipService {
+ item.getID() + " due to " + currentDepth + " depth"); + item.getID() + " due to " + currentDepth + " depth");
return; return;
} }
String entityTypeStringFromMetadata = relationshipMetadataService.getEntityTypeStringFromMetadata(item); String entityTypeStringFromMetadata = itemService.getEntityTypeLabel(item);
EntityType actualEntityType = entityTypeService.findByEntityType(context, entityTypeStringFromMetadata); EntityType actualEntityType = entityTypeService.findByEntityType(context, entityTypeStringFromMetadata);
// Get all types of relations for the current item // Get all types of relations for the current item
List<RelationshipType> relationshipTypes = relationshipTypeService.findByEntityType(context, actualEntityType); List<RelationshipType> relationshipTypes = relationshipTypeService.findByEntityType(context, actualEntityType);
@@ -510,6 +852,9 @@ public class RelationshipServiceImpl implements RelationshipService {
/** /**
* Converts virtual metadata from RelationshipMetadataValue objects to actual item metadata. * Converts virtual metadata from RelationshipMetadataValue objects to actual item metadata.
* The resulting MDVs are added in front or behind the Relationship's virtual MDVs.
* The Relationship's virtual MDVs may be shifted right, and all subsequent metadata will be shifted right.
* So this method ensures the places are still valid.
* *
* @param context The relevant DSpace context * @param context The relevant DSpace context
* @param relationship The relationship containing the left and right items * @param relationship The relationship containing the left and right items
@@ -520,13 +865,20 @@ public class RelationshipServiceImpl implements RelationshipService {
boolean copyToRightItem) boolean copyToRightItem)
throws SQLException, AuthorizeException { throws SQLException, AuthorizeException {
if (copyToLeftItem) { if (copyToLeftItem) {
String entityTypeString = relationshipMetadataService String entityTypeString = itemService.getEntityTypeLabel(relationship.getLeftItem());
.getEntityTypeStringFromMetadata(relationship.getLeftItem());
List<RelationshipMetadataValue> relationshipMetadataValues = List<RelationshipMetadataValue> relationshipMetadataValues =
relationshipMetadataService.findRelationshipMetadataValueForItemRelationship(context, relationshipMetadataService.findRelationshipMetadataValueForItemRelationship(context,
relationship.getLeftItem(), entityTypeString, relationship, true); relationship.getLeftItem(), entityTypeString, relationship, true);
for (RelationshipMetadataValue relationshipMetadataValue : relationshipMetadataValues) { for (RelationshipMetadataValue relationshipMetadataValue : relationshipMetadataValues) {
itemService.addAndShiftRightMetadata(context, relationship.getLeftItem(), // This adds the plain text metadata values on the same spot as the virtual values.
// This will be overruled in org.dspace.content.DSpaceObjectServiceImpl.update
// in the line below but it's not important whether the plain text or virtual values end up on top.
// The virtual values will eventually be deleted, and the others shifted
// This is required because addAndShiftRightMetadata has issues on metadata fields containing
// relationship values which are not useForPlace, while the relationhip type has useForPlace
// E.g. when using addAndShiftRightMetadata on relation.isAuthorOfPublication, it will break the order
// from dc.contributor.author
itemService.addMetadata(context, relationship.getLeftItem(),
relationshipMetadataValue.getMetadataField(). relationshipMetadataValue.getMetadataField().
getMetadataSchema().getName(), getMetadataSchema().getName(),
relationshipMetadataValue.getMetadataField().getElement(), relationshipMetadataValue.getMetadataField().getElement(),
@@ -535,16 +887,16 @@ public class RelationshipServiceImpl implements RelationshipService {
relationshipMetadataValue.getValue(), null, -1, relationshipMetadataValue.getValue(), null, -1,
relationshipMetadataValue.getPlace()); relationshipMetadataValue.getPlace());
} }
//This will ensure the new values no longer overlap, but won't break the order
itemService.update(context, relationship.getLeftItem()); itemService.update(context, relationship.getLeftItem());
} }
if (copyToRightItem) { if (copyToRightItem) {
String entityTypeString = relationshipMetadataService String entityTypeString = itemService.getEntityTypeLabel(relationship.getRightItem());
.getEntityTypeStringFromMetadata(relationship.getRightItem());
List<RelationshipMetadataValue> relationshipMetadataValues = List<RelationshipMetadataValue> relationshipMetadataValues =
relationshipMetadataService.findRelationshipMetadataValueForItemRelationship(context, relationshipMetadataService.findRelationshipMetadataValueForItemRelationship(context,
relationship.getRightItem(), entityTypeString, relationship, true); relationship.getRightItem(), entityTypeString, relationship, true);
for (RelationshipMetadataValue relationshipMetadataValue : relationshipMetadataValues) { for (RelationshipMetadataValue relationshipMetadataValue : relationshipMetadataValues) {
itemService.addAndShiftRightMetadata(context, relationship.getRightItem(), itemService.addMetadata(context, relationship.getRightItem(),
relationshipMetadataValue.getMetadataField(). relationshipMetadataValue.getMetadataField().
getMetadataSchema().getName(), getMetadataSchema().getName(),
relationshipMetadataValue.getMetadataField().getElement(), relationshipMetadataValue.getMetadataField().getElement(),
@@ -638,22 +990,46 @@ public class RelationshipServiceImpl implements RelationshipService {
public List<Relationship> findByItemAndRelationshipType(Context context, Item item, public List<Relationship> findByItemAndRelationshipType(Context context, Item item,
RelationshipType relationshipType) RelationshipType relationshipType)
throws SQLException { throws SQLException {
return relationshipDAO.findByItemAndRelationshipType(context, item, relationshipType, -1, -1); return findByItemAndRelationshipType(context, item, relationshipType, -1, -1, true);
} }
@Override @Override
public List<Relationship> findByItemAndRelationshipType(Context context, Item item, public List<Relationship> findByItemAndRelationshipType(Context context, Item item,
RelationshipType relationshipType, int limit, int offset) RelationshipType relationshipType, int limit, int offset)
throws SQLException { throws SQLException {
return relationshipDAO.findByItemAndRelationshipType(context, item, relationshipType, limit, offset); return findByItemAndRelationshipType(context, item, relationshipType, limit, offset, true);
} }
@Override @Override
public List<Relationship> findByItemAndRelationshipType(Context context, Item item, public List<Relationship> findByItemAndRelationshipType(
RelationshipType relationshipType, boolean isLeft, Context context, Item item, RelationshipType relationshipType, int limit, int offset, boolean excludeNonLatest
int limit, int offset) ) throws SQLException {
throws SQLException { return relationshipDAO
return relationshipDAO.findByItemAndRelationshipType(context, item, relationshipType, isLeft, limit, offset); .findByItemAndRelationshipType(context, item, relationshipType, limit, offset, excludeNonLatest);
}
@Override
public List<Relationship> findByItemAndRelationshipType(
Context context, Item item, RelationshipType relationshipType, boolean isLeft, int limit, int offset
) throws SQLException {
return findByItemAndRelationshipType(context, item, relationshipType, isLeft, limit, offset, true);
}
@Override
public List<Relationship> findByItemAndRelationshipType(
Context context, Item item, RelationshipType relationshipType, boolean isLeft, int limit, int offset,
boolean excludeNonLatest
) throws SQLException {
return relationshipDAO
.findByItemAndRelationshipType(context, item, relationshipType, isLeft, limit, offset, excludeNonLatest);
}
@Override
public List<ItemUuidAndRelationshipId> findByLatestItemAndRelationshipType(
Context context, Item latestItem, RelationshipType relationshipType, boolean isLeft
) throws SQLException {
return relationshipDAO
.findByLatestItemAndRelationshipType(context, latestItem, relationshipType, isLeft);
} }
@Override @Override
@@ -690,7 +1066,14 @@ public class RelationshipServiceImpl implements RelationshipService {
@Override @Override
public int countByItem(Context context, Item item) throws SQLException { public int countByItem(Context context, Item item) throws SQLException {
return relationshipDAO.countByItem(context, item); return countByItem(context, item, false, true);
}
@Override
public int countByItem(
Context context, Item item, boolean excludeTilted, boolean excludeNonLatest
) throws SQLException {
return relationshipDAO.countByItem(context, item, excludeTilted, excludeNonLatest);
} }
@Override @Override
@@ -699,9 +1082,18 @@ public class RelationshipServiceImpl implements RelationshipService {
} }
@Override @Override
public int countByItemAndRelationshipType(Context context, Item item, RelationshipType relationshipType, public int countByItemAndRelationshipType(
boolean isLeft) throws SQLException { Context context, Item item, RelationshipType relationshipType, boolean isLeft
return relationshipDAO.countByItemAndRelationshipType(context, item, relationshipType, isLeft); ) throws SQLException {
return countByItemAndRelationshipType(context, item, relationshipType, isLeft, true);
}
@Override
public int countByItemAndRelationshipType(
Context context, Item item, RelationshipType relationshipType, boolean isLeft, boolean excludeNonLatest
) throws SQLException {
return relationshipDAO
.countByItemAndRelationshipType(context, item, relationshipType, isLeft, excludeNonLatest);
} }
@Override @Override

View File

@@ -128,19 +128,23 @@ public class WorkspaceItemServiceImpl implements WorkspaceItemService {
Optional<MetadataValue> colEntityType = getDSpaceEntityType(collection); Optional<MetadataValue> colEntityType = getDSpaceEntityType(collection);
Optional<MetadataValue> templateItemEntityType = getDSpaceEntityType(templateItem); Optional<MetadataValue> templateItemEntityType = getDSpaceEntityType(templateItem);
if (colEntityType.isPresent() && templateItemEntityType.isPresent() && if (template && colEntityType.isPresent() && templateItemEntityType.isPresent() &&
!StringUtils.equals(colEntityType.get().getValue(), templateItemEntityType.get().getValue())) { !StringUtils.equals(colEntityType.get().getValue(), templateItemEntityType.get().getValue())) {
throw new IllegalStateException("The template item has entity type : (" + throw new IllegalStateException("The template item has entity type : (" +
templateItemEntityType.get().getValue() + ") different than collection entity type : " + templateItemEntityType.get().getValue() + ") different than collection entity type : " +
colEntityType.get().getValue()); colEntityType.get().getValue());
} }
if (colEntityType.isPresent() && templateItemEntityType.isEmpty()) { if (template && colEntityType.isPresent() && templateItemEntityType.isEmpty()) {
MetadataValue original = colEntityType.get(); MetadataValue original = colEntityType.get();
MetadataField metadataField = original.getMetadataField(); MetadataField metadataField = original.getMetadataField();
MetadataSchema metadataSchema = metadataField.getMetadataSchema(); MetadataSchema metadataSchema = metadataField.getMetadataSchema();
itemService.addMetadata(context, item, metadataSchema.getName(), metadataField.getElement(), // NOTE: dspace.entity.type = <blank> does not make sense
metadataField.getQualifier(), original.getLanguage(), original.getValue()); // the collection entity type is by default blank when a collection is first created
if (StringUtils.isNotBlank(original.getValue())) {
itemService.addMetadata(context, item, metadataSchema.getName(), metadataField.getElement(),
metadataField.getQualifier(), original.getLanguage(), original.getValue());
}
} }
if (template && (templateItem != null)) { if (template && (templateItem != null)) {

View File

@@ -0,0 +1,127 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.content.authority;
import java.sql.SQLException;
import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
import java.util.UUID;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
import org.dspace.authorize.factory.AuthorizeServiceFactory;
import org.dspace.authorize.service.AuthorizeService;
import org.dspace.core.Context;
import org.dspace.eperson.EPerson;
import org.dspace.eperson.factory.EPersonServiceFactory;
import org.dspace.eperson.service.EPersonService;
import org.dspace.util.UUIDUtils;
import org.dspace.web.ContextUtil;
/**
* Implementation of {@link ChoiceAuthority} based on EPerson. Allows you to set
* the id of an eperson as authority.
*
* @author Mykhaylo Boychuk (4science.it)
*/
public class EPersonAuthority implements ChoiceAuthority {
private static final Logger log = LogManager.getLogger(EPersonAuthority.class);
/**
* the name assigned to the specific instance by the PluginService, @see
* {@link NameAwarePlugin}
**/
private String authorityName;
private EPersonService ePersonService = EPersonServiceFactory.getInstance().getEPersonService();
private AuthorizeService authorizeService = AuthorizeServiceFactory.getInstance().getAuthorizeService();
@Override
public Choices getBestMatch(String text, String locale) {
return getMatches(text, 0, 2, locale);
}
@Override
public Choices getMatches(String text, int start, int limit, String locale) {
if (limit <= 0) {
limit = 20;
}
Context context = getContext();
List<EPerson> ePersons = searchEPersons(context, text, start, limit);
List<Choice> choiceList = new ArrayList<Choice>();
for (EPerson eperson : ePersons) {
choiceList.add(new Choice(eperson.getID().toString(), eperson.getFullName(), eperson.getFullName()));
}
Choice[] results = new Choice[choiceList.size()];
results = choiceList.toArray(results);
return new Choices(results, start, ePersons.size(), Choices.CF_AMBIGUOUS, ePersons.size() > (start + limit), 0);
}
@Override
public String getLabel(String key, String locale) {
UUID uuid = UUIDUtils.fromString(key);
if (uuid == null) {
return null;
}
Context context = getContext();
try {
EPerson ePerson = ePersonService.find(context, uuid);
return ePerson != null ? ePerson.getFullName() : null;
} catch (SQLException e) {
log.error(e.getMessage(), e);
throw new RuntimeException(e.getMessage(), e);
}
}
private List<EPerson> searchEPersons(Context context, String text, int start, int limit) {
if (!isCurrentUserAdminOrAccessGroupManager(context)) {
return Collections.emptyList();
}
try {
return ePersonService.search(context, text, start, limit);
} catch (SQLException e) {
log.error(e.getMessage(), e);
throw new RuntimeException(e.getMessage(), e);
}
}
private Context getContext() {
Context context = ContextUtil.obtainCurrentRequestContext();
return context != null ? context : new Context();
}
private boolean isCurrentUserAdminOrAccessGroupManager(Context context) {
try {
return authorizeService.isAdmin(context) || authorizeService.isAccountManager(context);
} catch (SQLException e) {
throw new RuntimeException(e);
}
}
@Override
public String getPluginInstanceName() {
return authorityName;
}
@Override
public void setPluginInstanceName(String name) {
this.authorityName = name;
}
}

View File

@@ -32,8 +32,22 @@ public interface ItemDAO extends DSpaceObjectLegacySupportDAO<Item> {
public Iterator<Item> findAll(Context context, boolean archived, int limit, int offset) throws SQLException; public Iterator<Item> findAll(Context context, boolean archived, int limit, int offset) throws SQLException;
@Deprecated
public Iterator<Item> findAll(Context context, boolean archived, boolean withdrawn) throws SQLException; public Iterator<Item> findAll(Context context, boolean archived, boolean withdrawn) throws SQLException;
/**
* Find all items that are:
* - NOT in the workspace
* - NOT in the workflow
* - NOT a template item for e.g. a collection
*
* This implies that the result also contains older versions of items and withdrawn items.
* @param context the DSpace context.
* @return iterator over all regular items.
* @throws SQLException if database error.
*/
public Iterator<Item> findAllRegularItems(Context context) throws SQLException;
/** /**
* Find all Items modified since a Date. * Find all Items modified since a Date.
* *

View File

@@ -14,6 +14,7 @@ import java.util.UUID;
import org.dspace.content.Item; import org.dspace.content.Item;
import org.dspace.content.Relationship; import org.dspace.content.Relationship;
import org.dspace.content.RelationshipType; import org.dspace.content.RelationshipType;
import org.dspace.content.dao.pojo.ItemUuidAndRelationshipId;
import org.dspace.core.Context; import org.dspace.core.Context;
import org.dspace.core.GenericDAO; import org.dspace.core.GenericDAO;
@@ -28,53 +29,38 @@ public interface RelationshipDAO extends GenericDAO<Relationship> {
/** /**
* This method returns a list of Relationship objects that have the given Item object * This method returns a list of Relationship objects that have the given Item object
* as a leftItem or a rightItem * as a leftItem or a rightItem
* @param context The relevant DSpace context * @param context The relevant DSpace context
* @param item The item that should be either a leftItem or a rightItem of all * @param item The item that should be either a leftItem or a rightItem of all
* the Relationship objects in the returned list * the Relationship objects in the returned list
* @param excludeTilted If true, excludes tilted relationships * @param excludeTilted If true, excludes tilted relationships
* @return The list of Relationship objects that contain either a left or a * @param excludeNonLatest If true, excludes all relationships for which the other item has a more recent version
* right item that is equal to the given item * that is relevant for this relationship
* @throws SQLException If something goes wrong * @return The list of Relationship objects that contain either a left or a
* right item that is equal to the given item
* @throws SQLException If something goes wrong
*/ */
List<Relationship> findByItem(Context context, Item item, boolean excludeTilted) throws SQLException; List<Relationship> findByItem(
Context context, Item item, boolean excludeTilted, boolean excludeNonLatest
) throws SQLException;
/** /**
* This method returns a list of Relationship objects that have the given Item object * This method returns a list of Relationship objects that have the given Item object
* as a leftItem or a rightItem * as a leftItem or a rightItem
* @param context The relevant DSpace context * @param context The relevant DSpace context
* @param item The item that should be either a leftItem or a rightItem of all * @param item The item that should be either a leftItem or a rightItem of all
* the Relationship objects in the returned list * the Relationship objects in the returned list
* @param limit paging limit * @param limit paging limit
* @param offset paging offset * @param offset paging offset
* @param excludeTilted If true, excludes tilted relationships * @param excludeTilted If true, excludes tilted relationships
* @return The list of Relationship objects that contain either a left or a * @param excludeNonLatest If true, excludes all relationships for which the other item has a more recent version
* right item that is equal to the given item * that is relevant for this relationship
* @throws SQLException If something goes wrong * @return The list of Relationship objects that contain either a left or a
* right item that is equal to the given item
* @throws SQLException If something goes wrong
*/ */
List<Relationship> findByItem(Context context, Item item, Integer limit, Integer offset, boolean excludeTilted) List<Relationship> findByItem(
throws SQLException; Context context, Item item, Integer limit, Integer offset, boolean excludeTilted, boolean excludeNonLatest
) throws SQLException;
/**
* This method returns the next leftplace integer to use for a relationship with this item as the leftItem
*
* @param context The relevant DSpace context
* @param item The item to be matched on leftItem
* @return The next integer to be used for the leftplace of a relationship with the given item
* as a left item
* @throws SQLException If something goes wrong
*/
int findNextLeftPlaceByLeftItem(Context context, Item item) throws SQLException;
/**
* This method returns the next rightplace integer to use for a relationship with this item as the rightItem
*
* @param context The relevant DSpace context
* @param item The item to be matched on rightItem
* @return The next integer to be used for the rightplace of a relationship with the given item
* as a right item
* @throws SQLException If something goes wrong
*/
int findNextRightPlaceByRightItem(Context context, Item item) throws SQLException;
/** /**
* This method returns a list of Relationship objects for the given RelationshipType object. * This method returns a list of Relationship objects for the given RelationshipType object.
@@ -108,34 +94,69 @@ public interface RelationshipDAO extends GenericDAO<Relationship> {
* It will construct a list of all Relationship objects that have the given RelationshipType object * It will construct a list of all Relationship objects that have the given RelationshipType object
* as the relationshipType property * as the relationshipType property
* @param context The relevant DSpace context * @param context The relevant DSpace context
* @param item item to filter by
* @param relationshipType The RelationshipType object to be checked on * @param relationshipType The RelationshipType object to be checked on
* @param limit paging limit * @param limit paging limit
* @param offset paging offset * @param offset paging offset
* @param item item to filter by * @param excludeNonLatest If true, excludes all relationships for which the other item has a more recent version
* that is relevant for this relationship
* @return A list of Relationship objects that have the given RelationshipType object as the * @return A list of Relationship objects that have the given RelationshipType object as the
* relationshipType property * relationshipType property
* @throws SQLException If something goes wrong * @throws SQLException If something goes wrong
*/ */
List<Relationship> findByItemAndRelationshipType(Context context, Item item, RelationshipType relationshipType, List<Relationship> findByItemAndRelationshipType(
Integer limit, Integer offset) throws SQLException; Context context, Item item, RelationshipType relationshipType, Integer limit, Integer offset,
boolean excludeNonLatest
) throws SQLException;
/** /**
* This method returns a list of Relationship objects for the given RelationshipType object. * This method returns a list of Relationship objects for the given RelationshipType object.
* It will construct a list of all Relationship objects that have the given RelationshipType object * It will construct a list of all Relationship objects that have the given RelationshipType object
* as the relationshipType property * as the relationshipType property
* @param context The relevant DSpace context * @param context The relevant DSpace context
* @param item item to filter by
* @param relationshipType The RelationshipType object to be checked on * @param relationshipType The RelationshipType object to be checked on
* @param isLeft Is item left or right
* @param limit paging limit * @param limit paging limit
* @param offset paging offset * @param offset paging offset
* @param item item to filter by * @param excludeNonLatest If true, excludes all relationships for which the other item has a more recent version
* @param isLeft Is item left or right * that is relevant for this relationship
* @return A list of Relationship objects that have the given RelationshipType object as the * @return A list of Relationship objects that have the given RelationshipType object as the
* relationshipType property * relationshipType property
* @throws SQLException If something goes wrong * @throws SQLException If something goes wrong
*/ */
List<Relationship> findByItemAndRelationshipType(Context context, Item item, RelationshipType relationshipType, List<Relationship> findByItemAndRelationshipType(
boolean isLeft, Integer limit, Integer offset) Context context, Item item, RelationshipType relationshipType, boolean isLeft, Integer limit, Integer offset,
throws SQLException; boolean excludeNonLatest
) throws SQLException;
/**
* This method returns the UUIDs of all items that have a relationship with the given item, from the perspective
* of the other item. In other words, given a relationship with the given item, the given item should have
* "latest status" in order for the other item uuid to be returned.
*
* This method differs from the "excludeNonLatest" property in other methods,
* because in this method the current item should have "latest status" to return the other item,
* whereas with "excludeNonLatest" the other item should have "latest status" to be returned.
*
* This method is used to index items in solr; when searching for related items of one of the returned uuids,
* the given item should appear as a search result.
*
* NOTE: This method does not return {@link Relationship}s for performance, because doing so would eagerly fetch
* the items on both sides, which is unnecessary.
* NOTE: tilted relationships are NEVER excluded when fetching one relationship type.
* @param context the DSpace context.
* @param latestItem the target item; only relationships where this item has "latest status" should be considered.
* @param relationshipType the relationship type for which relationships should be selected.
* @param isLeft whether the entity type of the item occurs on the left or right side of the relationship type.
* This is redundant in most cases, but necessary because relationship types my have
* the same entity type on both sides.
* @return a list containing pairs of relationship ids and item uuids.
* @throws SQLException if something goes wrong.
*/
public List<ItemUuidAndRelationshipId> findByLatestItemAndRelationshipType(
Context context, Item latestItem, RelationshipType relationshipType, boolean isLeft
) throws SQLException;
/** /**
* This method returns a list of Relationship objects for the given typeName * This method returns a list of Relationship objects for the given typeName
@@ -183,28 +204,34 @@ public interface RelationshipDAO extends GenericDAO<Relationship> {
/** /**
* This method returns a count of Relationship objects that have the given Item object * This method returns a count of Relationship objects that have the given Item object
* as a leftItem or a rightItem * as a leftItem or a rightItem
* @param context The relevant DSpace context * @param context The relevant DSpace context
* @param item The item that should be either a leftItem or a rightItem of all * @param item The item that should be either a leftItem or a rightItem of all
* the Relationship objects in the returned list * the Relationship objects in the returned list
* @param excludeTilted if true, excludes tilted relationships
* @param excludeNonLatest if true, exclude relationships for which the opposite item is not the latest version
* that is relevant
* @return The list of Relationship objects that contain either a left or a * @return The list of Relationship objects that contain either a left or a
* right item that is equal to the given item * right item that is equal to the given item
* @throws SQLException If something goes wrong * @throws SQLException If something goes wrong
*/ */
int countByItem(Context context, Item item) throws SQLException; int countByItem(Context context, Item item, boolean excludeTilted, boolean excludeNonLatest) throws SQLException;
/** /**
* Count total number of relationships (rows in relationship table) by an item and a relationship type and a boolean * Count total number of relationships (rows in relationship table) by an item and a relationship type and a boolean
* indicating whether the item should be the leftItem or the rightItem * indicating whether the item should be the leftItem or the rightItem
* *
* @param context context * @param context context
* @param relationshipType relationship type to filter by * @param relationshipType relationship type to filter by
* @param item item to filter by * @param item item to filter by
* @param isLeft Indicating whether the counted Relationships should have the given Item on the left side or not * @param isLeft indicating whether the counted Relationships should have the given Item on the left side
* @param excludeNonLatest if true, exclude relationships for which the opposite item is not the latest version
* that is relevant
* @return total count * @return total count
* @throws SQLException if database error * @throws SQLException if database error
*/ */
int countByItemAndRelationshipType(Context context, Item item, RelationshipType relationshipType, boolean isLeft) int countByItemAndRelationshipType(
throws SQLException; Context context, Item item, RelationshipType relationshipType, boolean isLeft, boolean excludeNonLatest
) throws SQLException;
/** /**
* Count total number of relationships (rows in relationship table) given a typeName * Count total number of relationships (rows in relationship table) given a typeName

View File

@@ -79,6 +79,20 @@ public class ItemDAOImpl extends AbstractHibernateDSODAO<Item> implements ItemDA
return iterate(query); return iterate(query);
} }
@Override
public Iterator<Item> findAllRegularItems(Context context) throws SQLException {
// NOTE: This query includes archived items, withdrawn items and older versions of items.
// It does not include workspace, workflow or template items.
Query query = createQuery(
context,
"SELECT i FROM Item as i " +
"LEFT JOIN Version as v ON i = v.item " +
"WHERE i.inArchive=true or i.withdrawn=true or (i.inArchive=false and v.id IS NOT NULL) " +
"ORDER BY i.id"
);
return iterate(query);
}
@Override @Override
public Iterator<Item> findAll(Context context, boolean archived, public Iterator<Item> findAll(Context context, boolean archived,
boolean withdrawn, boolean discoverable, Date lastModified) boolean withdrawn, boolean discoverable, Date lastModified)

View File

@@ -11,17 +11,22 @@ import java.sql.SQLException;
import java.util.ArrayList; import java.util.ArrayList;
import java.util.List; import java.util.List;
import java.util.UUID; import java.util.UUID;
import java.util.stream.Collectors;
import javax.persistence.Query; import javax.persistence.Query;
import javax.persistence.Tuple;
import javax.persistence.criteria.CriteriaBuilder; import javax.persistence.criteria.CriteriaBuilder;
import javax.persistence.criteria.CriteriaQuery; import javax.persistence.criteria.CriteriaQuery;
import javax.persistence.criteria.Predicate;
import javax.persistence.criteria.Root; import javax.persistence.criteria.Root;
import org.dspace.content.Item; import org.dspace.content.Item;
import org.dspace.content.Item_;
import org.dspace.content.Relationship; import org.dspace.content.Relationship;
import org.dspace.content.RelationshipType; import org.dspace.content.RelationshipType;
import org.dspace.content.RelationshipType_; import org.dspace.content.RelationshipType_;
import org.dspace.content.Relationship_; import org.dspace.content.Relationship_;
import org.dspace.content.dao.RelationshipDAO; import org.dspace.content.dao.RelationshipDAO;
import org.dspace.content.dao.pojo.ItemUuidAndRelationshipId;
import org.dspace.content.factory.ContentServiceFactory; import org.dspace.content.factory.ContentServiceFactory;
import org.dspace.content.service.RelationshipTypeService; import org.dspace.content.service.RelationshipTypeService;
import org.dspace.core.AbstractHibernateDAO; import org.dspace.core.AbstractHibernateDAO;
@@ -30,95 +35,152 @@ import org.dspace.core.Context;
public class RelationshipDAOImpl extends AbstractHibernateDAO<Relationship> implements RelationshipDAO { public class RelationshipDAOImpl extends AbstractHibernateDAO<Relationship> implements RelationshipDAO {
@Override @Override
public List<Relationship> findByItem(Context context, Item item, boolean excludeTilted) throws SQLException { public List<Relationship> findByItem(
return findByItem(context, item, -1, -1, excludeTilted); Context context, Item item, boolean excludeTilted, boolean excludeNonLatest
) throws SQLException {
return findByItem(context, item, -1, -1, excludeTilted, excludeNonLatest);
} }
@Override @Override
public List<Relationship> findByItem(Context context, Item item, Integer limit, Integer offset, public List<Relationship> findByItem(
boolean excludeTilted) throws SQLException { Context context, Item item, Integer limit, Integer offset, boolean excludeTilted, boolean excludeNonLatest
) throws SQLException {
CriteriaBuilder criteriaBuilder = getCriteriaBuilder(context); CriteriaBuilder criteriaBuilder = getCriteriaBuilder(context);
CriteriaQuery criteriaQuery = getCriteriaQuery(criteriaBuilder, Relationship.class); CriteriaQuery<Relationship> criteriaQuery = getCriteriaQuery(criteriaBuilder, Relationship.class);
Root<Relationship> relationshipRoot = criteriaQuery.from(Relationship.class); Root<Relationship> relationshipRoot = criteriaQuery.from(Relationship.class);
criteriaQuery.select(relationshipRoot); criteriaQuery.select(relationshipRoot);
if (excludeTilted) {
// If this item is the left item, criteriaQuery.where(
// return relationships for types which are not tilted right (tilted is either left nor null) criteriaBuilder.or(
// If this item is the right item, getLeftItemPredicate(criteriaBuilder, relationshipRoot, item, excludeTilted, excludeNonLatest),
// return relationships for types which are not tilted left (tilted is either right nor null) getRightItemPredicate(criteriaBuilder, relationshipRoot, item, excludeTilted, excludeNonLatest)
criteriaQuery )
.where(criteriaBuilder.or( );
criteriaBuilder.and(
criteriaBuilder.equal(relationshipRoot.get(Relationship_.leftItem), item),
criteriaBuilder.or(
criteriaBuilder.isNull(relationshipRoot.get(Relationship_.relationshipType)
.get(RelationshipType_.tilted)),
criteriaBuilder.notEqual(relationshipRoot
.get(Relationship_.relationshipType)
.get(RelationshipType_.tilted), RelationshipType.Tilted.RIGHT))),
criteriaBuilder.and(
criteriaBuilder.equal(relationshipRoot.get(Relationship_.rightItem), item),
criteriaBuilder.or(
criteriaBuilder.isNull(relationshipRoot.get(Relationship_.relationshipType)
.get(RelationshipType_.tilted)),
criteriaBuilder.notEqual(relationshipRoot
.get(Relationship_.relationshipType)
.get(RelationshipType_.tilted), RelationshipType.Tilted.LEFT)))));
} else {
criteriaQuery
.where(criteriaBuilder.or(criteriaBuilder.equal(relationshipRoot.get(Relationship_.leftItem), item),
criteriaBuilder.equal(relationshipRoot.get(Relationship_.rightItem), item)));
}
return list(context, criteriaQuery, false, Relationship.class, limit, offset); return list(context, criteriaQuery, false, Relationship.class, limit, offset);
} }
@Override /**
public int countByItem(Context context, Item item) * Get the predicate for a criteria query that selects relationships by their left item.
throws SQLException { * @param criteriaBuilder the criteria builder.
* @param relationshipRoot the relationship root.
* @param item the item that is being searched for.
* @param excludeTilted if true, exclude tilted relationships.
* @param excludeNonLatest if true, exclude relationships for which the opposite item is not the latest version
* that is relevant.
* @return a predicate that satisfies the given restrictions.
*/
protected Predicate getLeftItemPredicate(
CriteriaBuilder criteriaBuilder, Root<Relationship> relationshipRoot, Item item,
boolean excludeTilted, boolean excludeNonLatest
) {
List<Predicate> predicates = new ArrayList<>();
// match relationships based on the left item
predicates.add(
criteriaBuilder.equal(relationshipRoot.get(Relationship_.leftItem), item)
);
if (excludeTilted) {
// if this item is the left item,
// return relationships for types which are NOT tilted right (tilted is either left nor null)
predicates.add(
criteriaBuilder.or(
criteriaBuilder.isNull(
relationshipRoot.get(Relationship_.relationshipType).get(RelationshipType_.tilted)
),
criteriaBuilder.notEqual(
relationshipRoot.get(Relationship_.relationshipType).get(RelationshipType_.tilted),
RelationshipType.Tilted.RIGHT
)
)
);
}
if (excludeNonLatest) {
// if this item is the left item,
// return relationships for which the right item is the "latest" version that is relevant.
predicates.add(
criteriaBuilder.notEqual(
relationshipRoot.get(Relationship_.LATEST_VERSION_STATUS),
Relationship.LatestVersionStatus.LEFT_ONLY
)
);
}
return criteriaBuilder.and(predicates.toArray(new Predicate[]{}));
}
/**
* Get the predicate for a criteria query that selects relationships by their right item.
* @param criteriaBuilder the criteria builder.
* @param relationshipRoot the relationship root.
* @param item the item that is being searched for.
* @param excludeTilted if true, exclude tilted relationships.
* @param excludeNonLatest if true, exclude relationships for which the opposite item is not the latest version
* that is relevant.
* @return a predicate that satisfies the given restrictions.
*/
protected Predicate getRightItemPredicate(
CriteriaBuilder criteriaBuilder, Root<Relationship> relationshipRoot, Item item,
boolean excludeTilted, boolean excludeNonLatest
) {
List<Predicate> predicates = new ArrayList<>();
// match relationships based on the right item
predicates.add(
criteriaBuilder.equal(relationshipRoot.get(Relationship_.rightItem), item)
);
if (excludeTilted) {
// if this item is the right item,
// return relationships for types which are NOT tilted left (tilted is either right nor null)
predicates.add(
criteriaBuilder.or(
criteriaBuilder.isNull(
relationshipRoot.get(Relationship_.relationshipType).get(RelationshipType_.tilted)
),
criteriaBuilder.notEqual(
relationshipRoot.get(Relationship_.relationshipType).get(RelationshipType_.tilted),
RelationshipType.Tilted.LEFT
)
)
);
}
if (excludeNonLatest) {
// if this item is the right item,
// return relationships for which the left item is the "latest" version that is relevant.
predicates.add(
criteriaBuilder.notEqual(
relationshipRoot.get(Relationship_.LATEST_VERSION_STATUS),
Relationship.LatestVersionStatus.RIGHT_ONLY
)
);
}
return criteriaBuilder.and(predicates.toArray(new Predicate[]{}));
}
@Override
public int countByItem(
Context context, Item item, boolean excludeTilted, boolean excludeNonLatest
) throws SQLException {
CriteriaBuilder criteriaBuilder = getCriteriaBuilder(context); CriteriaBuilder criteriaBuilder = getCriteriaBuilder(context);
CriteriaQuery criteriaQuery = getCriteriaQuery(criteriaBuilder, Relationship.class); CriteriaQuery criteriaQuery = getCriteriaQuery(criteriaBuilder, Relationship.class);
Root<Relationship> relationshipRoot = criteriaQuery.from(Relationship.class); Root<Relationship> relationshipRoot = criteriaQuery.from(Relationship.class);
criteriaQuery.select(relationshipRoot); criteriaQuery.select(relationshipRoot);
criteriaQuery
.where(criteriaBuilder.or(criteriaBuilder.equal(relationshipRoot.get(Relationship_.leftItem), item), criteriaQuery.where(
criteriaBuilder.equal(relationshipRoot.get(Relationship_.rightItem), item))); criteriaBuilder.or(
getLeftItemPredicate(criteriaBuilder, relationshipRoot, item, excludeTilted, excludeNonLatest),
getRightItemPredicate(criteriaBuilder, relationshipRoot, item, excludeTilted, excludeNonLatest)
)
);
return count(context, criteriaQuery, criteriaBuilder, relationshipRoot); return count(context, criteriaQuery, criteriaBuilder, relationshipRoot);
} }
@Override
public int findNextLeftPlaceByLeftItem(Context context, Item item) throws SQLException {
CriteriaBuilder criteriaBuilder = getCriteriaBuilder(context);
CriteriaQuery criteriaQuery = getCriteriaQuery(criteriaBuilder, Relationship.class);
Root<Relationship> relationshipRoot = criteriaQuery.from(Relationship.class);
criteriaQuery.select(relationshipRoot);
criteriaQuery.where(criteriaBuilder.equal(relationshipRoot.get(Relationship_.leftItem), item));
List<Relationship> list = list(context, criteriaQuery, false, Relationship.class, -1, -1);
list.sort((o1, o2) -> o2.getLeftPlace() - o1.getLeftPlace());
if (!list.isEmpty()) {
return list.get(0).getLeftPlace() + 1;
} else {
return 0;
}
}
@Override
public int findNextRightPlaceByRightItem(Context context, Item item) throws SQLException {
CriteriaBuilder criteriaBuilder = getCriteriaBuilder(context);
CriteriaQuery criteriaQuery = getCriteriaQuery(criteriaBuilder, Relationship.class);
Root<Relationship> relationshipRoot = criteriaQuery.from(Relationship.class);
criteriaQuery.select(relationshipRoot);
criteriaQuery.where(criteriaBuilder.equal(relationshipRoot.get(Relationship_.rightItem), item));
List<Relationship> list = list(context, criteriaQuery, false, Relationship.class, -1, -1);
list.sort((o1, o2) -> o2.getRightPlace() - o1.getRightPlace());
if (!list.isEmpty()) {
return list.get(0).getRightPlace() + 1;
} else {
return 0;
}
}
@Override @Override
public List<Relationship> findByRelationshipType(Context context, RelationshipType relationshipType) public List<Relationship> findByRelationshipType(Context context, RelationshipType relationshipType)
throws SQLException { throws SQLException {
@@ -140,49 +202,132 @@ public class RelationshipDAOImpl extends AbstractHibernateDAO<Relationship> impl
} }
@Override @Override
public List<Relationship> findByItemAndRelationshipType(Context context, Item item, public List<Relationship> findByItemAndRelationshipType(
RelationshipType relationshipType, Integer limit, Context context, Item item, RelationshipType relationshipType, Integer limit, Integer offset,
Integer offset) boolean excludeNonLatest
throws SQLException { ) throws SQLException {
CriteriaBuilder criteriaBuilder = getCriteriaBuilder(context); CriteriaBuilder criteriaBuilder = getCriteriaBuilder(context);
CriteriaQuery criteriaQuery = getCriteriaQuery(criteriaBuilder, Relationship.class); CriteriaQuery criteriaQuery = getCriteriaQuery(criteriaBuilder, Relationship.class);
Root<Relationship> relationshipRoot = criteriaQuery.from(Relationship.class); Root<Relationship> relationshipRoot = criteriaQuery.from(Relationship.class);
criteriaQuery.select(relationshipRoot); criteriaQuery.select(relationshipRoot);
criteriaQuery
.where(criteriaBuilder.equal(relationshipRoot.get(Relationship_.relationshipType), criteriaQuery.where(
relationshipType), criteriaBuilder.or criteriaBuilder.equal(relationshipRoot.get(Relationship_.relationshipType), relationshipType),
(criteriaBuilder.equal(relationshipRoot.get(Relationship_.leftItem), item), criteriaBuilder.or(
criteriaBuilder.equal(relationshipRoot.get(Relationship_.rightItem), item))); getLeftItemPredicate(criteriaBuilder, relationshipRoot, item, false, excludeNonLatest),
getRightItemPredicate(criteriaBuilder, relationshipRoot, item, false, excludeNonLatest)
)
);
return list(context, criteriaQuery, true, Relationship.class, limit, offset); return list(context, criteriaQuery, true, Relationship.class, limit, offset);
} }
@Override @Override
public List<Relationship> findByItemAndRelationshipType(Context context, Item item, public List<Relationship> findByItemAndRelationshipType(
RelationshipType relationshipType, boolean isLeft, Context context, Item item, RelationshipType relationshipType, boolean isLeft, Integer limit, Integer offset,
Integer limit, Integer offset) boolean excludeNonLatest
throws SQLException { ) throws SQLException {
CriteriaBuilder criteriaBuilder = getCriteriaBuilder(context); CriteriaBuilder criteriaBuilder = getCriteriaBuilder(context);
CriteriaQuery criteriaQuery = getCriteriaQuery(criteriaBuilder, Relationship.class); CriteriaQuery criteriaQuery = getCriteriaQuery(criteriaBuilder, Relationship.class);
Root<Relationship> relationshipRoot = criteriaQuery.from(Relationship.class); Root<Relationship> relationshipRoot = criteriaQuery.from(Relationship.class);
criteriaQuery.select(relationshipRoot); criteriaQuery.select(relationshipRoot);
if (isLeft) { if (isLeft) {
criteriaQuery criteriaQuery.where(
.where(criteriaBuilder.equal(relationshipRoot.get(Relationship_.relationshipType), criteriaBuilder.equal(relationshipRoot.get(Relationship_.relationshipType), relationshipType),
relationshipType), getLeftItemPredicate(criteriaBuilder, relationshipRoot, item, false, excludeNonLatest)
criteriaBuilder.equal(relationshipRoot.get(Relationship_.leftItem), item)); );
criteriaQuery.orderBy(criteriaBuilder.asc(relationshipRoot.get(Relationship_.leftPlace))); criteriaQuery.orderBy(criteriaBuilder.asc(relationshipRoot.get(Relationship_.leftPlace)));
} else { } else {
criteriaQuery criteriaQuery.where(
.where(criteriaBuilder.equal(relationshipRoot.get(Relationship_.relationshipType), criteriaBuilder.equal(relationshipRoot.get(Relationship_.relationshipType), relationshipType),
relationshipType), getRightItemPredicate(criteriaBuilder, relationshipRoot, item, false, excludeNonLatest)
criteriaBuilder.equal(relationshipRoot.get(Relationship_.rightItem), item)); );
criteriaQuery.orderBy(criteriaBuilder.asc(relationshipRoot.get(Relationship_.rightPlace))); criteriaQuery.orderBy(criteriaBuilder.asc(relationshipRoot.get(Relationship_.rightPlace)));
} }
return list(context, criteriaQuery, true, Relationship.class, limit, offset); return list(context, criteriaQuery, true, Relationship.class, limit, offset);
} }
@Override
public List<ItemUuidAndRelationshipId> findByLatestItemAndRelationshipType(
Context context, Item latestItem, RelationshipType relationshipType, boolean isLeft
) throws SQLException {
final String relationshipIdAlias = "relationshipId";
final String itemUuidAlias = "itemUuid";
CriteriaBuilder criteriaBuilder = getCriteriaBuilder(context);
CriteriaQuery<Tuple> criteriaQuery = criteriaBuilder.createTupleQuery();
Root<Relationship> relationshipRoot = criteriaQuery.from(Relationship.class);
ArrayList<Predicate> predicates = new ArrayList<>();
// all relationships should have the specified relationship type
predicates.add(
criteriaBuilder.equal(relationshipRoot.get(Relationship_.relationshipType), relationshipType)
);
if (isLeft) {
// match relationships based on the left item
predicates.add(
criteriaBuilder.equal(relationshipRoot.get(Relationship_.leftItem), latestItem)
);
// the left item has to have "latest status" => accept BOTH and LEFT_ONLY
predicates.add(
criteriaBuilder.notEqual(
relationshipRoot.get(Relationship_.LATEST_VERSION_STATUS),
Relationship.LatestVersionStatus.RIGHT_ONLY
)
);
// return the UUIDs of the right item
criteriaQuery.multiselect(
relationshipRoot.get(Relationship_.id).alias(relationshipIdAlias),
relationshipRoot.get(Relationship_.rightItem).get(Item_.id).alias(itemUuidAlias)
);
} else {
// match relationships based on the right item
predicates.add(
criteriaBuilder.equal(relationshipRoot.get(Relationship_.rightItem), latestItem)
);
// the right item has to have "latest status" => accept BOTH and RIGHT_ONLY
predicates.add(
criteriaBuilder.notEqual(
relationshipRoot.get(Relationship_.LATEST_VERSION_STATUS),
Relationship.LatestVersionStatus.LEFT_ONLY
)
);
// return the UUIDs of the left item
criteriaQuery.multiselect(
relationshipRoot.get(Relationship_.id).alias(relationshipIdAlias),
relationshipRoot.get(Relationship_.leftItem).get(Item_.id).alias(itemUuidAlias)
);
}
// all predicates are chained with the AND operator
criteriaQuery.where(predicates.toArray(new Predicate[]{}));
// deduplicate result
criteriaQuery.distinct(true);
// execute query
Query query = this.getHibernateSession(context).createQuery(criteriaQuery);
query.setHint("org.hibernate.cacheable", true);
List<?> resultList = query.getResultList();
// convert types
return resultList.stream()
.map(Tuple.class::cast)
.map(t -> new ItemUuidAndRelationshipId(
(UUID) t.get(itemUuidAlias),
(Integer) t.get(relationshipIdAlias)
))
.collect(Collectors.toList());
}
@Override @Override
public List<Relationship> findByTypeName(Context context, String typeName) public List<Relationship> findByTypeName(Context context, String typeName)
throws SQLException { throws SQLException {
@@ -228,24 +373,26 @@ public class RelationshipDAOImpl extends AbstractHibernateDAO<Relationship> impl
} }
@Override @Override
public int countByItemAndRelationshipType(Context context, Item item, RelationshipType relationshipType, public int countByItemAndRelationshipType(
boolean isLeft) throws SQLException { Context context, Item item, RelationshipType relationshipType, boolean isLeft, boolean excludeNonLatest
) throws SQLException {
CriteriaBuilder criteriaBuilder = getCriteriaBuilder(context); CriteriaBuilder criteriaBuilder = getCriteriaBuilder(context);
CriteriaQuery criteriaQuery = getCriteriaQuery(criteriaBuilder, Relationship.class); CriteriaQuery criteriaQuery = getCriteriaQuery(criteriaBuilder, Relationship.class);
Root<Relationship> relationshipRoot = criteriaQuery.from(Relationship.class); Root<Relationship> relationshipRoot = criteriaQuery.from(Relationship.class);
criteriaQuery.select(relationshipRoot); criteriaQuery.select(relationshipRoot);
if (isLeft) { if (isLeft) {
criteriaQuery criteriaQuery.where(
.where(criteriaBuilder.equal(relationshipRoot.get(Relationship_.relationshipType), criteriaBuilder.equal(relationshipRoot.get(Relationship_.relationshipType), relationshipType),
relationshipType), getLeftItemPredicate(criteriaBuilder, relationshipRoot, item, false, excludeNonLatest)
criteriaBuilder.equal(relationshipRoot.get(Relationship_.leftItem), item)); );
} else { } else {
criteriaQuery criteriaQuery.where(
.where(criteriaBuilder.equal(relationshipRoot.get(Relationship_.relationshipType), criteriaBuilder.equal(relationshipRoot.get(Relationship_.relationshipType), relationshipType),
relationshipType), getRightItemPredicate(criteriaBuilder, relationshipRoot, item, false, excludeNonLatest)
criteriaBuilder.equal(relationshipRoot.get(Relationship_.rightItem), item)); );
} }
return count(context, criteriaQuery, criteriaBuilder, relationshipRoot); return count(context, criteriaQuery, criteriaBuilder, relationshipRoot);
} }

View File

@@ -0,0 +1,37 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.content.dao.pojo;
import java.util.UUID;
import org.dspace.content.Relationship;
import org.dspace.content.dao.RelationshipDAO;
import org.springframework.lang.NonNull;
/**
* Used by {@link RelationshipDAO#findByLatestItemAndRelationshipType} to avoid creating {@link Relationship}s.
*/
public class ItemUuidAndRelationshipId {
private final UUID itemUuid;
private final int relationshipId;
public ItemUuidAndRelationshipId(@NonNull UUID itemUuid, @NonNull int relationshipId) {
this.itemUuid = itemUuid;
this.relationshipId = relationshipId;
}
public UUID getItemUuid() {
return this.itemUuid;
}
public int getRelationshipId() {
return this.relationshipId;
}
}

View File

@@ -21,6 +21,7 @@ import org.dspace.content.Bitstream;
import org.dspace.content.Bundle; import org.dspace.content.Bundle;
import org.dspace.content.Collection; import org.dspace.content.Collection;
import org.dspace.content.Community; import org.dspace.content.Community;
import org.dspace.content.EntityType;
import org.dspace.content.Item; import org.dspace.content.Item;
import org.dspace.content.MetadataField; import org.dspace.content.MetadataField;
import org.dspace.content.MetadataValue; import org.dspace.content.MetadataValue;
@@ -111,8 +112,22 @@ public interface ItemService
* @return an iterator over the items in the archive. * @return an iterator over the items in the archive.
* @throws SQLException if database error * @throws SQLException if database error
*/ */
@Deprecated
public Iterator<Item> findAllUnfiltered(Context context) throws SQLException; public Iterator<Item> findAllUnfiltered(Context context) throws SQLException;
/**
* Find all items that are:
* - NOT in the workspace
* - NOT in the workflow
* - NOT a template item for e.g. a collection
*
* This implies that the result also contains older versions of items and withdrawn items.
* @param context the DSpace context.
* @return iterator over all regular items.
* @throws SQLException if database error.
*/
public Iterator<Item> findAllRegularItems(Context context) throws SQLException;
/** /**
* Find all the items in the archive by a given submitter. The order is * Find all the items in the archive by a given submitter. The order is
* indeterminate. Only items with the "in archive" flag set are included. * indeterminate. Only items with the "in archive" flag set are included.
@@ -579,6 +594,37 @@ public interface ItemService
*/ */
public boolean canCreateNewVersion(Context context, Item item) throws SQLException; public boolean canCreateNewVersion(Context context, Item item) throws SQLException;
/**
* Returns an iterator of in archive items possessing the passed metadata field, or only
* those matching the passed value, if value is not Item.ANY
*
* @param context DSpace context object
* @param schema metadata field schema
* @param element metadata field element
* @param qualifier metadata field qualifier
* @param value field value or Item.ANY to match any value
* @return an iterator over the items matching that authority value
* @throws SQLException if database error
* @throws AuthorizeException if authorization error
*/
public Iterator<Item> findArchivedByMetadataField(Context context, String schema,
String element, String qualifier,
String value) throws SQLException, AuthorizeException;
/**
* Returns an iterator of in archive items possessing the passed metadata field, or only
* those matching the passed value, if value is not Item.ANY
*
* @param context DSpace context object
* @param metadataField metadata
* @param value field value or Item.ANY to match any value
* @return an iterator over the items matching that authority value
* @throws SQLException if database error
* @throws AuthorizeException if authorization error
*/
public Iterator<Item> findArchivedByMetadataField(Context context, String metadataField, String value)
throws SQLException, AuthorizeException;
/** /**
* Returns an iterator of Items possessing the passed metadata field, or only * Returns an iterator of Items possessing the passed metadata field, or only
* those matching the passed value, if value is not Item.ANY * those matching the passed value, if value is not Item.ANY
@@ -618,7 +664,7 @@ public interface ItemService
*/ */
public Iterator<Item> findByAuthorityValue(Context context, public Iterator<Item> findByAuthorityValue(Context context,
String schema, String element, String qualifier, String value) String schema, String element, String qualifier, String value)
throws SQLException, AuthorizeException, IOException; throws SQLException, AuthorizeException;
public Iterator<Item> findByMetadataFieldAuthority(Context context, String mdString, String authority) public Iterator<Item> findByMetadataFieldAuthority(Context context, String mdString, String authority)
@@ -783,4 +829,19 @@ public interface ItemService
public List<MetadataValue> getMetadata(Item item, String schema, String element, String qualifier, public List<MetadataValue> getMetadata(Item item, String schema, String element, String qualifier,
String lang, boolean enableVirtualMetadata); String lang, boolean enableVirtualMetadata);
/**
* Retrieve the label of the entity type of the given item.
* @param item the item.
* @return the label of the entity type, taken from the item metadata, or null if not found.
*/
public String getEntityTypeLabel(Item item);
/**
* Retrieve the entity type of the given item.
* @param context the DSpace context.
* @param item the item.
* @return the entity type of the given item, or null if not found.
*/
public EntityType getEntityType(Context context, Item item) throws SQLException;
} }

View File

@@ -14,7 +14,9 @@ import java.util.UUID;
import org.dspace.authorize.AuthorizeException; import org.dspace.authorize.AuthorizeException;
import org.dspace.content.Item; import org.dspace.content.Item;
import org.dspace.content.Relationship; import org.dspace.content.Relationship;
import org.dspace.content.Relationship.LatestVersionStatus;
import org.dspace.content.RelationshipType; import org.dspace.content.RelationshipType;
import org.dspace.content.dao.pojo.ItemUuidAndRelationshipId;
import org.dspace.core.Context; import org.dspace.core.Context;
import org.dspace.service.DSpaceCRUDService; import org.dspace.service.DSpaceCRUDService;
@@ -49,6 +51,25 @@ public interface RelationshipService extends DSpaceCRUDService<Relationship> {
List<Relationship> findByItem(Context context, Item item, Integer limit, Integer offset, boolean excludeTilted) List<Relationship> findByItem(Context context, Item item, Integer limit, Integer offset, boolean excludeTilted)
throws SQLException; throws SQLException;
/**
* Retrieves the list of Relationships currently in the system for which the given Item is either
* a leftItem or a rightItem object
* @param context The relevant DSpace context
* @param item The Item that has to be the left or right item for the relationship to be
* included in the list
* @param limit paging limit
* @param offset paging offset
* @param excludeTilted If true, excludes tilted relationships
* @param excludeNonLatest If true, excludes all relationships for which the other item has a more recent version
* that is relevant for this relationship
* @return The list of relationships for which each relationship adheres to the above
* listed constraint
* @throws SQLException If something goes wrong
*/
List<Relationship> findByItem(
Context context, Item item, Integer limit, Integer offset, boolean excludeTilted, boolean excludeNonLatest
) throws SQLException;
/** /**
* Retrieves the full list of relationships currently in the system * Retrieves the full list of relationships currently in the system
* @param context The relevant DSpace context * @param context The relevant DSpace context
@@ -79,30 +100,54 @@ public interface RelationshipService extends DSpaceCRUDService<Relationship> {
public Relationship create(Context context, Relationship relationship) throws SQLException, AuthorizeException; public Relationship create(Context context, Relationship relationship) throws SQLException, AuthorizeException;
/** /**
* This method returns the next leftplace integer to use for a relationship with this item as the leftItem * Move the given relationship to a new leftPlace and/or rightPlace.
* *
* @param context The relevant DSpace context * This will
* @param item The item that has to be the leftItem of a relationship for it to qualify * 1. verify whether the move is authorized
* @return The next integer to be used for the leftplace of a relationship with the given item * 2. move the relationship to the specified left/right place
* as a left item * 3. update the left/right place of other relationships and/or metadata in order to resolve the move without
* @throws SQLException If something goes wrong * leaving any gaps
*
* At least one of the new places should be non-null, otherwise no changes will be made.
*
* @param context The relevant DSpace context
* @param relationship The Relationship to move
* @param newLeftPlace The value to set the leftPlace of this Relationship to
* @param newRightPlace The value to set the rightPlace of this Relationship to
* @return The moved relationship with updated place variables
* @throws SQLException If something goes wrong
* @throws AuthorizeException If the user is not authorized to update the Relationship or its Items
*/ */
int findNextLeftPlaceByLeftItem(Context context, Item item) throws SQLException; Relationship move(Context context, Relationship relationship, Integer newLeftPlace, Integer newRightPlace)
throws SQLException, AuthorizeException;
/** /**
* This method returns the next rightplace integer to use for a relationship with this item as the rightItem * Move the given relationship to a new leftItem and/or rightItem.
* *
* @param context The relevant DSpace context * This will
* @param item The item that has to be the rightitem of a relationship for it to qualify * 1. move the relationship to the last place in its current left or right Item. This ensures that we don't leave
* @return The next integer to be used for the rightplace of a relationship with the given item * any gaps when moving the relationship to a new Item.
* as a right item * If only one of the relationship's Items is changed,the order of relationships and metadatain the other
* @throws SQLException If something goes wrong * will not be affected
* 2. insert the relationship into the new Item(s)
*
* At least one of the new Items should be non-null, otherwise no changes will be made.
*
* @param context The relevant DSpace context
* @param relationship The Relationship to move
* @param newLeftItem The value to set the leftItem of this Relationship to
* @param newRightItem The value to set the rightItem of this Relationship to
* @return The moved relationship with updated left/right Items variables
* @throws SQLException If something goes wrong
* @throws AuthorizeException If the user is not authorized to update the Relationship or its Items
*/ */
int findNextRightPlaceByRightItem(Context context, Item item) throws SQLException; Relationship move(Context context, Relationship relationship, Item newLeftItem, Item newRightItem)
throws SQLException, AuthorizeException;
/** /**
* This method returns a list of Relationships for which the leftItem or rightItem is equal to the given * This method returns a list of Relationships for which the leftItem or rightItem is equal to the given
* Item object and for which the RelationshipType object is equal to the relationshipType property * Item object and for which the RelationshipType object is equal to the relationshipType property
* NOTE: tilted relationships are NEVER excluded when fetching one relationship type
* @param context The relevant DSpace context * @param context The relevant DSpace context
* @param item The Item object to be matched on the leftItem or rightItem for the relationship * @param item The Item object to be matched on the leftItem or rightItem for the relationship
* @param relationshipType The RelationshipType object that will be used to check the Relationship on * @param relationshipType The RelationshipType object that will be used to check the Relationship on
@@ -117,6 +162,7 @@ public interface RelationshipService extends DSpaceCRUDService<Relationship> {
/** /**
* This method returns a list of Relationships for which the leftItem or rightItem is equal to the given * This method returns a list of Relationships for which the leftItem or rightItem is equal to the given
* Item object and for which the RelationshipType object is equal to the relationshipType property * Item object and for which the RelationshipType object is equal to the relationshipType property
* NOTE: tilted relationships are NEVER excluded when fetching one relationship type
* @param context The relevant DSpace context * @param context The relevant DSpace context
* @param item The Item object to be matched on the leftItem or rightItem for the relationship * @param item The Item object to be matched on the leftItem or rightItem for the relationship
* @param relationshipType The RelationshipType object that will be used to check the Relationship on * @param relationshipType The RelationshipType object that will be used to check the Relationship on
@@ -131,6 +177,24 @@ public interface RelationshipService extends DSpaceCRUDService<Relationship> {
/** /**
* This method returns a list of Relationships for which the leftItem or rightItem is equal to the given * This method returns a list of Relationships for which the leftItem or rightItem is equal to the given
* Item object and for which the RelationshipType object is equal to the relationshipType property * Item object and for which the RelationshipType object is equal to the relationshipType property
* NOTE: tilted relationships are NEVER excluded when fetching one relationship type
* @param context The relevant DSpace context
* @param item The Item object to be matched on the leftItem or rightItem for the relationship
* @param relationshipType The RelationshipType object that will be used to check the Relationship on
* @param excludeNonLatest If true, excludes all relationships for which the other item has a more recent version
* that is relevant for this relationship
* @return The list of Relationship objects that have the given Item object as leftItem or rightItem and
* for which the relationshipType property is equal to the given RelationshipType
* @throws SQLException If something goes wrong
*/
public List<Relationship> findByItemAndRelationshipType(
Context context, Item item, RelationshipType relationshipType, int limit, int offset, boolean excludeNonLatest
) throws SQLException;
/**
* This method returns a list of Relationships for which the leftItem or rightItem is equal to the given
* Item object and for which the RelationshipType object is equal to the relationshipType property
* NOTE: tilted relationships are NEVER excluded when fetching one relationship type
* @param context The relevant DSpace context * @param context The relevant DSpace context
* @param item The Item object to be matched on the leftItem or rightItem for the relationship * @param item The Item object to be matched on the leftItem or rightItem for the relationship
* @param relationshipType The RelationshipType object that will be used to check the Relationship on * @param relationshipType The RelationshipType object that will be used to check the Relationship on
@@ -145,17 +209,51 @@ public interface RelationshipService extends DSpaceCRUDService<Relationship> {
throws SQLException; throws SQLException;
/** /**
* This method will update the place for the Relationship and all other relationships found by the items and * This method returns a list of Relationships for which the leftItem or rightItem is equal to the given
* relationship type of the given Relationship. It will give this Relationship the last place in both the * Item object and for which the RelationshipType object is equal to the relationshipType property
* left and right place determined by querying for the list of leftRelationships and rightRelationships * NOTE: tilted relationships are NEVER excluded when fetching one relationship type
* by the leftItem, rightItem and relationshipType of the given Relationship. * @param context The relevant DSpace context
* @param context The relevant DSpace context * @param item The Item object to be matched on the leftItem or rightItem for the relationship
* @param relationship The Relationship object that will have it's place updated and that will be used * @param relationshipType The RelationshipType object that will be used to check the Relationship on
* to retrieve the other relationships whose place might need to be updated * @param isLeft Is the item left or right
* @throws SQLException If something goes wrong * @param excludeNonLatest If true, excludes all relationships for which the other item has a more recent version
* that is relevant for this relationship
* @return The list of Relationship objects that have the given Item object as leftItem or rightItem and
* for which the relationshipType property is equal to the given RelationshipType
* @throws SQLException If something goes wrong
*/ */
public void updatePlaceInRelationship(Context context, Relationship relationship) public List<Relationship> findByItemAndRelationshipType(
throws SQLException, AuthorizeException; Context context, Item item, RelationshipType relationshipType, boolean isLeft, int limit, int offset,
boolean excludeNonLatest
) throws SQLException;
/**
* This method returns the UUIDs of all items that have a relationship with the given item, from the perspective
* of the other item. In other words, given a relationship with the given item, the given item should have
* "latest status" in order for the other item uuid to be returned.
*
* This method differs from the "excludeNonLatest" property in other methods,
* because in this method the current item should have "latest status" to return the other item,
* whereas with "excludeNonLatest" the other item should have "latest status" to be returned.
*
* This method is used to index items in solr; when searching for related items of one of the returned uuids,
* the given item should appear as a search result.
*
* NOTE: This method does not return {@link Relationship}s for performance, because doing so would eagerly fetch
* the items on both sides, which is unnecessary.
* NOTE: tilted relationships are NEVER excluded when fetching one relationship type.
* @param context the DSpace context.
* @param latestItem the target item; only relationships where this item has "latest status" should be considered.
* @param relationshipType the relationship type for which relationships should be selected.
* @param isLeft whether the entity type of the item occurs on the left or right side of the relationship type.
* This is redundant in most cases, but necessary because relationship types my have
* the same entity type on both sides.
* @return a list containing pairs of relationship ids and item uuids.
* @throws SQLException if something goes wrong.
*/
public List<ItemUuidAndRelationshipId> findByLatestItemAndRelationshipType(
Context context, Item latestItem, RelationshipType relationshipType, boolean isLeft
) throws SQLException;
/** /**
* This method will update the given item's metadata order. * This method will update the given item's metadata order.
@@ -174,6 +272,7 @@ public interface RelationshipService extends DSpaceCRUDService<Relationship> {
/** /**
* This method returns a list of Relationship objects for which the relationshipType property is equal to the given * This method returns a list of Relationship objects for which the relationshipType property is equal to the given
* RelationshipType object * RelationshipType object
* NOTE: tilted relationships are NEVER excluded when fetching one relationship type
* @param context The relevant DSpace context * @param context The relevant DSpace context
* @param relationshipType The RelationshipType object that will be used to check the Relationship on * @param relationshipType The RelationshipType object that will be used to check the Relationship on
* @return The list of Relationship objects for which the given RelationshipType object is equal * @return The list of Relationship objects for which the given RelationshipType object is equal
@@ -185,6 +284,7 @@ public interface RelationshipService extends DSpaceCRUDService<Relationship> {
/** /**
* This method returns a list of Relationship objets for which the relationshipType property is equal to the given * This method returns a list of Relationship objets for which the relationshipType property is equal to the given
* RelationshipType object * RelationshipType object
* NOTE: tilted relationships are NEVER excluded when fetching one relationship type
* @param context The relevant DSpace context * @param context The relevant DSpace context
* @param relationshipType The RelationshipType object that will be used to check the Relationship on * @param relationshipType The RelationshipType object that will be used to check the Relationship on
* @param limit paging limit * @param limit paging limit
@@ -198,6 +298,27 @@ public interface RelationshipService extends DSpaceCRUDService<Relationship> {
/** /**
* This method is used to construct a Relationship object with all it's variables * This method is used to construct a Relationship object with all it's variables
* @param c The relevant DSpace context
* @param leftItem The leftItem Item object for the relationship
* @param rightItem The rightItem Item object for the relationship
* @param relationshipType The RelationshipType object for the relationship
* @param leftPlace The leftPlace integer for the relationship
* @param rightPlace The rightPlace integer for the relationship
* @param leftwardValue The leftwardValue string for the relationship
* @param rightwardValue The rightwardValue string for the relationship
* @param latestVersionStatus The latestVersionStatus value for the relationship
* @return The created Relationship object with the given properties
* @throws AuthorizeException If something goes wrong
* @throws SQLException If something goes wrong
*/
Relationship create(
Context c, Item leftItem, Item rightItem, RelationshipType relationshipType, int leftPlace, int rightPlace,
String leftwardValue, String rightwardValue, LatestVersionStatus latestVersionStatus
) throws AuthorizeException, SQLException;
/**
* This method is used to construct a Relationship object with all it's variables,
* except the latest version status
* @param c The relevant DSpace context * @param c The relevant DSpace context
* @param leftItem The leftItem Item object for the relationship * @param leftItem The leftItem Item object for the relationship
* @param rightItem The rightItem Item object for the relationship * @param rightItem The rightItem Item object for the relationship
@@ -210,14 +331,15 @@ public interface RelationshipService extends DSpaceCRUDService<Relationship> {
* @throws AuthorizeException If something goes wrong * @throws AuthorizeException If something goes wrong
* @throws SQLException If something goes wrong * @throws SQLException If something goes wrong
*/ */
Relationship create(Context c, Item leftItem, Item rightItem, RelationshipType relationshipType, Relationship create(
int leftPlace, int rightPlace, String leftwardValue, String rightwardValue) Context c, Item leftItem, Item rightItem, RelationshipType relationshipType, int leftPlace, int rightPlace,
throws AuthorizeException, SQLException; String leftwardValue, String rightwardValue
) throws AuthorizeException, SQLException;
/** /**
* This method is used to construct a Relationship object with all it's variables, * This method is used to construct a Relationship object with all it's variables,
* except the leftward and rightward labels * except the leftward label, rightward label and latest version status
* @param c The relevant DSpace context * @param c The relevant DSpace context
* @param leftItem The leftItem Item object for the relationship * @param leftItem The leftItem Item object for the relationship
* @param rightItem The rightItem Item object for the relationship * @param rightItem The rightItem Item object for the relationship
@@ -267,7 +389,7 @@ public interface RelationshipService extends DSpaceCRUDService<Relationship> {
/** /**
* Count total number of relationships (rows in relationship table) by a relationship type * Count total number of relationships (rows in relationship table) by a relationship type
* * NOTE: tilted relationships are NEVER excluded when fetching one relationship type
* @param context context * @param context context
* @param relationshipType relationship type to filter by * @param relationshipType relationship type to filter by
* @return total count * @return total count
@@ -287,10 +409,25 @@ public interface RelationshipService extends DSpaceCRUDService<Relationship> {
*/ */
int countByItem(Context context, Item item) throws SQLException; int countByItem(Context context, Item item) throws SQLException;
/**
* This method returns a count of Relationship objects that have the given Item object
* as a leftItem or a rightItem
* @param context The relevant DSpace context
* @param item The item that should be either a leftItem or a rightItem of all
* the Relationship objects in the returned list
* @param excludeTilted if true, excludes tilted relationships
* @param excludeNonLatest if true, exclude relationships for which the opposite item is not the latest version
* that is relevant
* @return The list of Relationship objects that contain either a left or a
* right item that is equal to the given item
* @throws SQLException If something goes wrong
*/
int countByItem(Context context, Item item, boolean excludeTilted, boolean excludeNonLatest) throws SQLException;
/** /**
* Count total number of relationships (rows in relationship table) by a relationship type and a boolean indicating * Count total number of relationships (rows in relationship table) by a relationship type and a boolean indicating
* whether the relationship should contain the item on the left side or not * whether the relationship should contain the item on the left side or not
* * NOTE: tilted relationships are NEVER excluded when fetching one relationship type
* @param context context * @param context context
* @param relationshipType relationship type to filter by * @param relationshipType relationship type to filter by
* @param isLeft Indicating whether the counted Relationships should have the given Item on the left side or not * @param isLeft Indicating whether the counted Relationships should have the given Item on the left side or not
@@ -300,6 +437,22 @@ public interface RelationshipService extends DSpaceCRUDService<Relationship> {
int countByItemAndRelationshipType(Context context, Item item, RelationshipType relationshipType, boolean isLeft) int countByItemAndRelationshipType(Context context, Item item, RelationshipType relationshipType, boolean isLeft)
throws SQLException; throws SQLException;
/**
* Count total number of relationships (rows in relationship table) by a relationship type and a boolean indicating
* whether the relationship should contain the item on the left side or not
* NOTE: tilted relationships are NEVER excluded when fetching one relationship type
* @param context context
* @param relationshipType relationship type to filter by
* @param isLeft Indicating whether the counted Relationships should have the given Item on the left side
* @param excludeNonLatest If true, excludes all relationships for which the other item has a more recent version
* that is relevant for this relationship
* @return total count with the given parameters
* @throws SQLException if database error
*/
int countByItemAndRelationshipType(
Context context, Item item, RelationshipType relationshipType, boolean isLeft, boolean excludeNonLatest
) throws SQLException;
/** /**
* Count total number of relationships (rows in relationship table) * Count total number of relationships (rows in relationship table)
* by a relationship leftward or rightward typeName * by a relationship leftward or rightward typeName

View File

@@ -155,12 +155,11 @@ public abstract class AbstractHibernateDAO<T> implements GenericDAO<T> {
* @return A list of distinct results as depicted by the CriteriaQuery and parameters * @return A list of distinct results as depicted by the CriteriaQuery and parameters
* @throws SQLException * @throws SQLException
*/ */
public List<T> list(Context context, CriteriaQuery criteriaQuery, boolean cacheable, Class<T> clazz, int maxResults, public List<T> list(
int offset) throws SQLException { Context context, CriteriaQuery<T> criteriaQuery, boolean cacheable, Class<T> clazz, int maxResults, int offset
) throws SQLException {
criteriaQuery.distinct(true); criteriaQuery.distinct(true);
@SuppressWarnings("unchecked") return executeCriteriaQuery(context, criteriaQuery, cacheable, maxResults, offset);
List<T> result = (List<T>) executeCriteriaQuery(context, criteriaQuery, cacheable, maxResults, offset);
return result;
} }
/** /**
@@ -183,12 +182,12 @@ public abstract class AbstractHibernateDAO<T> implements GenericDAO<T> {
* @return A list of results determined by the CriteriaQuery and parameters * @return A list of results determined by the CriteriaQuery and parameters
* @throws SQLException * @throws SQLException
*/ */
public List<T> list(Context context, CriteriaQuery criteriaQuery, boolean cacheable, Class<T> clazz, int maxResults, public List<T> list(
int offset, boolean distinct) throws SQLException { Context context, CriteriaQuery<T> criteriaQuery, boolean cacheable, Class<T> clazz, int maxResults, int offset,
boolean distinct
) throws SQLException {
criteriaQuery.distinct(distinct); criteriaQuery.distinct(distinct);
@SuppressWarnings("unchecked") return executeCriteriaQuery(context, criteriaQuery, cacheable, maxResults, offset);
List<T> result = (List<T>) executeCriteriaQuery(context, criteriaQuery, cacheable, maxResults, offset);
return result;
} }
/** /**

View File

@@ -538,6 +538,36 @@ public class Context implements AutoCloseable {
} }
} }
/**
* Rollback the current transaction with the database, without persisting any
* pending changes. The database connection is not closed and can be reused
* afterwards.
*
* <b>WARNING: After calling this method all previously fetched entities are
* "detached" (pending changes are not tracked anymore). You have to reload all
* entities you still want to work with manually after this method call (see
* {@link Context#reloadEntity(ReloadableEntity)}).</b>
*
* @throws SQLException When rollbacking the transaction in the database fails.
*/
public void rollback() throws SQLException {
// If Context is no longer open/valid, just note that it has already been closed
if (!isValid()) {
log.info("rollback() was called on a closed Context object. No changes to abort.");
return;
}
try {
// Rollback ONLY if we have a database transaction, and it is NOT Read Only
if (!isReadOnly() && isTransactionAlive()) {
dbConnection.rollback();
reloadContextBoundEntities();
}
} finally {
events = null;
}
}
/** /**
* Close the context, without committing any of the changes performed using * Close the context, without committing any of the changes performed using
* this context. The database connection is freed. No exception is thrown if * this context. The database connection is freed. No exception is thrown if
@@ -657,6 +687,15 @@ public class Context implements AutoCloseable {
return myGroups; return myGroups;
} }
/**
* Get a set of all of the special groups uuids that current user is a member of.
*
* @return list of special groups uuids
*/
public Set<UUID> getSpecialGroupUuids() {
return CollectionUtils.isEmpty(specialGroups) ? Set.of() : specialGroups;
}
/** /**
* Temporary change the user bound to the context, empty the special groups that * Temporary change the user bound to the context, empty the special groups that
* are retained to allow subsequent restore * are retained to allow subsequent restore

View File

@@ -259,12 +259,19 @@ public class Curation extends DSpaceRunnable<CurationScriptConfiguration> {
super.handler.logError("EPerson not found: " + currentUserUuid); super.handler.logError("EPerson not found: " + currentUserUuid);
throw new IllegalArgumentException("Unable to find a user with uuid: " + currentUserUuid); throw new IllegalArgumentException("Unable to find a user with uuid: " + currentUserUuid);
} }
assignSpecialGroupsInContext();
this.context.setCurrentUser(eperson); this.context.setCurrentUser(eperson);
} catch (SQLException e) { } catch (SQLException e) {
handler.handleException("Something went wrong trying to fetch eperson for uuid: " + currentUserUuid, e); handler.handleException("Something went wrong trying to fetch eperson for uuid: " + currentUserUuid, e);
} }
} }
protected void assignSpecialGroupsInContext() throws SQLException {
for (UUID uuid : handler.getSpecialGroups()) {
context.setSpecialGroup(uuid);
}
}
/** /**
* Fills in some optional command line options. * Fills in some optional command line options.
* Checks if there are missing required options or invalid values for options. * Checks if there are missing required options or invalid values for options.

View File

@@ -7,6 +7,7 @@
*/ */
package org.dspace.discovery; package org.dspace.discovery;
import java.sql.SQLException;
import java.util.HashSet; import java.util.HashSet;
import java.util.Optional; import java.util.Optional;
import java.util.Set; import java.util.Set;
@@ -37,6 +38,8 @@ public class IndexEventConsumer implements Consumer {
// collect Items, Collections, Communities that need indexing // collect Items, Collections, Communities that need indexing
private Set<IndexableObject> objectsToUpdate = new HashSet<>(); private Set<IndexableObject> objectsToUpdate = new HashSet<>();
// collect freshly created Items that need indexing (requires pre-db status)
private Set<IndexableObject> createdItemsToUpdate = new HashSet<>();
// unique search IDs to delete // unique search IDs to delete
private Set<String> uniqueIdsToDelete = new HashSet<>(); private Set<String> uniqueIdsToDelete = new HashSet<>();
@@ -65,6 +68,7 @@ public class IndexEventConsumer implements Consumer {
if (objectsToUpdate == null) { if (objectsToUpdate == null) {
objectsToUpdate = new HashSet<>(); objectsToUpdate = new HashSet<>();
uniqueIdsToDelete = new HashSet<>(); uniqueIdsToDelete = new HashSet<>();
createdItemsToUpdate = new HashSet<>();
} }
int st = event.getSubjectType(); int st = event.getSubjectType();
@@ -143,6 +147,7 @@ public class IndexEventConsumer implements Consumer {
String detail = indexableObjectService.getType() + "-" + event.getSubjectID().toString(); String detail = indexableObjectService.getType() + "-" + event.getSubjectID().toString();
uniqueIdsToDelete.add(detail); uniqueIdsToDelete.add(detail);
} }
objectsToUpdate.addAll(indexObjectServiceFactory.getIndexableObjects(ctx, subject)); objectsToUpdate.addAll(indexObjectServiceFactory.getIndexableObjects(ctx, subject));
} }
break; break;
@@ -162,7 +167,7 @@ public class IndexEventConsumer implements Consumer {
// also update the object in order to index mapped/unmapped Items // also update the object in order to index mapped/unmapped Items
if (subject != null && if (subject != null &&
subject.getType() == Constants.COLLECTION && object.getType() == Constants.ITEM) { subject.getType() == Constants.COLLECTION && object.getType() == Constants.ITEM) {
objectsToUpdate.addAll(indexObjectServiceFactory.getIndexableObjects(ctx, object)); createdItemsToUpdate.addAll(indexObjectServiceFactory.getIndexableObjects(ctx, object));
} }
} }
break; break;
@@ -209,23 +214,11 @@ public class IndexEventConsumer implements Consumer {
} }
// update the changed Items not deleted because they were on create list // update the changed Items not deleted because they were on create list
for (IndexableObject iu : objectsToUpdate) { for (IndexableObject iu : objectsToUpdate) {
/* we let all types through here and indexObject(ctx, iu, false);
* allow the search indexer to make }
* decisions on indexing and/or removal // update the created Items with a pre-db status
*/ for (IndexableObject iu : createdItemsToUpdate) {
iu.setIndexedObject(ctx.reloadEntity(iu.getIndexedObject())); indexObject(ctx, iu, true);
String uniqueIndexID = iu.getUniqueIndexID();
if (uniqueIndexID != null) {
try {
indexer.indexContent(ctx, iu, true, false);
log.debug("Indexed "
+ iu.getTypeText()
+ ", id=" + iu.getID()
+ ", unique_id=" + uniqueIndexID);
} catch (Exception e) {
log.error("Failed while indexing object: ", e);
}
}
} }
} finally { } finally {
if (!objectsToUpdate.isEmpty() || !uniqueIdsToDelete.isEmpty()) { if (!objectsToUpdate.isEmpty() || !uniqueIdsToDelete.isEmpty()) {
@@ -235,6 +228,27 @@ public class IndexEventConsumer implements Consumer {
// "free" the resources // "free" the resources
objectsToUpdate.clear(); objectsToUpdate.clear();
uniqueIdsToDelete.clear(); uniqueIdsToDelete.clear();
createdItemsToUpdate.clear();
}
}
}
private void indexObject(Context ctx, IndexableObject iu, boolean preDb) throws SQLException {
/* we let all types through here and
* allow the search indexer to make
* decisions on indexing and/or removal
*/
iu.setIndexedObject(ctx.reloadEntity(iu.getIndexedObject()));
String uniqueIndexID = iu.getUniqueIndexID();
if (uniqueIndexID != null) {
try {
indexer.indexContent(ctx, iu, true, false, preDb);
log.debug("Indexed "
+ iu.getTypeText()
+ ", id=" + iu.getID()
+ ", unique_id=" + uniqueIndexID);
} catch (Exception e) {
log.error("Failed while indexing object: ", e);
} }
} }
} }

View File

@@ -9,7 +9,9 @@ package org.dspace.discovery;
import java.io.IOException; import java.io.IOException;
import java.sql.SQLException; import java.sql.SQLException;
import java.util.Map;
import org.apache.solr.client.solrj.SolrServerException;
import org.dspace.core.Context; import org.dspace.core.Context;
/** /**
@@ -30,6 +32,17 @@ public interface IndexingService {
void indexContent(Context context, IndexableObject dso, void indexContent(Context context, IndexableObject dso,
boolean force, boolean commit) throws SQLException, SearchServiceException; boolean force, boolean commit) throws SQLException, SearchServiceException;
/**
* Index a given DSO
* @param context The DSpace Context
* @param dso The DSpace Object to index
* @param force Force update even if not stale
* @param commit Commit the changes
* @param preDb Add a "preDB" status to the index (only applicable to Items)
*/
void indexContent(Context context, IndexableObject dso,
boolean force, boolean commit, boolean preDb) throws SQLException, SearchServiceException;
void unIndexContent(Context context, IndexableObject dso) void unIndexContent(Context context, IndexableObject dso)
throws SQLException, IOException; throws SQLException, IOException;
@@ -62,4 +75,15 @@ public interface IndexingService {
void optimize() throws SearchServiceException; void optimize() throws SearchServiceException;
void buildSpellCheck() throws SearchServiceException, IOException; void buildSpellCheck() throws SearchServiceException, IOException;
/**
* Atomically update the index of a single field for an object
* @param context The DSpace context
* @param uniqueIndexId The unqiue index ID of the object to update the index for
* @param field The field to update
* @param fieldModifier The modifiers for the field to update. More information on how to atomically update a solr
* field using a field modifier can be found here: https://yonik.com/solr/atomic-updates/
*/
void atomicUpdate(Context context, String uniqueIndexId, String field, Map<String,Object> fieldModifier)
throws SolrServerException, IOException;
} }

View File

@@ -8,6 +8,7 @@
package org.dspace.discovery; package org.dspace.discovery;
import java.sql.SQLException; import java.sql.SQLException;
import java.util.Iterator;
import java.util.List; import java.util.List;
import org.dspace.content.Item; import org.dspace.content.Item;
@@ -38,6 +39,7 @@ public interface SearchService {
DiscoverResult search(Context context, DiscoverQuery query) DiscoverResult search(Context context, DiscoverQuery query)
throws SearchServiceException; throws SearchServiceException;
/** /**
* Convenient method to call @see #search(Context, DSpaceObject, * Convenient method to call @see #search(Context, DSpaceObject,
* DiscoverQuery, boolean) with includeWithdrawn=false * DiscoverQuery, boolean) with includeWithdrawn=false
@@ -52,9 +54,22 @@ public interface SearchService {
DiscoverResult search(Context context, IndexableObject dso, DiscoverQuery query) DiscoverResult search(Context context, IndexableObject dso, DiscoverQuery query)
throws SearchServiceException; throws SearchServiceException;
/**
* Convenience method to call @see #search(Context, DSpaceObject, DiscoverQuery) and getting an iterator for the
* results
*
* @param context DSpace context object
* @param dso a DSpace object to use as a scope of the search
* @param query the discovery query object
* @return an iterator iterating over all results from the search
* @throws SearchServiceException if search error
*/
Iterator<Item> iteratorSearch(Context context, IndexableObject dso, DiscoverQuery query)
throws SearchServiceException;
List<IndexableObject> search(Context context, String query, String orderfield, boolean ascending, int offset, List<IndexableObject> search(Context context, String query, String orderfield, boolean ascending, int offset,
int max, String... filterquery); int max, String... filterquery);
/** /**
* Transforms the given string field and value into a filter query * Transforms the given string field and value into a filter query

View File

@@ -20,6 +20,7 @@ import org.dspace.content.Item;
import org.dspace.content.WorkspaceItem; import org.dspace.content.WorkspaceItem;
import org.dspace.discovery.configuration.DiscoveryConfiguration; import org.dspace.discovery.configuration.DiscoveryConfiguration;
import org.dspace.discovery.configuration.DiscoveryConfigurationService; import org.dspace.discovery.configuration.DiscoveryConfigurationService;
import org.dspace.discovery.utils.DiscoverQueryBuilder;
import org.dspace.kernel.ServiceManager; import org.dspace.kernel.ServiceManager;
import org.dspace.services.factory.DSpaceServicesFactory; import org.dspace.services.factory.DSpaceServicesFactory;
import org.dspace.workflow.WorkflowItem; import org.dspace.workflow.WorkflowItem;
@@ -170,4 +171,10 @@ public class SearchUtils {
DiscoveryConfiguration configurationExtra = getDiscoveryConfigurationByName(confName); DiscoveryConfiguration configurationExtra = getDiscoveryConfigurationByName(confName);
result.add(configurationExtra); result.add(configurationExtra);
} }
public static DiscoverQueryBuilder getQueryBuilder() {
ServiceManager manager = DSpaceServicesFactory.getInstance().getServiceManager();
return manager
.getServiceByName(DiscoverQueryBuilder.class.getName(), DiscoverQueryBuilder.class);
}
} }

View File

@@ -8,6 +8,8 @@
package org.dspace.discovery; package org.dspace.discovery;
import static java.util.stream.Collectors.joining; import static java.util.stream.Collectors.joining;
import static org.dspace.discovery.indexobject.ItemIndexFactoryImpl.STATUS_FIELD;
import static org.dspace.discovery.indexobject.ItemIndexFactoryImpl.STATUS_FIELD_PREDB;
import java.io.IOException; import java.io.IOException;
import java.io.PrintWriter; import java.io.PrintWriter;
@@ -118,8 +120,6 @@ public class SolrServiceImpl implements SearchService, IndexingService {
} }
/** /**
* If the handle for the "dso" already exists in the index, and the "dso" * If the handle for the "dso" already exists in the index, and the "dso"
* has a lastModified timestamp that is newer than the document in the index * has a lastModified timestamp that is newer than the document in the index
@@ -166,6 +166,24 @@ public class SolrServiceImpl implements SearchService, IndexingService {
indexableObjectService.writeDocument(context, indexableObject, solrInputDocument); indexableObjectService.writeDocument(context, indexableObject, solrInputDocument);
} }
/**
* Update the given indexable object using a given service
* @param context The DSpace Context
* @param indexableObjectService The service to index the object with
* @param indexableObject The object to index
* @param preDB Add a "preDB" status to the document
*/
protected void update(Context context, IndexFactory indexableObjectService, IndexableObject indexableObject,
boolean preDB) throws IOException, SQLException, SolrServerException {
if (preDB) {
final SolrInputDocument solrInputDocument =
indexableObjectService.buildNewDocument(context, indexableObject);
indexableObjectService.writeDocument(context, indexableObject, solrInputDocument);
} else {
update(context, indexableObjectService, indexableObject);
}
}
/** /**
* unIndex removes an Item, Collection, or Community * unIndex removes an Item, Collection, or Community
* *
@@ -454,6 +472,16 @@ public class SolrServiceImpl implements SearchService, IndexingService {
} }
} }
@Override
public void atomicUpdate(Context context, String uniqueIndexId, String field, Map<String, Object> fieldModifier)
throws SolrServerException, IOException {
SolrInputDocument solrInputDocument = new SolrInputDocument();
solrInputDocument.addField(SearchUtils.RESOURCE_UNIQUE_ID, uniqueIndexId);
solrInputDocument.addField(field, fieldModifier);
solrSearchCore.getSolr().add(solrInputDocument);
}
// ////////////////////////////////// // //////////////////////////////////
// Private // Private
// ////////////////////////////////// // //////////////////////////////////
@@ -710,16 +738,21 @@ public class SolrServiceImpl implements SearchService, IndexingService {
discoveryQuery.addFilterQueries("location:l" + dso.getID()); discoveryQuery.addFilterQueries("location:l" + dso.getID());
} else if (dso instanceof IndexableItem) { } else if (dso instanceof IndexableItem) {
discoveryQuery.addFilterQueries(SearchUtils.RESOURCE_UNIQUE_ID + ":" + dso. discoveryQuery.addFilterQueries(SearchUtils.RESOURCE_UNIQUE_ID + ":" + dso.
getUniqueIndexID()); getUniqueIndexID());
} }
} }
return search(context, discoveryQuery); return search(context, discoveryQuery);
} }
@Override
public Iterator<Item> iteratorSearch(Context context, IndexableObject dso, DiscoverQuery query)
throws SearchServiceException {
return new SearchIterator(context, dso, query);
}
@Override @Override
public DiscoverResult search(Context context, DiscoverQuery discoveryQuery ) public DiscoverResult search(Context context, DiscoverQuery discoveryQuery)
throws SearchServiceException { throws SearchServiceException {
try { try {
if (solrSearchCore.getSolr() == null) { if (solrSearchCore.getSolr() == null) {
@@ -733,6 +766,72 @@ public class SolrServiceImpl implements SearchService, IndexingService {
} }
} }
/**
* This class implements an iterator over items that is specifically used to iterate over search results
*/
private class SearchIterator implements Iterator<Item> {
private Context context;
private DiscoverQuery discoverQuery;
private DiscoverResult discoverResult;
private IndexableObject dso;
private int absoluteCursor;
private int relativeCursor;
private int pagesize;
SearchIterator(Context context, DiscoverQuery discoverQuery) throws SearchServiceException {
this.context = context;
this.discoverQuery = discoverQuery;
this.absoluteCursor = discoverQuery.getStart();
initialise();
}
SearchIterator(Context context, IndexableObject dso, DiscoverQuery discoverQuery)
throws SearchServiceException {
this.context = context;
this.dso = dso;
this.discoverQuery = discoverQuery;
initialise();
}
private void initialise() throws SearchServiceException {
this.relativeCursor = 0;
if (discoverQuery.getMaxResults() != -1) {
pagesize = discoverQuery.getMaxResults();
} else {
pagesize = 10;
}
discoverQuery.setMaxResults(pagesize);
this.discoverResult = search(context, dso, discoverQuery);
}
@Override
public boolean hasNext() {
return absoluteCursor < discoverResult.getTotalSearchResults();
}
@Override
public Item next() {
//paginate getting results from the discoverquery.
if (relativeCursor == pagesize) {
// get a new page of results when the last element of the previous page has been read
int offset = absoluteCursor;
// reset the position counter for getting element relativecursor on a page
relativeCursor = 0;
discoverQuery.setStart(offset);
try {
discoverResult = search(context, dso, discoverQuery);
} catch (SearchServiceException e) {
log.error("error while getting search results", e);
}
}
// get the element at position relativecursor on a page
IndexableObject res = discoverResult.getIndexableObjects().get(relativeCursor);
relativeCursor++;
absoluteCursor++;
return (Item) res.getIndexedObject();
}
}
protected SolrQuery resolveToSolrQuery(Context context, DiscoverQuery discoveryQuery) protected SolrQuery resolveToSolrQuery(Context context, DiscoverQuery discoveryQuery)
throws SearchServiceException { throws SearchServiceException {
SolrQuery solrQuery = new SolrQuery(); SolrQuery solrQuery = new SolrQuery();
@@ -753,6 +852,7 @@ public class SolrServiceImpl implements SearchService, IndexingService {
solrQuery.addField(SearchUtils.RESOURCE_TYPE_FIELD); solrQuery.addField(SearchUtils.RESOURCE_TYPE_FIELD);
solrQuery.addField(SearchUtils.RESOURCE_ID_FIELD); solrQuery.addField(SearchUtils.RESOURCE_ID_FIELD);
solrQuery.addField(SearchUtils.RESOURCE_UNIQUE_ID); solrQuery.addField(SearchUtils.RESOURCE_UNIQUE_ID);
solrQuery.addField(STATUS_FIELD);
if (discoveryQuery.isSpellCheck()) { if (discoveryQuery.isSpellCheck()) {
solrQuery.setParam(SpellingParams.SPELLCHECK_Q, query); solrQuery.setParam(SpellingParams.SPELLCHECK_Q, query);
@@ -903,11 +1003,14 @@ public class SolrServiceImpl implements SearchService, IndexingService {
// Enables solr to remove documents related to items not on database anymore (Stale) // Enables solr to remove documents related to items not on database anymore (Stale)
// if maxAttemps is greater than 0 cleanup the index on each step // if maxAttemps is greater than 0 cleanup the index on each step
if (maxAttempts >= 0) { if (maxAttempts >= 0) {
zombieDocs.add((String) doc.getFirstValue(SearchUtils.RESOURCE_UNIQUE_ID)); Object statusObj = doc.getFirstValue(STATUS_FIELD);
// avoid to process the response except if we are in the last allowed execution. if (!(statusObj instanceof String && statusObj.equals(STATUS_FIELD_PREDB))) {
// When maxAttempts is 0 this will be just the first and last run as the zombieDocs.add((String) doc.getFirstValue(SearchUtils.RESOURCE_UNIQUE_ID));
// executionCount is increased at the start of the loop it will be equals to 1 // avoid to process the response except if we are in the last allowed execution.
skipLoadingResponse = maxAttempts + 1 != executionCount; // When maxAttempts is 0 this will be just the first and last run as the
// executionCount is increased at the start of the loop it will be equals to 1
skipLoadingResponse = maxAttempts + 1 != executionCount;
}
} }
continue; continue;
} }
@@ -1383,6 +1486,28 @@ public class SolrServiceImpl implements SearchService, IndexingService {
} }
} }
@Override
public void indexContent(Context context, IndexableObject indexableObject, boolean force,
boolean commit, boolean preDb) throws SearchServiceException, SQLException {
if (preDb) {
try {
final IndexFactory indexableObjectFactory = indexObjectServiceFactory.
getIndexableObjectFactory(indexableObject);
if (force || requiresIndexing(indexableObject.getUniqueIndexID(), indexableObject.getLastModified())) {
update(context, indexableObjectFactory, indexableObject, true);
log.info(LogHelper.getHeader(context, "indexed_object", indexableObject.getUniqueIndexID()));
}
} catch (IOException | SQLException | SolrServerException | SearchServiceException e) {
log.error(e.getMessage(), e);
}
} else {
indexContent(context, indexableObject, force);
}
if (commit) {
commit();
}
}
@Override @Override
public void commit() throws SearchServiceException { public void commit() throws SearchServiceException {
try { try {
@@ -1436,4 +1561,5 @@ public class SolrServiceImpl implements SearchService, IndexingService {
} }
return null; return null;
} }
} }

View File

@@ -70,10 +70,20 @@ public abstract class IndexFactoryImpl<T extends IndexableObject, S> implements
return doc; return doc;
} }
@Override
public SolrInputDocument buildNewDocument(Context context, T indexableObject) throws SQLException, IOException {
return buildDocument(context, indexableObject);
}
@Override @Override
public void writeDocument(Context context, T indexableObject, SolrInputDocument solrInputDocument) public void writeDocument(Context context, T indexableObject, SolrInputDocument solrInputDocument)
throws SQLException, IOException, SolrServerException { throws SQLException, IOException, SolrServerException {
writeDocument(solrInputDocument, null); try {
writeDocument(solrInputDocument, null);
} catch (Exception e) {
log.error("Error occurred while writing SOLR document for {} object {}",
indexableObject.getType(), indexableObject.getID(), e);
}
} }
/** /**
@@ -113,9 +123,11 @@ public abstract class IndexFactoryImpl<T extends IndexableObject, S> implements
log.info("Full text is larger than the configured limit (discovery.solr.fulltext.charLimit)." log.info("Full text is larger than the configured limit (discovery.solr.fulltext.charLimit)."
+ " Only the first {} characters were indexed.", charLimit); + " Only the first {} characters were indexed.", charLimit);
} else { } else {
log.error("Tika parsing error. Could not index full text.", saxe);
throw new IOException("Tika parsing error. Could not index full text.", saxe); throw new IOException("Tika parsing error. Could not index full text.", saxe);
} }
} catch (TikaException ex) { } catch (TikaException ex) {
log.error("Tika parsing error. Could not index full text.", ex);
throw new IOException("Tika parsing error. Could not index full text.", ex); throw new IOException("Tika parsing error. Could not index full text.", ex);
} }

View File

@@ -10,7 +10,6 @@ package org.dspace.discovery.indexobject;
import java.io.IOException; import java.io.IOException;
import java.sql.SQLException; import java.sql.SQLException;
import java.util.ArrayList; import java.util.ArrayList;
import java.util.Arrays;
import java.util.Date; import java.util.Date;
import java.util.HashMap; import java.util.HashMap;
import java.util.HashSet; import java.util.HashSet;
@@ -43,7 +42,6 @@ import org.dspace.content.service.WorkspaceItemService;
import org.dspace.core.Context; import org.dspace.core.Context;
import org.dspace.core.LogHelper; import org.dspace.core.LogHelper;
import org.dspace.discovery.FullTextContentStreams; import org.dspace.discovery.FullTextContentStreams;
import org.dspace.discovery.IndexableObject;
import org.dspace.discovery.SearchUtils; import org.dspace.discovery.SearchUtils;
import org.dspace.discovery.configuration.DiscoveryConfiguration; import org.dspace.discovery.configuration.DiscoveryConfiguration;
import org.dspace.discovery.configuration.DiscoveryConfigurationParameters; import org.dspace.discovery.configuration.DiscoveryConfigurationParameters;
@@ -64,6 +62,9 @@ import org.dspace.handle.service.HandleService;
import org.dspace.services.factory.DSpaceServicesFactory; import org.dspace.services.factory.DSpaceServicesFactory;
import org.dspace.util.MultiFormatDateParser; import org.dspace.util.MultiFormatDateParser;
import org.dspace.util.SolrUtils; import org.dspace.util.SolrUtils;
import org.dspace.versioning.Version;
import org.dspace.versioning.VersionHistory;
import org.dspace.versioning.service.VersionHistoryService;
import org.dspace.xmlworkflow.storedcomponents.XmlWorkflowItem; import org.dspace.xmlworkflow.storedcomponents.XmlWorkflowItem;
import org.dspace.xmlworkflow.storedcomponents.service.XmlWorkflowItemService; import org.dspace.xmlworkflow.storedcomponents.service.XmlWorkflowItemService;
import org.springframework.beans.factory.annotation.Autowired; import org.springframework.beans.factory.annotation.Autowired;
@@ -78,6 +79,8 @@ public class ItemIndexFactoryImpl extends DSpaceObjectIndexFactoryImpl<Indexable
private static final Logger log = org.apache.logging.log4j.LogManager.getLogger(ItemIndexFactoryImpl.class); private static final Logger log = org.apache.logging.log4j.LogManager.getLogger(ItemIndexFactoryImpl.class);
public static final String VARIANTS_STORE_SEPARATOR = "###"; public static final String VARIANTS_STORE_SEPARATOR = "###";
public static final String STORE_SEPARATOR = "\n|||\n"; public static final String STORE_SEPARATOR = "\n|||\n";
public static final String STATUS_FIELD = "database_status";
public static final String STATUS_FIELD_PREDB = "predb";
@Autowired @Autowired
@@ -96,11 +99,13 @@ public class ItemIndexFactoryImpl extends DSpaceObjectIndexFactoryImpl<Indexable
protected WorkflowItemIndexFactory workflowItemIndexFactory; protected WorkflowItemIndexFactory workflowItemIndexFactory;
@Autowired @Autowired
protected WorkspaceItemIndexFactory workspaceItemIndexFactory; protected WorkspaceItemIndexFactory workspaceItemIndexFactory;
@Autowired
protected VersionHistoryService versionHistoryService;
@Override @Override
public Iterator<IndexableItem> findAll(Context context) throws SQLException { public Iterator<IndexableItem> findAll(Context context) throws SQLException {
Iterator<Item> items = itemService.findAllUnfiltered(context); Iterator<Item> items = itemService.findAllRegularItems(context);
return new Iterator<IndexableItem>() { return new Iterator<IndexableItem>() {
@Override @Override
public boolean hasNext() { public boolean hasNext() {
@@ -139,6 +144,7 @@ public class ItemIndexFactoryImpl extends DSpaceObjectIndexFactoryImpl<Indexable
doc.addField("withdrawn", item.isWithdrawn()); doc.addField("withdrawn", item.isWithdrawn());
doc.addField("discoverable", item.isDiscoverable()); doc.addField("discoverable", item.isDiscoverable());
doc.addField("lastModified", SolrUtils.getDateFormatter().format(item.getLastModified())); doc.addField("lastModified", SolrUtils.getDateFormatter().format(item.getLastModified()));
doc.addField("latestVersion", isLatestVersion(context, item));
EPerson submitter = item.getSubmitter(); EPerson submitter = item.getSubmitter();
if (submitter != null) { if (submitter != null) {
@@ -169,6 +175,51 @@ public class ItemIndexFactoryImpl extends DSpaceObjectIndexFactoryImpl<Indexable
return doc; return doc;
} }
/**
* Check whether the given item is the latest version.
* If the latest item cannot be determined, because either the version history or the latest version is not present,
* assume the item is latest.
* @param context the DSpace context.
* @param item the item that should be checked.
* @return true if the item is the latest version, false otherwise.
*/
protected boolean isLatestVersion(Context context, Item item) throws SQLException {
VersionHistory history = versionHistoryService.findByItem(context, item);
if (history == null) {
// not all items have a version history
// if an item does not have a version history, it is by definition the latest version
return true;
}
// start with the very latest version of the given item (may still be in workspace)
Version latestVersion = versionHistoryService.getLatestVersion(context, history);
// find the latest version of the given item that is archived
while (latestVersion != null && !latestVersion.getItem().isArchived()) {
latestVersion = versionHistoryService.getPrevious(context, history, latestVersion);
}
// could not find an archived version of the given item
if (latestVersion == null) {
// this scenario should never happen, but let's err on the side of showing too many items vs. to little
// (see discovery.xml, a lot of discovery configs filter out all items that are not the latest version)
return true;
}
// sanity check
assert latestVersion.getItem().isArchived();
return item.equals(latestVersion.getItem());
}
@Override
public SolrInputDocument buildNewDocument(Context context, IndexableItem indexableItem)
throws SQLException, IOException {
SolrInputDocument doc = buildDocument(context, indexableItem);
doc.addField(STATUS_FIELD, STATUS_FIELD_PREDB);
return doc;
}
@Override @Override
public void addDiscoveryFields(SolrInputDocument doc, Context context, Item item, public void addDiscoveryFields(SolrInputDocument doc, Context context, Item item,
List<DiscoveryConfiguration> discoveryConfigurations) List<DiscoveryConfiguration> discoveryConfigurations)
@@ -713,26 +764,31 @@ public class ItemIndexFactoryImpl extends DSpaceObjectIndexFactoryImpl<Indexable
} }
@Override @Override
public List getIndexableObjects(Context context, Item object) throws SQLException { public List getIndexableObjects(Context context, Item item) throws SQLException {
List<IndexableObject> results = new ArrayList<>(); if (item.isArchived() || item.isWithdrawn()) {
if (object.isArchived() || object.isWithdrawn()) { // we only want to index an item as an item if it is not in workflow
// We only want to index an item as an item if it is not in workflow return List.of(new IndexableItem(item));
results.addAll(Arrays.asList(new IndexableItem(object)));
} else {
// Check if we have a workflow / workspace item
final WorkspaceItem workspaceItem = workspaceItemService.findByItem(context, object);
if (workspaceItem != null) {
results.addAll(workspaceItemIndexFactory.getIndexableObjects(context, workspaceItem));
} else {
// Check if we a workflow item
final XmlWorkflowItem xmlWorkflowItem = xmlWorkflowItemService.findByItem(context, object);
if (xmlWorkflowItem != null) {
results.addAll(workflowItemIndexFactory.getIndexableObjects(context, xmlWorkflowItem));
}
}
} }
return results; final WorkspaceItem workspaceItem = workspaceItemService.findByItem(context, item);
if (workspaceItem != null) {
// a workspace item is linked to the given item
return List.copyOf(workspaceItemIndexFactory.getIndexableObjects(context, workspaceItem));
}
final XmlWorkflowItem xmlWorkflowItem = xmlWorkflowItemService.findByItem(context, item);
if (xmlWorkflowItem != null) {
// a workflow item is linked to the given item
return List.copyOf(workflowItemIndexFactory.getIndexableObjects(context, xmlWorkflowItem));
}
if (!isLatestVersion(context, item)) {
// the given item is an older version of another item
return List.of(new IndexableItem(item));
}
// nothing to index
return List.of();
} }
@Override @Override

View File

@@ -46,6 +46,14 @@ public interface IndexFactory<T extends IndexableObject, S> {
*/ */
SolrInputDocument buildDocument(Context context, T indexableObject) throws SQLException, IOException; SolrInputDocument buildDocument(Context context, T indexableObject) throws SQLException, IOException;
/**
* Create solr document with all the shared fields initialized.
* Can contain special fields required for "new" documents vs regular buildDocument
* @param indexableObject the indexableObject that we want to index
* @return initialized solr document
*/
SolrInputDocument buildNewDocument(Context context, T indexableObject) throws SQLException, IOException;
/** /**
* Write the provided document to the solr core * Write the provided document to the solr core
* @param context DSpace context object * @param context DSpace context object

View File

@@ -5,7 +5,7 @@
* *
* http://www.dspace.org/license/ * http://www.dspace.org/license/
*/ */
package org.dspace.app.rest.utils; package org.dspace.discovery.utils;
import static java.util.Collections.emptyList; import static java.util.Collections.emptyList;
import static java.util.Collections.singletonList; import static java.util.Collections.singletonList;
@@ -19,10 +19,6 @@ import java.util.Objects;
import org.apache.commons.collections4.CollectionUtils; import org.apache.commons.collections4.CollectionUtils;
import org.apache.commons.lang3.StringUtils; import org.apache.commons.lang3.StringUtils;
import org.apache.logging.log4j.Logger; import org.apache.logging.log4j.Logger;
import org.dspace.app.rest.converter.query.SearchQueryConverter;
import org.dspace.app.rest.exception.DSpaceBadRequestException;
import org.dspace.app.rest.exception.InvalidSearchRequestException;
import org.dspace.app.rest.parameter.SearchFilter;
import org.dspace.core.Context; import org.dspace.core.Context;
import org.dspace.core.LogHelper; import org.dspace.core.LogHelper;
import org.dspace.discovery.DiscoverFacetField; import org.dspace.discovery.DiscoverFacetField;
@@ -32,6 +28,7 @@ import org.dspace.discovery.DiscoverQuery;
import org.dspace.discovery.FacetYearRange; import org.dspace.discovery.FacetYearRange;
import org.dspace.discovery.IndexableObject; import org.dspace.discovery.IndexableObject;
import org.dspace.discovery.SearchService; import org.dspace.discovery.SearchService;
import org.dspace.discovery.SearchServiceException;
import org.dspace.discovery.configuration.DiscoveryConfiguration; import org.dspace.discovery.configuration.DiscoveryConfiguration;
import org.dspace.discovery.configuration.DiscoveryConfigurationParameters; import org.dspace.discovery.configuration.DiscoveryConfigurationParameters;
import org.dspace.discovery.configuration.DiscoveryHitHighlightFieldConfiguration; import org.dspace.discovery.configuration.DiscoveryHitHighlightFieldConfiguration;
@@ -40,17 +37,11 @@ import org.dspace.discovery.configuration.DiscoverySearchFilterFacet;
import org.dspace.discovery.configuration.DiscoverySortConfiguration; import org.dspace.discovery.configuration.DiscoverySortConfiguration;
import org.dspace.discovery.configuration.DiscoverySortFieldConfiguration; import org.dspace.discovery.configuration.DiscoverySortFieldConfiguration;
import org.dspace.discovery.indexobject.factory.IndexFactory; import org.dspace.discovery.indexobject.factory.IndexFactory;
import org.dspace.discovery.utils.parameter.QueryBuilderSearchFilter;
import org.dspace.services.ConfigurationService; import org.dspace.services.ConfigurationService;
import org.springframework.beans.factory.InitializingBean; import org.springframework.beans.factory.InitializingBean;
import org.springframework.beans.factory.annotation.Autowired; import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.data.domain.Pageable;
import org.springframework.data.domain.Sort;
import org.springframework.stereotype.Component;
/**
* This class builds the queries for the /search and /facet endpoints.
*/
@Component
public class DiscoverQueryBuilder implements InitializingBean { public class DiscoverQueryBuilder implements InitializingBean {
private static final Logger log = org.apache.logging.log4j.LogManager.getLogger(DiscoverQueryBuilder.class); private static final Logger log = org.apache.logging.log4j.LogManager.getLogger(DiscoverQueryBuilder.class);
@@ -74,51 +65,60 @@ public class DiscoverQueryBuilder implements InitializingBean {
/** /**
* Build a discovery query * Build a discovery query
* *
* @param context the DSpace context * @param context the DSpace context
* @param scope the scope for this discovery query * @param scope the scope for this discovery query
* @param discoveryConfiguration the discovery configuration for this discovery query * @param discoveryConfiguration the discovery configuration for this discovery query
* @param query the query string for this discovery query * @param query the query string for this discovery query
* @param searchFilters the search filters for this discovery query * @param searchFilters the search filters for this discovery query
* @param dsoType only include search results with this type * @param dsoType only include search results with this type
* @param page the pageable for this discovery query * @param pageSize the page size for this discovery query
* @param offset the offset for this discovery query
* @param sortProperty the sort property for this discovery query
* @param sortDirection the sort direction for this discovery query
*/ */
public DiscoverQuery buildQuery(Context context, IndexableObject scope, public DiscoverQuery buildQuery(Context context, IndexableObject scope,
DiscoveryConfiguration discoveryConfiguration, DiscoveryConfiguration discoveryConfiguration,
String query, List<SearchFilter> searchFilters, String query, List<QueryBuilderSearchFilter> searchFilters,
String dsoType, Pageable page) String dsoType, Integer pageSize, Long offset, String sortProperty,
throws DSpaceBadRequestException { String sortDirection) throws SearchServiceException {
List<String> dsoTypes = dsoType != null ? singletonList(dsoType) : emptyList(); List<String> dsoTypes = dsoType != null ? singletonList(dsoType) : emptyList();
return buildQuery(context, scope, discoveryConfiguration, query, searchFilters, dsoTypes, page); return buildQuery(context, scope, discoveryConfiguration, query, searchFilters, dsoTypes, pageSize, offset,
sortProperty, sortDirection);
} }
/** /**
* Build a discovery query * Build a discovery query
* *
* @param context the DSpace context * @param context the DSpace context
* @param scope the scope for this discovery query * @param scope the scope for this discovery query
* @param discoveryConfiguration the discovery configuration for this discovery query * @param discoveryConfiguration the discovery configuration for this discovery query
* @param query the query string for this discovery query * @param query the query string for this discovery query
* @param searchFilters the search filters for this discovery query * @param searchFilters the search filters for this discovery query
* @param dsoTypes only include search results with one of these types * @param dsoTypes only include search results with one of these types
* @param page the pageable for this discovery query * @param pageSize the page size for this discovery query
* @param offset the offset for this discovery query
* @param sortProperty the sort property for this discovery query
* @param sortDirection the sort direction for this discovery query
*/ */
public DiscoverQuery buildQuery(Context context, IndexableObject scope, public DiscoverQuery buildQuery(Context context, IndexableObject scope,
DiscoveryConfiguration discoveryConfiguration, DiscoveryConfiguration discoveryConfiguration,
String query, List<SearchFilter> searchFilters, String query, List<QueryBuilderSearchFilter> searchFilters,
List<String> dsoTypes, Pageable page) List<String> dsoTypes, Integer pageSize, Long offset, String sortProperty,
throws DSpaceBadRequestException { String sortDirection)
throws IllegalArgumentException, SearchServiceException {
DiscoverQuery queryArgs = buildCommonDiscoverQuery(context, discoveryConfiguration, query, searchFilters, DiscoverQuery queryArgs = buildCommonDiscoverQuery(context, discoveryConfiguration, query, searchFilters,
dsoTypes); dsoTypes);
//When all search criteria are set, configure facet results //When all search criteria are set, configure facet results
addFaceting(context, scope, queryArgs, discoveryConfiguration); addFaceting(context, scope, queryArgs, discoveryConfiguration);
//Configure pagination and sorting //Configure pagination and sorting
configurePagination(page, queryArgs); configurePagination(pageSize, offset, queryArgs);
configureSorting(page, queryArgs, discoveryConfiguration.getSearchSortConfiguration()); configureSorting(sortProperty, sortDirection, queryArgs, discoveryConfiguration.getSearchSortConfiguration());
addDiscoveryHitHighlightFields(discoveryConfiguration, queryArgs); addDiscoveryHitHighlightFields(discoveryConfiguration, queryArgs);
return queryArgs; return queryArgs;
@@ -128,11 +128,11 @@ public class DiscoverQueryBuilder implements InitializingBean {
DiscoverQuery queryArgs) { DiscoverQuery queryArgs) {
if (discoveryConfiguration.getHitHighlightingConfiguration() != null) { if (discoveryConfiguration.getHitHighlightingConfiguration() != null) {
List<DiscoveryHitHighlightFieldConfiguration> metadataFields = discoveryConfiguration List<DiscoveryHitHighlightFieldConfiguration> metadataFields = discoveryConfiguration
.getHitHighlightingConfiguration().getMetadataFields(); .getHitHighlightingConfiguration().getMetadataFields();
for (DiscoveryHitHighlightFieldConfiguration fieldConfiguration : metadataFields) { for (DiscoveryHitHighlightFieldConfiguration fieldConfiguration : metadataFields) {
queryArgs.addHitHighlightingField( queryArgs.addHitHighlightingField(
new DiscoverHitHighlightingField(fieldConfiguration.getField(), fieldConfiguration.getMaxSize(), new DiscoverHitHighlightingField(fieldConfiguration.getField(), fieldConfiguration.getMaxSize(),
fieldConfiguration.getSnippets())); fieldConfiguration.getSnippets()));
} }
} }
} }
@@ -140,92 +140,97 @@ public class DiscoverQueryBuilder implements InitializingBean {
/** /**
* Create a discovery facet query. * Create a discovery facet query.
* *
* @param context the DSpace context * @param context the DSpace context
* @param scope the scope for this discovery query * @param scope the scope for this discovery query
* @param discoveryConfiguration the discovery configuration for this discovery query * @param discoveryConfiguration the discovery configuration for this discovery query
* @param prefix limit the facets results to those starting with the given prefix. * @param prefix limit the facets results to those starting with the given prefix.
* @param query the query string for this discovery query * @param query the query string for this discovery query
* @param searchFilters the search filters for this discovery query * @param searchFilters the search filters for this discovery query
* @param dsoType only include search results with this type * @param dsoType only include search results with this type
* @param page the pageable for this discovery query * @param pageSize the page size for this discovery query
* @param facetName the facet field * @param offset the offset for this discovery query
* @param facetName the facet field
*/ */
public DiscoverQuery buildFacetQuery(Context context, IndexableObject scope, public DiscoverQuery buildFacetQuery(Context context, IndexableObject scope,
DiscoveryConfiguration discoveryConfiguration, DiscoveryConfiguration discoveryConfiguration,
String prefix, String query, List<SearchFilter> searchFilters, String prefix, String query, List<QueryBuilderSearchFilter> searchFilters,
String dsoType, Pageable page, String facetName) String dsoType, Integer pageSize, Long offset, String facetName)
throws DSpaceBadRequestException { throws IllegalArgumentException {
List<String> dsoTypes = dsoType != null ? singletonList(dsoType) : emptyList(); List<String> dsoTypes = dsoType != null ? singletonList(dsoType) : emptyList();
return buildFacetQuery( return buildFacetQuery(
context, scope, discoveryConfiguration, prefix, query, searchFilters, dsoTypes, page, facetName); context, scope, discoveryConfiguration, prefix, query, searchFilters, dsoTypes, pageSize, offset,
facetName);
} }
/** /**
* Create a discovery facet query. * Create a discovery facet query.
* *
* @param context the DSpace context * @param context the DSpace context
* @param scope the scope for this discovery query * @param scope the scope for this discovery query
* @param discoveryConfiguration the discovery configuration for this discovery query * @param discoveryConfiguration the discovery configuration for this discovery query
* @param prefix limit the facets results to those starting with the given prefix. * @param prefix limit the facets results to those starting with the given prefix.
* @param query the query string for this discovery query * @param query the query string for this discovery query
* @param searchFilters the search filters for this discovery query * @param searchFilters the search filters for this discovery query
* @param dsoTypes only include search results with one of these types * @param dsoTypes only include search results with one of these types
* @param page the pageable for this discovery query * @param pageSize the page size for this discovery query
* @param facetName the facet field * @param offset the offset for this discovery query
* @param facetName the facet field
*/ */
public DiscoverQuery buildFacetQuery(Context context, IndexableObject scope, public DiscoverQuery buildFacetQuery(Context context, IndexableObject scope,
DiscoveryConfiguration discoveryConfiguration, DiscoveryConfiguration discoveryConfiguration,
String prefix, String query, List<SearchFilter> searchFilters, String prefix, String query, List<QueryBuilderSearchFilter> searchFilters,
List<String> dsoTypes, Pageable page, String facetName) List<String> dsoTypes, Integer pageSize, Long offset, String facetName)
throws DSpaceBadRequestException { throws IllegalArgumentException {
DiscoverQuery queryArgs = buildCommonDiscoverQuery(context, discoveryConfiguration, query, searchFilters, DiscoverQuery queryArgs = buildCommonDiscoverQuery(context, discoveryConfiguration, query, searchFilters,
dsoTypes); dsoTypes);
//When all search criteria are set, configure facet results //When all search criteria are set, configure facet results
addFacetingForFacets(context, scope, prefix, queryArgs, discoveryConfiguration, facetName, page); addFacetingForFacets(context, scope, prefix, queryArgs, discoveryConfiguration, facetName, pageSize);
//We don' want any search results, we only want facet values //We don' want any search results, we only want facet values
queryArgs.setMaxResults(0); queryArgs.setMaxResults(0);
//Configure pagination //Configure pagination
configurePaginationForFacets(page, queryArgs); configurePaginationForFacets(offset, queryArgs);
return queryArgs; return queryArgs;
} }
private void configurePaginationForFacets(Pageable page, DiscoverQuery queryArgs) { private void configurePaginationForFacets(Long offset, DiscoverQuery queryArgs) {
if (page != null) { if (offset != null) {
queryArgs.setFacetOffset(Math.toIntExact(page.getOffset())); queryArgs.setFacetOffset(Math.toIntExact(offset));
} }
} }
private DiscoverQuery addFacetingForFacets(Context context, IndexableObject scope, String prefix, private DiscoverQuery addFacetingForFacets(Context context, IndexableObject scope, String prefix,
DiscoverQuery queryArgs, DiscoveryConfiguration discoveryConfiguration, String facetName, Pageable page) DiscoverQuery queryArgs, DiscoveryConfiguration discoveryConfiguration,
throws DSpaceBadRequestException { String facetName, Integer pageSize)
throws IllegalArgumentException {
DiscoverySearchFilterFacet facet = discoveryConfiguration.getSidebarFacet(facetName); DiscoverySearchFilterFacet facet = discoveryConfiguration.getSidebarFacet(facetName);
if (facet != null) { if (facet != null) {
queryArgs.setFacetMinCount(1); queryArgs.setFacetMinCount(1);
int pageSize = Math.min(pageSizeLimit, page.getPageSize());
pageSize = pageSize != null ? Math.min(pageSizeLimit, pageSize) : pageSizeLimit;
fillFacetIntoQueryArgs(context, scope, prefix, queryArgs, facet, pageSize); fillFacetIntoQueryArgs(context, scope, prefix, queryArgs, facet, pageSize);
} else { } else {
throw new DSpaceBadRequestException(facetName + " is not a valid search facet"); throw new IllegalArgumentException(facetName + " is not a valid search facet");
} }
return queryArgs; return queryArgs;
} }
private void fillFacetIntoQueryArgs(Context context, IndexableObject scope, String prefix, private void fillFacetIntoQueryArgs(Context context, IndexableObject scope, String prefix,
DiscoverQuery queryArgs, DiscoverySearchFilterFacet facet, final int pageSize) { DiscoverQuery queryArgs, DiscoverySearchFilterFacet facet, final int pageSize) {
if (facet.getType().equals(DiscoveryConfigurationParameters.TYPE_DATE)) { if (facet.getType().equals(DiscoveryConfigurationParameters.TYPE_DATE)) {
try { try {
FacetYearRange facetYearRange = FacetYearRange facetYearRange =
searchService.getFacetYearRange(context, scope, facet, queryArgs.getFilterQueries(), queryArgs); searchService.getFacetYearRange(context, scope, facet, queryArgs.getFilterQueries(), queryArgs);
queryArgs.addYearRangeFacet(facet, facetYearRange); queryArgs.addYearRangeFacet(facet, facetYearRange);
@@ -241,18 +246,18 @@ public class DiscoverQueryBuilder implements InitializingBean {
int facetLimit = pageSize + 1; int facetLimit = pageSize + 1;
//This should take care of the sorting for us //This should take care of the sorting for us
queryArgs.addFacetField(new DiscoverFacetField(facet.getIndexFieldName(), facet.getType(), facetLimit, queryArgs.addFacetField(new DiscoverFacetField(facet.getIndexFieldName(), facet.getType(), facetLimit,
facet.getSortOrderSidebar(), StringUtils.trimToNull(prefix))); facet.getSortOrderSidebar(),
StringUtils.trimToNull(prefix)));
} }
} }
private DiscoverQuery buildCommonDiscoverQuery(Context context, DiscoveryConfiguration discoveryConfiguration, private DiscoverQuery buildCommonDiscoverQuery(Context context, DiscoveryConfiguration discoveryConfiguration,
String query, String query,
List<SearchFilter> searchFilters, List<String> dsoTypes) List<QueryBuilderSearchFilter> searchFilters, List<String> dsoTypes)
throws DSpaceBadRequestException { throws IllegalArgumentException {
DiscoverQuery queryArgs = buildBaseQueryForConfiguration(discoveryConfiguration); DiscoverQuery queryArgs = buildBaseQueryForConfiguration(discoveryConfiguration);
//Add search filters queryArgs.addFilterQueries(convertFiltersToString(context, discoveryConfiguration, searchFilters));
queryArgs.addFilterQueries(convertFilters(context, discoveryConfiguration, searchFilters));
//Set search query //Set search query
if (StringUtils.isNotBlank(query)) { if (StringUtils.isNotBlank(query)) {
@@ -274,30 +279,17 @@ public class DiscoverQueryBuilder implements InitializingBean {
queryArgs.setDiscoveryConfigurationName(discoveryConfiguration.getId()); queryArgs.setDiscoveryConfigurationName(discoveryConfiguration.getId());
queryArgs.addFilterQueries(discoveryConfiguration.getDefaultFilterQueries() queryArgs.addFilterQueries(discoveryConfiguration.getDefaultFilterQueries()
.toArray( .toArray(
new String[discoveryConfiguration.getDefaultFilterQueries() new String[discoveryConfiguration
.size()])); .getDefaultFilterQueries()
.size()]));
return queryArgs; return queryArgs;
} }
private void configureSorting(Pageable page, DiscoverQuery queryArgs, private void configureSorting(String sortProperty, String sortDirection, DiscoverQuery queryArgs,
DiscoverySortConfiguration searchSortConfiguration) throws DSpaceBadRequestException { DiscoverySortConfiguration searchSortConfiguration)
String sortBy = null; throws IllegalArgumentException, SearchServiceException {
String sortOrder = null; String sortBy = sortProperty;
String sortOrder = sortDirection;
//Read the Pageable object if there is one
if (page != null) {
Sort sort = page.getSort();
if (sort != null && sort.iterator().hasNext()) {
Sort.Order order = sort.iterator().next();
sortBy = order.getProperty();
sortOrder = order.getDirection().name();
}
}
if (StringUtils.isNotBlank(sortBy) && !isConfigured(sortBy, searchSortConfiguration)) {
throw new InvalidSearchRequestException(
"The field: " + sortBy + "is not configured for the configuration!");
}
//Load defaults if we did not receive values //Load defaults if we did not receive values
if (sortBy == null) { if (sortBy == null) {
@@ -307,24 +299,30 @@ public class DiscoverQueryBuilder implements InitializingBean {
sortOrder = getDefaultSortDirection(searchSortConfiguration, sortOrder); sortOrder = getDefaultSortDirection(searchSortConfiguration, sortOrder);
} }
if (StringUtils.isNotBlank(sortBy) && !isConfigured(sortBy, searchSortConfiguration)) {
throw new SearchServiceException(
"The field: " + sortBy + "is not configured for the configuration!");
}
//Update Discovery query //Update Discovery query
DiscoverySortFieldConfiguration sortFieldConfiguration = searchSortConfiguration DiscoverySortFieldConfiguration sortFieldConfiguration = searchSortConfiguration
.getSortFieldConfiguration(sortBy); .getSortFieldConfiguration(sortBy);
if (sortFieldConfiguration != null) { if (sortFieldConfiguration != null) {
String sortField = searchService String sortField = searchService
.toSortFieldIndex(sortFieldConfiguration.getMetadataField(), sortFieldConfiguration.getType()); .toSortFieldIndex(sortFieldConfiguration.getMetadataField(), sortFieldConfiguration.getType());
if ("asc".equalsIgnoreCase(sortOrder)) { if ("asc".equalsIgnoreCase(sortOrder)) {
queryArgs.setSortField(sortField, DiscoverQuery.SORT_ORDER.asc); queryArgs.setSortField(sortField, DiscoverQuery.SORT_ORDER.asc);
} else if ("desc".equalsIgnoreCase(sortOrder)) { } else if ("desc".equalsIgnoreCase(sortOrder)) {
queryArgs.setSortField(sortField, DiscoverQuery.SORT_ORDER.desc); queryArgs.setSortField(sortField, DiscoverQuery.SORT_ORDER.desc);
} else { } else {
throw new DSpaceBadRequestException(sortOrder + " is not a valid sort order"); throw new IllegalArgumentException(sortOrder + " is not a valid sort order");
} }
} else { } else {
throw new DSpaceBadRequestException(sortBy + " is not a valid sort field"); throw new IllegalArgumentException(sortBy + " is not a valid sort field");
} }
} }
@@ -334,7 +332,7 @@ public class DiscoverQueryBuilder implements InitializingBean {
private String getDefaultSortDirection(DiscoverySortConfiguration searchSortConfiguration, String sortOrder) { private String getDefaultSortDirection(DiscoverySortConfiguration searchSortConfiguration, String sortOrder) {
if (Objects.nonNull(searchSortConfiguration.getSortFields()) && if (Objects.nonNull(searchSortConfiguration.getSortFields()) &&
!searchSortConfiguration.getSortFields().isEmpty()) { !searchSortConfiguration.getSortFields().isEmpty()) {
sortOrder = searchSortConfiguration.getSortFields().get(0).getDefaultSortOrder().name(); sortOrder = searchSortConfiguration.getSortFields().get(0).getDefaultSortOrder().name();
} }
return sortOrder; return sortOrder;
@@ -344,7 +342,7 @@ public class DiscoverQueryBuilder implements InitializingBean {
String sortBy;// Attempt to find the default one, if none found we use SCORE String sortBy;// Attempt to find the default one, if none found we use SCORE
sortBy = "score"; sortBy = "score";
if (Objects.nonNull(searchSortConfiguration.getSortFields()) && if (Objects.nonNull(searchSortConfiguration.getSortFields()) &&
!searchSortConfiguration.getSortFields().isEmpty()) { !searchSortConfiguration.getSortFields().isEmpty()) {
DiscoverySortFieldConfiguration defaultSort = searchSortConfiguration.getSortFields().get(0); DiscoverySortFieldConfiguration defaultSort = searchSortConfiguration.getSortFields().get(0);
if (StringUtils.isBlank(defaultSort.getMetadataField())) { if (StringUtils.isBlank(defaultSort.getMetadataField())) {
return sortBy; return sortBy;
@@ -354,66 +352,31 @@ public class DiscoverQueryBuilder implements InitializingBean {
return sortBy; return sortBy;
} }
private void configurePagination(Pageable page, DiscoverQuery queryArgs) { private void configurePagination(Integer size, Long offset, DiscoverQuery queryArgs) {
if (page != null) { queryArgs.setMaxResults(size != null ? Math.min(pageSizeLimit, size) : pageSizeLimit);
queryArgs.setMaxResults(Math.min(pageSizeLimit, page.getPageSize())); queryArgs.setStart(offset != null ? Math.toIntExact(offset) : 0);
queryArgs.setStart(Math.toIntExact(page.getOffset()));
} else {
queryArgs.setMaxResults(pageSizeLimit);
queryArgs.setStart(0);
}
} }
private String getDsoType(String dsoType) throws DSpaceBadRequestException { private String getDsoType(String dsoType) throws IllegalArgumentException {
for (IndexFactory indexFactory : indexableFactories) { for (IndexFactory indexFactory : indexableFactories) {
if (StringUtils.equalsIgnoreCase(indexFactory.getType(), dsoType)) { if (StringUtils.equalsIgnoreCase(indexFactory.getType(), dsoType)) {
return indexFactory.getType(); return indexFactory.getType();
} }
} }
throw new DSpaceBadRequestException(dsoType + " is not a valid DSpace Object type"); throw new IllegalArgumentException(dsoType + " is not a valid DSpace Object type");
} }
public void setIndexableFactories(List<IndexFactory> indexableFactories) { public void setIndexableFactories(List<IndexFactory> indexableFactories) {
this.indexableFactories = indexableFactories; this.indexableFactories = indexableFactories;
} }
private String[] convertFilters(Context context, DiscoveryConfiguration discoveryConfiguration,
List<SearchFilter> searchFilters) throws DSpaceBadRequestException {
ArrayList<String> filterQueries = new ArrayList<>(CollectionUtils.size(searchFilters));
SearchQueryConverter searchQueryConverter = new SearchQueryConverter();
List<SearchFilter> transformedFilters = searchQueryConverter.convert(searchFilters);
try {
for (SearchFilter searchFilter : CollectionUtils.emptyIfNull(transformedFilters)) {
DiscoverySearchFilter filter = discoveryConfiguration.getSearchFilter(searchFilter.getName());
if (filter == null) {
throw new DSpaceBadRequestException(searchFilter.getName() + " is not a valid search filter");
}
DiscoverFilterQuery filterQuery = searchService.toFilterQuery(context,
filter.getIndexFieldName(),
searchFilter.getOperator(),
searchFilter.getValue(),
discoveryConfiguration);
if (filterQuery != null) {
filterQueries.add(filterQuery.getFilterQuery());
}
}
} catch (SQLException e) {
throw new DSpaceBadRequestException("There was a problem parsing the search filters.", e);
}
return filterQueries.toArray(new String[filterQueries.size()]);
}
private DiscoverQuery addFaceting(Context context, IndexableObject scope, DiscoverQuery queryArgs, private DiscoverQuery addFaceting(Context context, IndexableObject scope, DiscoverQuery queryArgs,
DiscoveryConfiguration discoveryConfiguration) { DiscoveryConfiguration discoveryConfiguration) {
List<DiscoverySearchFilterFacet> facets = discoveryConfiguration.getSidebarFacets(); List<DiscoverySearchFilterFacet> facets = discoveryConfiguration.getSidebarFacets();
log.debug("facets for configuration " + discoveryConfiguration.getId() + ": " + (facets != null ? facets log.debug("facets for configuration " + discoveryConfiguration.getId() + ": " + (facets != null ? facets
.size() : null)); .size() : null));
if (facets != null) { if (facets != null) {
queryArgs.setFacetMinCount(1); queryArgs.setFacetMinCount(1);
@@ -427,4 +390,34 @@ public class DiscoverQueryBuilder implements InitializingBean {
return queryArgs; return queryArgs;
} }
private String[] convertFiltersToString(Context context, DiscoveryConfiguration discoveryConfiguration,
List<QueryBuilderSearchFilter> searchFilters)
throws IllegalArgumentException {
ArrayList<String> filterQueries = new ArrayList<>(CollectionUtils.size(searchFilters));
try {
for (QueryBuilderSearchFilter searchFilter : CollectionUtils.emptyIfNull(searchFilters)) {
DiscoverySearchFilter filter = discoveryConfiguration.getSearchFilter(searchFilter.getName());
if (filter == null) {
throw new IllegalArgumentException(searchFilter.getName() + " is not a valid search filter");
}
DiscoverFilterQuery filterQuery = searchService.toFilterQuery(context,
filter.getIndexFieldName(),
searchFilter.getOperator(),
searchFilter.getValue(),
discoveryConfiguration);
if (filterQuery != null) {
filterQueries.add(filterQuery.getFilterQuery());
}
}
} catch (SQLException e) {
throw new IllegalArgumentException("There was a problem parsing the search filters.", e);
}
return filterQueries.toArray(new String[filterQueries.size()]);
}
} }

View File

@@ -0,0 +1,70 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.discovery.utils.parameter;
import java.util.Objects;
import org.apache.commons.lang3.StringUtils;
/**
* Representation for a Discovery search filter
*/
public class QueryBuilderSearchFilter {
private String name;
private String operator;
private String value;
public QueryBuilderSearchFilter(final String name, final String operator, final String value) {
this.name = name;
this.operator = operator;
this.value = value;
}
public String getName() {
return name;
}
public String getOperator() {
return operator;
}
public String getValue() {
return value;
}
public String toString() {
return "QueryBuilderSearchFilter{" +
"name='" + name + '\'' +
", operator='" + operator + '\'' +
", value='" + value + '\'' +
'}';
}
public boolean equals(Object object) {
if (object instanceof QueryBuilderSearchFilter) {
QueryBuilderSearchFilter obj = (QueryBuilderSearchFilter) object;
if (!StringUtils.equals(obj.getName(), getName())) {
return false;
}
if (!StringUtils.equals(obj.getOperator(), getOperator())) {
return false;
}
if (!StringUtils.equals(obj.getValue(), getValue())) {
return false;
}
return true;
}
return false;
}
public int hashCode() {
return Objects.hash(name, operator, value);
}
}

View File

@@ -7,6 +7,8 @@
*/ */
package org.dspace.eperson; package org.dspace.eperson;
import static org.dspace.content.Item.ANY;
import java.io.IOException; import java.io.IOException;
import java.sql.SQLException; import java.sql.SQLException;
import java.util.ArrayList; import java.util.ArrayList;
@@ -30,6 +32,7 @@ import org.dspace.authorize.service.ResourcePolicyService;
import org.dspace.content.DSpaceObjectServiceImpl; import org.dspace.content.DSpaceObjectServiceImpl;
import org.dspace.content.Item; import org.dspace.content.Item;
import org.dspace.content.MetadataField; import org.dspace.content.MetadataField;
import org.dspace.content.MetadataValue;
import org.dspace.content.WorkspaceItem; import org.dspace.content.WorkspaceItem;
import org.dspace.content.factory.ContentServiceFactory; import org.dspace.content.factory.ContentServiceFactory;
import org.dspace.content.service.ItemService; import org.dspace.content.service.ItemService;
@@ -43,6 +46,8 @@ import org.dspace.eperson.service.EPersonService;
import org.dspace.eperson.service.GroupService; import org.dspace.eperson.service.GroupService;
import org.dspace.eperson.service.SubscribeService; import org.dspace.eperson.service.SubscribeService;
import org.dspace.event.Event; import org.dspace.event.Event;
import org.dspace.orcid.service.OrcidTokenService;
import org.dspace.util.UUIDUtils;
import org.dspace.versioning.Version; import org.dspace.versioning.Version;
import org.dspace.versioning.VersionHistory; import org.dspace.versioning.VersionHistory;
import org.dspace.versioning.dao.VersionDAO; import org.dspace.versioning.dao.VersionDAO;
@@ -96,6 +101,8 @@ public class EPersonServiceImpl extends DSpaceObjectServiceImpl<EPerson> impleme
protected VersionDAO versionDAO; protected VersionDAO versionDAO;
@Autowired(required = true) @Autowired(required = true)
protected ClaimedTaskService claimedTaskService; protected ClaimedTaskService claimedTaskService;
@Autowired
protected OrcidTokenService orcidTokenService;
protected EPersonServiceImpl() { protected EPersonServiceImpl() {
super(); super();
@@ -379,6 +386,8 @@ public class EPersonServiceImpl extends DSpaceObjectServiceImpl<EPerson> impleme
group.getMembers().remove(ePerson); group.getMembers().remove(ePerson);
} }
orcidTokenService.deleteByEPerson(context, ePerson);
// Remove any subscriptions // Remove any subscriptions
subscribeService.deleteByEPerson(context, ePerson); subscribeService.deleteByEPerson(context, ePerson);
@@ -569,4 +578,18 @@ public class EPersonServiceImpl extends DSpaceObjectServiceImpl<EPerson> impleme
public int countTotal(Context context) throws SQLException { public int countTotal(Context context) throws SQLException {
return ePersonDAO.countRows(context); return ePersonDAO.countRows(context);
} }
@Override
public EPerson findByProfileItem(Context context, Item profile) throws SQLException {
List<MetadataValue> owners = itemService.getMetadata(profile, "dspace", "object", "owner", ANY);
if (CollectionUtils.isEmpty(owners)) {
return null;
}
return find(context, UUIDUtils.fromString(owners.get(0).getAuthority()));
}
@Override
public String getName(EPerson dso) {
return dso.getName();
}
} }

View File

@@ -829,4 +829,9 @@ public class GroupServiceImpl extends DSpaceObjectServiceImpl<Group> implements
final MetadataField metadataField) throws SQLException { final MetadataField metadataField) throws SQLException {
return groupDAO.findByMetadataField(context, searchValue, metadataField); return groupDAO.findByMetadataField(context, searchValue, metadataField);
} }
@Override
public String getName(Group dso) {
return dso.getName();
}
} }

View File

@@ -15,6 +15,7 @@ import java.util.List;
import java.util.Set; import java.util.Set;
import org.dspace.authorize.AuthorizeException; import org.dspace.authorize.AuthorizeException;
import org.dspace.content.Item;
import org.dspace.content.MetadataFieldName; import org.dspace.content.MetadataFieldName;
import org.dspace.content.service.DSpaceObjectLegacySupportService; import org.dspace.content.service.DSpaceObjectLegacySupportService;
import org.dspace.content.service.DSpaceObjectService; import org.dspace.content.service.DSpaceObjectService;
@@ -263,4 +264,16 @@ public interface EPersonService extends DSpaceObjectService<EPerson>, DSpaceObje
* @throws SQLException An exception that provides information on a database access error or other errors. * @throws SQLException An exception that provides information on a database access error or other errors.
*/ */
int countTotal(Context context) throws SQLException; int countTotal(Context context) throws SQLException;
/**
* Find the EPerson related to the given profile item. If the given item is not
* a profile item, null is returned.
*
* @param context The relevant DSpace Context.
* @param profile the profile item to search for
* @return the EPerson, if any
* @throws SQLException An exception that provides information on a database
* access error or other errors.
*/
EPerson findByProfileItem(Context context, Item profile) throws SQLException;
} }

View File

@@ -0,0 +1,547 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.external.provider.impl;
import static java.util.Collections.emptyList;
import static java.util.Comparator.comparing;
import static java.util.Comparator.reverseOrder;
import static java.util.Optional.ofNullable;
import static org.apache.commons.collections4.ListUtils.partition;
import static org.apache.commons.lang3.StringUtils.isNotBlank;
import static org.orcid.jaxb.model.common.CitationType.FORMATTED_UNSPECIFIED;
import java.io.File;
import java.io.FileOutputStream;
import java.nio.charset.Charset;
import java.util.List;
import java.util.Optional;
import java.util.function.Supplier;
import java.util.regex.Pattern;
import java.util.stream.Collectors;
import org.apache.commons.collections.CollectionUtils;
import org.apache.commons.io.IOUtils;
import org.apache.commons.lang3.StringUtils;
import org.apache.commons.lang3.math.NumberUtils;
import org.dspace.content.Item;
import org.dspace.content.MetadataFieldName;
import org.dspace.content.dto.MetadataValueDTO;
import org.dspace.core.Context;
import org.dspace.external.model.ExternalDataObject;
import org.dspace.external.provider.AbstractExternalDataProvider;
import org.dspace.external.provider.ExternalDataProvider;
import org.dspace.importer.external.datamodel.ImportRecord;
import org.dspace.importer.external.metadatamapping.MetadatumDTO;
import org.dspace.importer.external.service.ImportService;
import org.dspace.orcid.OrcidToken;
import org.dspace.orcid.client.OrcidClient;
import org.dspace.orcid.client.OrcidConfiguration;
import org.dspace.orcid.model.OrcidTokenResponseDTO;
import org.dspace.orcid.model.OrcidWorkFieldMapping;
import org.dspace.orcid.service.OrcidSynchronizationService;
import org.dspace.orcid.service.OrcidTokenService;
import org.dspace.web.ContextUtil;
import org.orcid.jaxb.model.common.ContributorRole;
import org.orcid.jaxb.model.common.WorkType;
import org.orcid.jaxb.model.v3.release.common.Contributor;
import org.orcid.jaxb.model.v3.release.common.ContributorAttributes;
import org.orcid.jaxb.model.v3.release.common.PublicationDate;
import org.orcid.jaxb.model.v3.release.common.Subtitle;
import org.orcid.jaxb.model.v3.release.common.Title;
import org.orcid.jaxb.model.v3.release.record.Citation;
import org.orcid.jaxb.model.v3.release.record.ExternalIDs;
import org.orcid.jaxb.model.v3.release.record.SourceAware;
import org.orcid.jaxb.model.v3.release.record.Work;
import org.orcid.jaxb.model.v3.release.record.WorkBulk;
import org.orcid.jaxb.model.v3.release.record.WorkContributors;
import org.orcid.jaxb.model.v3.release.record.WorkTitle;
import org.orcid.jaxb.model.v3.release.record.summary.WorkGroup;
import org.orcid.jaxb.model.v3.release.record.summary.WorkSummary;
import org.orcid.jaxb.model.v3.release.record.summary.Works;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
/**
* Implementation of {@link ExternalDataProvider} that search for all the works
* of the profile with the given orcid id that hava a source other than DSpace.
* The id of the external data objects returned by the methods of this class is
* the concatenation of the orcid id and the put code associated with the
* publication, separated by :: (example 0000-0000-0123-4567::123456)
*
* @author Luca Giamminonni (luca.giamminonni at 4science.it)
*
*/
public class OrcidPublicationDataProvider extends AbstractExternalDataProvider {
private final static Logger LOGGER = LoggerFactory.getLogger(OrcidPublicationDataProvider.class);
/**
* Examples of valid ORCID IDs:
* <ul>
* <li>0000-0002-1825-0097</li>
* <li>0000-0001-5109-3700</li>
* <li>0000-0002-1694-233X</li>
* </ul>
*/
private final static Pattern ORCID_ID_PATTERN = Pattern.compile("(\\d{4}-){3}\\d{3}(\\d|X)");
private final static int MAX_PUT_CODES_SIZE = 100;
@Autowired
private OrcidClient orcidClient;
@Autowired
private OrcidConfiguration orcidConfiguration;
@Autowired
private OrcidSynchronizationService orcidSynchronizationService;
@Autowired
private ImportService importService;
@Autowired
private OrcidTokenService orcidTokenService;
private OrcidWorkFieldMapping fieldMapping;
private String sourceIdentifier;
private String readPublicAccessToken;
@Override
public Optional<ExternalDataObject> getExternalDataObject(String id) {
if (isInvalidIdentifier(id)) {
throw new IllegalArgumentException("Invalid identifier '" + id + "', expected <orcid-id>::<put-code>");
}
String[] idSections = id.split("::");
String orcid = idSections[0];
String putCode = idSections[1];
validateOrcidId(orcid);
return getWork(orcid, putCode)
.filter(work -> hasDifferentSourceClientId(work))
.filter(work -> work.getPutCode() != null)
.map(work -> convertToExternalDataObject(orcid, work));
}
@Override
public List<ExternalDataObject> searchExternalDataObjects(String orcid, int start, int limit) {
validateOrcidId(orcid);
return findWorks(orcid, start, limit).stream()
.map(work -> convertToExternalDataObject(orcid, work))
.collect(Collectors.toList());
}
private boolean isInvalidIdentifier(String id) {
return StringUtils.isBlank(id) || id.split("::").length != 2;
}
private void validateOrcidId(String orcid) {
if (!ORCID_ID_PATTERN.matcher(orcid).matches()) {
throw new IllegalArgumentException("The given ORCID ID is not valid: " + orcid);
}
}
/**
* Returns all the works related to the given ORCID in the range from start and
* limit.
*
* @param orcid the ORCID ID of the author to search for works
* @param start the start index
* @param limit the limit index
* @return the list of the works
*/
private List<Work> findWorks(String orcid, int start, int limit) {
List<WorkSummary> workSummaries = findWorkSummaries(orcid, start, limit);
return findWorks(orcid, workSummaries);
}
/**
* Returns all the works summaries related to the given ORCID in the range from
* start and limit.
*
* @param orcid the ORCID ID of the author to search for works summaries
* @param start the start index
* @param limit the limit index
* @return the list of the works summaries
*/
private List<WorkSummary> findWorkSummaries(String orcid, int start, int limit) {
return getWorks(orcid).getWorkGroup().stream()
.filter(workGroup -> allWorkSummariesHaveDifferentSourceClientId(workGroup))
.map(workGroup -> getPreferredWorkSummary(workGroup))
.flatMap(Optional::stream)
.skip(start)
.limit(limit > 0 ? limit : Long.MAX_VALUE)
.collect(Collectors.toList());
}
/**
* Returns all the works related to the given ORCID ID and work summaries (a
* work has more details than a work summary).
*
* @param orcid the ORCID id of the author to search for works
* @param workSummaries the work summaries used to search the related works
* @return the list of the works
*/
private List<Work> findWorks(String orcid, List<WorkSummary> workSummaries) {
List<String> workPutCodes = getPutCodes(workSummaries);
if (CollectionUtils.isEmpty(workPutCodes)) {
return emptyList();
}
if (workPutCodes.size() == 1) {
return getWork(orcid, workPutCodes.get(0)).stream().collect(Collectors.toList());
}
return partition(workPutCodes, MAX_PUT_CODES_SIZE).stream()
.map(putCodes -> getWorkBulk(orcid, putCodes))
.flatMap(workBulk -> getWorks(workBulk).stream())
.collect(Collectors.toList());
}
/**
* Search a work by ORCID id and putcode, using API or PUBLIC urls based on
* whether the ORCID API keys are configured or not.
*
* @param orcid the ORCID ID
* @param putCode the work's identifier on ORCID
* @return the work, if any
*/
private Optional<Work> getWork(String orcid, String putCode) {
if (orcidConfiguration.isApiConfigured()) {
String accessToken = getAccessToken(orcid);
return orcidClient.getObject(accessToken, orcid, putCode, Work.class);
} else {
return orcidClient.getObject(orcid, putCode, Work.class);
}
}
/**
* Returns all the works related to the given ORCID.
*
* @param orcid the ORCID ID of the author to search for works
* @return the list of the works
*/
private Works getWorks(String orcid) {
if (orcidConfiguration.isApiConfigured()) {
String accessToken = getAccessToken(orcid);
return orcidClient.getWorks(accessToken, orcid);
} else {
return orcidClient.getWorks(orcid);
}
}
/**
* Returns all the works related to the given ORCID by the given putCodes.
*
* @param orcid the ORCID ID of the author to search for works
* @param putCodes the work's put codes to search
* @return the list of the works
*/
private WorkBulk getWorkBulk(String orcid, List<String> putCodes) {
if (orcidConfiguration.isApiConfigured()) {
String accessToken = getAccessToken(orcid);
return orcidClient.getWorkBulk(accessToken, orcid, putCodes);
} else {
return orcidClient.getWorkBulk(orcid, putCodes);
}
}
private String getAccessToken(String orcid) {
List<Item> items = orcidSynchronizationService.findProfilesByOrcid(new Context(), orcid);
return Optional.ofNullable(items.isEmpty() ? null : items.get(0))
.flatMap(item -> getAccessToken(item))
.orElseGet(() -> getReadPublicAccessToken());
}
private Optional<String> getAccessToken(Item item) {
return ofNullable(orcidTokenService.findByProfileItem(getContext(), item))
.map(OrcidToken::getAccessToken);
}
private String getReadPublicAccessToken() {
if (readPublicAccessToken != null) {
return readPublicAccessToken;
}
OrcidTokenResponseDTO accessTokenResponse = orcidClient.getReadPublicAccessToken();
readPublicAccessToken = accessTokenResponse.getAccessToken();
return readPublicAccessToken;
}
private List<Work> getWorks(WorkBulk workBulk) {
return workBulk.getBulk().stream()
.filter(bulkElement -> (bulkElement instanceof Work))
.map(bulkElement -> ((Work) bulkElement))
.collect(Collectors.toList());
}
private List<String> getPutCodes(List<WorkSummary> workSummaries) {
return workSummaries.stream()
.map(WorkSummary::getPutCode)
.map(String::valueOf)
.collect(Collectors.toList());
}
private Optional<WorkSummary> getPreferredWorkSummary(WorkGroup workGroup) {
return workGroup.getWorkSummary().stream()
.filter(work -> work.getPutCode() != null)
.filter(work -> NumberUtils.isCreatable(work.getDisplayIndex()))
.sorted(comparing(work -> Integer.valueOf(work.getDisplayIndex()), reverseOrder()))
.findFirst();
}
private ExternalDataObject convertToExternalDataObject(String orcid, Work work) {
ExternalDataObject externalDataObject = new ExternalDataObject(sourceIdentifier);
externalDataObject.setId(orcid + "::" + work.getPutCode().toString());
String title = getWorkTitle(work);
externalDataObject.setDisplayValue(title);
externalDataObject.setValue(title);
addMetadataValue(externalDataObject, fieldMapping.getTitleField(), () -> title);
addMetadataValue(externalDataObject, fieldMapping.getTypeField(), () -> getWorkType(work));
addMetadataValue(externalDataObject, fieldMapping.getPublicationDateField(), () -> getPublicationDate(work));
addMetadataValue(externalDataObject, fieldMapping.getJournalTitleField(), () -> getJournalTitle(work));
addMetadataValue(externalDataObject, fieldMapping.getSubTitleField(), () -> getSubTitleField(work));
addMetadataValue(externalDataObject, fieldMapping.getShortDescriptionField(), () -> getDescription(work));
addMetadataValue(externalDataObject, fieldMapping.getLanguageField(), () -> getLanguage(work));
for (String contributorField : fieldMapping.getContributorFields().keySet()) {
ContributorRole role = fieldMapping.getContributorFields().get(contributorField);
addMetadataValues(externalDataObject, contributorField, () -> getContributors(work, role));
}
for (String externalIdField : fieldMapping.getExternalIdentifierFields().keySet()) {
String type = fieldMapping.getExternalIdentifierFields().get(externalIdField);
addMetadataValues(externalDataObject, externalIdField, () -> getExternalIds(work, type));
}
try {
addMetadataValuesFromCitation(externalDataObject, work.getWorkCitation());
} catch (Exception e) {
LOGGER.error("An error occurs reading the following citation: " + work.getWorkCitation().getCitation(), e);
}
return externalDataObject;
}
private boolean allWorkSummariesHaveDifferentSourceClientId(WorkGroup workGroup) {
return workGroup.getWorkSummary().stream().allMatch(this::hasDifferentSourceClientId);
}
@SuppressWarnings("deprecation")
private boolean hasDifferentSourceClientId(SourceAware sourceAware) {
return Optional.ofNullable(sourceAware.getSource())
.map(source -> source.getSourceClientId())
.map(sourceClientId -> sourceClientId.getPath())
.map(clientId -> !StringUtils.equals(orcidConfiguration.getClientId(), clientId))
.orElse(true);
}
private void addMetadataValues(ExternalDataObject externalData, String metadata, Supplier<List<String>> values) {
if (StringUtils.isBlank(metadata)) {
return;
}
MetadataFieldName field = new MetadataFieldName(metadata);
for (String value : values.get()) {
externalData.addMetadata(new MetadataValueDTO(field.schema, field.element, field.qualifier, null, value));
}
}
private void addMetadataValue(ExternalDataObject externalData, String metadata, Supplier<String> valueSupplier) {
addMetadataValues(externalData, metadata, () -> {
String value = valueSupplier.get();
return isNotBlank(value) ? List.of(value) : emptyList();
});
}
private String getWorkTitle(Work work) {
WorkTitle workTitle = work.getWorkTitle();
if (workTitle == null) {
return null;
}
Title title = workTitle.getTitle();
return title != null ? title.getContent() : null;
}
private String getWorkType(Work work) {
WorkType workType = work.getWorkType();
return workType != null ? fieldMapping.convertType(workType.value()) : null;
}
private String getPublicationDate(Work work) {
PublicationDate publicationDate = work.getPublicationDate();
if (publicationDate == null) {
return null;
}
StringBuilder builder = new StringBuilder(publicationDate.getYear().getValue());
if (publicationDate.getMonth() != null) {
builder.append("-");
builder.append(publicationDate.getMonth().getValue());
}
if (publicationDate.getDay() != null) {
builder.append("-");
builder.append(publicationDate.getDay().getValue());
}
return builder.toString();
}
private String getJournalTitle(Work work) {
Title journalTitle = work.getJournalTitle();
return journalTitle != null ? journalTitle.getContent() : null;
}
private String getSubTitleField(Work work) {
WorkTitle workTitle = work.getWorkTitle();
if (workTitle == null) {
return null;
}
Subtitle subTitle = workTitle.getSubtitle();
return subTitle != null ? subTitle.getContent() : null;
}
private String getDescription(Work work) {
return work.getShortDescription();
}
private String getLanguage(Work work) {
return work.getLanguageCode() != null ? fieldMapping.convertLanguage(work.getLanguageCode()) : null;
}
private List<String> getContributors(Work work, ContributorRole role) {
WorkContributors workContributors = work.getWorkContributors();
if (workContributors == null) {
return emptyList();
}
return workContributors.getContributor().stream()
.filter(contributor -> hasRole(contributor, role))
.map(contributor -> getContributorName(contributor))
.flatMap(Optional::stream)
.collect(Collectors.toList());
}
private void addMetadataValuesFromCitation(ExternalDataObject externalDataObject, Citation citation)
throws Exception {
if (citation == null || citation.getWorkCitationType() == FORMATTED_UNSPECIFIED) {
return;
}
getImportRecord(citation).ifPresent(importRecord -> enrichExternalDataObject(externalDataObject, importRecord));
}
private Optional<ImportRecord> getImportRecord(Citation citation) throws Exception {
File citationFile = File.createTempFile("temp", "." + citation.getWorkCitationType().value());
try (FileOutputStream outputStream = new FileOutputStream(citationFile)) {
IOUtils.write(citation.getCitation(), new FileOutputStream(citationFile), Charset.defaultCharset());
return Optional.ofNullable(importService.getRecord(citationFile, citationFile.getName()));
} finally {
citationFile.delete();
}
}
private void enrichExternalDataObject(ExternalDataObject externalDataObject, ImportRecord importRecord) {
importRecord.getValueList().stream()
.filter(metadata -> doesNotContains(externalDataObject, metadata))
.forEach(metadata -> addMetadata(externalDataObject, metadata));
}
private void addMetadata(ExternalDataObject externalDataObject, MetadatumDTO metadata) {
externalDataObject.addMetadata(new MetadataValueDTO(metadata.getSchema(), metadata.getElement(),
metadata.getQualifier(), null, metadata.getValue()));
}
private boolean doesNotContains(ExternalDataObject externalDataObject, MetadatumDTO metadata) {
return externalDataObject.getMetadata().stream()
.filter(metadataValue -> StringUtils.equals(metadataValue.getSchema(), metadata.getSchema()))
.filter(metadataValue -> StringUtils.equals(metadataValue.getElement(), metadata.getElement()))
.filter(metadataValue -> StringUtils.equals(metadataValue.getQualifier(), metadata.getQualifier()))
.findAny().isEmpty();
}
private boolean hasRole(Contributor contributor, ContributorRole role) {
ContributorAttributes attributes = contributor.getContributorAttributes();
return attributes != null ? role.equals(attributes.getContributorRole()) : false;
}
private Optional<String> getContributorName(Contributor contributor) {
return Optional.ofNullable(contributor.getCreditName())
.map(creditName -> creditName.getContent());
}
private List<String> getExternalIds(Work work, String type) {
ExternalIDs externalIdentifiers = work.getExternalIdentifiers();
if (externalIdentifiers == null) {
return emptyList();
}
return externalIdentifiers.getExternalIdentifier().stream()
.filter(externalId -> type.equals(externalId.getType()))
.map(externalId -> externalId.getValue())
.collect(Collectors.toList());
}
private Context getContext() {
Context context = ContextUtil.obtainCurrentRequestContext();
return context != null ? context : new Context();
}
@Override
public boolean supports(String source) {
return StringUtils.equals(sourceIdentifier, source);
}
@Override
public int getNumberOfResults(String orcid) {
return findWorkSummaries(orcid, 0, -1).size();
}
public void setSourceIdentifier(String sourceIdentifier) {
this.sourceIdentifier = sourceIdentifier;
}
@Override
public String getSourceIdentifier() {
return sourceIdentifier;
}
public void setFieldMapping(OrcidWorkFieldMapping fieldMapping) {
this.fieldMapping = fieldMapping;
}
public void setReadPublicAccessToken(String readPublicAccessToken) {
this.readPublicAccessToken = readPublicAccessToken;
}
public OrcidClient getOrcidClient() {
return orcidClient;
}
public void setOrcidClient(OrcidClient orcidClient) {
this.orcidClient = orcidClient;
}
}

View File

@@ -140,7 +140,7 @@ public class OrcidV3AuthorDataProvider extends AbstractExternalDataProvider {
new MetadataValueDTO("person", "identifier", "orcid", null, person.getName().getPath())); new MetadataValueDTO("person", "identifier", "orcid", null, person.getName().getPath()));
externalDataObject externalDataObject
.addMetadata(new MetadataValueDTO("dc", "identifier", "uri", null, .addMetadata(new MetadataValueDTO("dc", "identifier", "uri", null,
orcidUrl + person.getName().getPath())); orcidUrl + "/" + person.getName().getPath()));
if (!StringUtils.isBlank(lastName) && !StringUtils.isBlank(firstName)) { if (!StringUtils.isBlank(lastName) && !StringUtils.isBlank(firstName)) {
externalDataObject.setDisplayValue(lastName + ", " + firstName); externalDataObject.setDisplayValue(lastName + ", " + firstName);
externalDataObject.setValue(lastName + ", " + firstName); externalDataObject.setValue(lastName + ", " + firstName);

View File

@@ -6,7 +6,7 @@
* http://www.dspace.org/license/ * http://www.dspace.org/license/
*/ */
package org.dspace.app.rest.cache; package org.dspace.iiif.logger;
import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger; import org.apache.logging.log4j.Logger;

View File

@@ -5,7 +5,7 @@
* *
* http://www.dspace.org/license/ * http://www.dspace.org/license/
*/ */
package org.dspace.app.rest.cache; package org.dspace.iiif.logger;
import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger; import org.apache.logging.log4j.Logger;

View File

@@ -0,0 +1,39 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.importer.external.ads;
import java.util.Map;
import javax.annotation.Resource;
import org.dspace.importer.external.metadatamapping.AbstractMetadataFieldMapping;
/**
* An implementation of {@link AbstractMetadataFieldMapping}
* Responsible for defining the mapping of the ADS metadatum fields on the DSpace metadatum fields
*
* @author Pasquale Cavallo (pasquale.cavallo at 4science dot it)
*/
@SuppressWarnings("rawtypes")
public class ADSFieldMapping extends AbstractMetadataFieldMapping {
/**
* Defines which metadatum is mapped on which metadatum. Note that while the key must be unique it
* only matters here for postprocessing of the value. The mapped MetadatumContributor has full control over
* what metadatafield is generated.
*
* @param metadataFieldMap The map containing the link between retrieve metadata and metadata that will be set to
* the item.
*/
@Override
@SuppressWarnings("unchecked")
@Resource(name = "adsMetadataFieldMap")
public void setMetadataFieldMap(Map metadataFieldMap) {
super.setMetadataFieldMap(metadataFieldMap);
}
}

View File

@@ -0,0 +1,334 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.importer.external.ads;
import static org.dspace.importer.external.liveimportclient.service.LiveImportClientImpl.HEADER_PARAMETERS;
import java.net.URISyntaxException;
import java.util.ArrayList;
import java.util.Collection;
import java.util.HashMap;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.concurrent.Callable;
import javax.el.MethodNotFoundException;
import com.fasterxml.jackson.core.JsonProcessingException;
import com.fasterxml.jackson.databind.JsonNode;
import com.fasterxml.jackson.databind.ObjectMapper;
import org.apache.commons.collections.CollectionUtils;
import org.apache.commons.lang3.StringUtils;
import org.apache.http.client.utils.URIBuilder;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
import org.dspace.content.Item;
import org.dspace.importer.external.datamodel.ImportRecord;
import org.dspace.importer.external.datamodel.Query;
import org.dspace.importer.external.exception.MetadataSourceException;
import org.dspace.importer.external.liveimportclient.service.LiveImportClient;
import org.dspace.importer.external.service.AbstractImportMetadataSourceService;
import org.dspace.importer.external.service.components.QuerySource;
import org.springframework.beans.factory.annotation.Autowired;
/**
* Implements a data source for querying ADS
*
* @author Mykhaylo Boychuk (mykhaylo.boychuk at 4science.com)
*/
public class ADSImportMetadataSourceServiceImpl extends AbstractImportMetadataSourceService<String>
implements QuerySource {
private final static Logger log = LogManager.getLogger();
private String url;
private String resultFieldList;
private String apiKey;
private int timeout = 1000;
@Autowired
private LiveImportClient liveImportClient;
@Override
public String getImportSource() {
return "ads";
}
@Override
public ImportRecord getRecord(String id) throws MetadataSourceException {
List<ImportRecord> records = retry(new SearchByIdCallable(id));
return CollectionUtils.isEmpty(records) ? null : records.get(0);
}
@Override
public int getRecordsCount(String query) throws MetadataSourceException {
return retry(new CountByQueryCallable(query));
}
@Override
public int getRecordsCount(Query query) throws MetadataSourceException {
return retry(new CountByQueryCallable(query));
}
@Override
public Collection<ImportRecord> getRecords(String query, int start, int count) throws MetadataSourceException {
return retry(new SearchByQueryCallable(query, count, start));
}
@Override
public Collection<ImportRecord> getRecords(Query query) throws MetadataSourceException {
return retry(new SearchByQueryCallable(query));
}
@Override
public ImportRecord getRecord(Query query) throws MetadataSourceException {
List<ImportRecord> records = retry(new SearchByIdCallable(query));
return CollectionUtils.isEmpty(records) ? null : records.get(0);
}
@Override
public Collection<ImportRecord> findMatchingRecords(Query query) throws MetadataSourceException {
return retry(new FindMatchingRecordCallable(query));
}
@Override
public Collection<ImportRecord> findMatchingRecords(Item item) throws MetadataSourceException {
throw new MethodNotFoundException("This method is not implemented for CrossRef");
}
@Override
public void init() throws Exception {}
public String getApiKey() {
return apiKey;
}
public void setApiKey(String apiKey) {
this.apiKey = apiKey;
}
/**
* This class is a Callable implementation to get ADS entries based on query object.
* This Callable use as query value the string queryString passed to constructor.
* If the object will be construct through Query.class instance, a Query's map entry with key "query" will be used.
* Pagination is supported too, using the value of the Query's map with keys "start" and "count".
*
* @author Mykhaylo Boychuk (mykhaylo.boychuk@4science.com)
*/
private class SearchByQueryCallable implements Callable<List<ImportRecord>> {
private Query query;
private SearchByQueryCallable(String queryString, Integer maxResult, Integer start) {
query = new Query();
query.addParameter("query", queryString);
query.addParameter("count", maxResult);
query.addParameter("start", start);
}
private SearchByQueryCallable(Query query) {
this.query = query;
}
@Override
public List<ImportRecord> call() throws Exception {
return search(query.getParameterAsClass("query", String.class),
query.getParameterAsClass("start", Integer.class),
query.getParameterAsClass("count", Integer.class),
getApiKey());
}
}
/**
* This class is a Callable implementation to get an ADS entry using bibcode
* The bibcode to use can be passed through the constructor as a String or as Query's map entry, with the key "id".
*
* @author Mykhaylo Boychuk (mykhaylo.boychuk@4science.com)
*/
private class SearchByIdCallable implements Callable<List<ImportRecord>> {
private Query query;
private SearchByIdCallable(Query query) {
this.query = query;
}
private SearchByIdCallable(String id) {
this.query = new Query();
query.addParameter("id", id);
}
@Override
public List<ImportRecord> call() throws Exception {
String queryString = "bibcode:" + query.getParameterAsClass("id", String.class);
return search(queryString, 0 , 1, getApiKey());
}
}
/**
* This class is a Callable implementation to search ADS entries
* using author and title and year.
* Pagination is supported too, using the value of the Query's map with keys "start" and "count".
*
* @author Mykhaylo Boychuk (mykhaylo.boychuk@4science.com)
*/
private class FindMatchingRecordCallable implements Callable<List<ImportRecord>> {
private Query query;
private FindMatchingRecordCallable(Query q) {
query = q;
}
@Override
public List<ImportRecord> call() throws Exception {
Integer count = query.getParameterAsClass("count", Integer.class);
Integer start = query.getParameterAsClass("start", Integer.class);
String author = query.getParameterAsClass("author", String.class);
String title = query.getParameterAsClass("title", String.class);
Integer year = query.getParameterAsClass("year", Integer.class);
return search(title, author, year, start, count, getApiKey());
}
}
/**
* This class is a Callable implementation to count the number of entries for an ADS query.
* This Callable use as query value to ADS the string queryString passed to constructor.
* If the object will be construct through Query.class instance, the value of the Query's
* map with the key "query" will be used.
*
* @author Mykhaylo Boychuk (mykhaylo.boychuk@4science.com)
*/
private class CountByQueryCallable implements Callable<Integer> {
private Query query;
private CountByQueryCallable(String queryString) {
query = new Query();
query.addParameter("query", queryString);
}
private CountByQueryCallable(Query query) {
this.query = query;
}
@Override
public Integer call() throws Exception {
return count(query.getParameterAsClass("query", String.class), getApiKey());
}
}
private List<ImportRecord> search(String title, String author, int year, int start, int count, String token) {
String query = "";
if (StringUtils.isNotBlank(title)) {
query += "title:" + title;
}
if (StringUtils.isNotBlank(author)) {
String splitRegex = "(\\s*,\\s+|\\s*;\\s+|\\s*;+|\\s*,+|\\s+)";
String[] authors = author.split(splitRegex);
// [FAU]
if (StringUtils.isNotBlank(query)) {
query = "author:";
} else {
query += "&fq=author:";
}
int x = 0;
for (String auth : authors) {
x++;
query += auth;
if (x < authors.length) {
query += " AND ";
}
}
}
if (year != -1) {
// [DP]
if (StringUtils.isNotBlank(query)) {
query = "year:";
} else {
query += "&fq=year:";
}
query += year;
}
return search(query.toString(), start, count, token);
}
public Integer count(String query, String token) {
try {
Map<String, Map<String, String>> params = new HashMap<String, Map<String,String>>();
Map<String, String> headerParameters = new HashMap<String, String>();
headerParameters.put("Authorization", "Bearer " + token);
params.put(HEADER_PARAMETERS, headerParameters);
URIBuilder uriBuilder = new URIBuilder(this.url);
uriBuilder.addParameter("q", query);
uriBuilder.addParameter("rows", "1");
uriBuilder.addParameter("start", "0");
uriBuilder.addParameter("fl", this.resultFieldList);
String resp = liveImportClient.executeHttpGetRequest(timeout, uriBuilder.toString(), params);
JsonNode jsonNode = convertStringJsonToJsonNode(resp);
return jsonNode.at("/response/numFound").asInt();
} catch (URISyntaxException e) {
e.printStackTrace();
}
return 0;
}
public List<ImportRecord> search(String query, Integer start, Integer count, String token) {
List<ImportRecord> adsResults = new ArrayList<>();
try {
Map<String, Map<String, String>> params = new HashMap<String, Map<String,String>>();
Map<String, String> headerParameters = new HashMap<String, String>();
headerParameters.put("Authorization", "Bearer " + token);
params.put(HEADER_PARAMETERS, headerParameters);
URIBuilder uriBuilder = new URIBuilder(this.url);
uriBuilder.addParameter("q", query);
uriBuilder.addParameter("rows", count.toString());
uriBuilder.addParameter("start", start.toString());
uriBuilder.addParameter("fl", this.resultFieldList);
String resp = liveImportClient.executeHttpGetRequest(timeout, uriBuilder.toString(), params);
JsonNode jsonNode = convertStringJsonToJsonNode(resp);
JsonNode docs = jsonNode.at("/response/docs");
if (docs.isArray()) {
Iterator<JsonNode> nodes = docs.elements();
while (nodes.hasNext()) {
JsonNode node = nodes.next();
adsResults.add(transformSourceRecords(node.toString()));
}
} else {
adsResults.add(transformSourceRecords(docs.toString()));
}
} catch (URISyntaxException e) {
e.printStackTrace();
}
return adsResults;
}
private JsonNode convertStringJsonToJsonNode(String json) {
try {
return new ObjectMapper().readTree(json);
} catch (JsonProcessingException e) {
log.error("Unable to process json response.", e);
}
return null;
}
public void setUrl(String url) {
this.url = url;
}
public void setResultFieldList(String resultFieldList) {
this.resultFieldList = resultFieldList;
}
}

View File

@@ -0,0 +1,37 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.importer.external.cinii;
import java.util.Map;
import javax.annotation.Resource;
import org.dspace.importer.external.metadatamapping.AbstractMetadataFieldMapping;
/**
* An implementation of {@link AbstractMetadataFieldMapping}
* Responsible for defining the mapping of the Cinii metadatum fields on the DSpace metadatum fields
*
* @author Pasquale Cavallo (pasquale.cavallo at 4science dot it)
*/
public class CiniiFieldMapping extends AbstractMetadataFieldMapping {
/**
* Defines which metadatum is mapped on which metadatum. Note that while the key must be unique it
* only matters here for postprocessing of the value. The mapped MetadatumContributor has full control over
* what metadatafield is generated.
*
* @param metadataFieldMap The map containing the link between retrieve metadata and metadata that will be set to
* the item.
*/
@Override
@Resource(name = "ciniiMetadataFieldMap")
public void setMetadataFieldMap(Map metadataFieldMap) {
super.setMetadataFieldMap(metadataFieldMap);
}
}

View File

@@ -0,0 +1,447 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.importer.external.cinii;
import java.io.IOException;
import java.io.StringReader;
import java.net.URISyntaxException;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collection;
import java.util.HashMap;
import java.util.LinkedList;
import java.util.List;
import java.util.Map;
import java.util.Objects;
import java.util.concurrent.Callable;
import javax.el.MethodNotFoundException;
import org.apache.commons.collections.CollectionUtils;
import org.apache.commons.lang.StringUtils;
import org.apache.http.HttpException;
import org.apache.http.client.utils.URIBuilder;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
import org.dspace.content.Item;
import org.dspace.importer.external.datamodel.ImportRecord;
import org.dspace.importer.external.datamodel.Query;
import org.dspace.importer.external.exception.MetadataSourceException;
import org.dspace.importer.external.liveimportclient.service.LiveImportClient;
import org.dspace.importer.external.metadatamapping.MetadatumDTO;
import org.dspace.importer.external.service.AbstractImportMetadataSourceService;
import org.dspace.importer.external.service.components.QuerySource;
import org.dspace.services.ConfigurationService;
import org.jdom2.Attribute;
import org.jdom2.Document;
import org.jdom2.Element;
import org.jdom2.JDOMException;
import org.jdom2.Namespace;
import org.jdom2.filter.Filters;
import org.jdom2.input.SAXBuilder;
import org.jdom2.xpath.XPathExpression;
import org.jdom2.xpath.XPathFactory;
import org.springframework.beans.factory.annotation.Autowired;
/**
* Implements a data source for querying Cinii
*
* @author Mykhaylo Boychuk (mykhaylo.boychuk at 4science.com)
*/
public class CiniiImportMetadataSourceServiceImpl extends AbstractImportMetadataSourceService<Element>
implements QuerySource {
private final static Logger log = LogManager.getLogger();
private String url;
private String urlSearch;
@Autowired
private LiveImportClient liveImportClient;
@Autowired
private ConfigurationService configurationService;
@Override
public String getImportSource() {
return "cinii";
}
@Override
public void init() throws Exception {}
@Override
public ImportRecord getRecord(String id) throws MetadataSourceException {
List<ImportRecord> records = retry(new SearchByIdCallable(id));
return CollectionUtils.isNotEmpty(records) ? records.get(0) : null;
}
@Override
public int getRecordsCount(String query) throws MetadataSourceException {
return retry(new CountByQueryCallable(query));
}
@Override
public int getRecordsCount(Query query) throws MetadataSourceException {
return retry(new CountByQueryCallable(query));
}
@Override
public Collection<ImportRecord> getRecords(String query, int start, int count) throws MetadataSourceException {
return retry(new SearchByQueryCallable(query, count, start));
}
@Override
public Collection<ImportRecord> getRecords(Query query) throws MetadataSourceException {
return retry(new SearchByQueryCallable(query));
}
@Override
public ImportRecord getRecord(Query query) throws MetadataSourceException {
List<ImportRecord> records = retry(new SearchByIdCallable(query));
return CollectionUtils.isNotEmpty(records) ? records.get(0) : null;
}
@Override
public Collection<ImportRecord> findMatchingRecords(Query query) throws MetadataSourceException {
return retry(new FindMatchingRecordCallable(query));
}
@Override
public Collection<ImportRecord> findMatchingRecords(Item item) throws MetadataSourceException {
throw new MethodNotFoundException("This method is not implemented for Cinii");
}
public String getUrl() {
return url;
}
public void setUrl(String url) {
this.url = url;
}
public String getUrlSearch() {
return urlSearch;
}
public void setUrlSearch(String urlSearch) {
this.urlSearch = urlSearch;
}
/**
* This class is a Callable implementation to get CiNii entries based on
* query object.
*
* This Callable use as query value the string queryString passed to constructor.
* If the object will be construct through Query.class instance, a Query's map entry with key "query" will be used.
* Pagination is supported too, using the value of the Query's map with keys "start" and "count".
*
* @author Mykhaylo Boychuk (mykhaylo.boychuk at 4science.com)
*/
private class SearchByQueryCallable implements Callable<List<ImportRecord>> {
private Query query;
private SearchByQueryCallable(String queryString, Integer maxResult, Integer start) {
query = new Query();
query.addParameter("query", queryString);
query.addParameter("count", maxResult);
query.addParameter("start", start);
}
private SearchByQueryCallable(Query query) {
this.query = query;
}
@Override
public List<ImportRecord> call() throws Exception {
List<ImportRecord> records = new LinkedList<ImportRecord>();
Integer count = query.getParameterAsClass("count", Integer.class);
Integer start = query.getParameterAsClass("start", Integer.class);
String queryString = query.getParameterAsClass("query", String.class);
String appId = configurationService.getProperty("cinii.appid");
List<String> ids = getCiniiIds(appId, count, null, null, null, start, queryString);
if (CollectionUtils.isNotEmpty(ids)) {
for (String id : ids) {
List<ImportRecord> tmp = search(id, appId);
if (CollectionUtils.isNotEmpty(tmp)) {
tmp.forEach(x -> x.addValue(createIdentifier(id)));
}
records.addAll(tmp);
}
}
return records;
}
}
/**
* This class is a Callable implementation to get an CiNii entry using CiNii ID
*
* @author Mykhaylo Boychuk (mykhaylo.boychuk at 4science.com)
*/
private class SearchByIdCallable implements Callable<List<ImportRecord>> {
private Query query;
private SearchByIdCallable(Query query) {
this.query = query;
}
private SearchByIdCallable(String id) {
this.query = new Query();
query.addParameter("id", id);
}
@Override
public List<ImportRecord> call() throws Exception {
String appId = configurationService.getProperty("cinii.appid");
String id = query.getParameterAsClass("id", String.class);
List<ImportRecord> importRecord = search(id, appId);
if (CollectionUtils.isNotEmpty(importRecord)) {
importRecord.forEach(x -> x.addValue(createIdentifier(id)));
}
return importRecord;
}
}
/**
* This class is a Callable implementation to search CiNii entries
* using author, title and year.
* Pagination is supported too, using the value of the Query's map with keys "start" and "count".
*
* @author Mykhaylo Boychuk (mykhaylo.boychuk at 4science.com)
*/
private class FindMatchingRecordCallable implements Callable<List<ImportRecord>> {
private Query query;
private FindMatchingRecordCallable(Query q) {
query = q;
}
@Override
public List<ImportRecord> call() throws Exception {
List<ImportRecord> records = new LinkedList<ImportRecord>();
String title = query.getParameterAsClass("title", String.class);
String author = query.getParameterAsClass("author", String.class);
Integer year = query.getParameterAsClass("year", Integer.class);
Integer maxResult = query.getParameterAsClass("maxResult", Integer.class);
Integer start = query.getParameterAsClass("start", Integer.class);
String appId = configurationService.getProperty("cinii.appid");
List<String> ids = getCiniiIds(appId, maxResult, author, title, year, start, null);
if (CollectionUtils.isNotEmpty(ids)) {
for (String id : ids) {
List<ImportRecord> importRecords = search(id, appId);
if (CollectionUtils.isNotEmpty(importRecords)) {
importRecords.forEach(x -> x.addValue(createIdentifier(id)));
}
records.addAll(importRecords);
}
}
return records;
}
}
/**
* This class is a Callable implementation to count the number
* of entries for an CiNii query.
*
* @author Mykhaylo Boychuk (mykhaylo.boychuk at 4science.com)
*/
private class CountByQueryCallable implements Callable<Integer> {
private Query query;
private CountByQueryCallable(String queryString) {
query = new Query();
query.addParameter("query", queryString);
}
private CountByQueryCallable(Query query) {
this.query = query;
}
@Override
public Integer call() throws Exception {
String appId = configurationService.getProperty("cinii.appid");
String queryString = query.getParameterAsClass("query", String.class);
return countCiniiElement(appId, null, null, null, null, null, queryString);
}
}
/**
* Get metadata by searching CiNii RDF API with CiNii NAID
*
* @param id CiNii NAID to search by
* @param appId registered application identifier for the API
* @return record metadata
* @throws IOException A general class of exceptions produced by failed or interrupted I/O operations.
* @throws HttpException Represents a XML/HTTP fault and provides access to the HTTP status code.
*/
protected List<ImportRecord> search(String id, String appId)
throws IOException, HttpException {
try {
List<ImportRecord> records = new LinkedList<ImportRecord>();
URIBuilder uriBuilder = new URIBuilder(this.url + id + ".rdf?appid=" + appId);
Map<String, Map<String, String>> params = new HashMap<String, Map<String,String>>();
String response = liveImportClient.executeHttpGetRequest(1000, uriBuilder.toString(), params);
List<Element> elements = splitToRecords(response);
for (Element record : elements) {
records.add(transformSourceRecords(record));
}
return records;
} catch (URISyntaxException e) {
log.error(e.getMessage(), e);
throw new RuntimeException(e.getMessage(), e);
}
}
private List<Element> splitToRecords(String recordsSrc) {
try {
SAXBuilder saxBuilder = new SAXBuilder();
Document document = saxBuilder.build(new StringReader(recordsSrc));
Element root = document.getRootElement();
return root.getChildren();
} catch (JDOMException | IOException e) {
log.error(e.getMessage(), e);
throw new RuntimeException(e.getMessage(), e);
}
}
/**
* Returns a list of uri links (for example:https://cir.nii.ac.jp/crid/123456789)
* to the searched CiNii articles
*
* @param appId Application ID
* @param maxResult The number of search results per page
* @param author Author name
* @param title Article name
* @param year Year of publication
* @param start Start number for the acquired search result list
* @param query Keyword to be searched
*/
private List<String> getCiniiIds(String appId, Integer maxResult, String author, String title,
Integer year, Integer start, String query) {
try {
List<String> ids = new ArrayList<>();
URIBuilder uriBuilder = new URIBuilder(this.urlSearch);
uriBuilder.addParameter("format", "rss");
if (StringUtils.isNotBlank(appId)) {
uriBuilder.addParameter("appid", appId);
}
if (Objects.nonNull(maxResult) && maxResult != 0) {
uriBuilder.addParameter("count", maxResult.toString());
}
if (Objects.nonNull(start)) {
uriBuilder.addParameter("start", start.toString());
}
if (StringUtils.isNotBlank(title)) {
uriBuilder.addParameter("title", title);
}
if (StringUtils.isNotBlank(author)) {
uriBuilder.addParameter("author", author);
}
if (StringUtils.isNotBlank(query)) {
uriBuilder.addParameter("q", query);
}
if (Objects.nonNull(year) && year != -1 && year != 0) {
uriBuilder.addParameter("year_from", String.valueOf(year));
uriBuilder.addParameter("year_to", String.valueOf(year));
}
Map<String, Map<String, String>> params = new HashMap<String, Map<String,String>>();
String response = liveImportClient.executeHttpGetRequest(1000, uriBuilder.toString(), params);
int url_len = this.url.length() - 1;
SAXBuilder saxBuilder = new SAXBuilder();
Document document = saxBuilder.build(new StringReader(response));
Element root = document.getRootElement();
List<Namespace> namespaces = Arrays.asList(
Namespace.getNamespace("ns", "http://purl.org/rss/1.0/"),
Namespace.getNamespace("rdf", "http://www.w3.org/1999/02/22-rdf-syntax-ns#"));
XPathExpression<Attribute> xpath = XPathFactory.instance().compile("//ns:item/@rdf:about",
Filters.attribute(), null, namespaces);
List<Attribute> recordsList = xpath.evaluate(root);
for (Attribute item : recordsList) {
String value = item.getValue();
if (value.length() > url_len) {
ids.add(value.substring(url_len + 1));
}
}
return ids;
} catch (JDOMException | IOException | URISyntaxException e) {
log.error(e.getMessage(), e);
throw new RuntimeException(e.getMessage(), e);
}
}
/**
* Returns the total number of CiNii articles returned by a specific query
*
* @param appId Application ID
* @param maxResult The number of search results per page
* @param author Author name
* @param title Article name
* @param year Year of publication
* @param start Start number for the acquired search result list
* @param query Keyword to be searched
*/
private Integer countCiniiElement(String appId, Integer maxResult, String author, String title,
Integer year, Integer start, String query) {
try {
URIBuilder uriBuilder = new URIBuilder(this.urlSearch);
uriBuilder.addParameter("format", "rss");
uriBuilder.addParameter("appid", appId);
if (Objects.nonNull(maxResult) && maxResult != 0) {
uriBuilder.addParameter("count", maxResult.toString());
}
if (Objects.nonNull(start)) {
uriBuilder.addParameter("start", start.toString());
}
if (StringUtils.isNotBlank(title)) {
uriBuilder.addParameter("title", title);
}
if (StringUtils.isNotBlank(author)) {
uriBuilder.addParameter("author", author);
}
if (StringUtils.isNotBlank(query)) {
uriBuilder.addParameter("q", query);
}
if (Objects.nonNull(year) && year != -1 && year != 0) {
uriBuilder.addParameter("year_from", String.valueOf(year));
uriBuilder.addParameter("year_to", String.valueOf(year));
}
Map<String, Map<String, String>> params = new HashMap<String, Map<String,String>>();
String response = liveImportClient.executeHttpGetRequest(1000, uriBuilder.toString(), params);
SAXBuilder saxBuilder = new SAXBuilder();
Document document = saxBuilder.build(new StringReader(response));
Element root = document.getRootElement();
List<Namespace> namespaces = Arrays
.asList(Namespace.getNamespace("opensearch", "http://a9.com/-/spec/opensearch/1.1/"));
XPathExpression<Element> xpath = XPathFactory.instance().compile("//opensearch:totalResults",
Filters.element(), null, namespaces);
List<Element> nodes = xpath.evaluate(root);
if (nodes != null && !nodes.isEmpty()) {
return Integer.parseInt(((Element) nodes.get(0)).getText());
}
return 0;
} catch (JDOMException | IOException | URISyntaxException e) {
log.error(e.getMessage(), e);
throw new RuntimeException(e.getMessage(), e);
}
}
private MetadatumDTO createIdentifier(String id) {
MetadatumDTO metadatumDTO = new MetadatumDTO();
metadatumDTO.setSchema("dc");
metadatumDTO.setElement("identifier");
metadatumDTO.setQualifier("other");
metadatumDTO.setValue(id);
return metadatumDTO;
}
}

View File

@@ -13,6 +13,7 @@ import java.util.Collection;
import java.util.HashMap; import java.util.HashMap;
import java.util.Iterator; import java.util.Iterator;
import java.util.List; import java.util.List;
import java.util.Map;
import java.util.Objects; import java.util.Objects;
import java.util.concurrent.Callable; import java.util.concurrent.Callable;
import javax.el.MethodNotFoundException; import javax.el.MethodNotFoundException;
@@ -155,9 +156,8 @@ public class CrossRefImportMetadataSourceServiceImpl extends AbstractImportMetad
if (Objects.nonNull(start)) { if (Objects.nonNull(start)) {
uriBuilder.addParameter("offset", start.toString()); uriBuilder.addParameter("offset", start.toString());
} }
Map<String, Map<String, String>> params = new HashMap<String, Map<String,String>>();
String response = liveImportClient.executeHttpGetRequest(1000, uriBuilder.toString(), String response = liveImportClient.executeHttpGetRequest(1000, uriBuilder.toString(), params);
new HashMap<String, String>());
JsonNode jsonNode = convertStringJsonToJsonNode(response); JsonNode jsonNode = convertStringJsonToJsonNode(response);
Iterator<JsonNode> nodes = jsonNode.at("/message/items").iterator(); Iterator<JsonNode> nodes = jsonNode.at("/message/items").iterator();
while (nodes.hasNext()) { while (nodes.hasNext()) {
@@ -192,8 +192,8 @@ public class CrossRefImportMetadataSourceServiceImpl extends AbstractImportMetad
List<ImportRecord> results = new ArrayList<>(); List<ImportRecord> results = new ArrayList<>();
String ID = URLDecoder.decode(query.getParameterAsClass("id", String.class), "UTF-8"); String ID = URLDecoder.decode(query.getParameterAsClass("id", String.class), "UTF-8");
URIBuilder uriBuilder = new URIBuilder(url + "/" + ID); URIBuilder uriBuilder = new URIBuilder(url + "/" + ID);
String responseString = liveImportClient.executeHttpGetRequest(1000, uriBuilder.toString(), Map<String, Map<String, String>> params = new HashMap<String, Map<String,String>>();
new HashMap<String, String>()); String responseString = liveImportClient.executeHttpGetRequest(1000, uriBuilder.toString(), params);
JsonNode jsonNode = convertStringJsonToJsonNode(responseString); JsonNode jsonNode = convertStringJsonToJsonNode(responseString);
JsonNode messageNode = jsonNode.at("/message"); JsonNode messageNode = jsonNode.at("/message");
results.add(transformSourceRecords(messageNode.toString())); results.add(transformSourceRecords(messageNode.toString()));
@@ -244,9 +244,8 @@ public class CrossRefImportMetadataSourceServiceImpl extends AbstractImportMetad
if (Objects.nonNull(bibliographics)) { if (Objects.nonNull(bibliographics)) {
uriBuilder.addParameter("query.bibliographic", bibliographics); uriBuilder.addParameter("query.bibliographic", bibliographics);
} }
Map<String, Map<String, String>> params = new HashMap<String, Map<String,String>>();
String resp = liveImportClient.executeHttpGetRequest(1000, uriBuilder.toString(), String resp = liveImportClient.executeHttpGetRequest(1000, uriBuilder.toString(), params);
new HashMap<String, String>());
JsonNode jsonNode = convertStringJsonToJsonNode(resp); JsonNode jsonNode = convertStringJsonToJsonNode(resp);
Iterator<JsonNode> nodes = jsonNode.at("/message/items").iterator(); Iterator<JsonNode> nodes = jsonNode.at("/message/items").iterator();
while (nodes.hasNext()) { while (nodes.hasNext()) {
@@ -283,8 +282,8 @@ public class CrossRefImportMetadataSourceServiceImpl extends AbstractImportMetad
public Integer call() throws Exception { public Integer call() throws Exception {
URIBuilder uriBuilder = new URIBuilder(url); URIBuilder uriBuilder = new URIBuilder(url);
uriBuilder.addParameter("query", query.getParameterAsClass("query", String.class)); uriBuilder.addParameter("query", query.getParameterAsClass("query", String.class));
String responseString = liveImportClient.executeHttpGetRequest(1000, uriBuilder.toString(), Map<String, Map<String, String>> params = new HashMap<String, Map<String,String>>();
new HashMap<String, String>()); String responseString = liveImportClient.executeHttpGetRequest(1000, uriBuilder.toString(), params);
JsonNode jsonNode = convertStringJsonToJsonNode(responseString); JsonNode jsonNode = convertStringJsonToJsonNode(responseString);
return jsonNode.at("/message/total-results").asInt(); return jsonNode.at("/message/total-results").asInt();
} }
@@ -313,9 +312,9 @@ public class CrossRefImportMetadataSourceServiceImpl extends AbstractImportMetad
@Override @Override
public Integer call() throws Exception { public Integer call() throws Exception {
Map<String, Map<String, String>> params = new HashMap<String, Map<String,String>>();
URIBuilder uriBuilder = new URIBuilder(url + "/" + query.getParameterAsClass("id", String.class)); URIBuilder uriBuilder = new URIBuilder(url + "/" + query.getParameterAsClass("id", String.class));
String responseString = liveImportClient.executeHttpGetRequest(1000, uriBuilder.toString(), String responseString = liveImportClient.executeHttpGetRequest(1000, uriBuilder.toString(), params);
new HashMap<String, String>());
JsonNode jsonNode = convertStringJsonToJsonNode(responseString); JsonNode jsonNode = convertStringJsonToJsonNode(responseString);
return StringUtils.equals(jsonNode.at("/status").toString(), "ok") ? 1 : 0; return StringUtils.equals(jsonNode.at("/status").toString(), "ok") ? 1 : 0;
} }

View File

@@ -0,0 +1,36 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.importer.external.epo.service;
import java.util.Map;
import javax.annotation.Resource;
import org.dspace.importer.external.metadatamapping.AbstractMetadataFieldMapping;
/**
* An implementation of {@link AbstractMetadataFieldMapping}
* Responsible for defining the mapping of the Epo metadatum fields on the DSpace metadatum fields
*
* @author Pasquale Cavallo (pasquale.cavallo at 4science dot it)
*/
public class EpoFieldMapping extends AbstractMetadataFieldMapping {
/**
* Defines which metadatum is mapped on which metadatum. Note that while the key must be unique it
* only matters here for postprocessing of the value. The mapped MetadatumContributor has full control over
* what metadatafield is generated.
*
* @param metadataFieldMap The map containing the link between retrieve metadata and metadata that will be set to
* the item.
*/
@Override
@Resource(name = "epoMetadataFieldMap")
public void setMetadataFieldMap(Map metadataFieldMap) {
super.setMetadataFieldMap(metadataFieldMap);
}
}

View File

@@ -0,0 +1,541 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.importer.external.epo.service;
import static org.dspace.importer.external.liveimportclient.service.LiveImportClientImpl.HEADER_PARAMETERS;
import java.io.IOException;
import java.io.StringReader;
import java.net.URISyntaxException;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collection;
import java.util.HashMap;
import java.util.LinkedList;
import java.util.List;
import java.util.Map;
import java.util.concurrent.Callable;
import java.util.stream.Collectors;
import com.fasterxml.jackson.core.JsonFactory;
import com.fasterxml.jackson.databind.JsonNode;
import com.fasterxml.jackson.databind.ObjectMapper;
import org.apache.commons.collections.CollectionUtils;
import org.apache.commons.lang3.StringUtils;
import org.apache.http.HttpException;
import org.apache.http.client.utils.URIBuilder;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
import org.apache.xerces.impl.dv.util.Base64;
import org.dspace.content.Item;
import org.dspace.importer.external.datamodel.ImportRecord;
import org.dspace.importer.external.datamodel.Query;
import org.dspace.importer.external.exception.MetadataSourceException;
import org.dspace.importer.external.liveimportclient.service.LiveImportClient;
import org.dspace.importer.external.metadatamapping.MetadataFieldConfig;
import org.dspace.importer.external.metadatamapping.contributor.EpoIdMetadataContributor.EpoDocumentId;
import org.dspace.importer.external.service.AbstractImportMetadataSourceService;
import org.dspace.importer.external.service.components.QuerySource;
import org.jaxen.JaxenException;
import org.jdom2.Attribute;
import org.jdom2.Document;
import org.jdom2.Element;
import org.jdom2.JDOMException;
import org.jdom2.Namespace;
import org.jdom2.Text;
import org.jdom2.filter.Filters;
import org.jdom2.input.SAXBuilder;
import org.jdom2.xpath.XPathExpression;
import org.jdom2.xpath.XPathFactory;
import org.springframework.beans.factory.annotation.Autowired;
/**
* Implements a data source for querying EPO
*
* @author Pasquale Cavallo (pasquale.cavallo at 4Science dot it)
*/
public class EpoImportMetadataSourceServiceImpl extends AbstractImportMetadataSourceService<Element>
implements QuerySource {
private final static Logger log = LogManager.getLogger();
private String url;
private String authUrl;
private String searchUrl;
private String consumerKey;
private String consumerSecret;
private MetadataFieldConfig dateFiled;
private MetadataFieldConfig applicationNumber;
public static final String APP_NO_DATE_SEPARATOR = "$$$";
private static final String APP_NO_DATE_SEPARATOR_REGEX = "\\$\\$\\$";
@Autowired
private LiveImportClient liveImportClient;
@Override
public void init() throws Exception {}
/**
* The string that identifies this import implementation. Preferable a URI
*
* @return the identifying uri
*/
@Override
public String getImportSource() {
return "epo";
}
/**
* Set the customer epo key
* @param consumerKey the customer consumer key
*/
public void setConsumerKey(String consumerKey) {
this.consumerKey = consumerKey;
}
public String getConsumerKey() {
return consumerKey;
}
/**
* Set the costumer epo secret
* @param consumerSecret the customer epo secret
*/
public void setConsumerSecret(String consumerSecret) {
this.consumerSecret = consumerSecret;
}
public String getConsumerSecret() {
return consumerSecret;
}
public void setDateFiled(MetadataFieldConfig dateFiled) {
this.dateFiled = dateFiled;
}
public MetadataFieldConfig getDateFiled() {
return dateFiled;
}
public void setApplicationNumber(MetadataFieldConfig applicationNumber) {
this.applicationNumber = applicationNumber;
}
public MetadataFieldConfig getApplicationNumber() {
return applicationNumber;
}
/***
* Log to EPO, bearer is valid for 20 minutes
*
* @param consumerKey The consumer Key
* @param consumerSecretKey The consumer secret key
* @return
* @throws IOException
* @throws HttpException
*/
protected String login() throws IOException, HttpException {
Map<String, Map<String, String>> params = getLoginParams();
String entity = "grant_type=client_credentials";
String json = liveImportClient.executeHttpPostRequest(this.authUrl, params, entity);
ObjectMapper mapper = new ObjectMapper(new JsonFactory());
JsonNode rootNode = mapper.readTree(json);
JsonNode accessTokenNode = rootNode.get("access_token");
return accessTokenNode.asText();
}
private Map<String, Map<String, String>> getLoginParams() {
Map<String, Map<String, String>> params = new HashMap<String, Map<String,String>>();
Map<String, String> headerParams = getLoginHeaderParams();
params.put(HEADER_PARAMETERS, headerParams);
return params;
}
private Map<String, String> getLoginHeaderParams() {
Map<String, String> params = new HashMap<String, String>();
String authString = consumerKey + ":" + consumerSecret;
params.put("Authorization", "Basic " + Base64.encode(authString.getBytes()));
params.put("Content-type", "application/x-www-form-urlencoded");
return params;
}
@Override
public int getRecordsCount(String query) throws MetadataSourceException {
if (StringUtils.isNotBlank(consumerKey) && StringUtils.isNotBlank(consumerSecret)) {
try {
String bearer = login();
return retry(new CountRecordsCallable(query, bearer));
} catch (IOException | HttpException e) {
log.warn(e.getMessage());
throw new RuntimeException(e.getMessage(), e);
}
}
return 0;
}
@Override
public int getRecordsCount(Query query) throws MetadataSourceException {
if (StringUtils.isNotBlank(consumerKey) && StringUtils.isNotBlank(consumerSecret)) {
try {
String bearer = login();
return retry(new CountRecordsCallable(query, bearer));
} catch (IOException | HttpException e) {
e.printStackTrace();
}
}
return 0;
}
@Override
public Collection<ImportRecord> getRecords(String query, int start,
int count) throws MetadataSourceException {
if (StringUtils.isNotBlank(consumerKey) && StringUtils.isNotBlank(consumerSecret)) {
try {
String bearer = login();
return retry(new SearchByQueryCallable(query, bearer, start, count));
} catch (IOException | HttpException e) {
log.warn(e.getMessage());
throw new RuntimeException(e.getMessage(), e);
}
}
return new ArrayList<ImportRecord>();
}
@Override
public Collection<ImportRecord> getRecords(Query query)
throws MetadataSourceException {
if (StringUtils.isNotBlank(consumerKey) && StringUtils.isNotBlank(consumerSecret)) {
try {
String bearer = login();
return retry(new SearchByQueryCallable(query, bearer));
} catch (IOException | HttpException e) {
log.warn(e.getMessage());
throw new RuntimeException(e.getMessage(), e);
}
}
return new ArrayList<ImportRecord>();
}
@Override
public ImportRecord getRecord(String id) throws MetadataSourceException {
if (StringUtils.isNotBlank(consumerKey) && StringUtils.isNotBlank(consumerSecret)) {
try {
String bearer = login();
List<ImportRecord> list = retry(new SearchByIdCallable(id, bearer));
return CollectionUtils.isNotEmpty(list) ? list.get(0) : null;
} catch (IOException | HttpException e) {
log.warn(e.getMessage());
throw new RuntimeException(e.getMessage(), e);
}
}
return null;
}
@Override
public ImportRecord getRecord(Query query) throws MetadataSourceException {
return null;
}
@Override
public Collection<ImportRecord> findMatchingRecords(Item item)
throws MetadataSourceException {
return null;
}
@Override
public Collection<ImportRecord> findMatchingRecords(Query query)
throws MetadataSourceException {
return null;
}
/**
* This class is a Callable implementation to count the number of entries for an EPO query.
* This Callable use as query value to EPO the string queryString passed to constructor.
* If the object will be construct through Query.class instance, the value of the Query's
* map with the key "query" will be used.
*
* @author Mykhaylo Boychuk (mykhaylo.boychuk@4science.com)
*/
private class CountRecordsCallable implements Callable<Integer> {
private String bearer;
private String query;
private CountRecordsCallable(Query query, String bearer) {
this.query = query.getParameterAsClass("query", String.class);
this.bearer = bearer;
}
private CountRecordsCallable(String query, String bearer) {
this.query = query;
this.bearer = bearer;
}
public Integer call() throws Exception {
return countDocument(bearer, query);
}
}
/**
* This class is a Callable implementation to get an EPO entry using epodocID (epodoc:AB1234567T)
* The epodocID to use can be passed through the constructor as a String or as Query's map entry, with the key "id".
*
* @author Mykhaylo Boychuk (mykhaylo.boychuk@4science.com)
*/
private class SearchByIdCallable implements Callable<List<ImportRecord>> {
private String id;
private String bearer;
private SearchByIdCallable(String id, String bearer) {
this.id = id;
this.bearer = bearer;
}
public List<ImportRecord> call() throws Exception {
int positionToSplit = id.indexOf(":");
String docType = EpoDocumentId.EPODOC;
String idS = id;
if (positionToSplit != -1) {
docType = id.substring(0, positionToSplit);
idS = id.substring(positionToSplit + 1, id.length());
} else if (id.contains(APP_NO_DATE_SEPARATOR)) {
// special case the id is the combination of the applicationnumber and date filed
String query = "applicationnumber=" + id.split(APP_NO_DATE_SEPARATOR_REGEX)[0];
SearchByQueryCallable search = new SearchByQueryCallable(query, bearer, 0, 10);
List<ImportRecord> records = search.call().stream()
.filter(r -> r.getValue(dateFiled.getSchema(), dateFiled.getElement(),
dateFiled.getQualifier())
.stream()
.anyMatch(m -> StringUtils.equals(m.getValue(),
id.split(APP_NO_DATE_SEPARATOR_REGEX)[1])
))
.limit(1).collect(Collectors.toList());
return records;
}
List<ImportRecord> records = searchDocument(bearer, idS, docType);
if (records.size() > 1) {
log.warn("More record are returned with epocID " + id);
}
return records;
}
}
/**
* This class is a Callable implementation to get EPO entries based on query object.
* This Callable use as query value the string queryString passed to constructor.
* If the object will be construct through Query.class instance, a Query's map entry with key "query" will be used.
* Pagination is supported too, using the value of the Query's map with keys "start" and "count".
*
* @author Mykhaylo Boychuk (mykhaylo.boychuk@4science.com)
*/
private class SearchByQueryCallable implements Callable<List<ImportRecord>> {
private Query query;
private Integer start;
private Integer count;
private String bearer;
private SearchByQueryCallable(Query query, String bearer) {
this.query = query;
this.bearer = bearer;
}
public SearchByQueryCallable(String queryValue, String bearer, int start, int count) {
this.query = new Query();
query.addParameter("query", queryValue);
this.start = query.getParameterAsClass("start", Integer.class) != null ?
query.getParameterAsClass("start", Integer.class) : 0;
this.count = query.getParameterAsClass("count", Integer.class) != null ?
query.getParameterAsClass("count", Integer.class) : 20;
this.bearer = bearer;
}
@Override
public List<ImportRecord> call() throws Exception {
List<ImportRecord> records = new ArrayList<ImportRecord>();
String queryString = query.getParameterAsClass("query", String.class);
if (StringUtils.isNotBlank(consumerKey) && StringUtils.isNotBlank(consumerSecret)) {
if (StringUtils.isNotBlank(queryString) && StringUtils.isNotBlank(bearer)) {
List<EpoDocumentId> epoDocIds = searchDocumentIds(bearer, queryString, start + 1, count);
for (EpoDocumentId epoDocId : epoDocIds) {
List<ImportRecord> recordfounds = searchDocument(bearer, epoDocId);
if (recordfounds.size() > 1) {
log.warn("More record are returned with epocID " + epoDocId.toString());
}
records.addAll(recordfounds);
}
}
}
return records;
}
}
private Integer countDocument(String bearer, String query) {
if (StringUtils.isBlank(bearer)) {
return null;
}
try {
Map<String, Map<String, String>> params = new HashMap<String, Map<String,String>>();
Map<String, String> headerParameters = new HashMap<String, String>();
headerParameters.put("Authorization", "Bearer " + bearer);
headerParameters.put("X-OPS-Range", "1-1");
params.put(HEADER_PARAMETERS, headerParameters);
URIBuilder uriBuilder = new URIBuilder(this.searchUrl);
uriBuilder.addParameter("q", query);
String response = liveImportClient.executeHttpGetRequest(1000, uriBuilder.toString(), params);
SAXBuilder saxBuilder = new SAXBuilder();
Document document = saxBuilder.build(new StringReader(response));
Element root = document.getRootElement();
List<Namespace> namespaces = Arrays.asList(
Namespace.getNamespace("xlink", "http://www.w3.org/1999/xlink"),
Namespace.getNamespace("ops", "http://ops.epo.org"),
Namespace.getNamespace("ns", "http://www.epo.org/exchange"));
String totalRes = getElement(root, namespaces, "//ops:biblio-search/@total-result-count");
return Integer.parseInt(totalRes);
} catch (JDOMException | IOException | URISyntaxException | JaxenException e) {
log.error(e.getMessage(), e);
return null;
}
}
private List<EpoDocumentId> searchDocumentIds(String bearer, String query, int start, int count) {
List<EpoDocumentId> results = new ArrayList<EpoDocumentId>();
int end = start + count;
if (StringUtils.isBlank(bearer)) {
return results;
}
try {
Map<String, Map<String, String>> params = new HashMap<String, Map<String,String>>();
Map<String, String> headerParameters = new HashMap<String, String>();
headerParameters.put("Authorization", "Bearer " + bearer);
if (start >= 1 && end > start) {
headerParameters.put("X-OPS-Range", start + "-" + end);
}
params.put(HEADER_PARAMETERS, headerParameters);
URIBuilder uriBuilder = new URIBuilder(this.searchUrl);
uriBuilder.addParameter("q", query);
String response = liveImportClient.executeHttpGetRequest(1000, uriBuilder.toString(), params);
SAXBuilder saxBuilder = new SAXBuilder();
Document document = saxBuilder.build(new StringReader(response));
Element root = document.getRootElement();
List<Namespace> namespaces = Arrays.asList(
Namespace.getNamespace("xlink", "http://www.w3.org/1999/xlink"),
Namespace.getNamespace("ops", "http://ops.epo.org"),
Namespace.getNamespace("ns", "http://www.epo.org/exchange"));
XPathExpression<Element> xpath = XPathFactory.instance()
.compile("//ns:document-id", Filters.element(), null, namespaces);
List<Element> documentIds = xpath.evaluate(root);
for (Element documentId : documentIds) {
results.add(new EpoDocumentId(documentId, namespaces));
}
} catch (Exception e) {
log.error(e.getMessage(), e);
}
return results;
}
private List<ImportRecord> searchDocument(String bearer, EpoDocumentId id) {
return searchDocument(bearer, id.getId(), id.getDocumentIdType());
}
private List<ImportRecord> searchDocument(String bearer, String id, String docType) {
List<ImportRecord> results = new ArrayList<ImportRecord>();
if (StringUtils.isBlank(bearer)) {
return results;
}
try {
Map<String, Map<String, String>> params = new HashMap<String, Map<String,String>>();
Map<String, String> headerParameters = new HashMap<String, String>();
headerParameters.put("Authorization", "Bearer " + bearer);
params.put(HEADER_PARAMETERS, headerParameters);
String url = this.url.replace("$(doctype)", docType).replace("$(id)", id);
String response = liveImportClient.executeHttpGetRequest(1000, url, params);
List<Element> elements = splitToRecords(response);
for (Element element : elements) {
results.add(transformSourceRecords(element));
}
} catch (Exception e) {
log.error(e.getMessage(), e);
}
return results;
}
private List<Element> splitToRecords(String recordsSrc) {
try {
SAXBuilder saxBuilder = new SAXBuilder();
Document document = saxBuilder.build(new StringReader(recordsSrc));
Element root = document.getRootElement();
List<Namespace> namespaces = Arrays.asList(Namespace.getNamespace("ns", "http://www.epo.org/exchange"));
XPathExpression<Element> xpath = XPathFactory.instance().compile("//ns:exchange-document",
Filters.element(), null, namespaces);
List<Element> recordsList = xpath.evaluate(root);
return recordsList;
} catch (JDOMException | IOException e) {
log.error(e.getMessage(), e);
return new LinkedList<Element>();
}
}
private String getElement(Element document, List<Namespace> namespaces, String path) throws JaxenException {
XPathExpression<Object> xpath = XPathFactory.instance().compile(path, Filters.fpassthrough(), null, namespaces);
List<Object> nodes = xpath.evaluate(document);
//exactly one element expected for any field
if (CollectionUtils.isEmpty(nodes)) {
return StringUtils.EMPTY;
} else {
return getValue(nodes.get(0));
}
}
private String getValue(Object el) {
if (el instanceof Element) {
return ((Element) el).getText();
} else if (el instanceof Attribute) {
return ((Attribute) el).getValue();
} else if (el instanceof String) {
return (String)el;
} else if (el instanceof Text) {
return ((Text) el).getText();
} else {
log.error("node of type: " + el.getClass());
return "";
}
}
public void setUrl(String url) {
this.url = url;
}
public void setAuthUrl(String authUrl) {
this.authUrl = authUrl;
}
public void setSearchUrl(String searchUrl) {
this.searchUrl = searchUrl;
}
}

View File

@@ -16,6 +16,24 @@ import java.util.Map;
*/ */
public interface LiveImportClient { public interface LiveImportClient {
public String executeHttpGetRequest(int timeout, String URL, Map<String, String> requestParams); /**
* Http GET request
*
* @param timeout The connect timeout in milliseconds
* @param URL URL
* @param requestParams This map contains the parameters to be included in the request.
* Each parameter will be added to the url?(key=value)
* @return The response in String type converted from InputStream
*/
public String executeHttpGetRequest(int timeout, String URL, Map<String, Map<String, String>> params);
/**
* Http POST request
*
* @param URL URL
* @param params This map contains the header params to be included in the request.
* @param entry the entity value
* @return the response in String type converted from InputStream
*/
public String executeHttpPostRequest(String URL, Map<String, Map<String, String>> params, String entry);
} }

View File

@@ -14,6 +14,7 @@ import java.util.Map;
import java.util.Objects; import java.util.Objects;
import java.util.Optional; import java.util.Optional;
import org.apache.commons.collections.MapUtils;
import org.apache.commons.io.IOUtils; import org.apache.commons.io.IOUtils;
import org.apache.commons.lang3.StringUtils; import org.apache.commons.lang3.StringUtils;
import org.apache.http.HttpHost; import org.apache.http.HttpHost;
@@ -21,7 +22,10 @@ import org.apache.http.HttpResponse;
import org.apache.http.client.config.RequestConfig; import org.apache.http.client.config.RequestConfig;
import org.apache.http.client.config.RequestConfig.Builder; import org.apache.http.client.config.RequestConfig.Builder;
import org.apache.http.client.methods.HttpGet; import org.apache.http.client.methods.HttpGet;
import org.apache.http.client.methods.HttpPost;
import org.apache.http.client.methods.HttpRequestBase;
import org.apache.http.client.utils.URIBuilder; import org.apache.http.client.utils.URIBuilder;
import org.apache.http.entity.StringEntity;
import org.apache.http.impl.client.CloseableHttpClient; import org.apache.http.impl.client.CloseableHttpClient;
import org.apache.http.impl.client.HttpClients; import org.apache.http.impl.client.HttpClients;
import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.LogManager;
@@ -38,13 +42,16 @@ public class LiveImportClientImpl implements LiveImportClient {
private final static Logger log = LogManager.getLogger(); private final static Logger log = LogManager.getLogger();
public static final String URI_PARAMETERS = "uriParameters";
public static final String HEADER_PARAMETERS = "headerParameters";
private CloseableHttpClient httpClient; private CloseableHttpClient httpClient;
@Autowired @Autowired
private ConfigurationService configurationService; private ConfigurationService configurationService;
@Override @Override
public String executeHttpGetRequest(int timeout, String URL, Map<String, String> requestParams) { public String executeHttpGetRequest(int timeout, String URL, Map<String, Map<String, String>> params) {
HttpGet method = null; HttpGet method = null;
try (CloseableHttpClient httpClient = Optional.ofNullable(this.httpClient) try (CloseableHttpClient httpClient = Optional.ofNullable(this.httpClient)
.orElseGet(HttpClients::createDefault)) { .orElseGet(HttpClients::createDefault)) {
@@ -53,14 +60,22 @@ public class LiveImportClientImpl implements LiveImportClient {
requestConfigBuilder.setConnectionRequestTimeout(timeout); requestConfigBuilder.setConnectionRequestTimeout(timeout);
RequestConfig defaultRequestConfig = requestConfigBuilder.build(); RequestConfig defaultRequestConfig = requestConfigBuilder.build();
method = new HttpGet(getSearchUrl(URL, requestParams)); method = new HttpGet(buildUrl(URL, params.get(URI_PARAMETERS)));
method.setConfig(defaultRequestConfig); method.setConfig(defaultRequestConfig);
Map<String, String> headerParams = params.get(HEADER_PARAMETERS);
if (MapUtils.isNotEmpty(headerParams)) {
for (String param : headerParams.keySet()) {
method.setHeader(param, headerParams.get(param));
}
}
configureProxy(method, defaultRequestConfig); configureProxy(method, defaultRequestConfig);
HttpResponse httpResponse = httpClient.execute(method); HttpResponse httpResponse = httpClient.execute(method);
if (isNotSuccessfull(httpResponse)) { if (isNotSuccessfull(httpResponse)) {
throw new RuntimeException("The request failed with: " + getStatusCode(httpResponse) + " code"); throw new RuntimeException("The request failed with: " + getStatusCode(httpResponse) + " code, reason= "
+ httpResponse.getStatusLine().getReasonPhrase());
} }
InputStream inputStream = httpResponse.getEntity().getContent(); InputStream inputStream = httpResponse.getEntity().getContent();
return IOUtils.toString(inputStream, Charset.defaultCharset()); return IOUtils.toString(inputStream, Charset.defaultCharset());
@@ -74,7 +89,41 @@ public class LiveImportClientImpl implements LiveImportClient {
return StringUtils.EMPTY; return StringUtils.EMPTY;
} }
private void configureProxy(HttpGet method, RequestConfig defaultRequestConfig) { @Override
public String executeHttpPostRequest(String URL, Map<String, Map<String, String>> params, String entry) {
HttpPost method = null;
try (CloseableHttpClient httpClient = Optional.ofNullable(this.httpClient)
.orElseGet(HttpClients::createDefault)) {
Builder requestConfigBuilder = RequestConfig.custom();
RequestConfig defaultRequestConfig = requestConfigBuilder.build();
method = new HttpPost(buildUrl(URL, params.get(URI_PARAMETERS)));
method.setConfig(defaultRequestConfig);
if (StringUtils.isNotBlank(entry)) {
method.setEntity(new StringEntity(entry));
}
setHeaderParams(method, params);
configureProxy(method, defaultRequestConfig);
HttpResponse httpResponse = httpClient.execute(method);
if (isNotSuccessfull(httpResponse)) {
throw new RuntimeException();
}
InputStream inputStream = httpResponse.getEntity().getContent();
return IOUtils.toString(inputStream, Charset.defaultCharset());
} catch (Exception e1) {
log.error(e1.getMessage(), e1);
} finally {
if (Objects.nonNull(method)) {
method.releaseConnection();
}
}
return StringUtils.EMPTY;
}
private void configureProxy(HttpRequestBase method, RequestConfig defaultRequestConfig) {
String proxyHost = configurationService.getProperty("http.proxy.host"); String proxyHost = configurationService.getProperty("http.proxy.host");
String proxyPort = configurationService.getProperty("http.proxy.port"); String proxyPort = configurationService.getProperty("http.proxy.port");
if (StringUtils.isNotBlank(proxyHost) && StringUtils.isNotBlank(proxyPort)) { if (StringUtils.isNotBlank(proxyHost) && StringUtils.isNotBlank(proxyPort)) {
@@ -85,10 +134,36 @@ public class LiveImportClientImpl implements LiveImportClient {
} }
} }
private String getSearchUrl(String URL, Map<String, String> requestParams) throws URISyntaxException { /**
* Allows to set the header parameters to the HTTP Post method
*
* @param method HttpPost method
* @param params This map contains the header params to be included in the request.
*/
private void setHeaderParams(HttpPost method, Map<String, Map<String, String>> params) {
Map<String, String> headerParams = params.get(HEADER_PARAMETERS);
if (MapUtils.isNotEmpty(headerParams)) {
for (String param : headerParams.keySet()) {
method.setHeader(param, headerParams.get(param));
}
}
}
/**
* This method allows you to add the parameters contained in the requestParams map to the URL
*
* @param URL URL
* @param requestParams This map contains the parameters to be included in the request.
* Each parameter will be added to the url?(key=value)
* @return
* @throws URISyntaxException
*/
private String buildUrl(String URL, Map<String, String> requestParams) throws URISyntaxException {
URIBuilder uriBuilder = new URIBuilder(URL); URIBuilder uriBuilder = new URIBuilder(URL);
for (String param : requestParams.keySet()) { if (MapUtils.isNotEmpty(requestParams)) {
uriBuilder.setParameter(param, requestParams.get(param)); for (String param : requestParams.keySet()) {
uriBuilder.setParameter(param, requestParams.get(param));
}
} }
return uriBuilder.toString(); return uriBuilder.toString();
} }

View File

@@ -0,0 +1,173 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.importer.external.metadatamapping.contributor;
import java.util.ArrayList;
import java.util.Collection;
import java.util.HashMap;
import java.util.LinkedList;
import java.util.List;
import java.util.Map;
import java.util.Objects;
import org.apache.commons.lang.StringUtils;
import org.dspace.importer.external.metadatamapping.MetadataFieldConfig;
import org.dspace.importer.external.metadatamapping.MetadatumDTO;
import org.jaxen.JaxenException;
import org.jdom2.Element;
import org.jdom2.Namespace;
/**
* Scopus specific implementation of {@link MetadataContributor}
* Responsible for generating the ScopusID, orcid, author name and affiliationID
* from the retrieved item.
*
* @author Boychuk Mykhaylo (boychuk.mykhaylo at 4science dot it)
*/
public class AuthorMetadataContributor extends SimpleXpathMetadatumContributor {
private static final Namespace NAMESPACE = Namespace.getNamespace("http://www.w3.org/2005/Atom");
private MetadataFieldConfig orcid;
private MetadataFieldConfig scopusId;
private MetadataFieldConfig authname;
private MetadataFieldConfig affiliation;
private Map<String, String> affId2affName = new HashMap<String, String>();
/**
* Retrieve the metadata associated with the given object.
* Depending on the retrieved node (using the query),
* different types of values will be added to the MetadatumDTO list.
*
* @param element A class to retrieve metadata from.
* @return A collection of import records. Only the ScopusID, orcid, author name and affiliation
* of the found records may be put in the record.
*/
@Override
public Collection<MetadatumDTO> contributeMetadata(Element element) {
List<MetadatumDTO> values = new LinkedList<>();
List<MetadatumDTO> metadatums = null;
fillAffillation(element);
try {
List<Element> nodes = element.getChildren("author", NAMESPACE);
for (Element el : nodes) {
metadatums = getMetadataOfAuthors(el);
if (Objects.nonNull(metadatums)) {
for (MetadatumDTO metadatum : metadatums) {
values.add(metadatum);
}
}
}
} catch (JaxenException e) {
throw new RuntimeException(e);
}
return values;
}
/**
* Retrieve the the ScopusID, orcid, author name and affiliationID
* metadata associated with the given element object.
* If the value retrieved from the element is empty
* it is set PLACEHOLDER_PARENT_METADATA_VALUE
*
* @param element A class to retrieve metadata from
* @throws JaxenException If Xpath evaluation failed
*/
private List<MetadatumDTO> getMetadataOfAuthors(Element element) throws JaxenException {
List<MetadatumDTO> metadatums = new ArrayList<MetadatumDTO>();
Element authname = element.getChild("authname", NAMESPACE);
Element scopusId = element.getChild("authid", NAMESPACE);
Element orcid = element.getChild("orcid", NAMESPACE);
Element afid = element.getChild("afid", NAMESPACE);
addMetadatum(metadatums, getMetadata(getElementValue(authname), this.authname));
addMetadatum(metadatums, getMetadata(getElementValue(scopusId), this.scopusId));
addMetadatum(metadatums, getMetadata(getElementValue(orcid), this.orcid));
addMetadatum(metadatums, getMetadata(StringUtils.isNotBlank(afid.getValue())
? this.affId2affName.get(afid.getValue()) : null, this.affiliation));
return metadatums;
}
private void addMetadatum(List<MetadatumDTO> list, MetadatumDTO metadatum) {
if (Objects.nonNull(metadatum)) {
list.add(metadatum);
}
}
private String getElementValue(Element element) {
if (Objects.nonNull(element)) {
return element.getValue();
}
return StringUtils.EMPTY;
}
private MetadatumDTO getMetadata(String value, MetadataFieldConfig metadaConfig) {
if (StringUtils.isBlank(value)) {
return null;
}
MetadatumDTO metadata = new MetadatumDTO();
metadata.setElement(metadaConfig.getElement());
metadata.setQualifier(metadaConfig.getQualifier());
metadata.setSchema(metadaConfig.getSchema());
metadata.setValue(value);
return metadata;
}
private void fillAffillation(Element element) {
try {
List<Element> nodes = element.getChildren("affiliation", NAMESPACE);
for (Element el : nodes) {
fillAffiliation2Name(el);
}
} catch (JaxenException e) {
throw new RuntimeException(e);
}
}
private void fillAffiliation2Name(Element element) throws JaxenException {
Element affilationName = element.getChild("affilname", NAMESPACE);
Element affilationId = element.getChild("afid", NAMESPACE);
if (Objects.nonNull(affilationId) && Objects.nonNull(affilationName)) {
affId2affName.put(affilationId.getValue(), affilationName.getValue());
}
}
public MetadataFieldConfig getAuthname() {
return authname;
}
public void setAuthname(MetadataFieldConfig authname) {
this.authname = authname;
}
public MetadataFieldConfig getOrcid() {
return orcid;
}
public void setOrcid(MetadataFieldConfig orcid) {
this.orcid = orcid;
}
public MetadataFieldConfig getScopusId() {
return scopusId;
}
public void setScopusId(MetadataFieldConfig scopusId) {
this.scopusId = scopusId;
}
public MetadataFieldConfig getAffiliation() {
return affiliation;
}
public void setAffiliation(MetadataFieldConfig affiliation) {
this.affiliation = affiliation;
}
}

View File

@@ -0,0 +1,312 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.importer.external.metadatamapping.contributor;
import java.util.Arrays;
import java.util.Collection;
import java.util.LinkedList;
import java.util.List;
import java.util.Map;
import java.util.Objects;
import javax.annotation.Resource;
import org.apache.commons.collections4.CollectionUtils;
import org.apache.commons.lang3.StringUtils;
import org.dspace.importer.external.metadatamapping.MetadataFieldConfig;
import org.dspace.importer.external.metadatamapping.MetadataFieldMapping;
import org.dspace.importer.external.metadatamapping.MetadatumDTO;
import org.jaxen.JaxenException;
import org.jdom2.Attribute;
import org.jdom2.Element;
import org.jdom2.Namespace;
import org.jdom2.Text;
import org.jdom2.filter.Filters;
import org.jdom2.xpath.XPathExpression;
import org.jdom2.xpath.XPathFactory;
import org.springframework.beans.factory.annotation.Required;
/**
* Custom MetadataContributor to manage Epo ID.
* Need as input <publication-reference> element and all children.
*
* @author Pasquale Cavallo
*/
public class EpoIdMetadataContributor implements MetadataContributor<Element> {
protected MetadataFieldConfig field;
private boolean needType;
/**
* This property will be used in ID definition.
* If this is true, id will be in the form docType:EpoID, otherwise EpoID will be returned
*
* @param needType if true, docType will be included in id definition
*/
public void setNeedType(boolean needType) {
this.needType = needType;
}
/**
* Return prefixToNamespaceMapping
*
* @return a prefixToNamespaceMapping map
*/
public Map<String, String> getPrefixToNamespaceMapping() {
return prefixToNamespaceMapping;
}
protected MetadataFieldMapping<Element, MetadataContributor<Element>> metadataFieldMapping;
/**
* Return metadataFieldMapping
*
* @return MetadataFieldMapping
*/
public MetadataFieldMapping<Element, MetadataContributor<Element>> getMetadataFieldMapping() {
return metadataFieldMapping;
}
/**
* Set the metadataFieldMapping of this SimpleXpathMetadatumContributor
*
* @param metadataFieldMapping the new mapping.
*/
public void setMetadataFieldMapping(
MetadataFieldMapping<Element, MetadataContributor<Element>> metadataFieldMapping) {
this.metadataFieldMapping = metadataFieldMapping;
}
/**
* Set the prefixToNamespaceMapping for this object,
*
* @param prefixToNamespaceMapping the new mapping.
*/
@Resource(name = "isiFullprefixMapping")
public void setPrefixToNamespaceMapping(Map<String, String> prefixToNamespaceMapping) {
this.prefixToNamespaceMapping = prefixToNamespaceMapping;
}
protected Map<String, String> prefixToNamespaceMapping;
/**
* Initialize EpoIdMetadataContributor with a query, prefixToNamespaceMapping and MetadataFieldConfig
*
* @param query query string
* @param prefixToNamespaceMapping metadata prefix to namespace mapping
* @param field
* <a href="https://github.com/DSpace/DSpace/tree/master/dspace-api/src/main/java/org/dspace/importer/external#metadata-mapping-">MetadataFieldConfig</a>
*/
public EpoIdMetadataContributor(String query, Map<String, String> prefixToNamespaceMapping,
MetadataFieldConfig field) {
this.query = query;
this.prefixToNamespaceMapping = prefixToNamespaceMapping;
this.field = field;
}
/**
* Empty constructor for EpoIdMetadataContributor
*/
public EpoIdMetadataContributor() {
}
protected String query;
/**
* Return the MetadataFieldConfig used while retrieving MetadatumDTO
*
* @return MetadataFieldConfig
*/
public MetadataFieldConfig getField() {
return field;
}
/**
* Setting the MetadataFieldConfig
*
* @param field MetadataFieldConfig used while retrieving MetadatumDTO
*/
@Required
public void setField(MetadataFieldConfig field) {
this.field = field;
}
/**
* Return query used to create an xpathExpression on, this query is used to
*
* @return the query this instance is based on
*/
public String getQuery() {
return query;
}
@Required
public void setQuery(String query) {
this.query = query;
}
/**
* Retrieve the metadata associated with the given object.
* Depending on the retrieved node (using the query), different types of values will be added to the MetadatumDTO
* list
*
* @param t A class to retrieve metadata from.
* @return a collection of import records. Only the identifier of the found records may be put in the record.
*/
@Override
public Collection<MetadatumDTO> contributeMetadata(Element element) {
List<MetadatumDTO> values = new LinkedList<>();
try {
List<Namespace> namespaces = Arrays.asList(
Namespace.getNamespace("xlink", "http://www.w3.org/1999/xlink"),
Namespace.getNamespace("ops", "http://ops.epo.org"),
Namespace.getNamespace("ns", "http://www.epo.org/exchange"));
XPathExpression<Element> xpath = XPathFactory.instance().compile(query, Filters.element(), null,
namespaces);
List<Element> elements = xpath.evaluate(element);
for (Element el : elements) {
EpoDocumentId document = new EpoDocumentId(el, namespaces);
MetadatumDTO metadatum = new MetadatumDTO();
metadatum.setElement(field.getElement());
metadatum.setQualifier(field.getQualifier());
metadatum.setSchema(field.getSchema());
if (needType) {
metadatum.setValue(document.getIdAndType());
} else {
metadatum.setValue(document.getId());
}
values.add(metadatum);
}
return values;
} catch (JaxenException e) {
System.err.println(query);
throw new RuntimeException(e);
}
}
/**
* This class maps EPO's response metadata needs to extract epo ID.
*
* @author Pasquale Cavallo
*
*/
public static class EpoDocumentId {
private String documentIdType;
private String country;
private String docNumber;
private String kind;
private String date;
private List<Namespace> namespaces;
public static final String DOCDB = "docdb";
public static final String EPODOC = "epodoc";
public static final String ORIGIN = "origin";
public EpoDocumentId(Element documentId, List<Namespace> namespaces) throws JaxenException {
this.namespaces = namespaces;
Element preferredId = null;
XPathExpression<Object> xpath = XPathFactory.instance().compile(
"./ns:document-id[@document-id-type=\"epodoc\"]", Filters.fpassthrough(), null, namespaces);
List<Object> nodes = xpath.evaluate(documentId);
if (CollectionUtils.isNotEmpty(nodes)) {
preferredId = (Element) nodes.get(0);
}
if (Objects.isNull(preferredId)) {
preferredId = documentId;
}
this.documentIdType = buildDocumentIdType(preferredId);
this.country = buildCountry(preferredId);
this.docNumber = buildDocNumber(preferredId);
this.kind = buildKind(preferredId);
this.date = buildDate(preferredId);
}
private String buildDocumentIdType(Element documentId) throws JaxenException {
return getElement(documentId, "./@document-id-type");
}
private String buildCountry(Element documentId) throws JaxenException {
return getElement(documentId, "./ns:country");
}
private String buildDocNumber(Element documentId) throws JaxenException {
return getElement(documentId, "./ns:doc-number");
}
private String buildKind(Element documentId) throws JaxenException {
return getElement(documentId, "./ns:kind");
}
private String buildDate(Element documentId) throws JaxenException {
return getElement(documentId, "./ns:date");
}
public String getDocumentIdType() {
return documentIdType;
}
/**
* This method compute the epo ID from fields
*
* @return the EPO id
*/
public String getId() {
if (DOCDB.equals(documentIdType)) {
return country + "." + docNumber + "." + kind;
} else if (EPODOC.equals(documentIdType)) {
return docNumber + ((kind != null) ? kind : StringUtils.EMPTY);
} else {
return StringUtils.EMPTY;
}
}
public String getIdAndType() {
if (EPODOC.equals(documentIdType)) {
return documentIdType + ":" + docNumber + ((kind != null) ? kind : "");
} else if (DOCDB.equals(documentIdType)) {
return documentIdType + ":" + country + "." + docNumber + "." + kind;
} else {
return StringUtils.EMPTY;
}
}
private String getElement(Element documentId, String path) throws JaxenException {
if (Objects.isNull(documentId)) {
return StringUtils.EMPTY;
}
XPathExpression<Object> xpath = XPathFactory.instance().compile(path, Filters.fpassthrough(), null,
namespaces);
List<Object> nodes = xpath.evaluate(documentId);
//exactly one element expected for any field
return CollectionUtils.isNotEmpty(nodes) ? getValue(nodes.get(0)) : StringUtils.EMPTY;
}
private String getValue(Object el) {
if (el instanceof Element) {
return ((Element) el).getText();
} else if (el instanceof Attribute) {
return ((Attribute) el).getValue();
} else if (el instanceof String) {
return (String)el;
} else if (el instanceof Text) {
return ((Text) el).getText();
} else {
return StringUtils.EMPTY;
}
}
}
}

View File

@@ -0,0 +1,110 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.importer.external.metadatamapping.contributor;
import java.util.ArrayList;
import java.util.Collection;
import java.util.LinkedList;
import java.util.List;
import java.util.Objects;
import org.apache.commons.lang3.StringUtils;
import org.dspace.importer.external.metadatamapping.MetadataFieldConfig;
import org.dspace.importer.external.metadatamapping.MetadatumDTO;
import org.jdom2.Element;
import org.jdom2.Namespace;
/**
* Scopus specific implementation of {@link MetadataContributor}
* Responsible for generating the Scopus startPage and endPage from the retrieved item.
*
* @author Boychuk Mykhaylo (boychuk.mykhaylo at 4science.com)
*/
public class PageRangeXPathMetadataContributor extends SimpleXpathMetadatumContributor {
private MetadataFieldConfig startPageMetadata;
private MetadataFieldConfig endPageMetadata;
/**
* Retrieve the metadata associated with the given Element object.
* Depending on the retrieved node (using the query),
* StartPage and EndPage values will be added to the MetadatumDTO list
*
* @param el A class to retrieve metadata from.
* @return A collection of import records. Only the StartPage and EndPage
* of the found records may be put in the record.
*/
@Override
public Collection<MetadatumDTO> contributeMetadata(Element el) {
List<MetadatumDTO> values = new LinkedList<>();
List<MetadatumDTO> metadatums = null;
for (String ns : prefixToNamespaceMapping.keySet()) {
List<Element> nodes = el.getChildren(query, Namespace.getNamespace(ns));
for (Element element : nodes) {
metadatums = getMetadatum(element.getValue());
if (Objects.nonNull(metadatums)) {
for (MetadatumDTO metadatum : metadatums) {
values.add(metadatum);
}
}
}
}
return values;
}
private List<MetadatumDTO> getMetadatum(String value) {
List<MetadatumDTO> metadatums = new ArrayList<MetadatumDTO>();
if (StringUtils.isBlank(value)) {
return null;
}
String [] range = value.split("-");
if (range.length == 2) {
metadatums.add(setStartPage(range));
metadatums.add(setEndPage(range));
} else if (range.length != 0) {
metadatums.add(setStartPage(range));
}
return metadatums;
}
private MetadatumDTO setEndPage(String[] range) {
MetadatumDTO endPage = new MetadatumDTO();
endPage.setValue(range[1]);
endPage.setElement(endPageMetadata.getElement());
endPage.setQualifier(endPageMetadata.getQualifier());
endPage.setSchema(endPageMetadata.getSchema());
return endPage;
}
private MetadatumDTO setStartPage(String[] range) {
MetadatumDTO startPage = new MetadatumDTO();
startPage.setValue(range[0]);
startPage.setElement(startPageMetadata.getElement());
startPage.setQualifier(startPageMetadata.getQualifier());
startPage.setSchema(startPageMetadata.getSchema());
return startPage;
}
public MetadataFieldConfig getStartPageMetadata() {
return startPageMetadata;
}
public void setStartPageMetadata(MetadataFieldConfig startPageMetadata) {
this.startPageMetadata = startPageMetadata;
}
public MetadataFieldConfig getEndPageMetadata() {
return endPageMetadata;
}
public void setEndPageMetadata(MetadataFieldConfig endPageMetadata) {
this.endPageMetadata = endPageMetadata;
}
}

View File

@@ -0,0 +1,66 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.importer.external.metadatamapping.contributor;
import java.util.Collection;
import java.util.LinkedList;
import java.util.List;
import java.util.Objects;
import org.dspace.importer.external.metadatamapping.MetadataFieldConfig;
import org.dspace.importer.external.metadatamapping.MetadatumDTO;
import org.jdom2.Element;
import org.jdom2.Namespace;
/**
* This contributor replace specific character in the metadata value.
* It is useful for some provider (e.g. Scopus) which use containing "/" character.
* Actually, "/" will never encode by framework in URL building. In the same ways, if we
* encode "/" -> %2F, it will be encoded by framework and become %252F.
*
* @author Boychuk Mykhaylo (boychuk.mykhaylo at 4science.com)
*/
public class ReplaceCharacterXPathMetadataContributor extends SimpleXpathMetadatumContributor {
private char characterToBeReplaced;
private char characterToReplaceWith;
@Override
public Collection<MetadatumDTO> contributeMetadata(Element element) {
List<MetadatumDTO> values = new LinkedList<>();
for (String ns : prefixToNamespaceMapping.keySet()) {
List<Element> nodes = element.getChildren(query, Namespace.getNamespace(ns));
for (Element el : nodes) {
values.add(getMetadatum(field, el.getValue()));
}
}
return values;
}
private MetadatumDTO getMetadatum(MetadataFieldConfig field, String value) {
MetadatumDTO dcValue = new MetadatumDTO();
if (Objects.isNull(field)) {
return null;
}
dcValue.setValue(value == null ? null : value.replace(characterToBeReplaced, characterToReplaceWith));
dcValue.setElement(field.getElement());
dcValue.setQualifier(field.getQualifier());
dcValue.setSchema(field.getSchema());
return dcValue;
}
public void setCharacterToBeReplaced(int characterToBeReplaced) {
this.characterToBeReplaced = (char)characterToBeReplaced;
}
public void setCharacterToReplaceWith(int characterToReplaceWith) {
this.characterToReplaceWith = (char)characterToReplaceWith;
}
}

View File

@@ -0,0 +1,65 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.importer.external.metadatamapping.contributor;
import java.util.ArrayList;
import java.util.Collection;
import java.util.LinkedList;
import java.util.List;
import org.apache.commons.lang3.StringUtils;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
import org.dspace.importer.external.metadatamapping.MetadatumDTO;
import org.jdom2.Element;
import org.jdom2.Namespace;
import org.jdom2.filter.Filters;
import org.jdom2.xpath.XPathExpression;
import org.jdom2.xpath.XPathFactory;
/**
* This contributor is able to concat multi value.
* Given a certain path, if it contains several nodes,
* the values of nodes will be concatenated into a single one.
* The concrete example we can see in the file wos-responce.xml in the <abstract_text> node,
* which may contain several <p> paragraphs,
* this Contributor allows concatenating all <p> paragraphs. to obtain a single one.
*
* @author Boychuk Mykhaylo (boychuk.mykhaylo at 4Science dot it)
*/
public class SimpleConcatContributor extends SimpleXpathMetadatumContributor {
private final static Logger log = LogManager.getLogger();
@Override
public Collection<MetadatumDTO> contributeMetadata(Element t) {
List<MetadatumDTO> values = new LinkedList<>();
StringBuilder text = new StringBuilder();
List<Namespace> namespaces = new ArrayList<Namespace>();
for (String ns : prefixToNamespaceMapping.keySet()) {
namespaces.add(Namespace.getNamespace(prefixToNamespaceMapping.get(ns), ns));
}
XPathExpression<Object> xpath = XPathFactory.instance().compile(query, Filters.fpassthrough(), null,namespaces);
List<Object> nodes = xpath.evaluate(t);
for (Object el : nodes) {
if (el instanceof Element) {
Element element = (Element) el;
if (StringUtils.isNotBlank(element.getText())) {
text.append(element.getText());
}
} else {
log.warn("node of type: " + el.getClass());
}
}
if (StringUtils.isNotBlank(text.toString())) {
values.add(metadataFieldMapping.toDCValue(field, text.toString()));
}
return values;
}
}

View File

@@ -0,0 +1,75 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.importer.external.metadatamapping.contributor;
import java.util.ArrayList;
import java.util.Collection;
import java.util.LinkedList;
import java.util.List;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
import org.dspace.importer.external.metadatamapping.MetadatumDTO;
import org.jdom2.Element;
import org.jdom2.Namespace;
import org.jdom2.filter.Filters;
import org.jdom2.xpath.XPathExpression;
import org.jdom2.xpath.XPathFactory;
/**
* Web of Science specific implementation of {@link MetadataContributor}.
* This contributor can perform research on multi-paths.
* For example, to populate the subject metadata, in the Web of Science response
* the values are contained in different paths,
* so this Contributor allows you to collect the values by configuring the paths in the paths list.
*
* @author Boychuk Mykhaylo (boychuk.mykhaylo at 4Science dot it)
*/
public class SimpleMultiplePathContributor extends SimpleXpathMetadatumContributor {
private final static Logger log = LogManager.getLogger();
private List<String> paths;
public SimpleMultiplePathContributor() {}
public SimpleMultiplePathContributor(List<String> paths) {
this.paths = paths;
}
@Override
public Collection<MetadatumDTO> contributeMetadata(Element t) {
List<MetadatumDTO> values = new LinkedList<>();
for (String path : this.paths) {
List<Namespace> namespaces = new ArrayList<Namespace>();
for (String ns : prefixToNamespaceMapping.keySet()) {
namespaces.add(Namespace.getNamespace(prefixToNamespaceMapping.get(ns), ns));
}
XPathExpression<Object> xpath = XPathFactory.instance().compile(path, Filters.fpassthrough(), null,
namespaces);
List<Object> nodes = xpath.evaluate(t);
for (Object el : nodes) {
if (el instanceof Element) {
values.add(metadataFieldMapping.toDCValue(field, ((Element) el).getText()));
} else {
log.warn("node of type: " + el.getClass());
}
}
}
return values;
}
public List<String> getPaths() {
return paths;
}
public void setPaths(List<String> paths) {
this.paths = paths;
}
}

View File

@@ -0,0 +1,91 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.importer.external.metadatamapping.contributor;
import java.text.DateFormat;
import java.text.ParseException;
import java.text.SimpleDateFormat;
import java.util.ArrayList;
import java.util.Collection;
import java.util.LinkedList;
import java.util.List;
import org.dspace.importer.external.metadatamapping.MetadataFieldConfig;
import org.dspace.importer.external.metadatamapping.MetadatumDTO;
import org.jdom2.Attribute;
import org.jdom2.Element;
import org.jdom2.Namespace;
import org.jdom2.Text;
import org.jdom2.filter.Filters;
import org.jdom2.xpath.XPathExpression;
import org.jdom2.xpath.XPathFactory;
/**
* This contributor can be used when parsing an XML file,
* particularly to extract a date and convert it to a specific format.
* In the variable dateFormatFrom the read format should be configured,
* instead in the variable dateFormatTo the format you want to obtain.
*
* @author Mykhaylo Boychuk (mykhaylo.boychuk at 4science.com)
*/
public class SimpleXpathDateFormatMetadataContributor extends SimpleXpathMetadatumContributor {
private DateFormat dateFormatFrom;
private DateFormat dateFormatTo;
public void setDateFormatFrom(String dateFormatFrom) {
this.dateFormatFrom = new SimpleDateFormat(dateFormatFrom);
}
public void setDateFormatTo(String dateFormatTo) {
this.dateFormatTo = new SimpleDateFormat(dateFormatTo);
}
@Override
public Collection<MetadatumDTO> contributeMetadata(Element element) {
List<MetadatumDTO> values = new LinkedList<>();
List<Namespace> namespaces = new ArrayList<Namespace>();
for (String ns : prefixToNamespaceMapping.keySet()) {
namespaces.add(Namespace.getNamespace(prefixToNamespaceMapping.get(ns), ns));
}
XPathExpression<Object> xpath = XPathFactory.instance()
.compile(query,Filters.fpassthrough(), null, namespaces);
List<Object> nodes = xpath.evaluate(element);
for (Object el : nodes) {
if (el instanceof Element) {
values.add(getMetadatum(field, ((Element) el).getText()));
} else if (el instanceof Attribute) {
values.add(getMetadatum(field, ((Attribute) el).getValue()));
} else if (el instanceof String) {
values.add(getMetadatum(field, (String) el));
} else if (el instanceof Text) {
values.add(metadataFieldMapping.toDCValue(field, ((Text) el).getText()));
} else {
System.err.println("node of type: " + el.getClass());
}
}
return values;
}
private MetadatumDTO getMetadatum(MetadataFieldConfig field, String value) {
MetadatumDTO dcValue = new MetadatumDTO();
if (field == null) {
return null;
}
try {
dcValue.setValue(dateFormatTo.format(dateFormatFrom.parse(value)));
} catch (ParseException e) {
dcValue.setValue(value);
}
dcValue.setElement(field.getElement());
dcValue.setQualifier(field.getQualifier());
dcValue.setSchema(field.getSchema());
return dcValue;
}
}

View File

@@ -0,0 +1,69 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.importer.external.metadatamapping.contributor;
import java.util.ArrayList;
import java.util.Collection;
import java.util.LinkedList;
import java.util.List;
import org.apache.commons.lang3.StringUtils;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
import org.dspace.importer.external.metadatamapping.MetadatumDTO;
import org.jdom2.Element;
import org.jdom2.Namespace;
import org.jdom2.filter.Filters;
import org.jdom2.xpath.XPathExpression;
import org.jdom2.xpath.XPathFactory;
/**
* This contributor checks for each node returned for the supplied path
* if node contains supplied attribute - the value of the current node is taken if exist.
*
* @author Boychuk Mykhaylo (boychuk.mykhaylo at 4Science dot com)
*/
public class SimpleXpathMetadatumAndAttributeContributor extends SimpleXpathMetadatumContributor {
private final static Logger log = LogManager.getLogger();
private String attribute;
@Override
public Collection<MetadatumDTO> contributeMetadata(Element t) {
List<MetadatumDTO> values = new LinkedList<>();
List<Namespace> namespaces = new ArrayList<Namespace>();
for (String ns : prefixToNamespaceMapping.keySet()) {
namespaces.add(Namespace.getNamespace(prefixToNamespaceMapping.get(ns), ns));
}
XPathExpression<Object> xpath = XPathFactory.instance().compile(query, Filters.fpassthrough(), null,
namespaces);
List<Object> nodes = xpath.evaluate(t);
for (Object el : nodes) {
if (el instanceof Element) {
Element element = (Element) el;
String attributeValue = element.getAttributeValue(this.attribute);
if (StringUtils.isNotBlank(attributeValue)) {
values.add(metadataFieldMapping.toDCValue(this.field, attributeValue));
}
} else {
log.warn("node of type: " + el.getClass());
}
}
return values;
}
public String getAttribute() {
return attribute;
}
public void setAttribute(String attribute) {
this.attribute = attribute;
}
}

View File

@@ -33,10 +33,10 @@ import org.springframework.beans.factory.annotation.Autowired;
* @author Roeland Dillen (roeland at atmire dot com) * @author Roeland Dillen (roeland at atmire dot com)
*/ */
public class SimpleXpathMetadatumContributor implements MetadataContributor<Element> { public class SimpleXpathMetadatumContributor implements MetadataContributor<Element> {
private MetadataFieldConfig field;
private static final Logger log private static final Logger log = org.apache.logging.log4j.LogManager.getLogger();
= org.apache.logging.log4j.LogManager.getLogger();
protected MetadataFieldConfig field;
/** /**
* Return prefixToNamespaceMapping * Return prefixToNamespaceMapping
@@ -47,7 +47,7 @@ public class SimpleXpathMetadatumContributor implements MetadataContributor<Elem
return prefixToNamespaceMapping; return prefixToNamespaceMapping;
} }
private MetadataFieldMapping<Element, MetadataContributor<Element>> metadataFieldMapping; protected MetadataFieldMapping<Element, MetadataContributor<Element>> metadataFieldMapping;
/** /**
* Return metadataFieldMapping * Return metadataFieldMapping
@@ -79,7 +79,7 @@ public class SimpleXpathMetadatumContributor implements MetadataContributor<Elem
this.prefixToNamespaceMapping = prefixToNamespaceMapping; this.prefixToNamespaceMapping = prefixToNamespaceMapping;
} }
private Map<String, String> prefixToNamespaceMapping; protected Map<String, String> prefixToNamespaceMapping;
/** /**
* Initialize SimpleXpathMetadatumContributor with a query, prefixToNamespaceMapping and MetadataFieldConfig * Initialize SimpleXpathMetadatumContributor with a query, prefixToNamespaceMapping and MetadataFieldConfig
@@ -103,7 +103,7 @@ public class SimpleXpathMetadatumContributor implements MetadataContributor<Elem
} }
private String query; protected String query;
/** /**
* Return the MetadataFieldConfig used while retrieving MetadatumDTO * Return the MetadataFieldConfig used while retrieving MetadatumDTO
@@ -154,10 +154,7 @@ public class SimpleXpathMetadatumContributor implements MetadataContributor<Elem
for (String ns : prefixToNamespaceMapping.keySet()) { for (String ns : prefixToNamespaceMapping.keySet()) {
namespaces.add(Namespace.getNamespace(prefixToNamespaceMapping.get(ns), ns)); namespaces.add(Namespace.getNamespace(prefixToNamespaceMapping.get(ns), ns));
} }
XPathExpression<Object> xpath = XPathFactory.instance().compile(query, Filters.fpassthrough(), null,namespaces);
XPathExpression<Object> xpath =
XPathFactory.instance().compile(query, Filters.fpassthrough(), null, namespaces);
List<Object> nodes = xpath.evaluate(t); List<Object> nodes = xpath.evaluate(t);
for (Object el : nodes) { for (Object el : nodes) {
if (el instanceof Element) { if (el instanceof Element) {
@@ -174,4 +171,5 @@ public class SimpleXpathMetadatumContributor implements MetadataContributor<Elem
} }
return values; return values;
} }
}
}

View File

@@ -0,0 +1,160 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.importer.external.metadatamapping.contributor;
import java.util.ArrayList;
import java.util.Collection;
import java.util.LinkedList;
import java.util.List;
import java.util.Map;
import javax.annotation.Resource;
import org.apache.commons.lang3.StringUtils;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
import org.dspace.importer.external.metadatamapping.MetadataFieldConfig;
import org.dspace.importer.external.metadatamapping.MetadataFieldMapping;
import org.dspace.importer.external.metadatamapping.MetadatumDTO;
import org.jdom2.Element;
import org.jdom2.Namespace;
import org.jdom2.filter.Filters;
import org.jdom2.xpath.XPathExpression;
import org.jdom2.xpath.XPathFactory;
/**
* Web Of Science specific implementation of {@link MetadataContributor}
* This contributor checks for each node returned for the given path if the node contains "this.attribute"
* and then checks if the attribute value is one of the values configured
* in the "this.attributeValue2metadata" map, if the value of the current known is taken.
* If "this.firstChild" is true, it takes the value of the child of the known.
* The mapping and configuration of this class can be found in the following wos-integration.xml file.
*
* @author Boychuk Mykhaylo (boychuk.mykhaylo at 4Science dot it)
*/
public class WosAttribute2ValueContributor implements MetadataContributor<Element> {
private final static Logger log = LogManager.getLogger();
private String query;
private String attribute;
private boolean firstChild;
private String childName;
private Map<String, String> prefixToNamespaceMapping;
private Map<String, MetadataFieldConfig> attributeValue2metadata;
private MetadataFieldMapping<Element, MetadataContributor<Element>> metadataFieldMapping;
public WosAttribute2ValueContributor() {}
public WosAttribute2ValueContributor(String query,
Map<String, String> prefixToNamespaceMapping,
Map<String, MetadataFieldConfig> attributeValue2metadata) {
this.query = query;
this.prefixToNamespaceMapping = prefixToNamespaceMapping;
this.attributeValue2metadata = attributeValue2metadata;
}
@Override
public Collection<MetadatumDTO> contributeMetadata(Element t) {
List<MetadatumDTO> values = new LinkedList<>();
List<Namespace> namespaces = new ArrayList<Namespace>();
for (String ns : prefixToNamespaceMapping.keySet()) {
namespaces.add(Namespace.getNamespace(prefixToNamespaceMapping.get(ns), ns));
}
XPathExpression<Object> xpath = XPathFactory.instance().compile(query, Filters.fpassthrough(), null,
namespaces);
List<Object> nodes = xpath.evaluate(t);
for (Object el : nodes) {
if (el instanceof Element) {
Element element = (Element) el;
String attributeValue = element.getAttributeValue(this.attribute);
setField(attributeValue, element, values);
} else {
log.warn("node of type: " + el.getClass());
}
}
return values;
}
private void setField(String attributeValue, Element el, List<MetadatumDTO> values) {
for (String id : attributeValue2metadata.keySet()) {
if (StringUtils.equals(id, attributeValue)) {
if (this.firstChild) {
String value = el.getChild(this.childName).getValue();
values.add(metadataFieldMapping.toDCValue(attributeValue2metadata.get(id), value));
} else {
values.add(metadataFieldMapping.toDCValue(attributeValue2metadata.get(id), el.getText()));
}
}
}
}
public MetadataFieldMapping<Element, MetadataContributor<Element>> getMetadataFieldMapping() {
return metadataFieldMapping;
}
public void setMetadataFieldMapping(
MetadataFieldMapping<Element, MetadataContributor<Element>> metadataFieldMapping) {
this.metadataFieldMapping = metadataFieldMapping;
}
@Resource(name = "isiFullprefixMapping")
public void setPrefixToNamespaceMapping(Map<String, String> prefixToNamespaceMapping) {
this.prefixToNamespaceMapping = prefixToNamespaceMapping;
}
public Map<String, String> getPrefixToNamespaceMapping() {
return prefixToNamespaceMapping;
}
public String getAttribute() {
return attribute;
}
public void setAttribute(String attribute) {
this.attribute = attribute;
}
public Map<String, MetadataFieldConfig> getAttributeValue2metadata() {
return attributeValue2metadata;
}
public void setAttributeValue2metadata(Map<String, MetadataFieldConfig> attributeValue2metadata) {
this.attributeValue2metadata = attributeValue2metadata;
}
public String getQuery() {
return query;
}
public void setQuery(String query) {
this.query = query;
}
public boolean isFirstChild() {
return firstChild;
}
public void setFirstChild(boolean firstChild) {
this.firstChild = firstChild;
}
public String getChildName() {
return childName;
}
public void setChildName(String childName) {
this.childName = childName;
}
}

View File

@@ -0,0 +1,71 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.importer.external.metadatamapping.contributor;
import java.util.ArrayList;
import java.util.Collection;
import java.util.LinkedList;
import java.util.List;
import java.util.Map;
import org.apache.commons.lang3.StringUtils;
import org.dspace.importer.external.metadatamapping.MetadataFieldConfig;
import org.dspace.importer.external.metadatamapping.MetadatumDTO;
import org.jdom2.Element;
import org.jdom2.Namespace;
import org.jdom2.filter.Filters;
import org.jdom2.xpath.XPathExpression;
import org.jdom2.xpath.XPathFactory;
/**
* This contributor can retrieve the identifiers
* configured in "this.identifire2field" from the Web of Science response.
* The mapping and configuration of this class can be found in the following wos-integration.xml file.
*
* @author Boychuk Mykhaylo (boychuk.mykhaylo at 4Science dot it)
*/
public class WosIdentifierContributor extends SimpleXpathMetadatumContributor {
protected Map<String, MetadataFieldConfig> identifier2field;
@Override
public Collection<MetadatumDTO> contributeMetadata(Element element) {
List<MetadatumDTO> values = new LinkedList<>();
List<Namespace> namespaces = new ArrayList<>();
for (String ns : prefixToNamespaceMapping.keySet()) {
namespaces.add(Namespace.getNamespace(prefixToNamespaceMapping.get(ns), ns));
}
XPathExpression<Element> xpath =
XPathFactory.instance().compile(query, Filters.element(), null, namespaces);
List<Element> nodes = xpath.evaluate(element);
for (Element el : nodes) {
String type = el.getAttributeValue("type");
setIdentyfier(type, el, values);
}
return values;
}
private void setIdentyfier(String type, Element el, List<MetadatumDTO> values) {
for (String id : identifier2field.keySet()) {
if (StringUtils.equals(id, type)) {
String value = el.getAttributeValue("value");
values.add(metadataFieldMapping.toDCValue(identifier2field.get(id), value));
}
}
}
public Map<String, MetadataFieldConfig> getIdentifier2field() {
return identifier2field;
}
public void setIdentifier2field(Map<String, MetadataFieldConfig> identifier2field) {
this.identifier2field = identifier2field;
}
}

View File

@@ -0,0 +1,68 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.importer.external.metadatamapping.contributor;
import java.util.ArrayList;
import java.util.Collection;
import java.util.LinkedList;
import java.util.List;
import java.util.Objects;
import org.apache.commons.lang3.StringUtils;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
import org.dspace.importer.external.metadatamapping.MetadatumDTO;
import org.jdom2.Element;
import org.jdom2.Namespace;
import org.jdom2.filter.Filters;
import org.jdom2.xpath.XPathExpression;
import org.jdom2.xpath.XPathFactory;
/**
* Web Of Science specific implementation of {@link MetadataContributor}
*
* @author Boychuk Mykhaylo (boychuk.mykhaylo at 4Science dot it)
*/
public class WosIdentifierRidContributor extends SimpleXpathMetadatumContributor {
private final static Logger log = LogManager.getLogger();
@Override
public Collection<MetadatumDTO> contributeMetadata(Element t) {
List<MetadatumDTO> values = new LinkedList<>();
List<Namespace> namespaces = new ArrayList<Namespace>();
for (String ns : prefixToNamespaceMapping.keySet()) {
namespaces.add(Namespace.getNamespace(prefixToNamespaceMapping.get(ns), ns));
}
XPathExpression<Object> xpath = XPathFactory.instance().compile(query, Filters.fpassthrough(), null,
namespaces);
List<Object> nodes = xpath.evaluate(t);
for (Object el : nodes) {
if (el instanceof Element) {
Element element = ((Element) el).getChild("name");
if (Objects.nonNull(element)) {
String type = element.getAttributeValue("role");
setIdentyfier(type, element, values);
}
} else {
log.warn("node of type: " + el.getClass());
}
}
return values;
}
private void setIdentyfier(String type, Element el, List<MetadatumDTO> values) {
if (StringUtils.equals("researcher_id", type)) {
String value = el.getAttributeValue("r_id");
if (StringUtils.isNotBlank(value)) {
values.add(metadataFieldMapping.toDCValue(this.field, value));
}
}
}
}

View File

@@ -14,23 +14,23 @@ import java.io.InputStreamReader;
import java.io.Reader; import java.io.Reader;
import java.io.StringReader; import java.io.StringReader;
import java.util.Collection; import java.util.Collection;
import java.util.HashMap;
import java.util.LinkedList; import java.util.LinkedList;
import java.util.List; import java.util.List;
import java.util.Map;
import java.util.Objects;
import java.util.concurrent.Callable; import java.util.concurrent.Callable;
import javax.ws.rs.client.Client;
import javax.ws.rs.client.ClientBuilder;
import javax.ws.rs.client.Invocation;
import javax.ws.rs.client.WebTarget;
import javax.ws.rs.core.MediaType;
import javax.ws.rs.core.Response;
import com.google.common.io.CharStreams; import com.google.common.io.CharStreams;
import org.apache.commons.lang3.StringUtils;
import org.apache.http.client.utils.URIBuilder;
import org.dspace.content.Item; import org.dspace.content.Item;
import org.dspace.importer.external.datamodel.ImportRecord; import org.dspace.importer.external.datamodel.ImportRecord;
import org.dspace.importer.external.datamodel.Query; import org.dspace.importer.external.datamodel.Query;
import org.dspace.importer.external.exception.FileMultipleOccurencesException; import org.dspace.importer.external.exception.FileMultipleOccurencesException;
import org.dspace.importer.external.exception.FileSourceException; import org.dspace.importer.external.exception.FileSourceException;
import org.dspace.importer.external.exception.MetadataSourceException; import org.dspace.importer.external.exception.MetadataSourceException;
import org.dspace.importer.external.liveimportclient.service.LiveImportClient;
import org.dspace.importer.external.service.AbstractImportMetadataSourceService; import org.dspace.importer.external.service.AbstractImportMetadataSourceService;
import org.dspace.importer.external.service.components.FileSource; import org.dspace.importer.external.service.components.FileSource;
import org.dspace.importer.external.service.components.QuerySource; import org.dspace.importer.external.service.components.QuerySource;
@@ -41,6 +41,7 @@ import org.jdom2.filter.Filters;
import org.jdom2.input.SAXBuilder; import org.jdom2.input.SAXBuilder;
import org.jdom2.xpath.XPathExpression; import org.jdom2.xpath.XPathExpression;
import org.jdom2.xpath.XPathFactory; import org.jdom2.xpath.XPathFactory;
import org.springframework.beans.factory.annotation.Autowired;
/** /**
* Implements a data source for querying PubMed Central * Implements a data source for querying PubMed Central
@@ -51,13 +52,16 @@ import org.jdom2.xpath.XPathFactory;
public class PubmedImportMetadataSourceServiceImpl extends AbstractImportMetadataSourceService<Element> public class PubmedImportMetadataSourceServiceImpl extends AbstractImportMetadataSourceService<Element>
implements QuerySource, FileSource { implements QuerySource, FileSource {
private String baseAddress; private String urlFetch;
private String urlSearch;
// it is protected so that subclass can mock it for testing private int attempt = 3;
protected WebTarget pubmedWebTarget;
private List<String> supportedExtensions; private List<String> supportedExtensions;
@Autowired
private LiveImportClient liveImportClient;
/** /**
* Set the file extensions supported by this metadata service * Set the file extensions supported by this metadata service
* *
@@ -187,29 +191,7 @@ public class PubmedImportMetadataSourceServiceImpl extends AbstractImportMetadat
* @throws Exception on generic exception * @throws Exception on generic exception
*/ */
@Override @Override
public void init() throws Exception { public void init() throws Exception {}
Client client = ClientBuilder.newClient();
WebTarget webTarget = client.target(baseAddress);
pubmedWebTarget = webTarget.queryParam("db", "pubmed");
}
/**
* Return the baseAddress set to this object
*
* @return The String object that represents the baseAddress of this object
*/
public String getBaseAddress() {
return baseAddress;
}
/**
* Set the baseAddress to this object
*
* @param baseAddress The String object that represents the baseAddress of this object
*/
public void setBaseAddress(String baseAddress) {
this.baseAddress = baseAddress;
}
private class GetNbRecords implements Callable<Integer> { private class GetNbRecords implements Callable<Integer> {
@@ -226,24 +208,27 @@ public class PubmedImportMetadataSourceServiceImpl extends AbstractImportMetadat
@Override @Override
public Integer call() throws Exception { public Integer call() throws Exception {
WebTarget getRecordIdsTarget = pubmedWebTarget URIBuilder uriBuilder = new URIBuilder(urlSearch);
.queryParam("term", query.getParameterAsClass("query", String.class)); uriBuilder.addParameter("db", "pubmed");
uriBuilder.addParameter("term", query.getParameterAsClass("query", String.class));
Map<String, Map<String, String>> params = new HashMap<String, Map<String,String>>();
String response = StringUtils.EMPTY;
int countAttempt = 0;
while (StringUtils.isBlank(response) && countAttempt <= attempt) {
countAttempt++;
response = liveImportClient.executeHttpGetRequest(1000, uriBuilder.toString(), params);
}
getRecordIdsTarget = getRecordIdsTarget.path("esearch.fcgi"); if (StringUtils.isBlank(response)) {
throw new RuntimeException("After " + attempt
+ " attempts to contact the PubMed service, a correct answer could not be received."
+ " The request was made with this URL:" + uriBuilder.toString());
}
Invocation.Builder invocationBuilder = getRecordIdsTarget.request(MediaType.TEXT_PLAIN_TYPE); return Integer.parseInt(getSingleElementValue(response, "Count"));
Response response = invocationBuilder.get();
String responseString = response.readEntity(String.class);
String count = getSingleElementValue(responseString, "Count");
return Integer.parseInt(count);
} }
} }
private String getSingleElementValue(String src, String elementName) { private String getSingleElementValue(String src, String elementName) {
String value = null; String value = null;
@@ -286,41 +271,61 @@ public class PubmedImportMetadataSourceServiceImpl extends AbstractImportMetadat
Integer start = query.getParameterAsClass("start", Integer.class); Integer start = query.getParameterAsClass("start", Integer.class);
Integer count = query.getParameterAsClass("count", Integer.class); Integer count = query.getParameterAsClass("count", Integer.class);
if (count == null || count < 0) { if (Objects.isNull(count) || count < 0) {
count = 10; count = 10;
} }
if (start == null || start < 0) { if (Objects.isNull(start) || start < 0) {
start = 0; start = 0;
} }
List<ImportRecord> records = new LinkedList<ImportRecord>(); List<ImportRecord> records = new LinkedList<ImportRecord>();
WebTarget getRecordIdsTarget = pubmedWebTarget.queryParam("term", queryString); URIBuilder uriBuilder = new URIBuilder(urlSearch);
getRecordIdsTarget = getRecordIdsTarget.queryParam("retstart", start); uriBuilder.addParameter("db", "pubmed");
getRecordIdsTarget = getRecordIdsTarget.queryParam("retmax", count); uriBuilder.addParameter("retstart", start.toString());
getRecordIdsTarget = getRecordIdsTarget.queryParam("usehistory", "y"); uriBuilder.addParameter("retmax", count.toString());
getRecordIdsTarget = getRecordIdsTarget.path("esearch.fcgi"); uriBuilder.addParameter("usehistory", "y");
uriBuilder.addParameter("term", queryString);
Map<String, Map<String, String>> params = new HashMap<String, Map<String,String>>();
String response = StringUtils.EMPTY;
int countAttempt = 0;
while (StringUtils.isBlank(response) && countAttempt <= attempt) {
countAttempt++;
response = liveImportClient.executeHttpGetRequest(1000, uriBuilder.toString(), params);
}
Invocation.Builder invocationBuilder = getRecordIdsTarget.request(MediaType.TEXT_PLAIN_TYPE); if (StringUtils.isBlank(response)) {
throw new RuntimeException("After " + attempt
+ " attempts to contact the PubMed service, a correct answer could not be received."
+ " The request was made with this URL:" + uriBuilder.toString());
}
Response response = invocationBuilder.get(); String queryKey = getSingleElementValue(response, "QueryKey");
String responseString = response.readEntity(String.class); String webEnv = getSingleElementValue(response, "WebEnv");
String queryKey = getSingleElementValue(responseString, "QueryKey"); URIBuilder uriBuilder2 = new URIBuilder(urlFetch);
String webEnv = getSingleElementValue(responseString, "WebEnv"); uriBuilder2.addParameter("db", "pubmed");
uriBuilder2.addParameter("retstart", start.toString());
uriBuilder2.addParameter("retmax", count.toString());
uriBuilder2.addParameter("WebEnv", webEnv);
uriBuilder2.addParameter("query_key", queryKey);
uriBuilder2.addParameter("retmode", "xml");
Map<String, Map<String, String>> params2 = new HashMap<String, Map<String,String>>();
String response2 = StringUtils.EMPTY;
countAttempt = 0;
while (StringUtils.isBlank(response2) && countAttempt <= attempt) {
countAttempt++;
response2 = liveImportClient.executeHttpGetRequest(1000, uriBuilder2.toString(), params2);
}
WebTarget getRecordsTarget = pubmedWebTarget.queryParam("WebEnv", webEnv); if (StringUtils.isBlank(response2)) {
getRecordsTarget = getRecordsTarget.queryParam("query_key", queryKey); throw new RuntimeException("After " + attempt
getRecordsTarget = getRecordsTarget.queryParam("retmode", "xml"); + " attempts to contact the PubMed service, a correct answer could not be received."
getRecordsTarget = getRecordsTarget.path("efetch.fcgi"); + " The request was made with this URL:" + uriBuilder2.toString());
getRecordsTarget = getRecordsTarget.queryParam("retmax", count); }
getRecordsTarget = getRecordsTarget.queryParam("retstart", start);
invocationBuilder = getRecordsTarget.request(MediaType.TEXT_PLAIN_TYPE); List<Element> elements = splitToRecords(response2);
response = invocationBuilder.get();
List<Element> elements = splitToRecords(response.readEntity(String.class));
for (Element record : elements) { for (Element record : elements) {
records.add(transformSourceRecords(record)); records.add(transformSourceRecords(record));
@@ -361,23 +366,29 @@ public class PubmedImportMetadataSourceServiceImpl extends AbstractImportMetadat
@Override @Override
public ImportRecord call() throws Exception { public ImportRecord call() throws Exception {
String id = query.getParameterAsClass("id", String.class);
WebTarget getRecordTarget = pubmedWebTarget.queryParam("id", id); URIBuilder uriBuilder = new URIBuilder(urlFetch);
getRecordTarget = getRecordTarget.queryParam("retmode", "xml"); uriBuilder.addParameter("db", "pubmed");
getRecordTarget = getRecordTarget.path("efetch.fcgi"); uriBuilder.addParameter("retmode", "xml");
uriBuilder.addParameter("id", query.getParameterAsClass("id", String.class));
Invocation.Builder invocationBuilder = getRecordTarget.request(MediaType.TEXT_PLAIN_TYPE); Map<String, Map<String, String>> params = new HashMap<String, Map<String,String>>();
String response = StringUtils.EMPTY;
Response response = invocationBuilder.get(); int countAttempt = 0;
while (StringUtils.isBlank(response) && countAttempt <= attempt) {
List<Element> elements = splitToRecords(response.readEntity(String.class)); countAttempt++;
response = liveImportClient.executeHttpGetRequest(1000, uriBuilder.toString(), params);
if (elements.isEmpty()) {
return null;
} }
return transformSourceRecords(elements.get(0)); if (StringUtils.isBlank(response)) {
throw new RuntimeException("After " + attempt
+ " attempts to contact the PubMed service, a correct answer could not be received."
+ " The request was made with this URL:" + uriBuilder.toString());
}
List<Element> elements = splitToRecords(response);
return elements.isEmpty() ? null : transformSourceRecords(elements.get(0));
} }
} }
@@ -396,40 +407,57 @@ public class PubmedImportMetadataSourceServiceImpl extends AbstractImportMetadat
@Override @Override
public Collection<ImportRecord> call() throws Exception { public Collection<ImportRecord> call() throws Exception {
WebTarget getRecordIdsTarget = pubmedWebTarget URIBuilder uriBuilder = new URIBuilder(urlSearch);
.queryParam("term", query.getParameterAsClass("term", String.class)); uriBuilder.addParameter("db", "pubmed");
getRecordIdsTarget = getRecordIdsTarget uriBuilder.addParameter("usehistory", "y");
.queryParam("field", query.getParameterAsClass("field", String.class)); uriBuilder.addParameter("term", query.getParameterAsClass("term", String.class));
getRecordIdsTarget = getRecordIdsTarget.queryParam("usehistory", "y"); uriBuilder.addParameter("field", query.getParameterAsClass("field", String.class));
getRecordIdsTarget = getRecordIdsTarget.path("esearch.fcgi");
Invocation.Builder invocationBuilder = getRecordIdsTarget.request(MediaType.TEXT_PLAIN_TYPE); Map<String, Map<String, String>> params = new HashMap<String, Map<String,String>>();
String response = StringUtils.EMPTY;
int countAttempt = 0;
while (StringUtils.isBlank(response) && countAttempt <= attempt) {
countAttempt++;
response = liveImportClient.executeHttpGetRequest(1000, uriBuilder.toString(), params);
}
Response response = invocationBuilder.get(); if (StringUtils.isBlank(response)) {
String responseString = response.readEntity(String.class); throw new RuntimeException("After " + attempt
+ " attempts to contact the PubMed service, a correct answer could not be received."
+ " The request was made with this URL:" + uriBuilder.toString());
}
String queryKey = getSingleElementValue(responseString, "QueryKey"); String webEnv = getSingleElementValue(response, "WebEnv");
String webEnv = getSingleElementValue(responseString, "WebEnv"); String queryKey = getSingleElementValue(response, "QueryKey");
WebTarget getRecordsTarget = pubmedWebTarget.queryParam("WebEnv", webEnv); URIBuilder uriBuilder2 = new URIBuilder(urlFetch);
getRecordsTarget = getRecordsTarget.queryParam("query_key", queryKey); uriBuilder2.addParameter("db", "pubmed");
getRecordsTarget = getRecordsTarget.queryParam("retmode", "xml"); uriBuilder2.addParameter("retmode", "xml");
getRecordsTarget = getRecordsTarget.path("efetch.fcgi"); uriBuilder2.addParameter("WebEnv", webEnv);
uriBuilder2.addParameter("query_key", queryKey);
invocationBuilder = getRecordsTarget.request(MediaType.TEXT_PLAIN_TYPE); Map<String, Map<String, String>> params2 = new HashMap<String, Map<String,String>>();
response = invocationBuilder.get(); String response2 = StringUtils.EMPTY;
countAttempt = 0;
while (StringUtils.isBlank(response2) && countAttempt <= attempt) {
countAttempt++;
response2 = liveImportClient.executeHttpGetRequest(1000, uriBuilder2.toString(), params2);
}
String xml = response.readEntity(String.class); if (StringUtils.isBlank(response2)) {
return parseXMLString(xml); throw new RuntimeException("After " + attempt
+ " attempts to contact the PubMed service, a correct answer could not be received."
+ " The request was made with this URL:" + uriBuilder2.toString());
}
return parseXMLString(response2);
} }
} }
@Override @Override
public List<ImportRecord> getRecords(InputStream inputStream) throws FileSourceException { public List<ImportRecord> getRecords(InputStream inputStream) throws FileSourceException {
String xml = null;
try (Reader reader = new InputStreamReader(inputStream, "UTF-8")) { try (Reader reader = new InputStreamReader(inputStream, "UTF-8")) {
xml = CharStreams.toString(reader); String xml = CharStreams.toString(reader);
return parseXMLString(xml); return parseXMLString(xml);
} catch (IOException e) { } catch (IOException e) {
throw new FileSourceException ("Cannot read XML from InputStream", e); throw new FileSourceException ("Cannot read XML from InputStream", e);
@@ -456,4 +484,21 @@ public class PubmedImportMetadataSourceServiceImpl extends AbstractImportMetadat
} }
return records; return records;
} }
}
public String getUrlFetch() {
return urlFetch;
}
public void setUrlFetch(String urlFetch) {
this.urlFetch = urlFetch;
}
public String getUrlSearch() {
return urlSearch;
}
public void setUrlSearch(String urlSearch) {
this.urlSearch = urlSearch;
}
}

View File

@@ -0,0 +1,37 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.importer.external.pubmedeurope;
import java.util.Map;
import javax.annotation.Resource;
import org.dspace.importer.external.metadatamapping.AbstractMetadataFieldMapping;
/**
* An implementation of {@link AbstractMetadataFieldMapping}
* Responsible for defining the mapping of the PubmedEurope metadatum fields on the DSpace metadatum fields
*
* @author Pasquale Cavallo (pasquale.cavallo at 4science dot it)
*/
public class PubmedEuropeFieldMapping extends AbstractMetadataFieldMapping {
/**
* Defines which metadatum is mapped on which metadatum. Note that while the key must be unique it
* only matters here for postprocessing of the value. The mapped MetadatumContributor has full control over
* what metadatafield is generated.
*
* @param metadataFieldMap The map containing the link between retrieve metadata and metadata that will be set to
* the item.
*/
@Override
@Resource(name = "pubmedEuropeMetadataFieldMap")
public void setMetadataFieldMap(Map metadataFieldMap) {
super.setMetadataFieldMap(metadataFieldMap);
}
}

View File

@@ -0,0 +1,419 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.importer.external.pubmedeurope;
import java.io.IOException;
import java.io.StringReader;
import java.net.URISyntaxException;
import java.util.ArrayList;
import java.util.Collection;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Objects;
import java.util.concurrent.Callable;
import javax.el.MethodNotFoundException;
import org.apache.commons.lang3.StringUtils;
import org.apache.http.HttpException;
import org.apache.http.client.ClientProtocolException;
import org.apache.http.client.utils.URIBuilder;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
import org.dspace.content.Item;
import org.dspace.importer.external.datamodel.ImportRecord;
import org.dspace.importer.external.datamodel.Query;
import org.dspace.importer.external.exception.MetadataSourceException;
import org.dspace.importer.external.liveimportclient.service.LiveImportClient;
import org.dspace.importer.external.service.AbstractImportMetadataSourceService;
import org.dspace.importer.external.service.components.QuerySource;
import org.jaxen.JaxenException;
import org.jdom2.Document;
import org.jdom2.Element;
import org.jdom2.JDOMException;
import org.jdom2.filter.Filters;
import org.jdom2.input.SAXBuilder;
import org.jdom2.xpath.XPathExpression;
import org.jdom2.xpath.XPathFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.util.CollectionUtils;
/**
* Implements a data source for querying PubMed Europe
*
* @author Mykhaylo Boychuk (mykhaylo.boychuk at 4science.com)
*/
public class PubmedEuropeMetadataSourceServiceImpl extends AbstractImportMetadataSourceService<Element>
implements QuerySource {
private final static Logger log = LogManager.getLogger();
private String url;
@Autowired
private LiveImportClient liveImportClient;
@Override
public String getImportSource() {
return "pubmedeu";
}
/**
* Get a single record from the PubMed Europe.
*
* @param id Identifier for the record
* @return The first matching record
* @throws MetadataSourceException If the underlying methods throw any exception.
*/
@Override
public ImportRecord getRecord(String id) throws MetadataSourceException {
List<ImportRecord> records = retry(new SearchByIdCallable(id));
return CollectionUtils.isEmpty(records) ? null : records.get(0);
}
/**
* Find the number of records matching a query;
*
* @param query a query string to base the search on.
* @return the sum of the matching records over this import source
* @throws MetadataSourceException if the underlying methods throw any exception.
*/
@Override
public int getRecordsCount(String query) throws MetadataSourceException {
return retry(new CountByQueryCallable(query));
}
/**
* Find the number of records matching a query;
*
* @param query A query string to base the search on.
* @return The sum of the matching records over this import source
* @throws MetadataSourceException If the underlying methods throw any exception.
*/
@Override
public int getRecordsCount(Query query) throws MetadataSourceException {
return retry(new CountByQueryCallable(query));
}
/**
* Find records matching a string query.
*
* @param query A query string to base the search on.
* @param start Offset to start at
* @param count Number of records to retrieve.
* @return A set of records. Fully transformed.
* @throws MetadataSourceException If the underlying methods throw any exception.
*/
@Override
public Collection<ImportRecord> getRecords(String query, int start, int count) throws MetadataSourceException {
return retry(new SearchByQueryCallable(query, count, start));
}
/**
* Find records based on a object query.
*
* @param query A query object to base the search on.
* @return A set of records. Fully transformed.
* @throws MetadataSourceException If the underlying methods throw any exception.
*/
@Override
public Collection<ImportRecord> getRecords(Query query) throws MetadataSourceException {
return retry(new SearchByQueryCallable(query));
}
/**
* Get a single record from the PubMed Europe.
*
* @param query A query matching a single record
* @return The first matching record
* @throws MetadataSourceException If the underlying methods throw any exception.
*/
@Override
public ImportRecord getRecord(Query query) throws MetadataSourceException {
List<ImportRecord> records = retry(new SearchByIdCallable(query));
return CollectionUtils.isEmpty(records) ? null : records.get(0);
}
/**
* Finds records based on query object.
*
* @param query A query object to base the search on.
* @return A collection of import records.
* @throws MetadataSourceException If the underlying methods throw any exception.
*/
@Override
public Collection<ImportRecord> findMatchingRecords(Query query) throws MetadataSourceException {
return retry(new FindMatchingRecordCallable(query));
}
@Override
public Collection<ImportRecord> findMatchingRecords(Item item) throws MetadataSourceException {
throw new MethodNotFoundException("This method is not implemented for PubMed Europe");
}
@Override
public void init() throws Exception {}
public List<ImportRecord> getByPubmedEuropeID(String pubmedID, Integer start, Integer size)
throws IOException, HttpException {
String query = "(EXT_ID:" + pubmedID + ")";
return search(query, size < 1 ? 1 : size, start);
}
/**
* This class is a Callable implementation to get PubMed Europe entries based on
* query object.
*
* This Callable use as query value the string queryString passed to constructor.
* If the object will be construct through Query.class instance, a Query's map entry with key "query" will be used.
* Pagination is supported too, using the value of the Query's map with keys "start" and "count".
*
* @author Mykhaylo Boychuk (mykhaylo.boychuk at 4science.com)
*/
private class SearchByQueryCallable implements Callable<List<ImportRecord>> {
private Query query;
private SearchByQueryCallable(String queryString, Integer maxResult, Integer start) {
query = new Query();
query.addParameter("query", queryString);
query.addParameter("count", maxResult);
query.addParameter("start", start);
}
private SearchByQueryCallable(Query query) {
this.query = query;
}
@Override
public List<ImportRecord> call() throws Exception {
Integer count = query.getParameterAsClass("count", Integer.class);
Integer start = query.getParameterAsClass("start", Integer.class);
String queryString = query.getParameterAsClass("query", String.class);
return search(queryString, count, start);
}
}
/**
* This class is a Callable implementation to get an PubMed Europe entry using PubMed Europe ID
*
* @author Mykhaylo Boychuk (mykhaylo.boychuk at 4science.com)
*/
private class SearchByIdCallable implements Callable<List<ImportRecord>> {
private Query query;
private SearchByIdCallable(Query query) {
this.query = query;
}
private SearchByIdCallable(String id) {
this.query = new Query();
query.addParameter("id", id);
}
@Override
public List<ImportRecord> call() throws Exception {
return getByPubmedEuropeID(query.getParameterAsClass("id", String.class), 1 ,0);
}
}
/**
* This class is a Callable implementation to search PubMed Europe entries
* using author, title and year.
* Pagination is supported too, using the value of the Query's map with keys "start" and "count".
*
* @author Mykhaylo Boychuk (mykhaylo.boychuk at 4science.com)
*/
public class FindMatchingRecordCallable implements Callable<List<ImportRecord>> {
private Query query;
private FindMatchingRecordCallable(Query q) {
query = q;
}
@Override
public List<ImportRecord> call() throws Exception {
String title = query.getParameterAsClass("title", String.class);
String author = query.getParameterAsClass("author", String.class);
Integer year = query.getParameterAsClass("year", Integer.class);
Integer maxResult = query.getParameterAsClass("maxResult", Integer.class);
Integer start = query.getParameterAsClass("start", Integer.class);
return search(title, author, year, maxResult, start);
}
}
/**
* This class is a Callable implementation to count the number
* of entries for an PubMed Europe query.
*
* @author Mykhaylo Boychuk (mykhaylo.boychuk at 4science.com)
*/
private class CountByQueryCallable implements Callable<Integer> {
private Query query;
private CountByQueryCallable(String queryString) {
query = new Query();
query.addParameter("query", queryString);
}
private CountByQueryCallable(Query query) {
this.query = query;
}
@Override
public Integer call() throws Exception {
try {
return count(query.getParameterAsClass("query", String.class));
} catch (Exception e) {
throw new RuntimeException();
}
}
}
/**
* Returns the total number of PubMed Europe publications returned by a specific query
*
* @param query A keyword or combination of keywords to be searched
* @throws URISyntaxException If URI syntax error
* @throws ClientProtocolException The client protocol exception
* @throws IOException If IO error
* @throws JaxenException If Xpath evaluation failed
*/
public Integer count(String query) throws URISyntaxException, ClientProtocolException, IOException, JaxenException {
try {
Map<String, Map<String, String>> params = new HashMap<String, Map<String,String>>();
String response = liveImportClient.executeHttpGetRequest(1000, buildURI(1, query), params);
SAXBuilder saxBuilder = new SAXBuilder();
Document document = saxBuilder.build(new StringReader(response));
Element root = document.getRootElement();
Element element = root.getChild("hitCount");
return Integer.parseInt(element.getValue());
} catch (JDOMException e) {
log.error(e.getMessage(), e);
throw new RuntimeException(e.getMessage(), e);
}
}
public List<ImportRecord> search(String title, String author, int year, int count, int start)
throws IOException {
StringBuffer query = new StringBuffer();
query.append("(");
if (StringUtils.isNotBlank(title)) {
query.append("TITLE:").append(title);
query.append(")");
}
if (StringUtils.isNotBlank(author)) {
// Search for a surname and (optionally) initial(s) in publication author lists
// AUTH:einstein, AUTH:”Smith AB”
String splitRegex = "(\\s*,\\s+|\\s*;\\s+|\\s*;+|\\s*,+|\\s+)";
String[] authors = author.split(splitRegex);
if (query.length() > 0) {
query.append(" AND ");
}
query.append("(");
int countAuthors = 0;
for (String auth : authors) {
countAuthors++;
query.append("AUTH:\"").append(auth).append("\"");
if (countAuthors < authors.length) {
query.append(" AND ");
}
}
query.append(")");
}
if (year != -1) {
if (query.length() > 0) {
query.append(" AND ");
}
query.append("( PUB_YEAR:").append(year).append(")");
}
query.append(")");
return search(query.toString(), count, start);
}
/**
* Returns a list of PubMed Europe publication records
*
* @param query A keyword or combination of keywords to be searched
* @param size The number of search results per page
* @param start Start number for the acquired search result list
* @throws IOException If IO error
*/
public List<ImportRecord> search(String query, Integer size, Integer start) throws IOException {
List<ImportRecord> results = new ArrayList<>();
try {
URIBuilder uriBuilder = new URIBuilder(this.url);
uriBuilder.addParameter("format", "xml");
uriBuilder.addParameter("resulttype", "core");
uriBuilder.addParameter("pageSize", String.valueOf(size));
uriBuilder.addParameter("query", query);
Map<String, Map<String, String>> params = new HashMap<String, Map<String,String>>();
boolean lastPage = false;
int skipped = 0;
while (!lastPage || results.size() < size) {
String response = liveImportClient.executeHttpGetRequest(1000, uriBuilder.toString(), params);
String cursorMark = StringUtils.EMPTY;
if (StringUtils.isNotBlank(response)) {
SAXBuilder saxBuilder = new SAXBuilder();
Document document = saxBuilder.build(new StringReader(response));
XPathFactory xpfac = XPathFactory.instance();
XPathExpression<Element> xPath = xpfac.compile("//responseWrapper/resultList/result",
Filters.element());
List<Element> records = xPath.evaluate(document);
if (records.size() > 0) {
for (Element item : records) {
if (start > skipped) {
skipped++;
} else {
results.add(transformSourceRecords(item));
}
}
} else {
lastPage = true;
break;
}
Element root = document.getRootElement();
Element nextCursorMark = root.getChild("nextCursorMark");
cursorMark = Objects.nonNull(nextCursorMark) ? nextCursorMark.getValue() : StringUtils.EMPTY;
}
if (StringUtils.isNotBlank(cursorMark)) {
uriBuilder.setParameter("cursorMar", cursorMark);
} else {
lastPage = true;
}
}
} catch (URISyntaxException | JDOMException e) {
log.error(e.getMessage(), e);
throw new RuntimeException(e.getMessage(), e);
}
return results;
}
private String buildURI(Integer pageSize, String query) throws URISyntaxException {
URIBuilder uriBuilder = new URIBuilder(this.url);
uriBuilder.addParameter("format", "xml");
uriBuilder.addParameter("resulttype", "core");
uriBuilder.addParameter("pageSize", String.valueOf(pageSize));
uriBuilder.addParameter("query", query);
return uriBuilder.toString();
}
public String getUrl() {
return url;
}
public void setUrl(String url) {
this.url = url;
}
}

View File

@@ -131,9 +131,9 @@ public class ScieloImportMetadataSourceServiceImpl extends AbstractImportMetadat
@Override @Override
public Integer call() throws Exception { public Integer call() throws Exception {
Map<String, Map<String, String>> params = new HashMap<String, Map<String,String>>();
URIBuilder uriBuilder = new URIBuilder(url + URLEncoder.encode(query, StandardCharsets.UTF_8)); URIBuilder uriBuilder = new URIBuilder(url + URLEncoder.encode(query, StandardCharsets.UTF_8));
String resp = liveImportClient.executeHttpGetRequest(timeout, uriBuilder.toString(), String resp = liveImportClient.executeHttpGetRequest(timeout, uriBuilder.toString(), params);
new HashMap<String, String>());
Map<Integer, Map<String, List<String>>> records = getRecords(resp); Map<Integer, Map<String, List<String>>> records = getRecords(resp);
return Objects.nonNull(records.size()) ? records.size() : 0; return Objects.nonNull(records.size()) ? records.size() : 0;
} }
@@ -161,9 +161,9 @@ public class ScieloImportMetadataSourceServiceImpl extends AbstractImportMetadat
Pattern risPattern = Pattern.compile(ID_PATTERN); Pattern risPattern = Pattern.compile(ID_PATTERN);
Matcher risMatcher = risPattern.matcher(scieloId); Matcher risMatcher = risPattern.matcher(scieloId);
if (risMatcher.matches()) { if (risMatcher.matches()) {
Map<String, Map<String, String>> params = new HashMap<String, Map<String,String>>();
URIBuilder uriBuilder = new URIBuilder(url + URLEncoder.encode(scieloId, StandardCharsets.UTF_8)); URIBuilder uriBuilder = new URIBuilder(url + URLEncoder.encode(scieloId, StandardCharsets.UTF_8));
String resp = liveImportClient.executeHttpGetRequest(timeout, uriBuilder.toString(), String resp = liveImportClient.executeHttpGetRequest(timeout, uriBuilder.toString(), params);
new HashMap<String, String>());
Map<Integer, Map<String, List<String>>> records = getRecords(resp); Map<Integer, Map<String, List<String>>> records = getRecords(resp);
if (Objects.nonNull(records) & !records.isEmpty()) { if (Objects.nonNull(records) & !records.isEmpty()) {
results.add(transformSourceRecords(records.get(1))); results.add(transformSourceRecords(records.get(1)));
@@ -207,8 +207,8 @@ public class ScieloImportMetadataSourceServiceImpl extends AbstractImportMetadat
URIBuilder uriBuilder = new URIBuilder(url + URLEncoder.encode(q, StandardCharsets.UTF_8)); URIBuilder uriBuilder = new URIBuilder(url + URLEncoder.encode(q, StandardCharsets.UTF_8));
uriBuilder.addParameter("start", start.toString()); uriBuilder.addParameter("start", start.toString());
uriBuilder.addParameter("count", count.toString()); uriBuilder.addParameter("count", count.toString());
String resp = liveImportClient.executeHttpGetRequest(timeout, uriBuilder.toString(), Map<String, Map<String, String>> params = new HashMap<String, Map<String,String>>();
new HashMap<String, String>()); String resp = liveImportClient.executeHttpGetRequest(timeout, uriBuilder.toString(), params);
Map<Integer, Map<String, List<String>>> records = getRecords(resp); Map<Integer, Map<String, List<String>>> records = getRecords(resp);
for (int record : records.keySet()) { for (int record : records.keySet()) {
results.add(transformSourceRecords(records.get(record))); results.add(transformSourceRecords(records.get(record)));

View File

@@ -0,0 +1,38 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.importer.external.scopus.service;
import java.util.Map;
import javax.annotation.Resource;
import org.dspace.importer.external.metadatamapping.AbstractMetadataFieldMapping;
/**
* An implementation of {@link AbstractMetadataFieldMapping}
* Responsible for defining the mapping of the Scopus metadatum fields on the DSpace metadatum fields
*
* @author Pasquale Cavallo (pasquale.cavallo at 4science dot it)
*/
public class ScopusFieldMapping extends AbstractMetadataFieldMapping {
/**
* Defines which metadatum is mapped on which metadatum. Note that while the key must be unique it
* only matters here for postprocessing of the value. The mapped MetadatumContributor has full control over
* what metadatafield is generated.
*
* @param metadataFieldMap The map containing the link between retrieve metadata and metadata that will be set to
* the item.
*/
@Override
@Resource(name = "scopusMetadataFieldMap")
public void setMetadataFieldMap(Map metadataFieldMap) {
super.setMetadataFieldMap(metadataFieldMap);
}
}

View File

@@ -0,0 +1,421 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.importer.external.scopus.service;
import static org.dspace.importer.external.liveimportclient.service.LiveImportClientImpl.URI_PARAMETERS;
import java.io.IOException;
import java.io.StringReader;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collection;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Objects;
import java.util.concurrent.Callable;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
import javax.el.MethodNotFoundException;
import org.apache.commons.lang3.StringUtils;
import org.dspace.content.Item;
import org.dspace.importer.external.datamodel.ImportRecord;
import org.dspace.importer.external.datamodel.Query;
import org.dspace.importer.external.exception.MetadataSourceException;
import org.dspace.importer.external.liveimportclient.service.LiveImportClient;
import org.dspace.importer.external.service.AbstractImportMetadataSourceService;
import org.dspace.importer.external.service.DoiCheck;
import org.dspace.importer.external.service.components.QuerySource;
import org.jdom2.Document;
import org.jdom2.Element;
import org.jdom2.JDOMException;
import org.jdom2.Namespace;
import org.jdom2.filter.Filters;
import org.jdom2.input.SAXBuilder;
import org.jdom2.xpath.XPathExpression;
import org.jdom2.xpath.XPathFactory;
import org.springframework.beans.factory.annotation.Autowired;
/**
* Implements a data source for querying Scopus
*
* @author Mykhaylo Boychuk (mykhaylo.boychuk at 4science dot com)
*/
public class ScopusImportMetadataSourceServiceImpl extends AbstractImportMetadataSourceService<Element>
implements QuerySource {
private int timeout = 1000;
int itemPerPage = 25;
private String url;
private String apiKey;
private String instKey;
private String viewMode;
@Autowired
private LiveImportClient liveImportClient;
public LiveImportClient getLiveImportClient() {
return liveImportClient;
}
public void setLiveImportClient(LiveImportClient liveImportClient) {
this.liveImportClient = liveImportClient;
}
@Override
public void init() throws Exception {}
/**
* The string that identifies this import implementation. Preferable a URI
*
* @return the identifying uri
*/
@Override
public String getImportSource() {
return "scopus";
}
@Override
public int getRecordsCount(String query) throws MetadataSourceException {
if (isEID(query)) {
return retry(new FindByIdCallable(query)).size();
}
if (DoiCheck.isDoi(query)) {
query = DoiCheck.purgeDoiValue(query);
}
return retry(new SearchNBByQueryCallable(query));
}
@Override
public int getRecordsCount(Query query) throws MetadataSourceException {
if (isEID(query.toString())) {
return retry(new FindByIdCallable(query.toString())).size();
}
if (DoiCheck.isDoi(query.toString())) {
query.addParameter("query", DoiCheck.purgeDoiValue(query.toString()));
}
return retry(new SearchNBByQueryCallable(query));
}
@Override
public Collection<ImportRecord> getRecords(String query, int start,
int count) throws MetadataSourceException {
if (isEID(query)) {
return retry(new FindByIdCallable(query));
}
if (DoiCheck.isDoi(query)) {
query = DoiCheck.purgeDoiValue(query);
}
return retry(new SearchByQueryCallable(query, count, start));
}
@Override
public Collection<ImportRecord> getRecords(Query query)
throws MetadataSourceException {
if (isEID(query.toString())) {
return retry(new FindByIdCallable(query.toString()));
}
if (DoiCheck.isDoi(query.toString())) {
query.addParameter("query", DoiCheck.purgeDoiValue(query.toString()));
}
return retry(new SearchByQueryCallable(query));
}
@Override
public ImportRecord getRecord(Query query) throws MetadataSourceException {
List<ImportRecord> records = null;
if (DoiCheck.isDoi(query.toString())) {
query.addParameter("query", DoiCheck.purgeDoiValue(query.toString()));
}
if (isEID(query.toString())) {
records = retry(new FindByIdCallable(query.toString()));
} else {
records = retry(new SearchByQueryCallable(query));
}
return records == null || records.isEmpty() ? null : records.get(0);
}
@Override
public Collection<ImportRecord> findMatchingRecords(Item item)
throws MetadataSourceException {
throw new MethodNotFoundException("This method is not implemented for Scopus");
}
@Override
public ImportRecord getRecord(String id) throws MetadataSourceException {
List<ImportRecord> records = retry(new FindByIdCallable(id));
return records == null || records.isEmpty() ? null : records.get(0);
}
@Override
public Collection<ImportRecord> findMatchingRecords(Query query)
throws MetadataSourceException {
if (isEID(query.toString())) {
return retry(new FindByIdCallable(query.toString()));
}
if (DoiCheck.isDoi(query.toString())) {
query.addParameter("query", DoiCheck.purgeDoiValue(query.toString()));
}
return retry(new FindByQueryCallable(query));
}
private boolean isEID(String query) {
Pattern pattern = Pattern.compile("2-s2\\.0-\\d+");
Matcher match = pattern.matcher(query);
if (match.matches()) {
return true;
}
return false;
}
/**
* This class implements a callable to get the numbers of result
*/
private class SearchNBByQueryCallable implements Callable<Integer> {
private String query;
private SearchNBByQueryCallable(String queryString) {
this.query = queryString;
}
private SearchNBByQueryCallable(Query query) {
this.query = query.getParameterAsClass("query", String.class);
}
@Override
public Integer call() throws Exception {
if (StringUtils.isNotBlank(apiKey)) {
// Execute the request.
Map<String, Map<String, String>> params = new HashMap<String, Map<String,String>>();
Map<String, String> requestParams = getRequestParameters(query, null, null, null);
params.put(URI_PARAMETERS, requestParams);
String response = liveImportClient.executeHttpGetRequest(timeout, url, params);
SAXBuilder saxBuilder = new SAXBuilder();
Document document = saxBuilder.build(new StringReader(response));
Element root = document.getRootElement();
List<Namespace> namespaces = Arrays.asList(
Namespace.getNamespace("opensearch", "http://a9.com/-/spec/opensearch/1.1/"));
XPathExpression<Element> xpath = XPathFactory.instance()
.compile("opensearch:totalResults", Filters.element(), null, namespaces);
Element count = xpath.evaluateFirst(root);
try {
return Integer.parseInt(count.getText());
} catch (NumberFormatException e) {
return null;
}
}
return null;
}
}
/**
* This class is a Callable implementation to get a Scopus entry using EID
*
* @author Mykhaylo Boychuk (mykhaylo.boychuk at 4science.com)
*/
private class FindByIdCallable implements Callable<List<ImportRecord>> {
private String eid;
private FindByIdCallable(String eid) {
this.eid = eid;
}
@Override
public List<ImportRecord> call() throws Exception {
List<ImportRecord> results = new ArrayList<>();
String queryString = "EID(" + eid.replace("!", "/") + ")";
if (StringUtils.isNotBlank(apiKey)) {
Map<String, Map<String, String>> params = new HashMap<String, Map<String,String>>();
Map<String, String> requestParams = getRequestParameters(queryString, viewMode, null, null);
params.put(URI_PARAMETERS, requestParams);
String response = liveImportClient.executeHttpGetRequest(timeout, url, params);
List<Element> elements = splitToRecords(response);
for (Element record : elements) {
results.add(transformSourceRecords(record));
}
}
return results;
}
}
/**
* This class implements a callable to get the items based on query parameters
*/
private class FindByQueryCallable implements Callable<List<ImportRecord>> {
private String title;
private String author;
private Integer year;
private Integer start;
private Integer count;
private FindByQueryCallable(Query query) {
this.title = query.getParameterAsClass("title", String.class);
this.year = query.getParameterAsClass("year", Integer.class);
this.author = query.getParameterAsClass("author", String.class);
this.start = query.getParameterAsClass("start", Integer.class) != null ?
query.getParameterAsClass("start", Integer.class) : 0;
this.count = query.getParameterAsClass("count", Integer.class) != null ?
query.getParameterAsClass("count", Integer.class) : 20;
}
@Override
public List<ImportRecord> call() throws Exception {
List<ImportRecord> results = new ArrayList<>();
String queryString = "";
StringBuffer query = new StringBuffer();
if (StringUtils.isNotBlank(title)) {
query.append("title(").append(title).append("");
}
if (StringUtils.isNotBlank(author)) {
// [FAU]
if (query.length() > 0) {
query.append(" AND ");
}
query.append("AUTH(").append(author).append(")");
}
if (year != -1) {
// [DP]
if (query.length() > 0) {
query.append(" AND ");
}
query.append("PUBYEAR IS ").append(year);
}
queryString = query.toString();
if (apiKey != null && !apiKey.equals("")) {
Map<String, Map<String, String>> params = new HashMap<String, Map<String,String>>();
Map<String, String> requestParams = getRequestParameters(queryString, viewMode, start, count);
params.put(URI_PARAMETERS, requestParams);
String response = liveImportClient.executeHttpGetRequest(timeout, url, params);
List<Element> elements = splitToRecords(response);
for (Element record : elements) {
results.add(transformSourceRecords(record));
}
}
return results;
}
}
/**
* Find records matching a string query.
*
* @param query A query string to base the search on.
* @param start Offset to start at
* @param count Number of records to retrieve.
* @return A set of records. Fully transformed.
*
* @author Mykhaylo Boychuk (mykhaylo.boychuk at 4science.com)
*/
private class SearchByQueryCallable implements Callable<List<ImportRecord>> {
private Query query;
private SearchByQueryCallable(String queryString, Integer maxResult, Integer start) {
query = new Query();
query.addParameter("query", queryString);
query.addParameter("start", start);
query.addParameter("count", maxResult);
}
private SearchByQueryCallable(Query query) {
this.query = query;
}
@Override
public List<ImportRecord> call() throws Exception {
List<ImportRecord> results = new ArrayList<>();
String queryString = query.getParameterAsClass("query", String.class);
Integer start = query.getParameterAsClass("start", Integer.class);
Integer count = query.getParameterAsClass("count", Integer.class);
if (StringUtils.isNotBlank(apiKey)) {
Map<String, Map<String, String>> params = new HashMap<String, Map<String,String>>();
Map<String, String> requestParams = getRequestParameters(queryString, viewMode, start, count);
params.put(URI_PARAMETERS, requestParams);
String response = liveImportClient.executeHttpGetRequest(timeout, url, params);
List<Element> elements = splitToRecords(response);
for (Element record : elements) {
results.add(transformSourceRecords(record));
}
}
return results;
}
}
private Map<String, String> getRequestParameters(String query, String viewMode, Integer start, Integer count) {
Map<String, String> params = new HashMap<String, String>();
params.put("httpAccept", "application/xml");
params.put("apiKey", apiKey);
params.put("query", query);
if (StringUtils.isNotBlank(instKey)) {
params.put("insttoken", instKey);
}
if (StringUtils.isNotBlank(viewMode)) {
params.put("view", viewMode);
}
params.put("start", (Objects.nonNull(start) ? start + "" : "0"));
params.put("count", (Objects.nonNull(count) ? count + "" : "20"));
return params;
}
private List<Element> splitToRecords(String recordsSrc) {
try {
SAXBuilder saxBuilder = new SAXBuilder();
Document document = saxBuilder.build(new StringReader(recordsSrc));
Element root = document.getRootElement();
List<Element> records = root.getChildren("entry",Namespace.getNamespace("http://www.w3.org/2005/Atom"));
return records;
} catch (JDOMException | IOException e) {
return new ArrayList<Element>();
}
}
public String getUrl() {
return url;
}
public void setUrl(String url) {
this.url = url;
}
public String getViewMode() {
return viewMode;
}
public void setViewMode(String viewMode) {
this.viewMode = viewMode;
}
public String getApiKey() {
return apiKey;
}
public String getInstKey() {
return instKey;
}
public void setApiKey(String apiKey) {
this.apiKey = apiKey;
}
public void setInstKey(String instKey) {
this.instKey = instKey;
}
}

View File

@@ -12,6 +12,7 @@ import java.util.Collection;
import java.util.HashMap; import java.util.HashMap;
import java.util.Iterator; import java.util.Iterator;
import java.util.List; import java.util.List;
import java.util.Map;
import java.util.Objects; import java.util.Objects;
import java.util.concurrent.Callable; import java.util.concurrent.Callable;
import javax.el.MethodNotFoundException; import javax.el.MethodNotFoundException;
@@ -141,8 +142,8 @@ public class VuFindImportMetadataSourceServiceImpl extends AbstractImportMetadat
uriBuilder.addParameter("limit", count.toString()); uriBuilder.addParameter("limit", count.toString());
uriBuilder.addParameter("prettyPrint", String.valueOf(true)); uriBuilder.addParameter("prettyPrint", String.valueOf(true));
uriBuilder.addParameter("lookfor", query.getParameterAsClass("query", String.class)); uriBuilder.addParameter("lookfor", query.getParameterAsClass("query", String.class));
String responseString = liveImportClient.executeHttpGetRequest(1000, uriBuilder.toString(), Map<String, Map<String, String>> params = new HashMap<String, Map<String,String>>();
new HashMap<String, String>()); String responseString = liveImportClient.executeHttpGetRequest(1000, uriBuilder.toString(), params);
JsonNode node = convertStringJsonToJsonNode(responseString); JsonNode node = convertStringJsonToJsonNode(responseString);
JsonNode resultCountNode = node.get("resultCount"); JsonNode resultCountNode = node.get("resultCount");
return resultCountNode.intValue(); return resultCountNode.intValue();
@@ -180,8 +181,8 @@ public class VuFindImportMetadataSourceServiceImpl extends AbstractImportMetadat
uriBuilder.addParameter("field[]", field); uriBuilder.addParameter("field[]", field);
} }
} }
String response = liveImportClient.executeHttpGetRequest(1000, uriBuilder.toString(), Map<String, Map<String, String>> params = new HashMap<String, Map<String,String>>();
new HashMap<String, String>()); String response = liveImportClient.executeHttpGetRequest(1000, uriBuilder.toString(), params);
return response; return response;
} }
} }
@@ -238,7 +239,8 @@ public class VuFindImportMetadataSourceServiceImpl extends AbstractImportMetadat
uriBuilder.addParameter("field[]", field); uriBuilder.addParameter("field[]", field);
} }
} }
return liveImportClient.executeHttpGetRequest(1000, uriBuilder.toString(), new HashMap<String, String>()); Map<String, Map<String, String>> params = new HashMap<String, Map<String,String>>();
return liveImportClient.executeHttpGetRequest(1000, uriBuilder.toString(), params);
} }
} }
@@ -288,7 +290,8 @@ public class VuFindImportMetadataSourceServiceImpl extends AbstractImportMetadat
} }
} }
uriBuilder.addParameter("lookfor", filter); uriBuilder.addParameter("lookfor", filter);
return liveImportClient.executeHttpGetRequest(1000, uriBuilder.toString(), new HashMap<String, String>()); Map<String, Map<String, String>> params = new HashMap<String, Map<String,String>>();
return liveImportClient.executeHttpGetRequest(1000, uriBuilder.toString(), params);
} }
} }

View File

@@ -0,0 +1,37 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.importer.external.wos.service;
import java.util.Map;
import javax.annotation.Resource;
import org.dspace.importer.external.metadatamapping.AbstractMetadataFieldMapping;
/**
* An implementation of {@link AbstractMetadataFieldMapping}
* Responsible for defining the mapping of the Web of Science metadatum fields on the DSpace metadatum fields
*
* @author Boychuk Mykhaylo (boychuk.mykhaylo at 4science dot it)
*/
@SuppressWarnings("rawtypes")
public class WOSFieldMapping extends AbstractMetadataFieldMapping {
/**
* Defines which metadatum is mapped on which metadatum. Note that while the key must be unique it
* only matters here for postprocessing of the value. The mapped MetadatumContributor has full control over
* what metadatafield is generated.
*
* @param metadataFieldMap The map containing the link between retrieve
* metadata and metadata that will be set to the item.
*/
@Override
@SuppressWarnings("unchecked")
@Resource(name = "wosMetadataFieldMap")
public void setMetadataFieldMap(Map metadataFieldMap) {
super.setMetadataFieldMap(metadataFieldMap);
}
}

View File

@@ -0,0 +1,329 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.importer.external.wos.service;
import static org.dspace.importer.external.liveimportclient.service.LiveImportClientImpl.HEADER_PARAMETERS;
import java.io.IOException;
import java.io.StringReader;
import java.net.URLEncoder;
import java.nio.charset.StandardCharsets;
import java.util.ArrayList;
import java.util.Collection;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.concurrent.Callable;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
import javax.el.MethodNotFoundException;
import org.apache.commons.collections.CollectionUtils;
import org.apache.commons.lang3.StringUtils;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
import org.dspace.content.Item;
import org.dspace.importer.external.datamodel.ImportRecord;
import org.dspace.importer.external.datamodel.Query;
import org.dspace.importer.external.exception.MetadataSourceException;
import org.dspace.importer.external.liveimportclient.service.LiveImportClient;
import org.dspace.importer.external.service.AbstractImportMetadataSourceService;
import org.dspace.importer.external.service.DoiCheck;
import org.dspace.importer.external.service.components.QuerySource;
import org.jdom2.Document;
import org.jdom2.Element;
import org.jdom2.JDOMException;
import org.jdom2.filter.Filters;
import org.jdom2.input.SAXBuilder;
import org.jdom2.xpath.XPathExpression;
import org.jdom2.xpath.XPathFactory;
import org.springframework.beans.factory.annotation.Autowired;
/**
* Implements a data source for querying Web of Science.
*
* @author Boychuk Mykhaylo (boychuk.mykhaylo at 4Science dot it)
*/
public class WOSImportMetadataSourceServiceImpl extends AbstractImportMetadataSourceService<Element>
implements QuerySource {
private final static Logger log = LogManager.getLogger();
private static final String AI_PATTERN = "^AI=(.*)";
private static final Pattern ISI_PATTERN = Pattern.compile("^\\d{15}$");
private int timeout = 1000;
private String url;
private String urlSearch;
private String apiKey;
@Autowired
private LiveImportClient liveImportClient;
@Override
public void init() throws Exception {}
/**
* The string that identifies this import implementation. Preferable a URI
*
* @return the identifying uri
*/
@Override
public String getImportSource() {
return "wos";
}
@Override
public Collection<ImportRecord> getRecords(String query, int start, int count) throws MetadataSourceException {
return retry(new SearchByQueryCallable(query, count, start));
}
@Override
public Collection<ImportRecord> getRecords(Query query) throws MetadataSourceException {
return retry(new SearchByQueryCallable(query));
}
@Override
public ImportRecord getRecord(Query query) throws MetadataSourceException {
List<ImportRecord> records = retry(new SearchByQueryCallable(query));
return records == null || records.isEmpty() ? null : records.get(0);
}
@Override
public ImportRecord getRecord(String id) throws MetadataSourceException {
List<ImportRecord> records = retry(new FindByIdCallable(id));
return records == null || records.isEmpty() ? null : records.get(0);
}
@Override
public int getRecordsCount(String query) throws MetadataSourceException {
return retry(new SearchNBByQueryCallable(query));
}
@Override
public int getRecordsCount(Query query) throws MetadataSourceException {
throw new MethodNotFoundException("This method is not implemented for WOS");
}
@Override
public Collection<ImportRecord> findMatchingRecords(Item item) throws MetadataSourceException {
throw new MethodNotFoundException("This method is not implemented for WOS");
}
@Override
public Collection<ImportRecord> findMatchingRecords(Query query) throws MetadataSourceException {
throw new MethodNotFoundException("This method is not implemented for WOS");
}
/**
* This class implements a callable to get the numbers of result
*/
private class SearchNBByQueryCallable implements Callable<Integer> {
private String query;
private SearchNBByQueryCallable(String queryString) {
this.query = queryString;
}
private SearchNBByQueryCallable(Query query) {
this.query = query.getParameterAsClass("query", String.class);
}
@Override
public Integer call() throws Exception {
if (StringUtils.isNotBlank(apiKey)) {
String queryString = URLEncoder.encode(checkQuery(query), StandardCharsets.UTF_8);
String url = urlSearch + queryString + "&count=1&firstRecord=1";
Map<String, Map<String, String>> params = new HashMap<String, Map<String,String>>();
params.put(HEADER_PARAMETERS, getRequestParameters());
String response = liveImportClient.executeHttpGetRequest(timeout, url, params);
SAXBuilder saxBuilder = new SAXBuilder();
Document document = saxBuilder.build(new StringReader(response));
Element root = document.getRootElement();
XPathExpression<Element> xpath = XPathFactory.instance().compile("//*[@name=\"RecordsFound\"]",
Filters.element(), null);
Element tot = xpath.evaluateFirst(root);
return Integer.valueOf(tot.getValue());
}
return null;
}
}
/**
* This class is a Callable implementation to get a Web of Science entry using Doi
*
* @author Mykhaylo Boychuk (mykhaylo.boychuk at 4science.com)
*/
private class FindByIdCallable implements Callable<List<ImportRecord>> {
private String doi;
private FindByIdCallable(String doi) {
this.doi = URLEncoder.encode(doi, StandardCharsets.UTF_8);
}
@Override
public List<ImportRecord> call() throws Exception {
List<ImportRecord> results = new ArrayList<>();
if (StringUtils.isNotBlank(apiKey)) {
String urlString = url + this.doi + "?databaseId=WOS&lang=en&count=10&firstRecord=1";
Map<String, Map<String, String>> params = new HashMap<String, Map<String,String>>();
params.put(HEADER_PARAMETERS, getRequestParameters());
String response = liveImportClient.executeHttpGetRequest(timeout, urlString, params);
List<Element> elements = splitToRecords(response);
for (Element record : elements) {
results.add(transformSourceRecords(record));
}
}
return results;
}
}
/**
* Find records matching a string query.
*
* @param query A query string to base the search on.
* @param start Offset to start at
* @param count Number of records to retrieve.
* @return A set of records. Fully transformed.
*
* @author Mykhaylo Boychuk (mykhaylo.boychuk at 4science.com)
*/
private class SearchByQueryCallable implements Callable<List<ImportRecord>> {
private Query query;
private SearchByQueryCallable(String queryString, Integer maxResult, Integer start) {
query = new Query();
query.addParameter("query", queryString);
query.addParameter("start", start);
query.addParameter("count", maxResult);
}
private SearchByQueryCallable(Query query) {
this.query = query;
}
@Override
public List<ImportRecord> call() throws Exception {
List<ImportRecord> results = new ArrayList<>();
String queryString = checkQuery(query.getParameterAsClass("query", String.class));
Integer start = query.getParameterAsClass("start", Integer.class);
Integer count = query.getParameterAsClass("count", Integer.class);
if (StringUtils.isNotBlank(apiKey)) {
Map<String, Map<String, String>> params = new HashMap<String, Map<String,String>>();
params.put(HEADER_PARAMETERS, getRequestParameters());
String url = urlSearch + URLEncoder.encode(queryString, StandardCharsets.UTF_8)
+ "&count=" + count + "&firstRecord=" + (start + 1);
String response = liveImportClient.executeHttpGetRequest(timeout, url, params);
List<Element> omElements = splitToRecords(response);
for (Element el : omElements) {
results.add(transformSourceRecords(el));
}
}
return results;
}
}
private Map<String, String> getRequestParameters() {
Map<String, String> params = new HashMap<String, String>();
params.put("Accept", "application/xml");
params.put("X-ApiKey", this.apiKey);
return params;
}
/**
* This method check if the query contain
* "AI=(...)" Author Identifier or a DOI "DO=(query)"
* or Accession Number "UT=(query)".
* Otherwise the value is placed in TS=(query) tag
* that searches for topic terms in the following fields within a document:
* Title, Abstract, Author keywords, Keywords Plus
*
* @param query
*/
private String checkQuery(String query) {
Pattern risPattern = Pattern.compile(AI_PATTERN);
Matcher risMatcher = risPattern.matcher(query.trim());
if (risMatcher.matches()) {
return query;
}
if (DoiCheck.isDoi(query)) {
// FIXME: workaround to be removed once fixed by the community the double post of query param
if (query.startsWith(",")) {
query = query.substring(1);
}
return "DO=(" + query + ")";
} else if (isIsi(query)) {
return "UT=(" + query + ")";
}
StringBuilder queryBuilder = new StringBuilder("TS=(");
queryBuilder.append(query).append(")");
return queryBuilder.toString();
}
private boolean isIsi(String query) {
if (query.startsWith("WOS:")) {
return true;
}
Matcher matcher = ISI_PATTERN.matcher(query.trim());
return matcher.matches();
}
private List<Element> splitToRecords(String recordsSrc) {
try {
SAXBuilder saxBuilder = new SAXBuilder();
Document document = saxBuilder.build(new StringReader(recordsSrc));
Element root = document.getRootElement();
String cData = XPathFactory.instance().compile("//*[@name=\"Records\"]",
Filters.element(), null).evaluate(root).get(0).getValue().trim();
Document intDocument = saxBuilder.build(new StringReader(cData));
XPathExpression<Element> xPath = XPathFactory.instance().compile("*", Filters.element(), null);
List<Element> records = xPath.evaluate(intDocument.getRootElement());
if (CollectionUtils.isNotEmpty(records)) {
return records;
}
} catch (JDOMException | IOException e) {
log.error(e.getMessage());
return new ArrayList<Element>();
}
return new ArrayList<Element>();
}
public String getUrl() {
return url;
}
public void setUrl(String url) {
this.url = url;
}
public String getUrlSearch() {
return urlSearch;
}
public void setUrlSearch(String urlSearch) {
this.urlSearch = urlSearch;
}
public String getApiKey() {
return apiKey;
}
public void setApiKey(String apiKey) {
this.apiKey = apiKey;
}
}

View File

@@ -0,0 +1,211 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.orcid;
import java.util.Date;
import javax.persistence.Column;
import javax.persistence.Entity;
import javax.persistence.EnumType;
import javax.persistence.Enumerated;
import javax.persistence.GeneratedValue;
import javax.persistence.GenerationType;
import javax.persistence.Id;
import javax.persistence.JoinColumn;
import javax.persistence.Lob;
import javax.persistence.ManyToOne;
import javax.persistence.SequenceGenerator;
import javax.persistence.Table;
import javax.persistence.Temporal;
import javax.persistence.TemporalType;
import org.dspace.content.Item;
import org.dspace.core.ReloadableEntity;
import org.hibernate.annotations.Type;
/**
* The ORCID history entity that it contains information relating to an attempt
* to synchronize the DSpace items and information on ORCID. While the entity
* {@link OrcidQueue} contains the data to be synchronized with ORCID, this
* entity instead contains the data synchronized with ORCID, with the result of
* the synchronization. Each record in this table is associated with a profile
* item and the entity synchronized (which can be the profile itself, a
* publication or a project/funding). If the entity is the profile itself then
* the metadata field contains the signature of the information synchronized.
*
* @author Luca Giamminonni (luca.giamminonni at 4science.it)
*
*/
@Entity
@Table(name = "orcid_history")
public class OrcidHistory implements ReloadableEntity<Integer> {
@Id
@GeneratedValue(strategy = GenerationType.SEQUENCE, generator = "orcid_history_id_seq")
@SequenceGenerator(name = "orcid_history_id_seq", sequenceName = "orcid_history_id_seq", allocationSize = 1)
private Integer id;
/**
* The profile item.
*/
@ManyToOne
@JoinColumn(name = "owner_id")
protected Item profileItem;
/**
* The synchronized item.
*/
@ManyToOne
@JoinColumn(name = "entity_id")
private Item entity;
/**
* The identifier of the synchronized resource on ORCID side. For more details
* see https://info.orcid.org/faq/what-is-a-put-code/
*/
@Column(name = "put_code")
private String putCode;
/**
* The record type. Could be publication, funding or a profile's section.
*/
@Column(name = "record_type")
private String recordType;
/**
* A description of the synchronized resource.
*/
@Column(name = "description")
private String description;
/**
* The signature of the synchronized metadata. This is used when the entity is
* the owner itself.
*/
@Lob
@Type(type = "org.dspace.storage.rdbms.hibernate.DatabaseAwareLobType")
@Column(name = "metadata")
private String metadata;
/**
* The operation performed on ORCID.
*/
@Enumerated(EnumType.STRING)
@Column(name = "operation")
private OrcidOperation operation;
/**
* The response message incoming from ORCID.
*/
@Lob
@Type(type = "org.dspace.storage.rdbms.hibernate.DatabaseAwareLobType")
@Column(name = "response_message")
private String responseMessage;
/**
* The timestamp of the synchronization attempt.
*/
@Temporal(TemporalType.TIMESTAMP)
@Column(name = "timestamp_last_attempt")
private Date timestamp = new Date();
/**
* The HTTP status incoming from ORCID.
*/
@Column(name = "status")
private Integer status;
public Integer getStatus() {
return status;
}
public void setStatus(Integer status) {
this.status = status;
}
public void setId(Integer id) {
this.id = id;
}
@Override
public Integer getID() {
return id;
}
public Item getProfileItem() {
return profileItem;
}
public void setProfileItem(Item profileItem) {
this.profileItem = profileItem;
}
public Item getEntity() {
return entity;
}
public void setEntity(Item entity) {
this.entity = entity;
}
public String getPutCode() {
return putCode;
}
public void setPutCode(String putCode) {
this.putCode = putCode;
}
public String getResponseMessage() {
return responseMessage;
}
public void setResponseMessage(String responseMessage) {
this.responseMessage = responseMessage;
}
public String getRecordType() {
return recordType;
}
public void setRecordType(String recordType) {
this.recordType = recordType;
}
public String getMetadata() {
return metadata;
}
public void setMetadata(String metadata) {
this.metadata = metadata;
}
public OrcidOperation getOperation() {
return operation;
}
public void setOperation(OrcidOperation operation) {
this.operation = operation;
}
public String getDescription() {
return description;
}
public void setDescription(String description) {
this.description = description;
}
public Date getTimestamp() {
return timestamp;
}
public void setTimestamp(Date timestamp) {
this.timestamp = timestamp;
}
}

View File

@@ -0,0 +1,20 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.orcid;
/**
* Enum that models an ORCID synchronization operation.
*
* @author Luca Giamminonni (luca.giamminonni at 4science.it)
*
*/
public enum OrcidOperation {
INSERT,
UPDATE,
DELETE;
}

View File

@@ -0,0 +1,219 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.orcid;
import static org.apache.commons.lang3.StringUtils.isEmpty;
import static org.apache.commons.lang3.StringUtils.isNotEmpty;
import java.util.Objects;
import javax.persistence.Column;
import javax.persistence.Entity;
import javax.persistence.EnumType;
import javax.persistence.Enumerated;
import javax.persistence.GeneratedValue;
import javax.persistence.GenerationType;
import javax.persistence.Id;
import javax.persistence.JoinColumn;
import javax.persistence.Lob;
import javax.persistence.ManyToOne;
import javax.persistence.SequenceGenerator;
import javax.persistence.Table;
import org.dspace.content.Item;
import org.dspace.core.ReloadableEntity;
import org.hibernate.annotations.Type;
/**
* Entity that model a record on the ORCID synchronization queue. Each record in
* this table is associated with an profile item and the entity to be
* synchronized (which can be the profile itself, a publication or a
* project/funding). If the entity is the profile itself then the metadata field
* contains the signature of the information to be synchronized.
*
* @author Luca Giamminonni (luca.giamminonni at 4science.it)
*
*/
@Entity
@Table(name = "orcid_queue")
public class OrcidQueue implements ReloadableEntity<Integer> {
@Id
@GeneratedValue(strategy = GenerationType.SEQUENCE, generator = "orcid_queue_id_seq")
@SequenceGenerator(name = "orcid_queue_id_seq", sequenceName = "orcid_queue_id_seq", allocationSize = 1)
private Integer id;
/**
* The profile item.
*/
@ManyToOne
@JoinColumn(name = "owner_id")
protected Item profileItem;
/**
* The entity to be synchronized.
*/
@ManyToOne
@JoinColumn(name = "entity_id")
private Item entity;
/**
* A description of the resource to be synchronized.
*/
@Column(name = "description")
private String description;
/**
* The identifier of the resource to be synchronized on ORCID side (in case of
* update or deletion). For more details see
* https://info.orcid.org/faq/what-is-a-put-code/
*/
@Column(name = "put_code")
private String putCode;
/**
* The record type. Could be publication, funding or a profile's section.
*/
@Column(name = "record_type")
private String recordType;
/**
* The signature of the metadata to be synchronized. This is used when the
* entity is the owner itself.
*/
@Lob
@Column(name = "metadata")
@Type(type = "org.dspace.storage.rdbms.hibernate.DatabaseAwareLobType")
private String metadata;
/**
* The operation to be performed on ORCID.
*/
@Enumerated(EnumType.STRING)
@Column(name = "operation")
private OrcidOperation operation;
/**
* Synchronization attempts already made for a particular record.
*/
@Column(name = "attempts")
private Integer attempts = 0;
public boolean isInsertAction() {
return entity != null && isEmpty(putCode);
}
public boolean isUpdateAction() {
return entity != null && isNotEmpty(putCode);
}
public boolean isDeleteAction() {
return entity == null && isNotEmpty(putCode);
}
public void setID(Integer id) {
this.id = id;
}
@Override
public Integer getID() {
return this.id;
}
public Item getProfileItem() {
return profileItem;
}
public void setProfileItem(Item profileItem) {
this.profileItem = profileItem;
}
public Item getEntity() {
return entity;
}
public void setEntity(Item entity) {
this.entity = entity;
}
public String getPutCode() {
return putCode;
}
public void setPutCode(String putCode) {
this.putCode = putCode;
}
@Override
public int hashCode() {
return Objects.hash(id);
}
@Override
public boolean equals(Object obj) {
if (this == obj) {
return true;
}
if (obj == null) {
return false;
}
if (getClass() != obj.getClass()) {
return false;
}
OrcidQueue other = (OrcidQueue) obj;
return Objects.equals(id, other.id);
}
public String getDescription() {
return description;
}
public void setDescription(String description) {
this.description = description;
}
public String getRecordType() {
return recordType;
}
public void setRecordType(String recordType) {
this.recordType = recordType;
}
public String getMetadata() {
return metadata;
}
public void setMetadata(String metadata) {
this.metadata = metadata;
}
public OrcidOperation getOperation() {
return operation;
}
public void setOperation(OrcidOperation operation) {
this.operation = operation;
}
public Integer getAttempts() {
return attempts;
}
public void setAttempts(Integer attempts) {
this.attempts = attempts;
}
@Override
public String toString() {
return "OrcidQueue [id=" + id + ", profileItem=" + profileItem + ", entity=" + entity + ", description="
+ description
+ ", putCode=" + putCode + ", recordType=" + recordType + ", metadata=" + metadata + ", operation="
+ operation + "]";
}
}

Some files were not shown because too many files have changed in this diff Show More