Merge branch 'main' into iiif-item-import

This commit is contained in:
Michael Spalti
2022-01-24 15:42:37 -08:00
152 changed files with 4698 additions and 1267 deletions

View File

@@ -11,7 +11,7 @@ jobs:
runs-on: ubuntu-latest
env:
# Give Maven 1GB of memory to work with
# Suppress all Maven "downloading" messages in Travis logs (see https://stackoverflow.com/a/35653426)
# Suppress all Maven "downloading" messages in logs (see https://stackoverflow.com/a/35653426)
# This also slightly speeds builds, as there is less logging
MAVEN_OPTS: "-Xmx1024M -Dorg.slf4j.simpleLogger.log.org.apache.maven.cli.transfer.Slf4jMavenTransferListener=warn"
strategy:
@@ -20,16 +20,18 @@ jobs:
matrix:
include:
# NOTE: Unit Tests include deprecated REST API v6 (as it has unit tests)
# - surefire.rerunFailingTestsCount => try again for flakey tests, and keep track of/report on number of retries
- type: "Unit Tests"
mvnflags: "-DskipUnitTests=false -Pdspace-rest"
mvnflags: "-DskipUnitTests=false -Pdspace-rest -Dsurefire.rerunFailingTestsCount=2"
resultsdir: "**/target/surefire-reports/**"
# NOTE: ITs skip all code validation checks, as they are already done by Unit Test job.
# - enforcer.skip => Skip maven-enforcer-plugin rules
# - checkstyle.skip => Skip all checkstyle checks by maven-checkstyle-plugin
# - license.skip => Skip all license header checks by license-maven-plugin
# - xml.skip => Skip all XML/XSLT validation by xml-maven-plugin
# - failsafe.rerunFailingTestsCount => try again for flakey tests, and keep track of/report on number of retries
- type: "Integration Tests"
mvnflags: "-DskipIntegrationTests=false -Denforcer.skip=true -Dcheckstyle.skip=true -Dlicense.skip=true -Dxml.skip=true"
mvnflags: "-DskipIntegrationTests=false -Denforcer.skip=true -Dcheckstyle.skip=true -Dlicense.skip=true -Dxml.skip=true -Dfailsafe.rerunFailingTestsCount=2"
resultsdir: "**/target/failsafe-reports/**"
# Do NOT exit immediately if one matrix job fails
# This ensures ITs continue running even if Unit Tests fail, or visa versa
@@ -38,13 +40,14 @@ jobs:
steps:
# https://github.com/actions/checkout
- name: Checkout codebase
uses: actions/checkout@v1
uses: actions/checkout@v2
# https://github.com/actions/setup-java
- name: Install JDK 11
uses: actions/setup-java@v1
uses: actions/setup-java@v2
with:
java-version: 11
distribution: 'temurin'
# https://github.com/actions/cache
- name: Cache Maven dependencies
@@ -74,4 +77,4 @@ jobs:
# https://github.com/codecov/codecov-action
- name: Upload coverage to Codecov.io
uses: codecov/codecov-action@v1
uses: codecov/codecov-action@v2

156
.github/workflows/docker.yml vendored Normal file
View File

@@ -0,0 +1,156 @@
# DSpace Docker image build for hub.docker.com
name: Docker images
# Run this Build for all pushes to 'main' or maintenance branches, or tagged releases.
# Also run for PRs to ensure PR doesn't break Docker build process
on:
push:
branches:
- main
- 'dspace-**'
tags:
- 'dspace-**'
pull_request:
jobs:
docker:
# Ensure this job never runs on forked repos. It's only executed for 'dspace/dspace'
if: github.repository == 'dspace/dspace'
runs-on: ubuntu-latest
env:
# Define tags to use for Docker images based on Git tags/branches (for docker/metadata-action)
# For a new commit on default branch (main), use the literal tag 'dspace-7_x' on Docker image.
# For a new commit on other branches, use the branch name as the tag for Docker image.
# For a new tag, copy that tag name as the tag for Docker image.
IMAGE_TAGS: |
type=raw,value=dspace-7_x,enable=${{ endsWith(github.ref, github.event.repository.default_branch) }}
type=ref,event=branch,enable=${{ !endsWith(github.ref, github.event.repository.default_branch) }}
type=ref,event=tag
# Define default tag "flavor" for docker/metadata-action per
# https://github.com/docker/metadata-action#flavor-input
# We turn off 'latest' tag by default.
TAGS_FLAVOR: |
latest=false
steps:
# https://github.com/actions/checkout
- name: Checkout codebase
uses: actions/checkout@v2
# https://github.com/docker/setup-buildx-action
- name: Setup Docker Buildx
uses: docker/setup-buildx-action@v1
# https://github.com/docker/login-action
- name: Login to DockerHub
# Only login if not a PR, as PRs only trigger a Docker build and not a push
if: github.event_name != 'pull_request'
uses: docker/login-action@v1
with:
username: ${{ secrets.DOCKER_USERNAME }}
password: ${{ secrets.DOCKER_ACCESS_TOKEN }}
####################################################
# Build/Push the 'dspace/dspace-dependencies' image
####################################################
# https://github.com/docker/metadata-action
# Get Metadata for docker_build_deps step below
- name: Sync metadata (tags, labels) from GitHub to Docker for 'dspace-dependencies' image
id: meta_build_deps
uses: docker/metadata-action@v3
with:
images: dspace/dspace-dependencies
tags: ${{ env.IMAGE_TAGS }}
flavor: ${{ env.TAGS_FLAVOR }}
# https://github.com/docker/build-push-action
- name: Build and push 'dspace-dependencies' image
id: docker_build_deps
uses: docker/build-push-action@v2
with:
context: .
file: ./Dockerfile.dependencies
# For pull requests, we run the Docker build (to ensure no PR changes break the build),
# but we ONLY do an image push to DockerHub if it's NOT a PR
push: ${{ github.event_name != 'pull_request' }}
# Use tags / labels provided by 'docker/metadata-action' above
tags: ${{ steps.meta_build_deps.outputs.tags }}
labels: ${{ steps.meta_build_deps.outputs.labels }}
#######################################
# Build/Push the 'dspace/dspace' image
#######################################
# Get Metadata for docker_build step below
- name: Sync metadata (tags, labels) from GitHub to Docker for 'dspace' image
id: meta_build
uses: docker/metadata-action@v3
with:
images: dspace/dspace
tags: ${{ env.IMAGE_TAGS }}
flavor: ${{ env.TAGS_FLAVOR }}
- name: Build and push 'dspace' image
id: docker_build
uses: docker/build-push-action@v2
with:
context: .
file: ./Dockerfile
# For pull requests, we run the Docker build (to ensure no PR changes break the build),
# but we ONLY do an image push to DockerHub if it's NOT a PR
push: ${{ github.event_name != 'pull_request' }}
# Use tags / labels provided by 'docker/metadata-action' above
tags: ${{ steps.meta_build.outputs.tags }}
labels: ${{ steps.meta_build.outputs.labels }}
#####################################################
# Build/Push the 'dspace/dspace' image ('-test' tag)
#####################################################
# Get Metadata for docker_build_test step below
- name: Sync metadata (tags, labels) from GitHub to Docker for 'dspace-test' image
id: meta_build_test
uses: docker/metadata-action@v3
with:
images: dspace/dspace
tags: ${{ env.IMAGE_TAGS }}
# As this is a test/development image, its tags are all suffixed with "-test". Otherwise, it uses the same
# tagging logic as the primary 'dspace/dspace' image above.
flavor: ${{ env.TAGS_FLAVOR }}
suffix=-test
- name: Build and push 'dspace-test' image
id: docker_build_test
uses: docker/build-push-action@v2
with:
context: .
file: ./Dockerfile.test
# For pull requests, we run the Docker build (to ensure no PR changes break the build),
# but we ONLY do an image push to DockerHub if it's NOT a PR
push: ${{ github.event_name != 'pull_request' }}
# Use tags / labels provided by 'docker/metadata-action' above
tags: ${{ steps.meta_build_test.outputs.tags }}
labels: ${{ steps.meta_build_test.outputs.labels }}
###########################################
# Build/Push the 'dspace/dspace-cli' image
###########################################
# Get Metadata for docker_build_test step below
- name: Sync metadata (tags, labels) from GitHub to Docker for 'dspace-cli' image
id: meta_build_cli
uses: docker/metadata-action@v3
with:
images: dspace/dspace-cli
tags: ${{ env.IMAGE_TAGS }}
flavor: ${{ env.TAGS_FLAVOR }}
- name: Build and push 'dspace-cli' image
id: docker_build_cli
uses: docker/build-push-action@v2
with:
context: .
file: ./Dockerfile.cli
# For pull requests, we run the Docker build (to ensure no PR changes break the build),
# but we ONLY do an image push to DockerHub if it's NOT a PR
push: ${{ github.event_name != 'pull_request' }}
# Use tags / labels provided by 'docker/metadata-action' above
tags: ${{ steps.meta_build_cli.outputs.tags }}
labels: ${{ steps.meta_build_cli.outputs.labels }}

View File

@@ -2,8 +2,8 @@
# See https://github.com/DSpace/DSpace/tree/main/dspace/src/main/docker for usage details
#
# This version is JDK11 compatible
# - tomcat:8-jdk11
# - ANT 1.10.7
# - tomcat:9-jdk11
# - ANT 1.10.12
# - maven:3-jdk-11 (see dspace-dependencies)
# - note: default tag for branch: dspace/dspace: dspace/dspace:dspace-7_x
@@ -11,53 +11,52 @@
FROM dspace/dspace-dependencies:dspace-7_x as build
ARG TARGET_DIR=dspace-installer
WORKDIR /app
# The dspace-install directory will be written to /install
# The dspace-installer directory will be written to /install
RUN mkdir /install \
&& chown -Rv dspace: /install \
&& chown -Rv dspace: /app
USER dspace
# Copy the DSpace source code into the workdir (excluding .dockerignore contents)
# Copy the DSpace source code (from local machine) into the workdir (excluding .dockerignore contents)
ADD --chown=dspace . /app/
COPY dspace/src/main/docker/local.cfg /app/local.cfg
# Build DSpace (note: this build doesn't include the optional, deprecated "dspace-rest" webapp)
# Copy the dspace-install directory to /install. Clean up the build to keep the docker image small
# Copy the dspace-installer directory to /install. Clean up the build to keep the docker image small
RUN mvn package && \
mv /app/dspace/target/${TARGET_DIR}/* /install && \
mvn clean
# Step 2 - Run Ant Deploy
FROM tomcat:8-jdk11 as ant_build
FROM tomcat:9-jdk11 as ant_build
ARG TARGET_DIR=dspace-installer
# COPY the /install directory from 'build' container to /dspace-src in this container
COPY --from=build /install /dspace-src
WORKDIR /dspace-src
# Create the initial install deployment using ANT
ENV ANT_VERSION 1.10.7
ENV ANT_VERSION 1.10.12
ENV ANT_HOME /tmp/ant-$ANT_VERSION
ENV PATH $ANT_HOME/bin:$PATH
# Download and install 'ant'
RUN mkdir $ANT_HOME && \
wget -qO- "https://archive.apache.org/dist/ant/binaries/apache-ant-$ANT_VERSION-bin.tar.gz" | tar -zx --strip-components=1 -C $ANT_HOME
# Run necessary 'ant' deploy scripts
RUN ant init_installation update_configs update_code update_webapps
# Step 3 - Run tomcat
# Create a new tomcat image that does not retain the the build directory contents
FROM tomcat:8-jdk11
FROM tomcat:9-jdk11
# NOTE: DSPACE_INSTALL must align with the "dspace.dir" default configuration.
ENV DSPACE_INSTALL=/dspace
# Copy the /dspace directory from 'ant_build' containger to /dspace in this container
COPY --from=ant_build /dspace $DSPACE_INSTALL
# Expose Tomcat port and AJP port
EXPOSE 8080 8009
# Give java extra memory (2GB)
ENV JAVA_OPTS=-Xmx2000m
# Run the "server" webapp off the /server path (e.g. http://localhost:8080/server/)
# Link the DSpace 'server' webapp into Tomcat's webapps directory.
# This ensures that when we start Tomcat, it runs from /server path (e.g. http://localhost:8080/server/)
RUN ln -s $DSPACE_INSTALL/webapps/server /usr/local/tomcat/webapps/server
# If you wish to run "server" webapp off the ROOT path, then comment out the above RUN, and uncomment the below RUN.
# You also MUST update the URL in dspace/src/main/docker/local.cfg
# You also MUST update the 'dspace.server.url' configuration to match.
# Please note that server webapp should only run on one path at a time.
#RUN mv /usr/local/tomcat/webapps/ROOT /usr/local/tomcat/webapps/ROOT.bk && \
# ln -s $DSPACE_INSTALL/webapps/server /usr/local/tomcat/webapps/ROOT

View File

@@ -3,7 +3,7 @@
#
# This version is JDK11 compatible
# - openjdk:11
# - ANT 1.10.7
# - ANT 1.10.12
# - maven:3-jdk-11 (see dspace-dependencies)
# - note: default tag for branch: dspace/dspace-cli: dspace/dspace-cli:dspace-7_x
@@ -11,19 +11,14 @@
FROM dspace/dspace-dependencies:dspace-7_x as build
ARG TARGET_DIR=dspace-installer
WORKDIR /app
# The dspace-install directory will be written to /install
# The dspace-installer directory will be written to /install
RUN mkdir /install \
&& chown -Rv dspace: /install \
&& chown -Rv dspace: /app
USER dspace
# Copy the DSpace source code into the workdir (excluding .dockerignore contents)
# Copy the DSpace source code (from local machine) into the workdir (excluding .dockerignore contents)
ADD --chown=dspace . /app/
COPY dspace/src/main/docker/local.cfg /app/local.cfg
# Build DSpace. Copy the dspace-install directory to /install. Clean up the build to keep the docker image small
# Build DSpace. Copy the dspace-installer directory to /install. Clean up the build to keep the docker image small
RUN mvn package && \
mv /app/dspace/target/${TARGET_DIR}/* /install && \
mvn clean
@@ -31,23 +26,25 @@ RUN mvn package && \
# Step 2 - Run Ant Deploy
FROM openjdk:11 as ant_build
ARG TARGET_DIR=dspace-installer
# COPY the /install directory from 'build' container to /dspace-src in this container
COPY --from=build /install /dspace-src
WORKDIR /dspace-src
# Create the initial install deployment using ANT
ENV ANT_VERSION 1.10.7
ENV ANT_VERSION 1.10.12
ENV ANT_HOME /tmp/ant-$ANT_VERSION
ENV PATH $ANT_HOME/bin:$PATH
# Download and install 'ant'
RUN mkdir $ANT_HOME && \
wget -qO- "https://archive.apache.org/dist/ant/binaries/apache-ant-$ANT_VERSION-bin.tar.gz" | tar -zx --strip-components=1 -C $ANT_HOME
# Run necessary 'ant' deploy scripts
RUN ant init_installation update_configs update_code
# Step 3 - Run jdk
# Create a new tomcat image that does not retain the the build directory contents
FROM openjdk:11
# NOTE: DSPACE_INSTALL must align with the "dspace.dir" default configuration.
ENV DSPACE_INSTALL=/dspace
# Copy the /dspace directory from 'ant_build' containger to /dspace in this container
COPY --from=ant_build /dspace $DSPACE_INSTALL
# Give java extra memory (1GB)
ENV JAVA_OPTS=-Xmx1000m

View File

@@ -8,23 +8,20 @@
FROM maven:3-jdk-11 as build
ARG TARGET_DIR=dspace-installer
WORKDIR /app
# Create the 'dspace' user account & home directory
RUN useradd dspace \
&& mkdir /home/dspace \
&& chown -Rv dspace: /home/dspace
RUN chown -Rv dspace: /app
USER dspace
# Copy the DSpace source code into the workdir (excluding .dockerignore contents)
# Copy the DSpace source code (from local machine) into the workdir (excluding .dockerignore contents)
ADD --chown=dspace . /app/
COPY dspace/src/main/docker/local.cfg /app/local.cfg
# Trigger the installation of all maven dependencies
RUN mvn package
# Clear the contents of the /app directory (including all maven builds), so no artifacts remain.
# This ensures when dspace:dspace is built, it will just the Maven local cache (.m2) for dependencies
# This ensures when dspace:dspace is built, it will use the Maven local cache (~/.m2) for dependencies
USER root
RUN rm -rf /app/*

View File

@@ -2,8 +2,8 @@
# See https://github.com/DSpace/DSpace/tree/main/dspace/src/main/docker for usage details
#
# This version is JDK11 compatible
# - tomcat:8-jdk11
# - ANT 1.10.7
# - tomcat:9-jdk11
# - ANT 1.10.12
# - maven:3-jdk-11 (see dspace-dependencies)
# - note: default tag for branch: dspace/dspace: dspace/dspace:dspace-7_x-test
#
@@ -13,65 +13,63 @@
FROM dspace/dspace-dependencies:dspace-7_x as build
ARG TARGET_DIR=dspace-installer
WORKDIR /app
# The dspace-install directory will be written to /install
# The dspace-installer directory will be written to /install
RUN mkdir /install \
&& chown -Rv dspace: /install \
&& chown -Rv dspace: /app
USER dspace
# Copy the DSpace source code into the workdir (excluding .dockerignore contents)
# Copy the DSpace source code (from local machine) into the workdir (excluding .dockerignore contents)
ADD --chown=dspace . /app/
COPY dspace/src/main/docker/local.cfg /app/local.cfg
# Build DSpace (including the optional, deprecated "dspace-rest" webapp)
# Copy the dspace-install directory to /install. Clean up the build to keep the docker image small
# Build DSpace (INCLUDING the optional, deprecated "dspace-rest" webapp)
# Copy the dspace-installer directory to /install. Clean up the build to keep the docker image small
RUN mvn package -Pdspace-rest && \
mv /app/dspace/target/${TARGET_DIR}/* /install && \
mvn clean
# Step 2 - Run Ant Deploy
FROM tomcat:8-jdk11 as ant_build
FROM tomcat:9-jdk11 as ant_build
ARG TARGET_DIR=dspace-installer
# COPY the /install directory from 'build' container to /dspace-src in this container
COPY --from=build /install /dspace-src
WORKDIR /dspace-src
# Create the initial install deployment using ANT
ENV ANT_VERSION 1.10.7
ENV ANT_VERSION 1.10.12
ENV ANT_HOME /tmp/ant-$ANT_VERSION
ENV PATH $ANT_HOME/bin:$PATH
# Download and install 'ant'
RUN mkdir $ANT_HOME && \
wget -qO- "https://archive.apache.org/dist/ant/binaries/apache-ant-$ANT_VERSION-bin.tar.gz" | tar -zx --strip-components=1 -C $ANT_HOME
# Run necessary 'ant' deploy scripts
RUN ant init_installation update_configs update_code update_webapps
# Step 3 - Run tomcat
# Create a new tomcat image that does not retain the the build directory contents
FROM tomcat:8-jdk11
FROM tomcat:9-jdk11
ENV DSPACE_INSTALL=/dspace
ENV TOMCAT_INSTALL=/usr/local/tomcat
# Copy the /dspace directory from 'ant_build' containger to /dspace in this container
COPY --from=ant_build /dspace $DSPACE_INSTALL
# Enable the AJP connector in Tomcat's server.xml
# NOTE: secretRequired="false" should only be used when AJP is NOT accessible from an external network. But, secretRequired="true" isn't supported by mod_proxy_ajp until Apache 2.5
RUN sed -i '/Service name="Catalina".*/a \\n <Connector protocol="AJP/1.3" port="8009" address="0.0.0.0" redirectPort="8443" URIEncoding="UTF-8" secretRequired="false" />' $TOMCAT_INSTALL/conf/server.xml
# Expose Tomcat port and AJP port
EXPOSE 8080 8009
# Give java extra memory (2GB)
ENV JAVA_OPTS=-Xmx2000m
# Run the "server" webapp off the /server path (e.g. http://localhost:8080/server/)
# and the v6.x (deprecated) REST API off the "/rest" path
# Link the DSpace 'server' webapp into Tomcat's webapps directory.
# This ensures that when we start Tomcat, it runs from /server path (e.g. http://localhost:8080/server/)
# Also link the v6.x (deprecated) REST API off the "/rest" path
RUN ln -s $DSPACE_INSTALL/webapps/server /usr/local/tomcat/webapps/server && \
ln -s $DSPACE_INSTALL/webapps/rest /usr/local/tomcat/webapps/rest
# If you wish to run "server" webapp off the ROOT path, then comment out the above RUN, and uncomment the below RUN.
# You also MUST update the URL in dspace/src/main/docker/local.cfg
# You also MUST update the 'dspace.server.url' configuration to match.
# Please note that server webapp should only run on one path at a time.
#RUN mv /usr/local/tomcat/webapps/ROOT /usr/local/tomcat/webapps/ROOT.bk && \
# ln -s $DSPACE_INSTALL/webapps/server /usr/local/tomcat/webapps/ROOT && \
# ln -s $DSPACE_INSTALL/webapps/rest /usr/local/tomcat/webapps/rest
# Overwrite the v6.x (deprecated) REST API's web.xml, so that we can run it on HTTP (defaults to requiring HTTPS)
# WARNING: THIS IS OBVIOUSLY INSECURE. NEVER DO THIS IN PRODUCTION.
COPY dspace/src/main/docker/test/rest_web.xml $DSPACE_INSTALL/webapps/rest/WEB-INF/web.xml
RUN sed -i -e "s|\${dspace.dir}|$DSPACE_INSTALL|" $DSPACE_INSTALL/webapps/rest/WEB-INF/web.xml

20
LICENSE
View File

@@ -1,4 +1,4 @@
DSpace source code BSD License:
BSD 3-Clause License
Copyright (c) 2002-2021, LYRASIS. All rights reserved.
@@ -13,13 +13,12 @@ notice, this list of conditions and the following disclaimer.
notice, this list of conditions and the following disclaimer in the
documentation and/or other materials provided with the distribution.
- Neither the name DuraSpace nor the name of the DSpace Foundation
nor the names of its contributors may be used to endorse or promote
products derived from this software without specific prior written
permission.
- Neither the name of the copyright holder nor the names of its
contributors may be used to endorse or promote products derived from
this software without specific prior written permission.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
``AS IS'' AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
HOLDERS OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
@@ -29,11 +28,4 @@ OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR
TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE
USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH
DAMAGE.
DSpace uses third-party libraries which may be distributed under
different licenses to the above. Information about these licenses
is detailed in the LICENSES_THIRD_PARTY file at the root of the source
tree. You must agree to the terms of these licenses, in addition to
the above DSpace source code license, in order to use this software.
DAMAGE.

10
NOTICE
View File

@@ -1,3 +1,13 @@
Licenses of Third-Party Libraries
=================================
DSpace uses third-party libraries which may be distributed under
different licenses than specified in our LICENSE file. Information
about these licenses is detailed in the LICENSES_THIRD_PARTY file at
the root of the source tree. You must agree to the terms of these
licenses, in addition to the DSpace source code license, in order to
use this software.
Licensing Notices
=================

View File

@@ -136,3 +136,6 @@ run automatically by [GitHub Actions](https://github.com/DSpace/DSpace/actions?q
DSpace source code is freely available under a standard [BSD 3-Clause license](https://opensource.org/licenses/BSD-3-Clause).
The full license is available in the [LICENSE](LICENSE) file or online at http://www.dspace.org/license/
DSpace uses third-party libraries which may be distributed under different licenses. Those licenses are listed
in the [LICENSES_THIRD_PARTY](LICENSES_THIRD_PARTY) file.

View File

@@ -7,10 +7,23 @@ services:
build:
context: .
dockerfile: Dockerfile.cli
#environment:
environment:
# Below syntax may look odd, but it is how to override dspace.cfg settings via env variables.
# See https://github.com/DSpace/DSpace/blob/main/dspace/config/config-definition.xml
# __P__ => "." (e.g. dspace__P__dir => dspace.dir)
# __D__ => "-" (e.g. google__D__metadata => google-metadata)
# dspace.dir: Must match with Dockerfile's DSPACE_INSTALL directory.
dspace__P__dir: /dspace
# db.url: Ensure we are using the 'dspacedb' image for our database
db__P__url: 'jdbc:postgresql://dspacedb:5432/dspace'
# solr.server: Ensure we are using the 'dspacesolr' image for Solr
solr__P__server: http://dspacesolr:8983/solr
volumes:
- ./dspace/src/main/docker-compose/local.cfg:/dspace/config/local.cfg
# Keep DSpace assetstore directory between reboots
- assetstore:/dspace/assetstore
# Mount local [src]/dspace/config/ to container. This syncs your local configs with container
# NOTE: Environment variables specified above will OVERRIDE any configs in local.cfg or dspace.cfg
- ./dspace/config:/dspace/config
entrypoint: /dspace/bin/dspace
command: help
networks:

View File

@@ -4,12 +4,30 @@ networks:
ipam:
config:
# Define a custom subnet for our DSpace network, so that we can easily trust requests from host to container.
# If you customize this value, be sure to customize the 'proxies.trusted.ipranges' in your local.cfg.
# If you customize this value, be sure to customize the 'proxies.trusted.ipranges' env variable below.
- subnet: 172.23.0.0/16
services:
# DSpace (backend) webapp container
dspace:
container_name: dspace
environment:
# Below syntax may look odd, but it is how to override dspace.cfg settings via env variables.
# See https://github.com/DSpace/DSpace/blob/main/dspace/config/config-definition.xml
# __P__ => "." (e.g. dspace__P__dir => dspace.dir)
# __D__ => "-" (e.g. google__D__metadata => google-metadata)
# dspace.dir: Must match with Dockerfile's DSPACE_INSTALL directory.
dspace__P__dir: /dspace
# Uncomment to set a non-default value for dspace.server.url or dspace.ui.url
# dspace__P__server__P__url: http://localhost:8080/server
# dspace__P__ui__P__url: http://localhost:4000
dspace__P__name: 'DSpace Started with Docker Compose'
# db.url: Ensure we are using the 'dspacedb' image for our database
db__P__url: 'jdbc:postgresql://dspacedb:5432/dspace'
# solr.server: Ensure we are using the 'dspacesolr' image for Solr
solr__P__server: http://dspacesolr:8983/solr
# proxies.trusted.ipranges: This setting is required for a REST API running in Docker to trust requests
# from the host machine. This IP range MUST correspond to the 'dspacenet' subnet defined above.
proxies__P__trusted__P__ipranges: '172.23.0'
image: "${DOCKER_OWNER:-dspace}/dspace:${DSPACE_VER:-dspace-7_x-test}"
build:
context: .
@@ -26,8 +44,11 @@ services:
stdin_open: true
tty: true
volumes:
# Keep DSpace assetstore directory between reboots
- assetstore:/dspace/assetstore
- ./dspace/src/main/docker-compose/local.cfg:/dspace/config/local.cfg
# Mount local [src]/dspace/config/ to container. This syncs your local configs with container
# NOTE: Environment variables specified above will OVERRIDE any configs in local.cfg or dspace.cfg
- ./dspace/config:/dspace/config
# Ensure that the database is ready BEFORE starting tomcat
# 1. While a TCP connection to dspacedb port 5432 is not available, continue to sleep
# 2. Then, run database migration to init database tables
@@ -59,7 +80,7 @@ services:
dspacesolr:
container_name: dspacesolr
# Uses official Solr image at https://hub.docker.com/_/solr/
image: solr:8.8
image: solr:8.11-slim
networks:
dspacenet:
ports:

View File

@@ -481,8 +481,8 @@
<artifactId>commons-validator</artifactId>
</dependency>
<dependency>
<groupId>javax.mail</groupId>
<artifactId>mail</artifactId>
<groupId>com.sun.mail</groupId>
<artifactId>javax.mail</artifactId>
</dependency>
<dependency>
<groupId>javax.servlet</groupId>
@@ -903,14 +903,12 @@
<dependency>
<groupId>org.apache.velocity</groupId>
<artifactId>velocity-engine-core</artifactId>
<version>2.0</version>
<type>jar</type>
</dependency>
<dependency>
<groupId>org.xmlunit</groupId>
<artifactId>xmlunit-core</artifactId>
<version>2.6.3</version>
<scope>test</scope>
</dependency>
@@ -934,6 +932,81 @@
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.mock-server</groupId>
<artifactId>mockserver-junit-rule</artifactId>
<version>5.11.2</version>
<scope>test</scope>
</dependency>
</dependencies>
<dependencyManagement>
<dependencies>
<!-- for mockserver -->
<!-- Solve dependency convergence issues related to
'mockserver-junit-rule' by selecting the versions we want to use. -->
<dependency>
<groupId>org.apache.commons</groupId>
<artifactId>commons-text</artifactId>
<version>1.9</version>
</dependency>
<dependency>
<groupId>io.netty</groupId>
<artifactId>netty-buffer</artifactId>
<version>4.1.68.Final</version>
</dependency>
<dependency>
<groupId>io.netty</groupId>
<artifactId>netty-transport</artifactId>
<version>4.1.68.Final</version>
</dependency>
<dependency>
<groupId>io.netty</groupId>
<artifactId>netty-common</artifactId>
<version>4.1.68.Final</version>
</dependency>
<dependency>
<groupId>io.netty</groupId>
<artifactId>netty-handler</artifactId>
<version>4.1.68.Final</version>
</dependency>
<dependency>
<groupId>io.netty</groupId>
<artifactId>netty-codec</artifactId>
<version>4.1.68.Final</version>
</dependency>
<dependency>
<groupId>org.apache.velocity</groupId>
<artifactId>velocity-engine-core</artifactId>
<version>2.2</version>
</dependency>
<dependency>
<groupId>org.xmlunit</groupId>
<artifactId>xmlunit-core</artifactId>
<version>2.8.0</version>
<scope>test</scope>
</dependency>
<dependency>
<groupId>com.github.java-json-tools</groupId>
<artifactId>json-schema-validator</artifactId>
<version>2.2.14</version>
</dependency>
<dependency>
<groupId>jakarta.xml.bind</groupId>
<artifactId>jakarta.xml.bind-api</artifactId>
<version>2.3.3</version>
</dependency>
<dependency>
<groupId>javax.validation</groupId>
<artifactId>validation-api</artifactId>
<version>2.0.1.Final</version>
</dependency>
<dependency>
<groupId>io.swagger</groupId>
<artifactId>swagger-core</artifactId>
<version>1.6.2</version>
</dependency>
</dependencies>
</dependencyManagement>
</project>

View File

@@ -14,13 +14,7 @@ import java.util.Iterator;
import java.util.List;
import java.util.Map;
import org.apache.commons.cli.CommandLine;
import org.apache.commons.cli.CommandLineParser;
import org.apache.commons.cli.DefaultParser;
import org.apache.commons.cli.HelpFormatter;
import org.apache.commons.cli.MissingArgumentException;
import org.apache.commons.cli.Option;
import org.apache.commons.cli.Options;
import org.apache.commons.cli.ParseException;
import org.apache.commons.lang3.ArrayUtils;
import org.dspace.app.mediafilter.factory.MediaFilterServiceFactory;
import org.dspace.app.mediafilter.service.MediaFilterService;
@@ -33,7 +27,9 @@ import org.dspace.core.Context;
import org.dspace.core.SelfNamedPlugin;
import org.dspace.core.factory.CoreServiceFactory;
import org.dspace.handle.factory.HandleServiceFactory;
import org.dspace.scripts.DSpaceRunnable;
import org.dspace.services.factory.DSpaceServicesFactory;
import org.dspace.utils.DSpace;
/**
* MediaFilterManager is the class that invokes the media/format filters over the
@@ -44,7 +40,7 @@ import org.dspace.services.factory.DSpaceServicesFactory;
* scope to a community, collection or item; and -m [max] limits processing to a
* maximum number of items.
*/
public class MediaFilterCLITool {
public class MediaFilterScript extends DSpaceRunnable<MediaFilterScriptConfiguration> {
//key (in dspace.cfg) which lists all enabled filters by name
private static final String MEDIA_FILTER_PLUGINS_KEY = "filter.plugins";
@@ -55,127 +51,78 @@ public class MediaFilterCLITool {
//suffix (in dspace.cfg) for input formats supported by each filter
private static final String INPUT_FORMATS_SUFFIX = "inputFormats";
/**
* Default constructor
*/
private MediaFilterCLITool() { }
private boolean help;
private boolean isVerbose = false;
private boolean isQuiet = false;
private boolean isForce = false; // default to not forced
private String identifier = null; // object scope limiter
private int max2Process = Integer.MAX_VALUE;
private String[] filterNames;
private String[] skipIds = null;
private Map<String, List<String>> filterFormats = new HashMap<>();
public MediaFilterScriptConfiguration getScriptConfiguration() {
return new DSpace().getServiceManager()
.getServiceByName("filter-media", MediaFilterScriptConfiguration.class);
}
public void setup() throws ParseException {
public static void main(String[] argv) throws Exception {
// set headless for non-gui workstations
System.setProperty("java.awt.headless", "true");
// create an options object and populate it
CommandLineParser parser = new DefaultParser();
int status = 0;
help = commandLine.hasOption('h');
Options options = new Options();
options.addOption("v", "verbose", false,
"print all extracted text and other details to STDOUT");
options.addOption("q", "quiet", false,
"do not print anything except in the event of errors.");
options.addOption("f", "force", false,
"force all bitstreams to be processed");
options.addOption("i", "identifier", true,
"ONLY process bitstreams belonging to identifier");
options.addOption("m", "maximum", true,
"process no more than maximum items");
options.addOption("h", "help", false, "help");
//create a "plugin" option (to specify specific MediaFilter plugins to run)
Option pluginOption = Option.builder("p")
.longOpt("plugins")
.hasArg()
.hasArgs()
.valueSeparator(',')
.desc(
"ONLY run the specified Media Filter plugin(s)\n" +
"listed from '" + MEDIA_FILTER_PLUGINS_KEY + "' in dspace.cfg.\n" +
"Separate multiple with a comma (,)\n" +
"(e.g. MediaFilterManager -p \n\"Word Text Extractor\",\"PDF Text Extractor\")")
.build();
options.addOption(pluginOption);
//create a "skip" option (to specify communities/collections/items to skip)
Option skipOption = Option.builder("s")
.longOpt("skip")
.hasArg()
.hasArgs()
.valueSeparator(',')
.desc(
"SKIP the bitstreams belonging to identifier\n" +
"Separate multiple identifiers with a comma (,)\n" +
"(e.g. MediaFilterManager -s \n 123456789/34,123456789/323)")
.build();
options.addOption(skipOption);
boolean isVerbose = false;
boolean isQuiet = false;
boolean isForce = false; // default to not forced
String identifier = null; // object scope limiter
int max2Process = Integer.MAX_VALUE;
Map<String, List<String>> filterFormats = new HashMap<>();
CommandLine line = null;
try {
line = parser.parse(options, argv);
} catch (MissingArgumentException e) {
System.out.println("ERROR: " + e.getMessage());
HelpFormatter myhelp = new HelpFormatter();
myhelp.printHelp("MediaFilterManager\n", options);
System.exit(1);
}
if (line.hasOption('h')) {
HelpFormatter myhelp = new HelpFormatter();
myhelp.printHelp("MediaFilterManager\n", options);
System.exit(0);
}
if (line.hasOption('v')) {
if (commandLine.hasOption('v')) {
isVerbose = true;
}
isQuiet = line.hasOption('q');
isQuiet = commandLine.hasOption('q');
if (line.hasOption('f')) {
if (commandLine.hasOption('f')) {
isForce = true;
}
if (line.hasOption('i')) {
identifier = line.getOptionValue('i');
if (commandLine.hasOption('i')) {
identifier = commandLine.getOptionValue('i');
}
if (line.hasOption('m')) {
max2Process = Integer.parseInt(line.getOptionValue('m'));
if (commandLine.hasOption('m')) {
max2Process = Integer.parseInt(commandLine.getOptionValue('m'));
if (max2Process <= 1) {
System.out.println("Invalid maximum value '" +
line.getOptionValue('m') + "' - ignoring");
handler.logWarning("Invalid maximum value '" +
commandLine.getOptionValue('m') + "' - ignoring");
max2Process = Integer.MAX_VALUE;
}
}
String filterNames[] = null;
if (line.hasOption('p')) {
if (commandLine.hasOption('p')) {
//specified which media filter plugins we are using
filterNames = line.getOptionValues('p');
if (filterNames == null || filterNames.length == 0) { //display error, since no plugins specified
System.err.println("\nERROR: -p (-plugin) option requires at least one plugin to be specified.\n" +
"(e.g. MediaFilterManager -p \"Word Text Extractor\",\"PDF Text Extractor\")\n");
HelpFormatter myhelp = new HelpFormatter();
myhelp.printHelp("MediaFilterManager\n", options);
System.exit(1);
}
filterNames = commandLine.getOptionValues('p');
} else {
//retrieve list of all enabled media filter plugins!
filterNames = DSpaceServicesFactory.getInstance().getConfigurationService()
.getArrayProperty(MEDIA_FILTER_PLUGINS_KEY);
}
//save to a global skip list
if (commandLine.hasOption('s')) {
//specified which identifiers to skip when processing
skipIds = commandLine.getOptionValues('s');
}
}
public void internalRun() throws Exception {
if (help) {
printHelp();
return;
}
MediaFilterService mediaFilterService = MediaFilterServiceFactory.getInstance().getMediaFilterService();
mediaFilterService.setLogHandler(handler);
mediaFilterService.setForce(isForce);
mediaFilterService.setQuiet(isQuiet);
mediaFilterService.setVerbose(isVerbose);
@@ -184,16 +131,17 @@ public class MediaFilterCLITool {
//initialize an array of our enabled filters
List<FormatFilter> filterList = new ArrayList<>();
//set up each filter
for (int i = 0; i < filterNames.length; i++) {
//get filter of this name & add to list of filters
FormatFilter filter = (FormatFilter) CoreServiceFactory.getInstance().getPluginService()
.getNamedPlugin(FormatFilter.class, filterNames[i]);
if (filter == null) {
System.err.println(
"\nERROR: Unknown MediaFilter specified (either from command-line or in dspace.cfg): '" +
filterNames[i] + "'");
System.exit(1);
handler.handleException("ERROR: Unknown MediaFilter specified (either from command-line or in " +
"dspace.cfg): '" + filterNames[i] + "'");
handler.logError("ERROR: Unknown MediaFilter specified (either from command-line or in " +
"dspace.cfg): '" + filterNames[i] + "'");
} else {
filterList.add(filter);
@@ -218,10 +166,10 @@ public class MediaFilterCLITool {
//For other MediaFilters, format of key is:
// filter.<class-name>.inputFormats
String[] formats =
DSpaceServicesFactory.getInstance().getConfigurationService().getArrayProperty(
FILTER_PREFIX + "." + filterClassName +
(pluginName != null ? "." + pluginName : "") +
"." + INPUT_FORMATS_SUFFIX);
DSpaceServicesFactory.getInstance().getConfigurationService().getArrayProperty(
FILTER_PREFIX + "." + filterClassName +
(pluginName != null ? "." + pluginName : "") +
"." + INPUT_FORMATS_SUFFIX);
//add to internal map of filters to supported formats
if (ArrayUtils.isNotEmpty(formats)) {
@@ -230,8 +178,8 @@ public class MediaFilterCLITool {
//For other MediaFilters, map key is just:
// <class-name>
filterFormats.put(filterClassName +
(pluginName != null ? MediaFilterService.FILTER_PLUGIN_SEPARATOR +
pluginName : ""),
(pluginName != null ? MediaFilterService.FILTER_PLUGIN_SEPARATOR +
pluginName : ""),
Arrays.asList(formats));
}
} //end if filter!=null
@@ -239,11 +187,11 @@ public class MediaFilterCLITool {
//If verbose, print out loaded mediafilter info
if (isVerbose) {
System.out.println("The following MediaFilters are enabled: ");
handler.logInfo("The following MediaFilters are enabled: ");
Iterator<String> i = filterFormats.keySet().iterator();
while (i.hasNext()) {
String filterName = i.next();
System.out.println("Full Filter Name: " + filterName);
handler.logInfo("Full Filter Name: " + filterName);
String pluginName = null;
if (filterName.contains(MediaFilterService.FILTER_PLUGIN_SEPARATOR)) {
String[] fields = filterName.split(MediaFilterService.FILTER_PLUGIN_SEPARATOR);
@@ -251,8 +199,7 @@ public class MediaFilterCLITool {
pluginName = fields[1];
}
System.out.println(filterName +
(pluginName != null ? " (Plugin: " + pluginName + ")" : ""));
handler.logInfo(filterName + (pluginName != null ? " (Plugin: " + pluginName + ")" : ""));
}
}
@@ -262,20 +209,8 @@ public class MediaFilterCLITool {
//Retrieve list of identifiers to skip (if any)
String skipIds[] = null;
if (line.hasOption('s')) {
//specified which identifiers to skip when processing
skipIds = line.getOptionValues('s');
if (skipIds == null || skipIds.length == 0) { //display error, since no identifiers specified to skip
System.err.println("\nERROR: -s (-skip) option requires at least one identifier to SKIP.\n" +
"Make sure to separate multiple identifiers with a comma!\n" +
"(e.g. MediaFilterManager -s 123456789/34,123456789/323)\n");
HelpFormatter myhelp = new HelpFormatter();
myhelp.printHelp("MediaFilterManager\n", options);
System.exit(0);
}
if (skipIds != null && skipIds.length > 0) {
//save to a global skip list
mediaFilterService.setSkipList(Arrays.asList(skipIds));
}
@@ -296,7 +231,7 @@ public class MediaFilterCLITool {
DSpaceObject dso = HandleServiceFactory.getInstance().getHandleService().resolveToObject(c, identifier);
if (dso == null) {
throw new IllegalArgumentException("Cannot resolve "
+ identifier + " to a DSpace object");
+ identifier + " to a DSpace object");
}
switch (dso.getType()) {
@@ -317,12 +252,11 @@ public class MediaFilterCLITool {
c.complete();
c = null;
} catch (Exception e) {
status = 1;
handler.handleException(e);
} finally {
if (c != null) {
c.abort();
}
}
System.exit(status);
}
}

View File

@@ -0,0 +1,91 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.app.mediafilter;
import java.sql.SQLException;
import org.apache.commons.cli.Option;
import org.apache.commons.cli.Options;
import org.dspace.authorize.service.AuthorizeService;
import org.dspace.core.Context;
import org.dspace.scripts.configuration.ScriptConfiguration;
import org.springframework.beans.factory.annotation.Autowired;
public class MediaFilterScriptConfiguration<T extends MediaFilterScript> extends ScriptConfiguration<T> {
@Autowired
private AuthorizeService authorizeService;
private Class<T> dspaceRunnableClass;
private static final String MEDIA_FILTER_PLUGINS_KEY = "filter.plugins";
@Override
public Class<T> getDspaceRunnableClass() {
return dspaceRunnableClass;
}
@Override
public void setDspaceRunnableClass(Class<T> dspaceRunnableClass) {
this.dspaceRunnableClass = dspaceRunnableClass;
}
@Override
public boolean isAllowedToExecute(final Context context) {
try {
return authorizeService.isAdmin(context);
} catch (SQLException e) {
throw new RuntimeException("SQLException occurred when checking if the current user is an admin", e);
}
}
@Override
public Options getOptions() {
Options options = new Options();
options.addOption("v", "verbose", false, "print all extracted text and other details to STDOUT");
options.getOption("v").setType(boolean.class);
options.addOption("q", "quiet", false, "do not print anything except in the event of errors.");
options.getOption("q").setType(boolean.class);
options.addOption("f", "force", false, "force all bitstreams to be processed");
options.getOption("f").setType(boolean.class);
options.addOption("i", "identifier", true, "ONLY process bitstreams belonging to identifier");
options.addOption("m", "maximum", true, "process no more than maximum items");
options.addOption("h", "help", false, "help");
options.getOption("h").setType(boolean.class);
Option pluginOption = Option.builder("p")
.longOpt("plugins")
.hasArg()
.hasArgs()
.valueSeparator(',')
.desc(
"ONLY run the specified Media Filter plugin(s)\n" +
"listed from '" + MEDIA_FILTER_PLUGINS_KEY + "' in dspace.cfg.\n" +
"Separate multiple with a comma (,)\n" +
"(e.g. MediaFilterManager -p \n\"Word Text Extractor\",\"PDF Text" +
" Extractor\")")
.build();
options.addOption(pluginOption);
Option skipOption = Option.builder("s")
.longOpt("skip")
.hasArg()
.hasArgs()
.valueSeparator(',')
.desc(
"SKIP the bitstreams belonging to identifier\n" +
"Separate multiple identifiers with a comma (,)\n" +
"(e.g. MediaFilterManager -s \n 123456789/34,123456789/323)")
.build();
options.addOption(skipOption);
return options;
}
}

View File

@@ -34,6 +34,7 @@ import org.dspace.core.Context;
import org.dspace.core.SelfNamedPlugin;
import org.dspace.eperson.Group;
import org.dspace.eperson.service.GroupService;
import org.dspace.scripts.handler.DSpaceRunnableHandler;
import org.dspace.services.ConfigurationService;
import org.springframework.beans.factory.InitializingBean;
import org.springframework.beans.factory.annotation.Autowired;
@@ -67,6 +68,8 @@ public class MediaFilterServiceImpl implements MediaFilterService, InitializingB
@Autowired(required = true)
protected ConfigurationService configurationService;
protected DSpaceRunnableHandler handler;
protected int max2Process = Integer.MAX_VALUE; // maximum number items to process
protected int processed = 0; // number items processed
@@ -225,16 +228,16 @@ public class MediaFilterServiceImpl implements MediaFilterService, InitializingB
int assetstore = myBitstream.getStoreNumber();
// Printout helpful information to find the errored bitstream.
System.out.println("ERROR filtering, skipping bitstream:\n");
System.out.println("\tItem Handle: " + handle);
StringBuilder sb = new StringBuilder("ERROR filtering, skipping bitstream:\n");
sb.append("\tItem Handle: ").append(handle);
for (Bundle bundle : bundles) {
System.out.println("\tBundle Name: " + bundle.getName());
sb.append("\tBundle Name: ").append(bundle.getName());
}
System.out.println("\tFile Size: " + size);
System.out.println("\tChecksum: " + checksum);
System.out.println("\tAsset Store: " + assetstore);
System.out.println(e);
e.printStackTrace();
sb.append("\tFile Size: ").append(size);
sb.append("\tChecksum: ").append(checksum);
sb.append("\tAsset Store: ").append(assetstore);
logError(sb.toString());
logError(e.getMessage(), e);
}
} else if (filterClass instanceof SelfRegisterInputFormats) {
// Filter implements self registration, so check to see if it should be applied
@@ -287,7 +290,7 @@ public class MediaFilterServiceImpl implements MediaFilterService, InitializingB
filtered = true;
}
} catch (Exception e) {
System.out.println("ERROR filtering, skipping bitstream #"
logError("ERROR filtering, skipping bitstream #"
+ myBitstream.getID() + " " + e);
e.printStackTrace();
}
@@ -332,7 +335,7 @@ public class MediaFilterServiceImpl implements MediaFilterService, InitializingB
// if exists and overwrite = false, exit
if (!overWrite && (existingBitstream != null)) {
if (!isQuiet) {
System.out.println("SKIPPED: bitstream " + source.getID()
logInfo("SKIPPED: bitstream " + source.getID()
+ " (item: " + item.getHandle() + ") because '" + newName + "' already exists");
}
@@ -340,11 +343,11 @@ public class MediaFilterServiceImpl implements MediaFilterService, InitializingB
}
if (isVerbose) {
System.out.println("PROCESSING: bitstream " + source.getID()
logInfo("PROCESSING: bitstream " + source.getID()
+ " (item: " + item.getHandle() + ")");
}
System.out.println("File: " + newName);
logInfo("File: " + newName);
// start filtering of the bitstream, using try with resource to close all InputStreams properly
try (
@@ -356,7 +359,7 @@ public class MediaFilterServiceImpl implements MediaFilterService, InitializingB
) {
if (destStream == null) {
if (!isQuiet) {
System.out.println("SKIPPED: bitstream " + source.getID()
logInfo("SKIPPED: bitstream " + source.getID()
+ " (item: " + item.getHandle() + ") because filtering was unsuccessful");
}
return false;
@@ -402,7 +405,7 @@ public class MediaFilterServiceImpl implements MediaFilterService, InitializingB
formatFilter.postProcessBitstream(context, item, b);
} catch (OutOfMemoryError oome) {
System.out.println("!!! OutOfMemoryError !!!");
logError("!!! OutOfMemoryError !!!");
}
// fixme - set date?
@@ -412,7 +415,7 @@ public class MediaFilterServiceImpl implements MediaFilterService, InitializingB
}
if (!isQuiet) {
System.out.println("FILTERED: bitstream " + source.getID()
logInfo("FILTERED: bitstream " + source.getID()
+ " (item: " + item.getHandle() + ") and created '" + newName + "'");
}
@@ -428,7 +431,7 @@ public class MediaFilterServiceImpl implements MediaFilterService, InitializingB
public boolean inSkipList(String identifier) {
if (skipList != null && skipList.contains(identifier)) {
if (!isQuiet) {
System.out.println("SKIP-LIST: skipped bitstreams within identifier " + identifier);
logInfo("SKIP-LIST: skipped bitstreams within identifier " + identifier);
}
return true;
} else {
@@ -436,6 +439,28 @@ public class MediaFilterServiceImpl implements MediaFilterService, InitializingB
}
}
private void logInfo(String message) {
if (handler != null) {
handler.logInfo(message);
} else {
System.out.println(message);
}
}
private void logError(String message) {
if (handler != null) {
handler.logError(message);
} else {
System.out.println(message);
}
}
private void logError(String message, Exception e) {
if (handler != null) {
handler.logError(message, e);
} else {
System.out.println(message);
}
}
@Override
public void setVerbose(boolean isVerbose) {
this.isVerbose = isVerbose;
@@ -470,4 +495,9 @@ public class MediaFilterServiceImpl implements MediaFilterService, InitializingB
public void setFilterFormats(Map<String, List<String>> filterFormats) {
this.filterFormats = filterFormats;
}
@Override
public void setLogHandler(DSpaceRunnableHandler handler) {
this.handler = handler;
}
}

View File

@@ -16,6 +16,7 @@ import org.dspace.content.Collection;
import org.dspace.content.Community;
import org.dspace.content.Item;
import org.dspace.core.Context;
import org.dspace.scripts.handler.DSpaceRunnableHandler;
/**
* MediaFilterManager is the class that invokes the media/format filters over the
@@ -124,4 +125,10 @@ public interface MediaFilterService {
public void setSkipList(List<String> skipList);
public void setFilterFormats(Map<String, List<String>> filterFormats);
/**
* Set the log handler used in the DSpace scripts and processes framework
* @param handler
*/
public void setLogHandler(DSpaceRunnableHandler handler);
}

View File

@@ -216,4 +216,12 @@ public interface AuthenticationMethod {
* @return The authentication method name
*/
public String getName();
/**
* Get whether the authentication method is being used.
* @param context The DSpace context
* @param request The current request
* @return whether the authentication method is being used.
*/
public boolean isUsed(Context context, HttpServletRequest request);
}

View File

@@ -193,4 +193,18 @@ public class AuthenticationServiceImpl implements AuthenticationService {
public Iterator<AuthenticationMethod> authenticationMethodIterator() {
return getAuthenticationMethodStack().iterator();
}
@Override
public String getAuthenticationMethod(final Context context, final HttpServletRequest request) {
final Iterator<AuthenticationMethod> authenticationMethodIterator = authenticationMethodIterator();
while (authenticationMethodIterator.hasNext()) {
final AuthenticationMethod authenticationMethod = authenticationMethodIterator.next();
if (authenticationMethod.isUsed(context, request)) {
return authenticationMethod.getName();
}
}
return null;
}
}

View File

@@ -273,4 +273,9 @@ public class IPAuthentication implements AuthenticationMethod {
public String getName() {
return "ip";
}
@Override
public boolean isUsed(final Context context, final HttpServletRequest request) {
return false;
}
}

View File

@@ -83,6 +83,9 @@ public class LDAPAuthentication
protected ConfigurationService configurationService
= DSpaceServicesFactory.getInstance().getConfigurationService();
private static final String LDAP_AUTHENTICATED = "ldap.authenticated";
/**
* Let a real auth method return true if it wants.
*
@@ -261,6 +264,7 @@ public class LDAPAuthentication
if (ldap.ldapAuthenticate(dn, password, context)) {
context.setCurrentUser(eperson);
request.setAttribute(LDAP_AUTHENTICATED, true);
// assign user to groups based on ldap dn
assignGroups(dn, ldap.ldapGroup, context);
@@ -311,6 +315,8 @@ public class LDAPAuthentication
context.dispatchEvents();
context.restoreAuthSystemState();
context.setCurrentUser(eperson);
request.setAttribute(LDAP_AUTHENTICATED, true);
// assign user to groups based on ldap dn
assignGroups(dn, ldap.ldapGroup, context);
@@ -341,6 +347,8 @@ public class LDAPAuthentication
ePersonService.update(context, eperson);
context.dispatchEvents();
context.setCurrentUser(eperson);
request.setAttribute(LDAP_AUTHENTICATED, true);
// assign user to groups based on ldap dn
assignGroups(dn, ldap.ldapGroup, context);
@@ -734,4 +742,14 @@ public class LDAPAuthentication
}
}
}
@Override
public boolean isUsed(final Context context, final HttpServletRequest request) {
if (request != null &&
context.getCurrentUser() != null &&
request.getAttribute(LDAP_AUTHENTICATED) != null) {
return true;
}
return false;
}
}

View File

@@ -51,6 +51,9 @@ public class PasswordAuthentication
*/
private static final Logger log = LogManager.getLogger();
private static final String PASSWORD_AUTHENTICATED = "password.authenticated";
/**
* Look to see if this email address is allowed to register.
@@ -216,6 +219,9 @@ public class PasswordAuthentication
.checkPassword(context, eperson, password)) {
// login is ok if password matches:
context.setCurrentUser(eperson);
if (request != null) {
request.setAttribute(PASSWORD_AUTHENTICATED, true);
}
log.info(LogHelper.getHeader(context, "authenticate", "type=PasswordAuthentication"));
return SUCCESS;
} else {
@@ -247,4 +253,15 @@ public class PasswordAuthentication
public String getName() {
return "password";
}
@Override
public boolean isUsed(final Context context, final HttpServletRequest request) {
if (request != null &&
context.getCurrentUser() != null &&
request.getAttribute(PASSWORD_AUTHENTICATED) != null) {
return true;
}
return false;
}
}

View File

@@ -235,7 +235,7 @@ public class ShibAuthentication implements AuthenticationMethod {
// Step 4: Log the user in.
context.setCurrentUser(eperson);
request.getSession().setAttribute("shib.authenticated", true);
request.setAttribute("shib.authenticated", true);
AuthenticateServiceFactory.getInstance().getAuthenticationService().initEPerson(context, request, eperson);
log.info(eperson.getEmail() + " has been authenticated via shibboleth.");
@@ -403,7 +403,7 @@ public class ShibAuthentication implements AuthenticationMethod {
// Cache the special groups, so we don't have to recalculate them again
// for this session.
request.getSession().setAttribute("shib.specialgroup", groupIds);
request.setAttribute("shib.specialgroup", groupIds);
return new ArrayList<>(groups);
} catch (Throwable t) {
@@ -515,7 +515,7 @@ public class ShibAuthentication implements AuthenticationMethod {
}
// Determine the server return URL, where shib will send the user after authenticating.
// We need it to go back to DSpace's ShibbolethRestController so we will extract the user's information,
// We need it to trigger DSpace's ShibbolethLoginFilter so we will extract the user's information,
// locally authenticate them & then redirect back to the UI.
String returnURL = configurationService.getProperty("dspace.server.url") + "/api/authn/shibboleth"
+ ((redirectUrl != null) ? "?redirectUrl=" + redirectUrl : "");
@@ -1283,5 +1283,14 @@ public class ShibAuthentication implements AuthenticationMethod {
}
@Override
public boolean isUsed(final Context context, final HttpServletRequest request) {
if (request != null &&
context.getCurrentUser() != null &&
request.getAttribute("shib.authenticated") != null) {
return true;
}
return false;
}
}

View File

@@ -128,6 +128,8 @@ public class X509Authentication implements AuthenticationMethod {
protected ConfigurationService configurationService =
DSpaceServicesFactory.getInstance().getConfigurationService();
private static final String X509_AUTHENTICATED = "x509.authenticated";
/**
* Initialization: Set caPublicKey and/or keystore. This loads the
@@ -544,6 +546,7 @@ public class X509Authentication implements AuthenticationMethod {
context.dispatchEvents();
context.restoreAuthSystemState();
context.setCurrentUser(eperson);
request.setAttribute(X509_AUTHENTICATED, true);
setSpecialGroupsFlag(request, email);
return SUCCESS;
} else {
@@ -563,6 +566,7 @@ public class X509Authentication implements AuthenticationMethod {
log.info(LogHelper.getHeader(context, "login",
"type=x509certificate"));
context.setCurrentUser(eperson);
request.setAttribute(X509_AUTHENTICATED, true);
setSpecialGroupsFlag(request, email);
return SUCCESS;
}
@@ -594,4 +598,14 @@ public class X509Authentication implements AuthenticationMethod {
public String getName() {
return "x509";
}
@Override
public boolean isUsed(final Context context, final HttpServletRequest request) {
if (request != null &&
context.getCurrentUser() != null &&
request.getAttribute(X509_AUTHENTICATED) != null) {
return true;
}
return false;
}
}

View File

@@ -168,4 +168,13 @@ public interface AuthenticationService {
*/
public Iterator<AuthenticationMethod> authenticationMethodIterator();
/**
* Retrieves the currently used authentication method name based on the context and the request
*
* @param context A valid DSpace context.
* @param request The request that started this operation, or null if not applicable.
* @return the currently used authentication method name
*/
public String getAuthenticationMethod(Context context, HttpServletRequest request);
}

View File

@@ -11,6 +11,8 @@ import java.io.IOException;
import java.net.MalformedURLException;
import java.util.ArrayList;
import java.util.List;
import javax.inject.Inject;
import javax.inject.Named;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
@@ -22,6 +24,7 @@ import org.apache.solr.client.solrj.response.FacetField;
import org.apache.solr.client.solrj.response.QueryResponse;
import org.apache.solr.common.SolrInputDocument;
import org.dspace.authority.indexer.AuthorityIndexingService;
import org.dspace.service.impl.HttpConnectionPoolService;
import org.dspace.services.ConfigurationService;
import org.dspace.services.factory.DSpaceServicesFactory;
@@ -35,6 +38,9 @@ public class AuthoritySolrServiceImpl implements AuthorityIndexingService, Autho
private static final Logger log = LogManager.getLogger(AuthoritySolrServiceImpl.class);
@Inject @Named("solrHttpConnectionPoolService")
private HttpConnectionPoolService httpConnectionPoolService;
protected AuthoritySolrServiceImpl() {
}
@@ -54,7 +60,9 @@ public class AuthoritySolrServiceImpl implements AuthorityIndexingService, Autho
log.debug("Solr authority URL: " + solrService);
HttpSolrClient solrServer = new HttpSolrClient.Builder(solrService).build();
HttpSolrClient solrServer = new HttpSolrClient.Builder(solrService)
.withHttpClient(httpConnectionPoolService.getClient())
.build();
solrServer.setBaseURL(solrService);
SolrQuery solrQuery = new SolrQuery().setQuery("*:*");

View File

@@ -1010,7 +1010,7 @@ public class CollectionServiceImpl extends DSpaceObjectServiceImpl<Collection> i
if (StringUtils.isNotBlank(q)) {
StringBuilder buildQuery = new StringBuilder();
String escapedQuery = ClientUtils.escapeQueryChars(q);
buildQuery.append(escapedQuery).append(" OR ").append(escapedQuery).append("*");
buildQuery.append("(").append(escapedQuery).append(" OR ").append(escapedQuery).append("*").append(")");
discoverQuery.setQuery(buildQuery.toString());
}
DiscoverResult resp = searchService.search(context, discoverQuery);

View File

@@ -95,20 +95,6 @@ public abstract class DSpaceObjectServiceImpl<T extends DSpaceObject> implements
}
}
if (log.isDebugEnabled()) {
StringBuilder dbgMsg = new StringBuilder();
for (String id : identifiers) {
if (dbgMsg.capacity() == 0) {
dbgMsg.append("This DSO's Identifiers are: ");
} else {
dbgMsg.append(", ");
}
dbgMsg.append(id);
}
dbgMsg.append(".");
log.debug(dbgMsg.toString());
}
return identifiers;
}
@@ -625,7 +611,9 @@ public abstract class DSpaceObjectServiceImpl<T extends DSpaceObject> implements
public int compare(MetadataValue o1, MetadataValue o2) {
int compare = o1.getPlace() - o2.getPlace();
if (compare == 0) {
if (o1 instanceof RelationshipMetadataValue) {
if (o1 instanceof RelationshipMetadataValue && o2 instanceof RelationshipMetadataValue) {
return compare;
} else if (o1 instanceof RelationshipMetadataValue) {
return 1;
} else if (o2 instanceof RelationshipMetadataValue) {
return -1;

View File

@@ -55,6 +55,8 @@ public class MetadataValueServiceImpl implements MetadataValueService {
//An update here isn't needed, this is persited upon the merge of the owning object
// metadataValueDAO.save(context, metadataValue);
metadataValue = metadataValueDAO.create(context, metadataValue);
log.info(LogHelper.getHeader(context, "add_metadatavalue",
"metadata_value_id=" + metadataValue.getID()));
return metadataValue;
}

View File

@@ -129,7 +129,7 @@ public class QDCCrosswalk extends SelfNamedPlugin
protected ItemService itemService = ContentServiceFactory.getInstance().getItemService();
protected static final ConfigurationService configurationService
protected final ConfigurationService configurationService
= DSpaceServicesFactory.getInstance().getConfigurationService();
private final CrosswalkMetadataValidator metadataValidator = new CrosswalkMetadataValidator();
@@ -141,9 +141,17 @@ public class QDCCrosswalk extends SelfNamedPlugin
private static String aliases[] = null;
static {
initStatic();
}
/**
* Call this method again in tests to repeat initialization if necessary.
*/
public static void initStatic() {
List<String> aliasList = new ArrayList<>();
String propname = CONFIG_PREFIX + ".properties.";
List<String> configKeys = configurationService.getPropertyKeys(propname);
List<String> configKeys =
DSpaceServicesFactory.getInstance().getConfigurationService().getPropertyKeys(propname);
for (String key : configKeys) {
aliasList.add(key.substring(propname.length()));
}

View File

@@ -98,6 +98,11 @@ public class Context implements AutoCloseable {
*/
private List<UUID> specialGroupsPreviousState;
/**
* The currently used authentication method
*/
private String authenticationMethod;
/**
* Content events
*/
@@ -111,7 +116,7 @@ public class Context implements AutoCloseable {
/**
* Context mode
*/
private Mode mode = Mode.READ_WRITE;
private Mode mode;
/**
* Cache that is only used the context is in READ_ONLY mode
@@ -129,7 +134,6 @@ public class Context implements AutoCloseable {
}
protected Context(EventService eventService, DBConnection dbConnection) {
this.mode = Mode.READ_WRITE;
this.eventService = eventService;
this.dbConnection = dbConnection;
init();
@@ -141,7 +145,6 @@ public class Context implements AutoCloseable {
* No user is authenticated.
*/
public Context() {
this.mode = Mode.READ_WRITE;
init();
}
@@ -184,7 +187,11 @@ public class Context implements AutoCloseable {
authStateChangeHistory = new ConcurrentLinkedDeque<>();
authStateClassCallHistory = new ConcurrentLinkedDeque<>();
setMode(this.mode);
if (this.mode != null) {
setMode(this.mode);
}
}
/**
@@ -773,7 +780,7 @@ public class Context implements AutoCloseable {
* @return The current mode
*/
public Mode getCurrentMode() {
return mode;
return mode != null ? mode : Mode.READ_WRITE;
}
/**
@@ -890,4 +897,11 @@ public class Context implements AutoCloseable {
currentUser = reloadEntity(currentUser);
}
public String getAuthenticationMethod() {
return authenticationMethod;
}
public void setAuthenticationMethod(final String authenticationMethod) {
this.authenticationMethod = authenticationMethod;
}
}

View File

@@ -77,7 +77,7 @@ public final class Utils {
private static final VMID vmid = new VMID();
// for parseISO8601Date
private static final SimpleDateFormat parseFmt[] = {
private static final SimpleDateFormat[] parseFmt = {
// first try at parsing, has milliseconds (note General time zone)
new SimpleDateFormat("yyyy'-'MM'-'dd'T'HH':'mm':'ss.SSSz"),
@@ -87,7 +87,9 @@ public final class Utils {
// finally, try without any timezone (defaults to current TZ)
new SimpleDateFormat("yyyy'-'MM'-'dd'T'HH':'mm':'ss.SSS"),
new SimpleDateFormat("yyyy'-'MM'-'dd'T'HH':'mm':'ss")
new SimpleDateFormat("yyyy'-'MM'-'dd'T'HH':'mm':'ss"),
new SimpleDateFormat("yyyy'-'MM'-'dd")
};
// for formatISO8601Date
@@ -159,11 +161,11 @@ public final class Utils {
StringBuilder result = new StringBuilder();
// This is far from the most efficient way to do things...
for (int i = 0; i < data.length; i++) {
int low = (int) (data[i] & 0x0F);
int high = (int) (data[i] & 0xF0);
for (byte datum : data) {
int low = datum & 0x0F;
int high = datum & 0xF0;
result.append(Integer.toHexString(high).substring(0, 1));
result.append(Integer.toHexString(high).charAt(0));
result.append(Integer.toHexString(low));
}
@@ -199,13 +201,7 @@ public final class Utils {
byte[] junk = new byte[16];
random.nextBytes(junk);
String input = new StringBuilder()
.append(vmid)
.append(new java.util.Date())
.append(Arrays.toString(junk))
.append(counter++)
.toString();
String input = String.valueOf(vmid) + new Date() + Arrays.toString(junk) + counter++;
return getMD5Bytes(input.getBytes(StandardCharsets.UTF_8));
}
@@ -294,7 +290,7 @@ public final class Utils {
}
String units = m.group(2);
long multiplier = MS_IN_SECOND;
long multiplier;
if ("s".equals(units)) {
multiplier = MS_IN_SECOND;
@@ -334,16 +330,16 @@ public final class Utils {
char tzSign = s.charAt(s.length() - 6);
if (s.endsWith("Z")) {
s = s.substring(0, s.length() - 1) + "GMT+00:00";
} else if (tzSign == '-' || tzSign == '+') {
} else if ((tzSign == '-' || tzSign == '+') && s.length() > 10) {
// check for trailing timezone
s = s.substring(0, s.length() - 6) + "GMT" + s.substring(s.length() - 6);
}
// try to parse without milliseconds
ParseException lastError = null;
for (int i = 0; i < parseFmt.length; ++i) {
for (SimpleDateFormat simpleDateFormat : parseFmt) {
try {
return parseFmt[i].parse(s);
return simpleDateFormat.parse(s);
} catch (ParseException e) {
lastError = e;
}
@@ -376,7 +372,7 @@ public final class Utils {
}
public static <E> java.util.Collection<E> emptyIfNull(java.util.Collection<E> collection) {
return collection == null ? Collections.<E>emptyList() : collection;
return collection == null ? Collections.emptyList() : collection;
}
/**
@@ -457,7 +453,7 @@ public final class Utils {
if (hostname != null) {
return hostname.startsWith("www.") ? hostname.substring(4) : hostname;
}
return hostname;
return null;
} catch (URISyntaxException e) {
return null;
}

View File

@@ -61,13 +61,14 @@ public class IndexClient extends DSpaceRunnable<IndexDiscoveryScriptConfiguratio
indexer.unIndexContent(context, commandLine.getOptionValue("r"));
} else if (indexClientOptions == IndexClientOptions.CLEAN) {
handler.logInfo("Cleaning Index");
indexer.cleanIndex(false);
} else if (indexClientOptions == IndexClientOptions.FORCECLEAN) {
handler.logInfo("Cleaning Index");
indexer.cleanIndex(true);
indexer.cleanIndex();
} else if (indexClientOptions == IndexClientOptions.DELETE) {
handler.logInfo("Deleting Index");
indexer.deleteIndex();
} else if (indexClientOptions == IndexClientOptions.BUILD ||
indexClientOptions == IndexClientOptions.BUILDANDSPELLCHECK) {
handler.logInfo("(Re)building index from scratch.");
indexer.deleteIndex();
indexer.createIndex(context);
if (indexClientOptions == IndexClientOptions.BUILDANDSPELLCHECK) {
checkRebuildSpellCheck(commandLine, indexer);
@@ -125,16 +126,14 @@ public class IndexClient extends DSpaceRunnable<IndexDiscoveryScriptConfiguratio
handler.logInfo("Indexed " + count + " object" + (count > 1 ? "s" : "") + " in " + seconds + " seconds");
} else if (indexClientOptions == IndexClientOptions.UPDATE ||
indexClientOptions == IndexClientOptions.UPDATEANDSPELLCHECK) {
handler.logInfo("Updating and Cleaning Index");
indexer.cleanIndex(false);
handler.logInfo("Updating Index");
indexer.updateIndex(context, false);
if (indexClientOptions == IndexClientOptions.UPDATEANDSPELLCHECK) {
checkRebuildSpellCheck(commandLine, indexer);
}
} else if (indexClientOptions == IndexClientOptions.FORCEUPDATE ||
indexClientOptions == IndexClientOptions.FORCEUPDATEANDSPELLCHECK) {
handler.logInfo("Updating and Cleaning Index");
indexer.cleanIndex(true);
handler.logInfo("Updating Index");
indexer.updateIndex(context, true);
if (indexClientOptions == IndexClientOptions.FORCEUPDATEANDSPELLCHECK) {
checkRebuildSpellCheck(commandLine, indexer);

View File

@@ -17,7 +17,7 @@ import org.apache.commons.cli.Options;
public enum IndexClientOptions {
REMOVE,
CLEAN,
FORCECLEAN,
DELETE,
BUILD,
BUILDANDSPELLCHECK,
OPTIMIZE,
@@ -41,11 +41,9 @@ public enum IndexClientOptions {
} else if (commandLine.hasOption("r")) {
return IndexClientOptions.REMOVE;
} else if (commandLine.hasOption("c")) {
if (commandLine.hasOption("f")) {
return IndexClientOptions.FORCECLEAN;
} else {
return IndexClientOptions.CLEAN;
}
return IndexClientOptions.CLEAN;
} else if (commandLine.hasOption("d")) {
return IndexClientOptions.DELETE;
} else if (commandLine.hasOption("b")) {
if (commandLine.hasOption("s")) {
return IndexClientOptions.BUILDANDSPELLCHECK;
@@ -83,6 +81,9 @@ public enum IndexClientOptions {
options.addOption("c", "clean", false,
"clean existing index removing any documents that no longer exist in the db");
options.getOption("c").setType(boolean.class);
options.addOption("d", "delete", false,
"delete all records from existing index");
options.getOption("d").setType(boolean.class);
options.addOption("b", "build", false, "(re)build index, wiping out current one if it exists");
options.getOption("b").setType(boolean.class);
options.addOption("s", "spellchecker", false, "Rebuild the spellchecker, can be combined with -b and -f.");

View File

@@ -53,8 +53,9 @@ public interface IndexingService {
void updateIndex(Context context, boolean force, String type);
void cleanIndex(boolean force) throws IOException,
SQLException, SearchServiceException;
void cleanIndex() throws IOException, SQLException, SearchServiceException;
void deleteIndex();
void commit() throws SearchServiceException;

View File

@@ -8,6 +8,7 @@
package org.dspace.discovery;
import java.io.IOException;
import javax.inject.Named;
import org.apache.commons.validator.routines.UrlValidator;
import org.apache.logging.log4j.LogManager;
@@ -18,13 +19,14 @@ import org.apache.solr.client.solrj.SolrRequest;
import org.apache.solr.client.solrj.SolrServerException;
import org.apache.solr.client.solrj.impl.HttpSolrClient;
import org.dspace.discovery.indexobject.IndexableItem;
import org.dspace.service.impl.HttpConnectionPoolService;
import org.dspace.services.ConfigurationService;
import org.dspace.services.factory.DSpaceServicesFactory;
import org.dspace.storage.rdbms.DatabaseUtils;
import org.springframework.beans.factory.annotation.Autowired;
/**
* Bean containing the SolrClient for the search core
* Bean containing the SolrClient for the search core.
* @author Kevin Van de Velde (kevin at atmire dot com)
*/
public class SolrSearchCore {
@@ -34,6 +36,8 @@ public class SolrSearchCore {
protected IndexingService indexingService;
@Autowired
protected ConfigurationService configurationService;
@Autowired @Named("solrHttpConnectionPoolService")
protected HttpConnectionPoolService httpConnectionPoolService;
/**
* SolrServer for processing indexing events.
@@ -79,7 +83,9 @@ public class SolrSearchCore {
.getBooleanProperty("discovery.solr.url.validation.enabled", true)) {
try {
log.debug("Solr URL: " + solrService);
HttpSolrClient solrServer = new HttpSolrClient.Builder(solrService).build();
HttpSolrClient solrServer = new HttpSolrClient.Builder(solrService)
.withHttpClient(httpConnectionPoolService.getClient())
.build();
solrServer.setBaseURL(solrService);
solrServer.setUseMultiPartPost(true);

View File

@@ -333,17 +333,31 @@ public class SolrServiceImpl implements SearchService, IndexingService {
}
}
/**
* Removes all documents from the Lucene index
*/
public void deleteIndex() {
try {
final List<IndexFactory> indexableObjectServices = indexObjectServiceFactory.
getIndexFactories();
for (IndexFactory indexableObjectService : indexableObjectServices) {
indexableObjectService.deleteAll();
}
} catch (IOException | SolrServerException e) {
log.error("Error cleaning discovery index: " + e.getMessage(), e);
}
}
/**
* Iterates over all documents in the Lucene index and verifies they are in
* database, if not, they are removed.
*
* @param force whether or not to force a clean index
* @throws IOException IO exception
* @throws SQLException sql exception
* @throws SearchServiceException occurs when something went wrong with querying the solr server
*/
@Override
public void cleanIndex(boolean force) throws IOException, SQLException, SearchServiceException {
public void cleanIndex() throws IOException, SQLException, SearchServiceException {
Context context = new Context();
context.turnOffAuthorisationSystem();
@@ -351,56 +365,48 @@ public class SolrServiceImpl implements SearchService, IndexingService {
if (solrSearchCore.getSolr() == null) {
return;
}
if (force) {
final List<IndexFactory> indexableObjectServices = indexObjectServiceFactory.
getIndexFactories();
for (IndexFactory indexableObjectService : indexableObjectServices) {
indexableObjectService.deleteAll();
}
} else {
// First, we'll just get a count of the total results
SolrQuery countQuery = new SolrQuery("*:*");
countQuery.setRows(0); // don't actually request any data
// Get the total amount of results
QueryResponse totalResponse = solrSearchCore.getSolr().query(countQuery,
solrSearchCore.REQUEST_METHOD);
long total = totalResponse.getResults().getNumFound();
// First, we'll just get a count of the total results
SolrQuery countQuery = new SolrQuery("*:*");
countQuery.setRows(0); // don't actually request any data
// Get the total amount of results
QueryResponse totalResponse = solrSearchCore.getSolr().query(countQuery,
solrSearchCore.REQUEST_METHOD);
long total = totalResponse.getResults().getNumFound();
int start = 0;
int batch = 100;
int start = 0;
int batch = 100;
// Now get actual Solr Documents in batches
SolrQuery query = new SolrQuery();
query.setFields(SearchUtils.RESOURCE_UNIQUE_ID, SearchUtils.RESOURCE_ID_FIELD,
SearchUtils.RESOURCE_TYPE_FIELD);
query.addSort(SearchUtils.RESOURCE_UNIQUE_ID, SolrQuery.ORDER.asc);
query.setQuery("*:*");
query.setRows(batch);
// Keep looping until we hit the total number of Solr docs
while (start < total) {
query.setStart(start);
QueryResponse rsp = solrSearchCore.getSolr().query(query, solrSearchCore.REQUEST_METHOD);
SolrDocumentList docs = rsp.getResults();
// Now get actual Solr Documents in batches
SolrQuery query = new SolrQuery();
query.setFields(SearchUtils.RESOURCE_UNIQUE_ID, SearchUtils.RESOURCE_ID_FIELD,
SearchUtils.RESOURCE_TYPE_FIELD);
query.addSort(SearchUtils.RESOURCE_UNIQUE_ID, SolrQuery.ORDER.asc);
query.setQuery("*:*");
query.setRows(batch);
// Keep looping until we hit the total number of Solr docs
while (start < total) {
query.setStart(start);
QueryResponse rsp = solrSearchCore.getSolr().query(query, solrSearchCore.REQUEST_METHOD);
SolrDocumentList docs = rsp.getResults();
for (SolrDocument doc : docs) {
String uniqueID = (String) doc.getFieldValue(SearchUtils.RESOURCE_UNIQUE_ID);
for (SolrDocument doc : docs) {
String uniqueID = (String) doc.getFieldValue(SearchUtils.RESOURCE_UNIQUE_ID);
IndexableObject o = findIndexableObject(context, doc);
IndexableObject o = findIndexableObject(context, doc);
if (o == null) {
log.info("Deleting: " + uniqueID);
/*
* Use IndexWriter to delete, its easier to manage
* write.lock
*/
unIndexContent(context, uniqueID);
} else {
log.debug("Keeping: " + o.getUniqueIndexID());
}
if (o == null) {
log.info("Deleting: " + uniqueID);
/*
* Use IndexWriter to delete, its easier to manage
* write.lock
*/
unIndexContent(context, uniqueID);
} else {
log.debug("Keeping: " + o.getUniqueIndexID());
}
start += batch;
}
start += batch;
}
} catch (IOException | SQLException | SolrServerException e) {
log.error("Error cleaning discovery index: " + e.getMessage(), e);

View File

@@ -14,13 +14,16 @@ import java.util.List;
import org.apache.solr.common.SolrInputDocument;
import org.dspace.content.InProgressSubmission;
import org.dspace.content.Item;
import org.dspace.content.WorkspaceItem;
import org.dspace.core.Context;
import org.dspace.discovery.SearchUtils;
import org.dspace.discovery.configuration.DiscoveryConfiguration;
import org.dspace.discovery.indexobject.factory.CollectionIndexFactory;
import org.dspace.discovery.indexobject.factory.InprogressSubmissionIndexFactory;
import org.dspace.discovery.indexobject.factory.ItemIndexFactory;
import org.dspace.eperson.EPerson;
import org.dspace.util.SolrUtils;
import org.dspace.workflow.WorkflowItem;
import org.springframework.beans.factory.annotation.Autowired;
/**
@@ -68,7 +71,15 @@ public abstract class InprogressSubmissionIndexFactoryImpl
locations.add("l" + inProgressSubmission.getCollection().getID());
// Add item metadata
indexableItemService.addDiscoveryFields(doc, context, item, SearchUtils.getAllDiscoveryConfigurations(item));
List<DiscoveryConfiguration> discoveryConfigurations;
if (inProgressSubmission instanceof WorkflowItem) {
discoveryConfigurations = SearchUtils.getAllDiscoveryConfigurations((WorkflowItem) inProgressSubmission);
} else if (inProgressSubmission instanceof WorkspaceItem) {
discoveryConfigurations = SearchUtils.getAllDiscoveryConfigurations((WorkspaceItem) inProgressSubmission);
} else {
discoveryConfigurations = SearchUtils.getAllDiscoveryConfigurations(item);
}
indexableItemService.addDiscoveryFields(doc, context, item, discoveryConfigurations);
indexableCollectionService.storeCommunityCollectionLocations(doc, locations);
}
}

View File

@@ -40,7 +40,9 @@ import org.springframework.beans.factory.annotation.Autowired;
* Time: 10:05
*
* Notify Google Analytics of... well anything we want really.
* @deprecated Use org.dspace.google.GoogleAsyncEventListener instead
*/
@Deprecated
public class GoogleRecorderEventListener extends AbstractUsageEventListener {
private String analyticsKey;

View File

@@ -15,6 +15,7 @@ import java.util.regex.Pattern;
import org.apache.commons.collections4.CollectionUtils;
import org.apache.commons.lang3.StringUtils;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
import org.dspace.content.DSpaceObject;
import org.dspace.content.service.SiteService;
@@ -25,10 +26,9 @@ import org.dspace.handle.service.HandleService;
import org.dspace.services.ConfigurationService;
import org.springframework.beans.factory.annotation.Autowired;
/**
* Interface to the <a href="http://www.handle.net" target=_new>CNRI Handle
* System </a>.
* Interface to the <a href="https://www.handle.net" target=_new>CNRI Handle
* System</a>.
*
* <p>
* Currently, this class simply maps handles to local facilities; handles which
@@ -37,13 +37,12 @@ import org.springframework.beans.factory.annotation.Autowired;
* </p>
*
* @author Peter Breton
* @version $Revision$
*/
public class HandleServiceImpl implements HandleService {
/**
* log4j category
* log category
*/
private static Logger log = org.apache.logging.log4j.LogManager.getLogger(HandleServiceImpl.class);
private static final Logger log = LogManager.getLogger();
/**
* Prefix registered to no one
@@ -84,9 +83,7 @@ public class HandleServiceImpl implements HandleService {
String url = configurationService.getProperty("dspace.ui.url")
+ "/handle/" + handle;
if (log.isDebugEnabled()) {
log.debug("Resolved " + handle + " to " + url);
}
log.debug("Resolved {} to {}", handle, url);
return url;
}
@@ -96,7 +93,7 @@ public class HandleServiceImpl implements HandleService {
throws SQLException {
String dspaceUrl = configurationService.getProperty("dspace.ui.url")
+ "/handle/";
String handleResolver = configurationService.getProperty("handle.canonical.prefix");
String handleResolver = getCanonicalPrefix();
String handle = null;
@@ -126,10 +123,8 @@ public class HandleServiceImpl implements HandleService {
// Let the admin define a new prefix, if not then we'll use the
// CNRI default. This allows the admin to use "hdl:" if they want to or
// use a locally branded prefix handle.myuni.edu.
String handlePrefix = configurationService.getProperty("handle.canonical.prefix");
if (StringUtils.isBlank(handlePrefix)) {
handlePrefix = "http://hdl.handle.net/";
}
String handlePrefix = configurationService.getProperty("handle.canonical.prefix",
"https://hdl.handle.net/");
return handlePrefix;
}
@@ -151,10 +146,10 @@ public class HandleServiceImpl implements HandleService {
handle.setResourceTypeId(dso.getType());
handleDAO.save(context, handle);
if (log.isDebugEnabled()) {
log.debug("Created new handle for "
+ Constants.typeText[dso.getType()] + " (ID=" + dso.getID() + ") " + handleId);
}
log.debug("Created new handle for {} (ID={}) {}",
() -> Constants.typeText[dso.getType()],
() -> dso.getID(),
() -> handleId);
return handleId;
}
@@ -205,10 +200,10 @@ public class HandleServiceImpl implements HandleService {
dso.addHandle(handle);
handleDAO.save(context, handle);
if (log.isDebugEnabled()) {
log.debug("Created new handle for "
+ Constants.typeText[dso.getType()] + " (ID=" + dso.getID() + ") " + suppliedHandle);
}
log.debug("Created new handle for {} (ID={}) {}",
() -> Constants.typeText[dso.getType()],
() -> dso.getID(),
() -> suppliedHandle);
return suppliedHandle;
}
@@ -230,15 +225,15 @@ public class HandleServiceImpl implements HandleService {
handleDAO.save(context, handle);
if (log.isDebugEnabled()) {
log.debug("Unbound Handle " + handle.getHandle() + " from object " + Constants.typeText[dso
.getType()] + " id=" + dso.getID());
}
log.debug("Unbound Handle {} from object {} id={}",
() -> handle.getHandle(),
() -> Constants.typeText[dso.getType()],
() -> dso.getID());
}
} else {
log.trace(
"Cannot find Handle entry to unbind for object " + Constants.typeText[dso.getType()] + " id=" + dso
.getID() + ". Handle could have been unbinded before.");
"Cannot find Handle entry to unbind for object {} id={}. Handle could have been unbound before.",
Constants.typeText[dso.getType()], dso.getID());
}
}
@@ -284,7 +279,7 @@ public class HandleServiceImpl implements HandleService {
public List<String> getHandlesForPrefix(Context context, String prefix)
throws SQLException {
List<Handle> handles = handleDAO.findByPrefix(context, prefix);
List<String> handleStrings = new ArrayList<String>(handles.size());
List<String> handleStrings = new ArrayList<>(handles.size());
for (Handle handle : handles) {
handleStrings.add(handle.getHandle());
}
@@ -296,7 +291,7 @@ public class HandleServiceImpl implements HandleService {
String prefix = configurationService.getProperty("handle.prefix");
if (StringUtils.isBlank(prefix)) {
prefix = EXAMPLE_PREFIX; // XXX no good way to exit cleanly
log.error("handle.prefix is not configured; using " + prefix);
log.error("handle.prefix is not configured; using {}", prefix);
}
return prefix;
}

View File

@@ -14,8 +14,8 @@ import org.dspace.content.DSpaceObject;
import org.dspace.core.Context;
/**
* Interface to the <a href="http://www.handle.net" target=_new>CNRI Handle
* System </a>.
* Interface to the <a href="https://www.handle.net" target=_new>CNRI Handle
* System</a>.
*
* <p>
* Currently, this class simply maps handles to local facilities; handles which
@@ -24,7 +24,6 @@ import org.dspace.core.Context;
* </p>
*
* @author Peter Breton
* @version $Revision$
*/
public interface HandleService {
@@ -42,7 +41,6 @@ public interface HandleService {
public String resolveToURL(Context context, String handle)
throws SQLException;
/**
* Try to detect a handle in a URL.
*
@@ -56,18 +54,18 @@ public interface HandleService {
throws SQLException;
/**
* Provides handle canonical prefix using http://hdl.handle.net if not
* Provides handle canonical prefix using https://hdl.handle.net if not
* overridden by the configuration property handle.canonical.prefix.
*
* No attempt is made to verify that handle is in fact valid.
*
* @param handle The handle
*
* @return The canonical form
*/
public String getCanonicalPrefix();
/**
* Transforms handle into a URI using http://hdl.handle.net if not
* Transforms handle into a URI using https://hdl.handle.net if not
* overridden by the configuration property handle.canonical.prefix.
*
* No attempt is made to verify that handle is in fact valid.

View File

@@ -701,7 +701,7 @@ public class OAIHarvester {
if (values.size() > 0 && acceptedHandleServers != null) {
for (MetadataValue value : values) {
// 0 1 2 3 4
// http://hdl.handle.net/1234/12
// https://hdl.handle.net/1234/12
String[] urlPieces = value.getValue().split("/");
if (urlPieces.length != 5) {
continue;

View File

@@ -150,9 +150,8 @@ public class IdentifierServiceImpl implements IdentifierService {
+ "Identifier for " + contentServiceFactory.getDSpaceObjectService(dso)
.getTypeText(dso) + ", "
+ dso.getID().toString() + ".");
log.debug(ex.getMessage(), ex);
} catch (IdentifierException e) {
log.error(e.getMessage(), e);
log.error(e);
}
}
}
@@ -162,6 +161,8 @@ public class IdentifierServiceImpl implements IdentifierService {
@Override
public List<String> lookup(Context context, DSpaceObject dso) {
List<String> identifiers = new ArrayList<>();
// Attempt to lookup DSO's identifiers using every available provider
// TODO: We may want to eventually limit providers based on DSO type, as not every provider supports every DSO
for (IdentifierProvider service : providers) {
try {
String result = service.lookup(context, dso);
@@ -177,13 +178,14 @@ public class IdentifierServiceImpl implements IdentifierService {
identifiers.add(result);
}
} catch (IdentifierNotFoundException ex) {
log.info(service.getClass().getName() + " doesn't find an "
// This IdentifierNotFoundException is NOT logged by default, as some providers do not apply to
// every DSO (e.g. DOIs usually don't apply to EPerson objects). So it is expected some may fail lookup.
log.debug(service.getClass().getName() + " doesn't find an "
+ "Identifier for " + contentServiceFactory.getDSpaceObjectService(dso)
.getTypeText(dso) + ", "
+ dso.getID().toString() + ".");
log.debug(ex.getMessage(), ex);
} catch (IdentifierException ex) {
log.error(ex.getMessage(), ex);
log.error(ex);
}
}
@@ -228,7 +230,6 @@ public class IdentifierServiceImpl implements IdentifierService {
log.info(service.getClass().getName() + " cannot resolve "
+ "Identifier " + identifier + ": identifier not "
+ "found.");
log.debug(ex.getMessage(), ex);
} catch (IdentifierException ex) {
log.error(ex.getMessage(), ex);
}

View File

@@ -0,0 +1,24 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.iiif;
import org.dspace.content.Bitstream;
/**
* @author Michael Spalti mspalti@willamette.edu
*/
public interface IIIFApiQueryService {
/**
* Returns array with canvas height and width
* @param bitstream
* @return
*/
int[] getImageDimensions(Bitstream bitstream);
}

View File

@@ -0,0 +1,82 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.iiif;
import static org.dspace.iiif.canvasdimension.Util.checkDimensions;
import java.io.BufferedReader;
import java.io.IOException;
import java.io.InputStreamReader;
import java.net.HttpURLConnection;
import java.net.URL;
import com.fasterxml.jackson.databind.JsonNode;
import com.fasterxml.jackson.databind.ObjectMapper;
import org.apache.logging.log4j.Logger;
import org.dspace.content.Bitstream;
import org.dspace.iiif.util.IIIFSharedUtils;
/**
* Queries the configured IIIF server for image dimensions. Used for
* formats that cannot be easily read using ImageIO (jpeg 2000).
*
* @author Michael Spalti mspalti@willamette.edu
*/
public class IIIFApiQueryServiceImpl implements IIIFApiQueryService {
private static final Logger log = org.apache.logging.log4j.LogManager.getLogger(IIIFApiQueryServiceImpl.class);
@Override
public int[] getImageDimensions(Bitstream bitstream) {
return getIiifImageDimensions(bitstream);
}
/**
* Retrieves image dimensions from the image server (IIIF Image API v.2.1.1).
* @param bitstream the bitstream DSO
* @return image dimensions
*/
private int[] getIiifImageDimensions(Bitstream bitstream) {
int[] arr = new int[2];
String path = IIIFSharedUtils.getInfoJsonPath(bitstream);
URL url;
BufferedReader in = null;
try {
url = new URL(path);
HttpURLConnection con = (HttpURLConnection) url.openConnection();
con.setRequestMethod("GET");
in = new BufferedReader(
new InputStreamReader(con.getInputStream()));
String inputLine;
StringBuilder response = new StringBuilder();
while ((inputLine = in.readLine()) != null) {
response.append(inputLine);
}
JsonNode parent = new ObjectMapper().readTree(response.toString());
// return dimensions if found.
if (parent.has("width") && parent.has("height")) {
arr[0] = parent.get("width").asInt();
arr[1] = parent.get("height").asInt();
return checkDimensions(arr);
}
} catch (IOException e) {
log.error(e.getMessage(), e);
} finally {
if (in != null) {
try {
in.close();
} catch (IOException e) {
log.error(e.getMessage(), e);
}
}
}
return null;
}
}

View File

@@ -0,0 +1,228 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.iiif.canvasdimension;
import java.util.Arrays;
import java.util.UUID;
import org.apache.commons.cli.CommandLine;
import org.apache.commons.cli.CommandLineParser;
import org.apache.commons.cli.DefaultParser;
import org.apache.commons.cli.HelpFormatter;
import org.apache.commons.cli.MissingArgumentException;
import org.apache.commons.cli.Option;
import org.apache.commons.cli.Options;
import org.dspace.app.util.factory.UtilServiceFactory;
import org.dspace.content.Collection;
import org.dspace.content.Community;
import org.dspace.content.DSpaceObject;
import org.dspace.content.Item;
import org.dspace.core.Constants;
import org.dspace.core.Context;
import org.dspace.eperson.EPerson;
import org.dspace.eperson.factory.EPersonServiceFactory;
import org.dspace.eperson.service.EPersonService;
import org.dspace.handle.factory.HandleServiceFactory;
import org.dspace.iiif.canvasdimension.factory.IIIFCanvasDimensionServiceFactory;
import org.dspace.iiif.canvasdimension.service.IIIFCanvasDimensionService;
import org.dspace.services.ConfigurationService;
import org.dspace.services.factory.DSpaceServicesFactory;
/**
* Sets IIIF canvas metadata on bitstreams based on image size.
*
* @author Michael Spalti mspalti@willamette.edu
*/
public class CanvasDimensionCLI {
private static final EPersonService epersonService = EPersonServiceFactory.getInstance().getEPersonService();
private static final ConfigurationService configurationService = DSpaceServicesFactory.getInstance()
.getConfigurationService();
private CanvasDimensionCLI() {}
public static void main(String[] argv) throws Exception {
boolean iiifEnabled = configurationService.getBooleanProperty("iiif.enabled");
if (!iiifEnabled) {
System.out.println("WARNING: IIIF is not enabled on this DSpace server.");
}
// default to not updating existing dimensions
boolean force = false;
// default to printing messages
boolean isQuiet = false;
// default to no limit
int max2Process = Integer.MAX_VALUE;
String identifier = null;
String eperson = null;
Context context = new Context();
IIIFCanvasDimensionService canvasProcessor = IIIFCanvasDimensionServiceFactory.getInstance()
.getIiifCanvasDimensionService();
CommandLineParser parser = new DefaultParser();
Options options = new Options();
options.addOption("i", "identifier", true,
"process IIIF canvas dimensions for images belonging to this identifier");
options.addOption("e", "eperson", true,
"email of eperson setting the canvas dimensions");
options.addOption("f", "force", false,
"force update of all IIIF canvas height and width dimensions");
options.addOption("q", "quiet", false,
"do not print anything except in the event of errors");
options.addOption("m", "maximum", true,
"process no more than maximum items");
options.addOption("h", "help", false,
"display help");
Option skipOption = Option.builder("s")
.longOpt("skip")
.hasArg()
.hasArgs()
.valueSeparator(',')
.desc(
"SKIP the bitstreams belonging to identifier\n" +
"Separate multiple identifiers with a comma (,)\n" +
"(e.g. -s \n 123456789/34,123456789/323)")
.build();
options.addOption(skipOption);
CommandLine line = null;
try {
line = parser.parse(options, argv);
} catch (MissingArgumentException e) {
System.out.println("ERROR: " + e.getMessage());
HelpFormatter help = new HelpFormatter();
help.printHelp("CanvasDimension processor\n", options);
System.exit(1);
}
if (line.hasOption('h')) {
HelpFormatter help = new HelpFormatter();
help.printHelp("CanvasDimension processor\n", options);
System.out
.println("\nUUID example: iiif-canvas-dimensions -e user@email.org " +
"-i 1086306d-8a51-43c3-98b9-c3b00f49105f");
System.out
.println("\nHandle example: iiif-canvas-dimensions -e user@email.org " +
"-i 123456789/12");
System.exit(0);
}
if (line.hasOption('f')) {
force = true;
}
if (line.hasOption('q')) {
isQuiet = true;
}
if (line.hasOption('e')) {
eperson = line.getOptionValue('e');
}
if (line.hasOption('i')) {
identifier = line.getOptionValue('i');
} else {
HelpFormatter help = new HelpFormatter();
help.printHelp("CanvasDimension processor\n", options);
System.out.println("An identifier for a Community, Collection, or Item must be provided.");
System.exit(1);
}
if (line.hasOption('m')) {
max2Process = Integer.parseInt(line.getOptionValue('m'));
if (max2Process <= 1) {
System.out.println("Invalid maximum value '" +
line.getOptionValue('m') + "' - ignoring");
max2Process = Integer.MAX_VALUE;
}
}
String[] skipIds;
if (line.hasOption('s')) {
//specified which identifiers to skip when processing
skipIds = line.getOptionValues('s');
if (skipIds == null || skipIds.length == 0) { //display error, since no identifiers specified to skip
System.err.println("\nERROR: -s (-skip) option requires at least one identifier to SKIP.\n" +
"Make sure to separate multiple identifiers with a comma!\n" +
"(e.g. -s 123456789/34,123456789/323)\n");
HelpFormatter myhelp = new HelpFormatter();
myhelp.printHelp("Canvas Dimensions\n", options);
System.exit(1);
}
canvasProcessor.setSkipList(Arrays.asList(skipIds));
}
DSpaceObject dso = null;
if (identifier.indexOf('/') != -1) {
dso = HandleServiceFactory.getInstance().getHandleService().resolveToObject(context, identifier);
} else {
dso = UtilServiceFactory.getInstance().getDSpaceObjectUtils()
.findDSpaceObject(context, UUID.fromString(identifier));
}
if (dso == null) {
throw new IllegalArgumentException("Cannot resolve "
+ identifier + " to a DSpace object.");
}
EPerson user;
if (eperson == null) {
System.out.println("You must provide an eperson using the \"-e\" flag.");
System.exit(1);
}
if (eperson.indexOf('@') != -1) {
// @ sign, must be an email
user = epersonService.findByEmail(context, eperson);
} else {
user = epersonService.find(context, UUID.fromString(eperson));
}
if (user == null) {
System.out.println("Error, eperson cannot be found: " + eperson);
System.exit(1);
}
context.setCurrentUser(user);
canvasProcessor.setForceProcessing(force);
canvasProcessor.setMax2Process(max2Process);
canvasProcessor.setIsQuiet(isQuiet);
int processed = 0;
switch (dso.getType()) {
case Constants.COMMUNITY:
processed = canvasProcessor.processCommunity(context, (Community) dso);
break;
case Constants.COLLECTION:
processed = canvasProcessor.processCollection(context, (Collection) dso);
break;
case Constants.ITEM:
canvasProcessor.processItem(context, (Item) dso);
processed = 1;
break;
default:
System.out.println("Unsupported object type.");
break;
}
// commit changes
if (processed >= 1) {
context.commit();
}
// Always print summary to standard out.
System.out.println(processed + " IIIF items were processed.");
}
}

View File

@@ -0,0 +1,241 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.iiif.canvasdimension;
import static org.dspace.iiif.util.IIIFSharedUtils.METADATA_IIIF_HEIGHT;
import static org.dspace.iiif.util.IIIFSharedUtils.METADATA_IIIF_IMAGE;
import static org.dspace.iiif.util.IIIFSharedUtils.METADATA_IIIF_SCHEMA;
import static org.dspace.iiif.util.IIIFSharedUtils.METADATA_IIIF_WIDTH;
import java.io.IOException;
import java.io.InputStream;
import java.sql.SQLException;
import java.util.Iterator;
import java.util.List;
import java.util.Optional;
import org.dspace.authorize.AuthorizeException;
import org.dspace.content.Bitstream;
import org.dspace.content.Bundle;
import org.dspace.content.Collection;
import org.dspace.content.Community;
import org.dspace.content.Item;
import org.dspace.content.MetadataValue;
import org.dspace.content.service.BitstreamService;
import org.dspace.content.service.CommunityService;
import org.dspace.content.service.DSpaceObjectService;
import org.dspace.content.service.ItemService;
import org.dspace.core.Context;
import org.dspace.iiif.IIIFApiQueryService;
import org.dspace.iiif.canvasdimension.service.IIIFCanvasDimensionService;
import org.dspace.iiif.util.IIIFSharedUtils;
import org.springframework.beans.factory.annotation.Autowired;
/**
* This service sets canvas dimensions for bitstreams. Processes communities,
* collections, and individual items.
*
* @author Michael Spalti mspalti@willamette.edu
*/
public class IIIFCanvasDimensionServiceImpl implements IIIFCanvasDimensionService {
@Autowired()
ItemService itemService;
@Autowired()
CommunityService communityService;
@Autowired()
BitstreamService bitstreamService;
@Autowired()
DSpaceObjectService<Bitstream> dSpaceObjectService;
@Autowired()
IIIFApiQueryService iiifApiQuery;
private boolean forceProcessing = false;
private boolean isQuiet = false;
private List<String> skipList = null;
private int max2Process = Integer.MAX_VALUE;
private int processed = 0;
// used to check for existing canvas dimension
private static final String IIIF_WIDTH_METADATA = "iiif.image.width";
@Override
public void setForceProcessing(boolean force) {
forceProcessing = force;
}
@Override
public void setIsQuiet(boolean quiet) {
isQuiet = quiet;
}
@Override
public void setMax2Process(int max2Process) {
this.max2Process = max2Process;
}
@Override
public void setSkipList(List<String> skipList) {
this.skipList = skipList;
}
@Override
public int processCommunity(Context context, Community community) throws Exception {
if (!inSkipList(community.getHandle())) {
List<Community> subcommunities = community.getSubcommunities();
for (Community subcommunity : subcommunities) {
processCommunity(context, subcommunity);
}
List<Collection> collections = community.getCollections();
for (Collection collection : collections) {
processCollection(context, collection);
}
}
return processed;
}
@Override
public int processCollection(Context context, Collection collection) throws Exception {
if (!inSkipList(collection.getHandle())) {
Iterator<Item> itemIterator = itemService.findAllByCollection(context, collection);
while (itemIterator.hasNext() && processed < max2Process) {
processItem(context, itemIterator.next());
}
}
return processed;
}
@Override
public void processItem(Context context, Item item) throws Exception {
if (!inSkipList(item.getHandle())) {
boolean isIIIFItem = IIIFSharedUtils.isIIIFItem(item);
if (isIIIFItem) {
if (processItemBundles(context, item)) {
++processed;
}
}
}
}
/**
* Process all IIIF bundles for an item.
* @param context
* @param item
* @return
* @throws Exception
*/
private boolean processItemBundles(Context context, Item item) throws Exception {
List<Bundle> bundles = IIIFSharedUtils.getIIIFBundles(item);
boolean done = false;
for (Bundle bundle : bundles) {
List<Bitstream> bitstreams = bundle.getBitstreams();
for (Bitstream bit : bitstreams) {
done |= processBitstream(context, bit);
}
}
if (done) {
if (!isQuiet) {
System.out.println("Updated canvas metadata for item: " + item.getID());
}
}
return done;
}
/**
* Gets image height and width for the bitstream. For jp2 images, height and width are
* obtained from the IIIF image server. For other formats supported by ImageIO these values
* are read from the actual DSpace bitstream content. If bitstream width metadata already exists,
* the bitstream is processed when forceProcessing is true.
* @param context
* @param bitstream
* @return
* @throws Exception
*/
private boolean processBitstream(Context context, Bitstream bitstream) throws SQLException, AuthorizeException,
IOException {
boolean processed = false;
boolean isImage = bitstream.getFormat(context).getMIMEType().contains("image/");
if (isImage) {
Optional<MetadataValue> op = bitstream.getMetadata().stream()
.filter(m -> m.getMetadataField().toString('.')
.contentEquals(IIIF_WIDTH_METADATA)).findFirst();
if (op.isEmpty() || forceProcessing) {
if (forceProcessing && !isQuiet) {
System.out.println("Force processing for bitstream: " + bitstream.getID());
}
int[] dims;
InputStream stream = null;
try {
stream = bitstreamService.retrieve(context, bitstream);
try {
dims = ImageDimensionReader.getImageDimensions(stream);
if (dims == null) {
// If image dimensions are not available try the iiif image server.
dims = iiifApiQuery.getImageDimensions(bitstream);
}
} catch (IOException e) {
// If an exception was raised by ImageIO, try the iiif image server.
dims = iiifApiQuery.getImageDimensions(bitstream);
}
} finally {
if (stream != null) {
stream.close();
}
}
if (dims != null) {
processed = setBitstreamMetadata(context, bitstream, dims);
// update the bitstream
bitstreamService.update(context, bitstream);
}
}
}
return processed;
}
/**
* Sets bitstream metadata for "iiif.image.width" and "iiif.image.height".
* @param context
* @param bitstream
* @param dims
* @return
*/
private boolean setBitstreamMetadata(Context context, Bitstream bitstream, int[] dims) throws SQLException {
dSpaceObjectService.clearMetadata(context, bitstream, METADATA_IIIF_SCHEMA,
METADATA_IIIF_IMAGE, METADATA_IIIF_WIDTH, Item.ANY);
dSpaceObjectService.setMetadataSingleValue(context, bitstream, METADATA_IIIF_SCHEMA,
METADATA_IIIF_IMAGE, METADATA_IIIF_WIDTH, null, String.valueOf(dims[0]));
dSpaceObjectService.clearMetadata(context, bitstream, METADATA_IIIF_SCHEMA,
METADATA_IIIF_IMAGE, METADATA_IIIF_HEIGHT, Item.ANY);
dSpaceObjectService.setMetadataSingleValue(context, bitstream, METADATA_IIIF_SCHEMA,
METADATA_IIIF_IMAGE, METADATA_IIIF_HEIGHT, null, String.valueOf(dims[1]));
if (!isQuiet) {
System.out.println("Added IIIF canvas metadata to bitstream: " + bitstream.getID());
}
return true;
}
/**
* Tests whether the identifier is in the skip list.
* @param identifier
* @return
*/
private boolean inSkipList(String identifier) {
if (skipList != null && skipList.contains(identifier)) {
if (!isQuiet) {
System.out.println("SKIP-LIST: skipped bitstreams within identifier " + identifier);
}
return true;
} else {
return false;
}
}
}

View File

@@ -0,0 +1,47 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.iiif.canvasdimension;
import static org.dspace.iiif.canvasdimension.Util.checkDimensions;
import java.awt.image.BufferedImage;
import java.io.IOException;
import java.io.InputStream;
import javax.imageio.ImageIO;
/**
* Reads and return height and width dimensions for image bitstreams.
*
* @author Michael Spalti mspalti@willamette.edu
*/
public class ImageDimensionReader {
private ImageDimensionReader() {}
/**
* Uses ImageIO to read height and width dimensions.
* @param image inputstream for dspace image
* @return image dimensions or null if the image format cannot be read.
* @throws Exception
*/
public static int[] getImageDimensions(InputStream image) throws IOException {
int[] dims = new int[2];
BufferedImage buf = ImageIO.read(image);
if (buf != null) {
int width = buf.getWidth(null);
int height = buf.getHeight(null);
if (width > 0 && height > 0) {
dims[0] = width;
dims[1] = height;
return checkDimensions(dims);
}
}
return null;
}
}

View File

@@ -0,0 +1,34 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.iiif.canvasdimension;
/**
* Utilities for IIIF canvas dimension processing.
*
* @author Michael Spalti mspalti@willamette.edu
*/
public class Util {
private Util() {}
/**
* IIIF Presentation API version 2.1.1:
* If the largest images dimensions are less than 1200 pixels on either edge, then
* the canvass dimensions SHOULD be double those of the image.
* @param dims
* @return
*/
public static int[] checkDimensions(int[] dims) {
if (dims[0] < 1200 || dims[1] < 1200) {
dims[0] = dims[0] * 2;
dims[1] = dims[1] * 2;
}
return dims;
}
}

View File

@@ -0,0 +1,27 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.iiif.canvasdimension.factory;
import org.dspace.iiif.canvasdimension.service.IIIFCanvasDimensionService;
import org.dspace.services.factory.DSpaceServicesFactory;
/**
* Factory for the image dimension service.
*
* @author Michael Spalti mspalti@willamette.edu
*/
public abstract class IIIFCanvasDimensionServiceFactory {
public static IIIFCanvasDimensionServiceFactory getInstance() {
return DSpaceServicesFactory.getInstance().getServiceManager()
.getServiceByName("iiifCanvasDimensionServiceFactory",
IIIFCanvasDimensionServiceFactory.class);
}
public abstract IIIFCanvasDimensionService getIiifCanvasDimensionService();
}

View File

@@ -0,0 +1,27 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.iiif.canvasdimension.factory;
import org.dspace.iiif.canvasdimension.service.IIIFCanvasDimensionService;
import org.springframework.beans.factory.annotation.Autowired;
/**
* Factory for the image dimension service.
*
* @author Michael Spalti mspalti@willamette.edu
*/
public class IIIFCanvasDimensionServiceFactoryImpl extends IIIFCanvasDimensionServiceFactory {
@Autowired()
private IIIFCanvasDimensionService iiifCanvasDimensionService;
@Override
public IIIFCanvasDimensionService getIiifCanvasDimensionService() {
return iiifCanvasDimensionService;
}
}

View File

@@ -0,0 +1,69 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.iiif.canvasdimension.service;
import java.util.List;
import org.dspace.content.Collection;
import org.dspace.content.Community;
import org.dspace.content.Item;
import org.dspace.core.Context;
public interface IIIFCanvasDimensionService {
/**
* Set IIIF canvas dimensions on all IIIF items in a community and its
* sub-communities.
* @param context
* @param community
* @throws Exception
*/
int processCommunity(Context context, Community community) throws Exception;
/**
* Set IIIF canvas dimensions on all IIIF items in a collection.
* @param context
* @param collection
* @throws Exception
*/
int processCollection(Context context, Collection collection) throws Exception;
/**
* Set IIIF canvas dimensions for an item.
* @param context
* @param item
* @throws Exception
*/
void processItem(Context context, Item item) throws Exception;
/**
* Set the force processing property. If true, existing canvas
* metadata will be replaced.
* @param force
*/
void setForceProcessing(boolean force);
/**
* Set whether to output messages during processing.
* @param quiet
*/
void setIsQuiet(boolean quiet);
/**
* Set the maximum number of items to process.
* @param max2Process
*/
void setMax2Process(int max2Process);
/**
* Set dso identifiers to skip.
* @param skipList
*/
void setSkipList(List<String> skipList);
}

View File

@@ -5,7 +5,7 @@
*
* http://www.dspace.org/license/
*/
package org.dspace.iiif;
package org.dspace.iiif.consumer;
import org.springframework.beans.BeansException;
import org.springframework.context.ApplicationContext;

View File

@@ -5,7 +5,7 @@
*
* http://www.dspace.org/license/
*/
package org.dspace.iiif;
package org.dspace.iiif.consumer;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.cache.CacheManager;

View File

@@ -5,7 +5,7 @@
*
* http://www.dspace.org/license/
*/
package org.dspace.iiif;
package org.dspace.iiif.consumer;
import java.util.HashSet;
import java.util.Set;
@@ -74,7 +74,8 @@ public class IIIFCacheEventConsumer implements Consumer {
clearAll = true;
}
if ((et == Event.ADD || et == Event.MODIFY_METADATA ) && subject != null) {
if ((et == Event.ADD || et == Event.MODIFY_METADATA ) && subject != null
&& ((Bitstream) subject).getBundles().size() > 0) {
// set subject to be the parent Item.
Bundle bundle = ((Bitstream) subject).getBundles().get(0);
subject = bundle.getItems().get(0);

View File

@@ -0,0 +1,113 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.iiif.util;
import java.util.ArrayList;
import java.util.List;
import java.util.stream.Collectors;
import org.apache.commons.lang3.StringUtils;
import org.dspace.content.Bitstream;
import org.dspace.content.Bundle;
import org.dspace.content.Item;
import org.dspace.core.Constants;
import org.dspace.license.CreativeCommonsServiceImpl;
import org.dspace.services.ConfigurationService;
import org.dspace.services.factory.DSpaceServicesFactory;
/**
* Shared utilities for IIIF processing.
*
* @author Michael Spalti mspalti@willamette.edu
* @author Andrea Bollini (andrea.bollini at 4science.it)
*/
public class IIIFSharedUtils {
// metadata used to enable the iiif features on the item
public static final String METADATA_IIIF_ENABLED = "dspace.iiif.enabled";
// The DSpace bundle for other content related to item.
protected static final String OTHER_CONTENT_BUNDLE = "OtherContent";
// The IIIF image server url from configuration
protected static final String IMAGE_SERVER_PATH = "iiif.image.server";
// IIIF metadata definitions
public static final String METADATA_IIIF_SCHEMA = "iiif";
public static final String METADATA_IIIF_IMAGE = "image";
public static final String METADATA_IIIF_HEIGHT = "height";
public static final String METADATA_IIIF_WIDTH = "width";
protected static final ConfigurationService configurationService
= DSpaceServicesFactory.getInstance().getConfigurationService();
private IIIFSharedUtils() {}
public static boolean isIIIFItem(Item item) {
return item.getMetadata().stream().filter(m -> m.getMetadataField().toString('.')
.contentEquals(METADATA_IIIF_ENABLED))
.anyMatch(m -> m.getValue().equalsIgnoreCase("true") ||
m.getValue().equalsIgnoreCase("yes"));
}
/**
* This method returns the bundles holding IIIF resources if any.
* If there is no IIIF content available an empty bundle list is returned.
* @param item the DSpace item
*
* @return list of DSpace bundles with IIIF content
*/
public static List<Bundle> getIIIFBundles(Item item) {
boolean iiif = isIIIFEnabled(item);
List<Bundle> bundles = new ArrayList<>();
if (iiif) {
bundles = item.getBundles().stream().filter(IIIFSharedUtils::isIIIFBundle).collect(Collectors.toList());
}
return bundles;
}
/**
* This method verify if the IIIF feature is enabled on the item or parent collection.
*
* @param item the dspace item
* @return true if the item supports IIIF
*/
public static boolean isIIIFEnabled(Item item) {
return item.getOwningCollection().getMetadata().stream()
.filter(m -> m.getMetadataField().toString('.').contentEquals(METADATA_IIIF_ENABLED))
.anyMatch(m -> m.getValue().equalsIgnoreCase("true") ||
m.getValue().equalsIgnoreCase("yes"))
|| item.getMetadata().stream()
.filter(m -> m.getMetadataField().toString('.').contentEquals(METADATA_IIIF_ENABLED))
.anyMatch(m -> m.getValue().equalsIgnoreCase("true") ||
m.getValue().equalsIgnoreCase("yes"));
}
/**
* Utility method to check is a bundle can contain bitstreams to use as IIIF
* resources
*
* @param b the DSpace bundle to check
* @return true if the bundle can contain bitstreams to use as IIIF resources
*/
public static boolean isIIIFBundle(Bundle b) {
return !StringUtils.equalsAnyIgnoreCase(b.getName(), Constants.LICENSE_BUNDLE_NAME,
Constants.METADATA_BUNDLE_NAME, CreativeCommonsServiceImpl.CC_BUNDLE_NAME, "THUMBNAIL",
"BRANDED_PREVIEW", "TEXT", OTHER_CONTENT_BUNDLE)
&& b.getMetadata().stream()
.filter(m -> m.getMetadataField().toString('.').contentEquals(METADATA_IIIF_ENABLED))
.noneMatch(m -> m.getValue().equalsIgnoreCase("false") || m.getValue().equalsIgnoreCase("no"));
}
/**
* Returns url for retrieving info.json metadata from the image server.
* @param bitstream
* @return
*/
public static String getInfoJsonPath(Bitstream bitstream) {
String iiifImageServer = configurationService.getProperty(IMAGE_SERVER_PATH);
return iiifImageServer + bitstream.getID() + "/info.json";
}
}

View File

@@ -0,0 +1,196 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.service.impl;
import java.util.concurrent.TimeUnit;
import javax.annotation.PostConstruct;
import javax.inject.Inject;
import javax.inject.Named;
import javax.inject.Singleton;
import org.apache.http.HeaderElement;
import org.apache.http.HeaderElementIterator;
import org.apache.http.HttpResponse;
import org.apache.http.conn.ConnectionKeepAliveStrategy;
import org.apache.http.conn.HttpClientConnectionManager;
import org.apache.http.impl.client.CloseableHttpClient;
import org.apache.http.impl.client.HttpClientBuilder;
import org.apache.http.impl.conn.PoolingHttpClientConnectionManager;
import org.apache.http.message.BasicHeaderElementIterator;
import org.apache.http.protocol.HTTP;
import org.apache.http.protocol.HttpContext;
import org.dspace.services.ConfigurationService;
/**
* Factory for HTTP clients sharing a pool of connections.
*
* <p>You may create multiple pools. Each is identified by a configuration
* "prefix" (passed to the constructor) which is used to create names of
* properties which will configure the pool. The properties are:
*
* <dl>
* <dt>PREFIX.client.keepAlive</dt>
* <dd>Default keep-alive time for open connections, in milliseconds</dd>
* <dt>PREFIX.client.maxTotalConnections</dt>
* <dd>maximum open connections</dd>
* <dt>PREFIX.client.maxPerRoute</dt>
* <dd>maximum open connections per service instance</dd>
* <dt>PREFIX.client.timeToLive</dt>
* <dd>maximum lifetime of a pooled connection, in seconds</dd>
* </dl>
*
* @author Mark H. Wood <mwood@iupui.edu>
*/
@Named
@Singleton
public class HttpConnectionPoolService {
@Inject
ConfigurationService configurationService;
/** Configuration properties will begin with this string. */
private final String configPrefix;
/** Maximum number of concurrent pooled connections. */
private static final int DEFAULT_MAX_TOTAL_CONNECTIONS = 20;
/** Maximum number of concurrent pooled connections per route. */
private static final int DEFAULT_MAX_PER_ROUTE = 15;
/** Keep connections open at least this long, if the response did not
* specify: milliseconds
*/
private static final int DEFAULT_KEEPALIVE = 5 * 1000;
/** Pooled connection maximum lifetime: seconds */
private static final int DEFAULT_TTL = 10 * 60;
/** Clean up stale connections this often: milliseconds */
private static final int CHECK_INTERVAL = 1000;
/** Connection idle if unused for this long: seconds */
private static final int IDLE_INTERVAL = 30;
private PoolingHttpClientConnectionManager connManager;
private final ConnectionKeepAliveStrategy keepAliveStrategy
= new KeepAliveStrategy();
/**
* Construct a pool for a given set of configuration properties.
*
* @param configPrefix Configuration property names will begin with this.
*/
public HttpConnectionPoolService(String configPrefix) {
this.configPrefix = configPrefix;
}
@PostConstruct
protected void init() {
connManager = new PoolingHttpClientConnectionManager(
configurationService.getIntProperty(configPrefix + ".client.timeToLive", DEFAULT_TTL),
TimeUnit.SECONDS);
connManager.setMaxTotal(configurationService.getIntProperty(
configPrefix + ".client.maxTotalConnections", DEFAULT_MAX_TOTAL_CONNECTIONS));
connManager.setDefaultMaxPerRoute(
configurationService.getIntProperty(configPrefix + ".client.maxPerRoute",
DEFAULT_MAX_PER_ROUTE));
Thread connectionMonitor = new IdleConnectionMonitorThread(connManager);
connectionMonitor.setDaemon(true);
connectionMonitor.start();
}
/**
* Create an HTTP client which uses a pooled connection.
*
* @return the client.
*/
public CloseableHttpClient getClient() {
CloseableHttpClient httpClient = HttpClientBuilder.create()
.setKeepAliveStrategy(keepAliveStrategy)
.setConnectionManager(connManager)
.build();
return httpClient;
}
/**
* A connection keep-alive strategy that obeys the Keep-Alive header and
* applies a default if none is given.
*
* Swiped from https://www.baeldung.com/httpclient-connection-management
*/
public class KeepAliveStrategy
implements ConnectionKeepAliveStrategy {
@Override
public long getKeepAliveDuration(HttpResponse response,
HttpContext context) {
HeaderElementIterator it = new BasicHeaderElementIterator(
response.headerIterator(HTTP.CONN_KEEP_ALIVE));
while (it.hasNext()) {
HeaderElement he = it.nextElement();
String name = he.getName();
String value = he.getValue();
if (value != null && "timeout".equalsIgnoreCase(name)) {
return Long.parseLong(value) * 1000;
}
}
// If server did not request keep-alive, use configured value.
return configurationService.getIntProperty(configPrefix + ".client.keepAlive",
DEFAULT_KEEPALIVE);
}
}
/**
* Clean up stale connections.
*
* Swiped from https://www.baeldung.com/httpclient-connection-management
*/
public class IdleConnectionMonitorThread
extends Thread {
private final HttpClientConnectionManager connMgr;
private volatile boolean shutdown;
/**
* Constructor.
*
* @param connMgr the manager to be monitored.
*/
public IdleConnectionMonitorThread(
PoolingHttpClientConnectionManager connMgr) {
super();
this.connMgr = connMgr;
}
@Override
public void run() {
try {
while (!shutdown) {
synchronized (this) {
wait(CHECK_INTERVAL);
connMgr.closeExpiredConnections();
connMgr.closeIdleConnections(IDLE_INTERVAL, TimeUnit.SECONDS);
}
}
} catch (InterruptedException ex) {
shutdown();
}
}
/**
* Cause a controlled exit from the thread.
*/
public void shutdown() {
shutdown = true;
synchronized (this) {
notifyAll();
}
}
}
}

View File

@@ -9,9 +9,12 @@ package org.dspace.statistics;
import static org.apache.logging.log4j.LogManager.getLogger;
import javax.inject.Named;
import org.apache.logging.log4j.Logger;
import org.apache.solr.client.solrj.SolrClient;
import org.apache.solr.client.solrj.impl.HttpSolrClient;
import org.dspace.service.impl.HttpConnectionPoolService;
import org.dspace.services.ConfigurationService;
import org.springframework.beans.factory.annotation.Autowired;
@@ -20,13 +23,16 @@ import org.springframework.beans.factory.annotation.Autowired;
*/
public class SolrStatisticsCore {
private static Logger log = getLogger(SolrStatisticsCore.class);
private static final Logger log = getLogger();
protected SolrClient solr = null;
@Autowired
private ConfigurationService configurationService;
@Autowired @Named("solrHttpConnectionPoolService")
private HttpConnectionPoolService httpConnectionPoolService;
/**
* Returns the {@link SolrClient} for the Statistics core.
* Initializes it if needed.
@@ -50,7 +56,9 @@ public class SolrStatisticsCore {
log.info("usage-statistics.dbfile: {}", configurationService.getProperty("usage-statistics.dbfile"));
try {
solr = new HttpSolrClient.Builder(solrService).build();
solr = new HttpSolrClient.Builder(solrService)
.withHttpClient(httpConnectionPoolService.getClient())
.build();
} catch (Exception e) {
log.error("Error accessing Solr server configured in 'solr-statistics.server'", e);
}

View File

@@ -9,13 +9,16 @@ package org.dspace.statistics.export.service;
import java.io.IOException;
import java.net.HttpURLConnection;
import java.net.URL;
import java.net.URLConnection;
import java.sql.SQLException;
import java.util.Date;
import java.util.List;
import org.apache.commons.lang.StringUtils;
import org.apache.http.HttpResponse;
import org.apache.http.client.HttpClient;
import org.apache.http.client.config.RequestConfig;
import org.apache.http.client.methods.HttpGet;
import org.apache.http.impl.client.HttpClientBuilder;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
import org.dspace.core.Context;
@@ -57,21 +60,23 @@ public class OpenUrlServiceImpl implements OpenUrlService {
}
/**
* Returns the response code from accessing the url
* Returns the response code from accessing the url. Returns a http status 408 when the external service doesn't
* reply in 10 seconds
*
* @param urlStr
* @return response code from the url
* @throws IOException
*/
protected int getResponseCodeFromUrl(final String urlStr) throws IOException {
URLConnection conn;
URL url = new URL(urlStr);
conn = url.openConnection();
HttpGet httpGet = new HttpGet(urlStr);
RequestConfig requestConfig = getRequestConfigBuilder().setConnectTimeout(10 * 1000).build();
HttpClient httpClient = HttpClientBuilder.create().setDefaultRequestConfig(requestConfig).build();
HttpResponse httpResponse = httpClient.execute(httpGet);
return httpResponse.getStatusLine().getStatusCode();
}
HttpURLConnection httpURLConnection = (HttpURLConnection) conn;
int responseCode = httpURLConnection.getResponseCode();
httpURLConnection.disconnect();
return responseCode;
protected RequestConfig.Builder getRequestConfigBuilder() {
return RequestConfig.custom();
}
/**

View File

@@ -1327,7 +1327,7 @@ public class DatabaseUtils {
// Reindex Discovery completely
// Force clean all content
this.indexer.cleanIndex(true);
this.indexer.deleteIndex();
// Recreate the entire index (overwriting existing one)
this.indexer.createIndex(context);
// Rebuild spell checker (which is based on index)

View File

@@ -681,7 +681,7 @@ public class XmlWorkflowServiceImpl implements XmlWorkflowService {
}
} catch (MessagingException e) {
log.warn(LogHelper.getHeader(context, "notifyOfArchive",
"cannot email user" + " item_id=" + item.getID()));
"cannot email user" + " item_id=" + item.getID()), e);
}
}
@@ -715,7 +715,7 @@ public class XmlWorkflowServiceImpl implements XmlWorkflowService {
} catch (MessagingException e) {
log.warn(LogHelper.getHeader(c, "notifyOfCuration",
"cannot email users of workflow_item_id " + wi.getID()
+ ": " + e.getMessage()));
+ ": " + e.getMessage()), e);
}
}
@@ -1031,12 +1031,13 @@ public class XmlWorkflowServiceImpl implements XmlWorkflowService {
itemService.update(context, myitem);
// convert into personal workspace
WorkspaceItem wsi = returnToWorkspace(context, wi);
// remove policy for controller
removeUserItemPolicies(context, myitem, e);
revokeReviewerPolicies(context, myitem);
// convert into personal workspace
WorkspaceItem wsi = returnToWorkspace(context, wi);
// notify that it's been rejected
notifyOfReject(context, wi, e, rejection_message);
log.info(LogHelper.getHeader(context, "reject_workflow", "workflow_item_id="
@@ -1195,7 +1196,7 @@ public class XmlWorkflowServiceImpl implements XmlWorkflowService {
log.warn(LogHelper.getHeader(c, "notify_of_reject",
"cannot email user" + " eperson_id" + e.getID()
+ " eperson_email" + e.getEmail()
+ " workflow_item_id" + wi.getID()));
+ " workflow_item_id" + wi.getID()), ex);
}
}

View File

@@ -0,0 +1,28 @@
--
-- The contents of this file are subject to the license and copyright
-- detailed in the LICENSE and NOTICE files at the root of the source
-- tree and available online at
--
-- http://www.dspace.org/license/
--
----------------------------------------------------
-- Make sure the metadatavalue.place column starts at 0 instead of 1
----------------------------------------------------
CREATE LOCAL TEMPORARY TABLE mdv_minplace (
dspace_object_id UUID NOT NULL,
metadata_field_id INT NOT NULL,
minplace INT NOT NULL,
);
INSERT INTO mdv_minplace
SELECT dspace_object_id, metadata_field_id, MIN(place) AS minplace
FROM metadatavalue
GROUP BY dspace_object_id, metadata_field_id;
UPDATE metadatavalue AS mdv
SET place = mdv.place - (
SELECT minplace FROM mdv_minplace AS mp
WHERE mp.dspace_object_id = mdv.dspace_object_id
AND mp.metadata_field_id = mdv.metadata_field_id
);

View File

@@ -47,9 +47,9 @@ CREATE TABLE relationship_type
CREATE TABLE relationship
(
id INTEGER NOT NULL PRIMARY KEY,
left_id uuid NOT NULL REFERENCES item(uuid),
left_id raw(16) NOT NULL REFERENCES item(uuid),
type_id INTEGER NOT NULL REFERENCES relationship_type(id),
right_id uuid NOT NULL REFERENCES item(uuid),
right_id raw(16) NOT NULL REFERENCES item(uuid),
left_place INTEGER,
right_place INTEGER,
CONSTRAINT u_constraint UNIQUE (left_id, type_id, right_id)
@@ -57,9 +57,9 @@ CREATE TABLE relationship
);
CREATE INDEX entity_type_label_idx ON entity_type(label);
CREATE INDEX relationship_type_by_left_type_idx ON relationship_type(left_type);
CREATE INDEX relationship_type_by_right_type_idx ON relationship_type(right_type);
CREATE INDEX relationship_type_by_left_label_idx ON relationship_type(left_label);
CREATE INDEX relationship_type_by_right_label_idx ON relationship_type(right_label);
CREATE INDEX rl_ty_by_left_type_idx ON relationship_type(left_type);
CREATE INDEX rl_ty_by_right_type_idx ON relationship_type(right_type);
CREATE INDEX rl_ty_by_left_label_idx ON relationship_type(left_label);
CREATE INDEX rl_ty_by_right_label_idx ON relationship_type(right_label);
CREATE INDEX relationship_by_left_id_idx ON relationship(left_id);
CREATE INDEX relationship_by_right_id_idx ON relationship(right_id);
CREATE INDEX relationship_by_right_id_idx ON relationship(right_id);

View File

@@ -11,8 +11,8 @@
-- Rename columns left_label and right_label to leftward_type and rightward_type
-----------------------------------------------------------------------------------
ALTER TABLE relationship ADD leftward_value VARCHAR;
ALTER TABLE relationship ADD rightward_value VARCHAR;
ALTER TABLE relationship ADD leftward_value VARCHAR2(50);
ALTER TABLE relationship ADD rightward_value VARCHAR2(50);
ALTER TABLE relationship_type RENAME left_label TO leftward_type;
ALTER TABLE relationship_type RENAME right_label TO rightward_type;
ALTER TABLE relationship_type RENAME COLUMN left_label TO leftward_type;
ALTER TABLE relationship_type RENAME COLUMN right_label TO rightward_type;

View File

@@ -10,5 +10,5 @@
-- Create columns copy_left and copy_right for RelationshipType
-----------------------------------------------------------------------------------
ALTER TABLE relationship_type ADD copy_to_left BOOLEAN DEFAULT FALSE NOT NULL;
ALTER TABLE relationship_type ADD copy_to_right BOOLEAN DEFAULT FALSE NOT NULL;
ALTER TABLE relationship_type ADD copy_to_left NUMBER(1) DEFAULT 0 NOT NULL;
ALTER TABLE relationship_type ADD copy_to_right NUMBER(1) DEFAULT 0 NOT NULL;

View File

@@ -0,0 +1,24 @@
--
-- The contents of this file are subject to the license and copyright
-- detailed in the LICENSE and NOTICE files at the root of the source
-- tree and available online at
--
-- http://www.dspace.org/license/
--
----------------------------------------------------
-- Make sure the metadatavalue.place column starts at 0 instead of 1
----------------------------------------------------
MERGE INTO metadatavalue mdv
USING (
SELECT dspace_object_id, metadata_field_id, MIN(place) AS minplace
FROM metadatavalue
GROUP BY dspace_object_id, metadata_field_id
) mp
ON (
mdv.dspace_object_id = mp.dspace_object_id
AND mdv.metadata_field_id = mp.metadata_field_id
AND mp.minplace > 0
)
WHEN MATCHED THEN UPDATE
SET mdv.place = mdv.place - mp.minplace;

View File

@@ -0,0 +1,21 @@
--
-- The contents of this file are subject to the license and copyright
-- detailed in the LICENSE and NOTICE files at the root of the source
-- tree and available online at
--
-- http://www.dspace.org/license/
--
----------------------------------------------------
-- Make sure the metadatavalue.place column starts at 0 instead of 1
----------------------------------------------------
UPDATE metadatavalue AS mdv
SET place = mdv.place - minplace
FROM (
SELECT dspace_object_id, metadata_field_id, MIN(place) AS minplace
FROM metadatavalue
GROUP BY dspace_object_id, metadata_field_id
) AS mp
WHERE mdv.dspace_object_id = mp.dspace_object_id
AND mdv.metadata_field_id = mp.metadata_field_id
AND minplace > 0;

View File

@@ -37,7 +37,7 @@
<property name="configurationService" ref="org.dspace.services.ConfigurationService"/>
</bean>
<bean name="org.dspace.core.DBConnection" class="org.dspace.core.HibernateDBConnection" lazy-init="true"/>
<bean name="org.dspace.core.DBConnection" class="org.dspace.core.HibernateDBConnection" lazy-init="true" scope="prototype"/>
<!-- Register all our Flyway callback classes (which run before/after database migrations) -->
<bean class="org.dspace.storage.rdbms.RegistryUpdater"/>

View File

@@ -20,6 +20,7 @@
<name-map collection-handle="default" submission-name="traditional"/>
<name-map collection-handle="123456789/language-test-1" submission-name="languagetestprocess"/>
<name-map collection-handle="123456789/extraction-test" submission-name="extractiontestprocess"/>
<name-map collection-handle="123456789/qualdrop-test" submission-name="qualdroptest"/>
</submission-map>
@@ -75,6 +76,11 @@
<scope visibilityOutside="read-only">submission</scope>
</step-definition>
<step-definition id="qualdroptest">
<processing-class>org.dspace.app.rest.submit.step.DescribeStep</processing-class>
<type>submission-form</type>
</step-definition>
<!-- Step Upload Item with Embargo Features to enable this step, please
make sure to comment-out the previous step "UploadStep" <step-definition id="upload-with-embargo">
<heading>submit.progressbar.upload</heading> <processing-class>org.dspace.submit.step.UploadWithEmbargoStep</processing-class>
@@ -167,6 +173,10 @@
<step id="cclicense"/>
</submission-process>
<submission-process name="qualdroptest">
<step id="qualdroptest" />
</submission-process>
</submission-definitions>
</item-submission>

View File

@@ -40,6 +40,11 @@
<property name="dspaceRunnableClass" value="org.dspace.app.bulkedit.MetadataDeletionCli"/>
</bean>
<bean id="filter-media" class="org.dspace.app.mediafilter.MediaFilterScriptConfiguration">
<property name="description" value="Perform the media filtering to extract full text from documents and to create thumbnails"/>
<property name="dspaceRunnableClass" value="org.dspace.app.mediafilter.MediaFilterScript"/>
</bean>
<bean id="another-mock-script" class="org.dspace.scripts.MockDSpaceRunnableScriptConfiguration" scope="prototype">
<property name="description" value="Mocking a script for testing purposes" />
<property name="dspaceRunnableClass" value="org.dspace.scripts.impl.MockDSpaceRunnableScript"/>

View File

@@ -286,6 +286,22 @@ it, please enter the types and the actual numbers or codes.</hint>
</row>
</form>
<form name="qualdroptest">
<row>
<field>
<dc-schema>dc</dc-schema>
<dc-element>identifier</dc-element>
<dc-qualifier></dc-qualifier>
<repeatable>true</repeatable>
<label>Identifiers</label>
<input-type value-pairs-name="common_identifiers">qualdrop_value</input-type>
<hint>If the item has any identification numbers or codes associated with
it, please enter the types and the actual numbers or codes.</hint>
<required>please give an identifier</required>
</field>
</row>
</form>
<form name="languagetest">
<row>
<field>

View File

@@ -105,7 +105,7 @@ public class AbstractIntegrationTestWithDatabase extends AbstractDSpaceIntegrati
public void setUp() throws Exception {
try {
//Start a new context
context = new Context(Context.Mode.BATCH_EDIT);
context = new Context(Context.Mode.READ_WRITE);
context.turnOffAuthorisationSystem();
//Find our global test EPerson account. If it doesn't exist, create it.

View File

@@ -29,7 +29,7 @@ public class IPMatcherTest {
private static final String IP6_FULL_ADDRESS2 = "2001:18e8:3:171:218:8bff:fe2a:56a3";
private static final String IP6_MASKED_ADDRESS = "2001:18e8:3::/48";
private final static int increment = 6;
private final static int increment = 17;
private static IPMatcher ip6FullMatcher;
private static IPMatcher ip6MaskedMatcher;

View File

@@ -11,8 +11,6 @@ import static org.hamcrest.CoreMatchers.equalTo;
import static org.hamcrest.MatcherAssert.assertThat;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.fail;
import static org.mockito.Mockito.spy;
import static org.mockito.Mockito.when;
import java.io.File;
import java.io.FileInputStream;
@@ -26,7 +24,6 @@ import java.util.TimeZone;
import org.apache.logging.log4j.Logger;
import org.dspace.AbstractUnitTest;
import org.dspace.authorize.AuthorizeException;
import org.dspace.authorize.service.AuthorizeService;
import org.dspace.content.factory.ContentServiceFactory;
import org.dspace.content.service.CollectionService;
import org.dspace.content.service.CommunityService;
@@ -38,7 +35,6 @@ import org.junit.Before;
import org.junit.Rule;
import org.junit.Test;
import org.junit.rules.ExpectedException;
import org.springframework.test.util.ReflectionTestUtils;
/**
* Unit Tests for class InstallItem
@@ -57,12 +53,6 @@ public class InstallItemTest extends AbstractUnitTest {
private Collection collection;
private Community owningCommunity;
/**
* Spy of AuthorizeService to use for tests
* (initialized / setup in @Before method)
*/
private AuthorizeService authorizeServiceSpy;
/**
* log4j category
*/
@@ -84,14 +74,6 @@ public class InstallItemTest extends AbstractUnitTest {
this.owningCommunity = communityService.create(null, context);
this.collection = collectionService.create(context, owningCommunity);
context.restoreAuthSystemState();
// Initialize our spy of the autowired (global) authorizeService bean.
// This allows us to customize the bean's method return values in tests below
authorizeServiceSpy = spy(authorizeService);
// "Wire" our spy to be used by the current loaded workspaceItemService and collectionService
// (To ensure it uses the spy instead of the real service)
ReflectionTestUtils.setField(workspaceItemService, "authorizeService", authorizeServiceSpy);
ReflectionTestUtils.setField(collectionService, "authorizeService", authorizeServiceSpy);
} catch (SQLException | AuthorizeException ex) {
log.error("SQL Error in init", ex);
fail("SQL Error in init: " + ex.getMessage());
@@ -154,23 +136,23 @@ public class InstallItemTest extends AbstractUnitTest {
/**
* Test of installItem method (with an invalid handle), of class InstallItem.
*/
@Test(expected = AuthorizeException.class)
@Test(expected = IllegalStateException.class)
public void testInstallItem_invalidHandle() throws Exception {
// Allow full Admin rights
when(authorizeServiceSpy.isAdmin(context)).thenReturn(true);
// create two items for tests
context.turnOffAuthorisationSystem();
WorkspaceItem is = workspaceItemService.create(context, collection, false);
WorkspaceItem is2 = workspaceItemService.create(context, collection, false);
context.restoreAuthSystemState();
try {
WorkspaceItem is = workspaceItemService.create(context, collection, false);
WorkspaceItem is2 = workspaceItemService.create(context, collection, false);
//Test assigning the same Handle to two different items
String handle = "123456789/56789";
installItemService.installItem(context, is, handle);
//Test assigning the same Handle to two different items
String handle = "123456789/56789";
installItemService.installItem(context, is, handle);
// Assigning the same handle again should throw a RuntimeException
installItemService.installItem(context, is2, handle);
// Assigning the same handle again should throw a RuntimeException
installItemService.installItem(context, is2, handle);
} finally {
context.restoreAuthSystemState();
}
fail("Exception expected");
}

View File

@@ -74,7 +74,7 @@ public class MetadataFieldPerformanceTest extends AbstractUnitTest {
long duration = (endTime - startTime);
double maxDurationPerCall = .3;
double maxDurationPerCall = .4;
double maxDuration = maxDurationPerCall * amount;
//Duration is 1.542 without performance improvements
//Duration is 0.0538 with performance improvements

View File

@@ -76,6 +76,8 @@ public class QDCCrosswalkTest
@Before
public void setUp() {
// make sure that the config properties set in @BeforeClass are picked up
QDCCrosswalk.initStatic();
}
@After

View File

@@ -0,0 +1,566 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.iiif.canvasdimension;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertTrue;
import java.io.ByteArrayOutputStream;
import java.io.IOException;
import java.io.InputStream;
import java.io.PrintStream;
import org.dspace.AbstractIntegrationTestWithDatabase;
import org.dspace.builder.BitstreamBuilder;
import org.dspace.builder.CollectionBuilder;
import org.dspace.builder.CommunityBuilder;
import org.dspace.builder.ItemBuilder;
import org.dspace.content.Bitstream;
import org.dspace.content.Collection;
import org.dspace.content.Community;
import org.dspace.content.Item;
import org.junit.After;
import org.junit.Before;
import org.junit.Test;
public class CanvasDimensionsIT extends AbstractIntegrationTestWithDatabase {
protected Community child1;
protected Community child2;
protected Collection col1;
protected Collection col2;
protected Collection col3;
protected Item iiifItem;
protected Item iiifItem2;
protected Item iiifItem3;
protected Bitstream bitstream;
protected Bitstream bitstream2;
private final static String METADATA_IIIF_HEIGHT = "iiif.image.height";
private final static String METADATA_IIIF_WIDTH = "iiif.image.width";
private final ByteArrayOutputStream outContent = new ByteArrayOutputStream();
private final PrintStream originalOut = System.out;
@Before
public void setup() throws IOException {
System.setOut(new PrintStream(outContent));
context.turnOffAuthorisationSystem();
parentCommunity = CommunityBuilder.createCommunity(context)
.withName("Parent Community")
.build();
child1 = CommunityBuilder.createSubCommunity(context, parentCommunity)
.withName("Sub Community 1")
.build();
child2 = CommunityBuilder.createSubCommunity(context, parentCommunity)
.withName("Sub Community 2")
.build();
col1 = CollectionBuilder.createCollection(context, child1).withName("Collection 1").build();
col2 = CollectionBuilder.createCollection(context, child2).withName("Collection 2").build();
context.restoreAuthSystemState();
}
@After
@Override
public void destroy() throws Exception {
System.setOut(originalOut);
super.destroy();
}
@Test
public void processItemNoForce() throws Exception {
context.turnOffAuthorisationSystem();
// Create a new Item
iiifItem = ItemBuilder.createItem(context, col1)
.withTitle("Test Item")
.withIssueDate("2017-10-17")
.enableIIIF()
.build();
// Add jpeg image bitstream (300 x 200)
InputStream input = this.getClass().getResourceAsStream("cat.jpg");
bitstream = BitstreamBuilder
.createBitstream(context, iiifItem, input)
.withName("Bitstream2.jpg")
.withMimeType("image/jpeg")
.build();
context.restoreAuthSystemState();
String handle = iiifItem.getHandle();
execCanvasScript(handle);
// The test image is small so the canvas dimension should be doubled, e.g. height 200 -> height 400
assertTrue(bitstream.getMetadata().stream()
.filter(m -> m.getMetadataField().toString('.').contentEquals(METADATA_IIIF_HEIGHT))
.anyMatch(m -> m.getValue().contentEquals("400")));
assertTrue(bitstream.getMetadata().stream()
.filter(m -> m.getMetadataField().toString('.').contentEquals(METADATA_IIIF_WIDTH))
.anyMatch(m -> m.getValue().contentEquals("600")));
}
@Test
public void processCollectionNoForce() throws Exception {
context.turnOffAuthorisationSystem();
// Create a new Item
iiifItem = ItemBuilder.createItem(context, col1)
.withTitle("Test Item")
.withIssueDate("2017-10-17")
.enableIIIF()
.build();
// Add jpeg image bitstream (300 x 200)
InputStream input = this.getClass().getResourceAsStream("cat.jpg");
bitstream = BitstreamBuilder
.createBitstream(context, iiifItem, input)
.withName("Bitstream2.jpg")
.withMimeType("image/jpeg")
.build();
context.restoreAuthSystemState();
String id = col1.getID().toString();
execCanvasScript(id);
// The test image is small so the canvas dimension should be doubled, e.g. height 200 -> height 400
assertTrue(bitstream.getMetadata().stream()
.filter(m -> m.getMetadataField().toString('.').contentEquals(METADATA_IIIF_HEIGHT))
.anyMatch(m -> m.getValue().contentEquals("400")));
assertTrue(bitstream.getMetadata().stream()
.filter(m -> m.getMetadataField().toString('.').contentEquals(METADATA_IIIF_WIDTH))
.anyMatch(m -> m.getValue().contentEquals("600")));
}
@Test
public void processSubCommunityNoForce() throws Exception {
context.turnOffAuthorisationSystem();
// Create a new Item
iiifItem = ItemBuilder.createItem(context, col1)
.withTitle("Test Item")
.withIssueDate("2017-10-17")
.enableIIIF()
.build();
// Add jpeg image bitstream (300 x 200)
InputStream input = this.getClass().getResourceAsStream("cat.jpg");
bitstream = BitstreamBuilder
.createBitstream(context, iiifItem, input)
.withName("Bitstream2.jpg")
.withMimeType("image/jpeg")
.build();
context.restoreAuthSystemState();
String id = child1.getID().toString();
execCanvasScript(id);
// The test image is small so the canvas dimension should be doubled, e.g. height 200 -> height 400
assertTrue(bitstream.getMetadata().stream()
.filter(m -> m.getMetadataField().toString('.').contentEquals(METADATA_IIIF_HEIGHT))
.anyMatch(m -> m.getValue().contentEquals("400")));
assertTrue(bitstream.getMetadata().stream()
.filter(m -> m.getMetadataField().toString('.').contentEquals(METADATA_IIIF_WIDTH))
.anyMatch(m -> m.getValue().contentEquals("600")));
}
@Test
public void processParentCommunityNoForce() throws Exception {
context.turnOffAuthorisationSystem();
// Create a new Item
iiifItem = ItemBuilder.createItem(context, col1)
.withTitle("Test Item")
.withIssueDate("2017-10-17")
.enableIIIF()
.build();
// Add jpeg image bitstream (300 x 200)
InputStream input = this.getClass().getResourceAsStream("cat.jpg");
bitstream = BitstreamBuilder
.createBitstream(context, iiifItem, input)
.withName("Bitstream2.jpg")
.withMimeType("image/jpeg")
.build();
context.restoreAuthSystemState();
String handle = parentCommunity.getHandle();
execCanvasScript(handle);
// The test image is small so the canvas dimension should be doubled, e.g. height 200 -> height 400
assertTrue(bitstream.getMetadata().stream()
.filter(m -> m.getMetadataField().toString('.').contentEquals(METADATA_IIIF_HEIGHT))
.anyMatch(m -> m.getValue().contentEquals("400")));
assertTrue(bitstream.getMetadata().stream()
.filter(m -> m.getMetadataField().toString('.').contentEquals(METADATA_IIIF_WIDTH))
.anyMatch(m -> m.getValue().contentEquals("600")));
}
@Test
public void processParentCommunityMultipleSubsNoForce() throws Exception {
context.turnOffAuthorisationSystem();
// Create new Items
iiifItem = ItemBuilder.createItem(context, col1)
.withTitle("Test Item")
.withIssueDate("2017-10-17")
.enableIIIF()
.build();
iiifItem2 = ItemBuilder.createItem(context, col2)
.withTitle("Test Item2")
.withIssueDate("2017-10-17")
.enableIIIF()
.build();
// Add jpeg image bitstreams (300 x 200)
InputStream input = this.getClass().getResourceAsStream("cat.jpg");
bitstream = BitstreamBuilder
.createBitstream(context, iiifItem, input)
.withName("Bitstream2.jpg")
.withMimeType("image/jpeg")
.build();
input = this.getClass().getResourceAsStream("cat.jpg");
bitstream2 = BitstreamBuilder
.createBitstream(context, iiifItem2, input)
.withName("Bitstream2.jpg")
.withMimeType("image/jpeg")
.build();
context.restoreAuthSystemState();
String id = parentCommunity.getID().toString();
execCanvasScript(id);
// All bitstreams should be updated with canvas metadata.
assertTrue(bitstream.getMetadata().stream()
.filter(m -> m.getMetadataField().toString('.').contentEquals(METADATA_IIIF_HEIGHT))
.anyMatch(m -> m.getValue().contentEquals("400")));
assertTrue(bitstream.getMetadata().stream()
.filter(m -> m.getMetadataField().toString('.').contentEquals(METADATA_IIIF_WIDTH))
.anyMatch(m -> m.getValue().contentEquals("600")));
assertTrue(bitstream2.getMetadata().stream()
.filter(m -> m.getMetadataField().toString('.').contentEquals(METADATA_IIIF_HEIGHT))
.anyMatch(m -> m.getValue().contentEquals("400")));
assertTrue(bitstream2.getMetadata().stream()
.filter(m -> m.getMetadataField().toString('.').contentEquals(METADATA_IIIF_WIDTH))
.anyMatch(m -> m.getValue().contentEquals("600")));
}
@Test
public void processItemWithForce() throws Exception {
context.turnOffAuthorisationSystem();
// Create a new Item
iiifItem = ItemBuilder.createItem(context, col1)
.withTitle("Test Item")
.withIssueDate("2017-10-17")
.enableIIIF()
.build();
// Add jpeg image bitstream (300 x 200)
InputStream input = this.getClass().getResourceAsStream("cat.jpg");
bitstream = BitstreamBuilder
.createBitstream(context, iiifItem, input)
.withName("Bitstream2.jpg")
.withMimeType("image/jpeg")
.withIIIFCanvasWidth(100)
.withIIIFCanvasHeight(100)
.build();
context.restoreAuthSystemState();
String id = iiifItem.getID().toString();
execCanvasScriptForceOption(id);
// The existing metadata should be updated
assertTrue(bitstream.getMetadata().stream()
.filter(m -> m.getMetadataField().toString('.').contentEquals(METADATA_IIIF_HEIGHT))
.anyMatch(m -> m.getValue().contentEquals("400")));
assertTrue(bitstream.getMetadata().stream()
.filter(m -> m.getMetadataField().toString('.').contentEquals(METADATA_IIIF_WIDTH))
.anyMatch(m -> m.getValue().contentEquals("600")));
}
@Test
public void processCollectionWithForce() throws Exception {
context.turnOffAuthorisationSystem();
// Create a new Item
iiifItem = ItemBuilder.createItem(context, col1)
.withTitle("Test Item")
.withIssueDate("2017-10-17")
.enableIIIF()
.build();
// Add jpeg image bitstream (300 x 200)
InputStream input = this.getClass().getResourceAsStream("cat.jpg");
bitstream = BitstreamBuilder
.createBitstream(context, iiifItem, input)
.withName("Bitstream2.jpg")
.withMimeType("image/jpeg")
.withIIIFCanvasWidth(100)
.withIIIFCanvasHeight(100)
.build();
context.restoreAuthSystemState();
String id = col1.getID().toString();
execCanvasScriptForceOption(id);
// The existing metadata should be updated
assertTrue(bitstream.getMetadata().stream()
.filter(m -> m.getMetadataField().toString('.').contentEquals(METADATA_IIIF_HEIGHT))
.anyMatch(m -> m.getValue().contentEquals("400")));
assertTrue(bitstream.getMetadata().stream()
.filter(m -> m.getMetadataField().toString('.').contentEquals(METADATA_IIIF_WIDTH))
.anyMatch(m -> m.getValue().contentEquals("600")));
}
@Test
public void processItemWithExistingMetadata() throws Exception {
context.turnOffAuthorisationSystem();
// Create a new Item
iiifItem = ItemBuilder.createItem(context, col1)
.withTitle("Test Item")
.withIssueDate("2017-10-17")
.enableIIIF()
.build();
// Add jpeg image bitstream (300 x 200)
InputStream input = this.getClass().getResourceAsStream("cat.jpg");
bitstream = BitstreamBuilder
.createBitstream(context, iiifItem, input)
.withName("Bitstream2.jpg")
.withMimeType("image/jpeg")
.withIIIFCanvasWidth(100)
.withIIIFCanvasHeight(100)
.build();
context.restoreAuthSystemState();
String handle = iiifItem.getHandle();
execCanvasScript(handle);
// The existing canvas metadata should be unchanged
assertTrue(bitstream.getMetadata().stream()
.filter(m -> m.getMetadataField().toString('.').contentEquals(METADATA_IIIF_HEIGHT))
.anyMatch(m -> m.getValue().contentEquals("100")));
assertTrue(bitstream.getMetadata().stream()
.filter(m -> m.getMetadataField().toString('.').contentEquals(METADATA_IIIF_WIDTH))
.anyMatch(m -> m.getValue().contentEquals("100")));
}
@Test
public void processParentCommunityWithMaximum() throws Exception {
context.turnOffAuthorisationSystem();
// Create a new Item
iiifItem = ItemBuilder.createItem(context, col1)
.withTitle("Test Item 1")
.withIssueDate("2017-10-17")
.enableIIIF()
.build();
// Second item
iiifItem2 = ItemBuilder.createItem(context, col1)
.withTitle("Test Item 2")
.withIssueDate("2017-10-17")
.enableIIIF()
.build();
// Third item so we can test max2process
iiifItem3 = ItemBuilder.createItem(context, col1)
.withTitle("Test Item3")
.withIssueDate("2017-10-17")
.enableIIIF()
.build();
// Add jpeg image bitstream (300 x 200)
InputStream input = this.getClass().getResourceAsStream("cat.jpg");
bitstream = BitstreamBuilder
.createBitstream(context, iiifItem, input)
.withName("Bitstream2.jpg")
.withMimeType("image/jpeg")
.withIIIFCanvasWidth(100)
.withIIIFCanvasHeight(100)
.build();
input = this.getClass().getResourceAsStream("cat.jpg");
bitstream2 = BitstreamBuilder
.createBitstream(context, iiifItem2, input)
.withName("Bitstream2.jpg")
.withMimeType("image/jpeg")
.withIIIFCanvasWidth(100)
.withIIIFCanvasHeight(100)
.build();
input = this.getClass().getResourceAsStream("cat.jpg");
Bitstream bitstream3 = BitstreamBuilder
.createBitstream(context, iiifItem3, input)
.withName("Bitstream3.jpg")
.withMimeType("image/jpeg")
.withIIIFCanvasWidth(100)
.withIIIFCanvasHeight(100)
.build();
context.restoreAuthSystemState();
String id = parentCommunity.getID().toString();
execCanvasScriptWithMaxRecs(id);
// check System.out for number of items processed.
assertEquals("2 IIIF items were processed.\n", outContent.toString());
}
@Test
public void processParentCommunityWithMultipleSkip() throws Exception {
context.turnOffAuthorisationSystem();
col3 = CollectionBuilder.createCollection(context, child1).withName("Collection 3").build();
// Create a new Item
iiifItem = ItemBuilder.createItem(context, col1)
.withTitle("Test Item")
.withIssueDate("2017-10-17")
.enableIIIF()
.build();
// Second item so we can test max2process
iiifItem2 = ItemBuilder.createItem(context, col2)
.withTitle("Test Item")
.withIssueDate("2017-10-17")
.enableIIIF()
.build();
iiifItem3 = ItemBuilder.createItem(context, col3)
.withTitle("Test Item")
.withIssueDate("2017-10-17")
.enableIIIF()
.build();
// Add jpeg image bitstream (300 x 200)
InputStream input = this.getClass().getResourceAsStream("cat.jpg");
bitstream = BitstreamBuilder
.createBitstream(context, iiifItem, input)
.withName("Bitstream2.jpg")
.withMimeType("image/jpeg")
.withIIIFCanvasWidth(100)
.withIIIFCanvasHeight(100)
.build();
input = this.getClass().getResourceAsStream("cat.jpg");
bitstream2 = BitstreamBuilder
.createBitstream(context, iiifItem2, input)
.withName("Bitstream2.jpg")
.withMimeType("image/jpeg")
.withIIIFCanvasWidth(100)
.withIIIFCanvasHeight(100)
.build();
input = this.getClass().getResourceAsStream("cat.jpg");
Bitstream bitstream3 = BitstreamBuilder
.createBitstream(context, iiifItem3, input)
.withName("Bitstream2.jpg")
.withMimeType("image/jpeg")
.withIIIFCanvasWidth(100)
.withIIIFCanvasHeight(100)
.build();
context.restoreAuthSystemState();
String id = parentCommunity.getID().toString();
execCanvasScriptWithSkipList(id,col2.getHandle() + "," + col3.getHandle());
// The test image is small so the canvas dimension should be doubled, e.g. height 200 -> height 400
assertTrue(bitstream.getMetadata().stream()
.filter(m -> m.getMetadataField().toString('.').contentEquals(METADATA_IIIF_HEIGHT))
.anyMatch(m -> m.getValue().contentEquals("400")));
assertTrue(bitstream.getMetadata().stream()
.filter(m -> m.getMetadataField().toString('.').contentEquals(METADATA_IIIF_WIDTH))
.anyMatch(m -> m.getValue().contentEquals("600")));
// Second bitstream should be unchanged because its within a skipped collection
assertTrue(bitstream2.getMetadata().stream()
.filter(m -> m.getMetadataField().toString('.').contentEquals(METADATA_IIIF_HEIGHT))
.anyMatch(m -> m.getValue().contentEquals("100")));
assertTrue(bitstream2.getMetadata().stream()
.filter(m -> m.getMetadataField().toString('.').contentEquals(METADATA_IIIF_WIDTH))
.anyMatch(m -> m.getValue().contentEquals("100")));
// Third bitstream should be unchanged because its within a skipped collection
assertTrue(bitstream3.getMetadata().stream()
.filter(m -> m.getMetadataField().toString('.').contentEquals(METADATA_IIIF_HEIGHT))
.anyMatch(m -> m.getValue().contentEquals("100")));
assertTrue(bitstream3.getMetadata().stream()
.filter(m -> m.getMetadataField().toString('.').contentEquals(METADATA_IIIF_WIDTH))
.anyMatch(m -> m.getValue().contentEquals("100")));
}
@Test
public void processParentCommunityWithSingleSkip() throws Exception {
context.turnOffAuthorisationSystem();
// Create a new Item
iiifItem = ItemBuilder.createItem(context, col1)
.withTitle("Test Item")
.withIssueDate("2017-10-17")
.enableIIIF()
.build();
// Second item so we can test max2process
iiifItem2 = ItemBuilder.createItem(context, col2)
.withTitle("Test Item")
.withIssueDate("2017-10-17")
.enableIIIF()
.build();
// Add jpeg image bitstream (300 x 200)
InputStream input = this.getClass().getResourceAsStream("cat.jpg");
bitstream = BitstreamBuilder
.createBitstream(context, iiifItem, input)
.withName("Bitstream2.jpg")
.withMimeType("image/jpeg")
.withIIIFCanvasWidth(100)
.withIIIFCanvasHeight(100)
.build();
input = this.getClass().getResourceAsStream("cat.jpg");
bitstream2 = BitstreamBuilder
.createBitstream(context, iiifItem2, input)
.withName("Bitstream2.jpg")
.withMimeType("image/jpeg")
.withIIIFCanvasWidth(100)
.withIIIFCanvasHeight(100)
.build();
context.restoreAuthSystemState();
String id = parentCommunity.getID().toString();
execCanvasScriptWithSkipList(id, col2.getHandle());
// The test image is small so the canvas dimension should be doubled, e.g. height 200 -> height 400
assertTrue(bitstream.getMetadata().stream()
.filter(m -> m.getMetadataField().toString('.').contentEquals(METADATA_IIIF_HEIGHT))
.anyMatch(m -> m.getValue().contentEquals("400")));
assertTrue(bitstream.getMetadata().stream()
.filter(m -> m.getMetadataField().toString('.').contentEquals(METADATA_IIIF_WIDTH))
.anyMatch(m -> m.getValue().contentEquals("600")));
// Second bitstream should be unchanged because it's inside a skipped collection
assertTrue(bitstream2.getMetadata().stream()
.filter(m -> m.getMetadataField().toString('.').contentEquals(METADATA_IIIF_HEIGHT))
.anyMatch(m -> m.getValue().contentEquals("100")));
assertTrue(bitstream2.getMetadata().stream()
.filter(m -> m.getMetadataField().toString('.').contentEquals(METADATA_IIIF_WIDTH))
.anyMatch(m -> m.getValue().contentEquals("100")));
}
private void execCanvasScript(String id) throws Exception {
runDSpaceScript("iiif-canvas-dimensions", "-e", "admin@email.com", "-i", id);
}
private void execCanvasScriptForceOption(String id) throws Exception {
runDSpaceScript("iiif-canvas-dimensions", "-e", "admin@email.com", "-i", id, "-f");
}
private void execCanvasScriptWithMaxRecs(String id) throws Exception {
// maximum 2
runDSpaceScript("iiif-canvas-dimensions", "-e", "admin@email.com", "-i", id, "-m", "2", "-f", "-q");
}
private void execCanvasScriptWithSkipList(String id, String skip) throws Exception {
runDSpaceScript("iiif-canvas-dimensions", "-e", "admin@email.com", "-i", id, "-s", skip, "-f");
}
}

View File

@@ -0,0 +1,96 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.service.impl;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertNotNull;
import static org.mockserver.model.HttpRequest.request;
import static org.mockserver.model.HttpResponse.response;
import java.io.IOException;
import java.net.URI;
import java.net.URISyntaxException;
import org.apache.http.client.methods.CloseableHttpResponse;
import org.apache.http.client.methods.HttpUriRequest;
import org.apache.http.client.methods.RequestBuilder;
import org.apache.http.client.utils.URIBuilder;
import org.apache.http.impl.client.CloseableHttpClient;
import org.dspace.AbstractDSpaceTest;
import org.dspace.services.ConfigurationService;
import org.dspace.services.factory.DSpaceServicesFactory;
import org.eclipse.jetty.http.HttpStatus;
import org.junit.BeforeClass;
import org.junit.Rule;
import org.junit.Test;
import org.mockserver.client.MockServerClient;
import org.mockserver.junit.MockServerRule;
/**
*
* @author Mark H. Wood <mwood@iupui.edu>
*/
public class HttpConnectionPoolServiceTest
extends AbstractDSpaceTest {
private static ConfigurationService configurationService;
@Rule
public MockServerRule mockServerRule = new MockServerRule(this);
private MockServerClient mockServerClient;
@BeforeClass
public static void initClass() {
configurationService = DSpaceServicesFactory.getInstance()
.getConfigurationService();
}
/**
* Test of getClient method, of class HttpConnectionPoolService.
* @throws java.io.IOException if a connection cannot be closed.
* @throws java.net.URISyntaxException when an invalid URI is constructed.
*/
@Test
public void testGetClient()
throws IOException, URISyntaxException {
System.out.println("getClient");
configurationService.setProperty("solr.client.maxTotalConnections", 2);
configurationService.setProperty("solr.client.maxPerRoute", 2);
HttpConnectionPoolService instance = new HttpConnectionPoolService("solr");
instance.configurationService = configurationService;
instance.init();
final String testPath = "/test";
mockServerClient.when(
request()
.withPath(testPath)
).respond(
response()
.withStatusCode(HttpStatus.OK_200)
);
try (CloseableHttpClient httpClient = instance.getClient()) {
assertNotNull("getClient should always return a client", httpClient);
URI uri = new URIBuilder()
.setScheme("http")
.setHost("localhost")
.setPort(mockServerClient.getPort())
.setPath(testPath)
.build();
System.out.println(uri.toString());
HttpUriRequest request = RequestBuilder.get(uri)
.build();
try (CloseableHttpResponse response = httpClient.execute(request)) {
assertEquals("Response status should be OK", HttpStatus.OK_200,
response.getStatusLine().getStatusCode());
}
}
}
}

View File

@@ -9,7 +9,6 @@ package org.dspace.statistics;
import static org.mockito.ArgumentMatchers.any;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.when;
import java.net.InetAddress;
import java.util.ArrayList;
@@ -29,6 +28,7 @@ import com.maxmind.geoip2.record.Postal;
import com.maxmind.geoip2.record.RepresentedCountry;
import com.maxmind.geoip2.record.Traits;
import org.dspace.solr.MockSolrServer;
import org.mockito.Mockito;
import org.springframework.beans.factory.DisposableBean;
import org.springframework.beans.factory.InitializingBean;
import org.springframework.stereotype.Service;
@@ -55,7 +55,7 @@ public class MockSolrLoggerServiceImpl
// Mock GeoIP's DatabaseReader
DatabaseReader reader = mock(DatabaseReader.class);
// Ensure that any tests requesting a city() get a mock/fake CityResponse
when(reader.city(any(InetAddress.class))).thenReturn(mockCityResponse());
Mockito.lenient().when(reader.city(any(InetAddress.class))).thenReturn(mockCityResponse());
// Save this mock DatabaseReader to be used by SolrLoggerService
locationService = reader;
}

View File

@@ -25,12 +25,14 @@ import java.sql.SQLException;
import java.util.ArrayList;
import java.util.List;
import org.apache.http.client.config.RequestConfig;
import org.dspace.core.Context;
import org.dspace.statistics.export.OpenURLTracker;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.mockito.InjectMocks;
import org.mockito.Mock;
import org.mockito.Mockito;
import org.mockito.Spy;
import org.mockito.junit.MockitoJUnitRunner;
@@ -61,8 +63,6 @@ public class OpenUrlServiceImplTest {
openUrlService.processUrl(context, "test-url");
verify(openUrlService, times(0)).logfailed(context, "test-url");
}
/**
@@ -82,7 +82,6 @@ public class OpenUrlServiceImplTest {
verify(openUrlService, times(1)).logfailed(context, "test-url");
}
/**
@@ -131,4 +130,23 @@ public class OpenUrlServiceImplTest {
assertThat(tracker1.getUrl(), is(failedUrl));
}
/**
* Tests whether the timeout gets set to 10 seconds when processing a url
* @throws SQLException
*/
@Test
public void testTimeout() throws SQLException {
Context context = mock(Context.class);
String URL = "http://bla.com";
RequestConfig.Builder requestConfig = mock(RequestConfig.Builder.class);
doReturn(requestConfig).when(openUrlService).getRequestConfigBuilder();
doReturn(requestConfig).when(requestConfig).setConnectTimeout(10 * 1000);
doReturn(RequestConfig.custom().build()).when(requestConfig).build();
openUrlService.processUrl(context, URL);
Mockito.verify(requestConfig).setConnectTimeout(10 * 1000);
}
}

View File

@@ -0,0 +1,110 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.xmlworkflow;
import static org.junit.Assert.assertTrue;
import java.sql.SQLException;
import java.util.List;
import javax.servlet.http.HttpServletRequest;
import org.dspace.AbstractIntegrationTestWithDatabase;
import org.dspace.authorize.ResourcePolicy;
import org.dspace.authorize.factory.AuthorizeServiceFactory;
import org.dspace.authorize.service.AuthorizeService;
import org.dspace.builder.ClaimedTaskBuilder;
import org.dspace.builder.CollectionBuilder;
import org.dspace.builder.CommunityBuilder;
import org.dspace.builder.EPersonBuilder;
import org.dspace.content.Collection;
import org.dspace.content.Community;
import org.dspace.content.Item;
import org.dspace.core.Constants;
import org.dspace.discovery.IndexingService;
import org.dspace.eperson.EPerson;
import org.dspace.services.factory.DSpaceServicesFactory;
import org.dspace.xmlworkflow.factory.XmlWorkflowServiceFactory;
import org.dspace.xmlworkflow.service.XmlWorkflowService;
import org.dspace.xmlworkflow.state.Workflow;
import org.dspace.xmlworkflow.storedcomponents.ClaimedTask;
import org.junit.Test;
import org.springframework.mock.web.MockHttpServletRequest;
/**
* IT for {@link XmlWorkflowServiceImpl}
*
* @author Maria Verdonck (Atmire) on 14/12/21
*/
public class XmlWorkflowServiceIT extends AbstractIntegrationTestWithDatabase {
protected XmlWorkflowService xmlWorkflowService = XmlWorkflowServiceFactory.getInstance().getXmlWorkflowService();
protected IndexingService indexer = DSpaceServicesFactory.getInstance().getServiceManager()
.getServiceByName(IndexingService.class.getName(),
IndexingService.class);
protected AuthorizeService authorizeService = AuthorizeServiceFactory.getInstance().getAuthorizeService();
/**
* Test to verify that if a user submits an item into the workflow, then it gets rejected that the submitter gets
* write access back on the item
*
* @throws Exception
*/
@Test
public void workflowUserRejectsItemTheySubmitted_ItemShouldBeEditable() throws Exception {
context.turnOffAuthorisationSystem();
EPerson submitter = EPersonBuilder.createEPerson(context).withEmail("submitter@example.org").build();
context.setCurrentUser(submitter);
Community community = CommunityBuilder.createCommunity(context)
.withName("Parent Community")
.build();
Collection colWithWorkflow = CollectionBuilder.createCollection(context, community)
.withName("Collection WITH workflow")
.withWorkflowGroup(1, submitter)
.build();
Workflow workflow = XmlWorkflowServiceFactory.getInstance().getWorkflowFactory().getWorkflow(colWithWorkflow);
ClaimedTask taskToReject = ClaimedTaskBuilder.createClaimedTask(context, colWithWorkflow, submitter)
.withTitle("Test workflow item to reject").build();
context.restoreAuthSystemState();
// Submitter person is both original submitter as well as reviewer, should have edit access of claimed task
assertTrue(this.containsRPForUser(taskToReject.getWorkflowItem().getItem(), submitter, Constants.WRITE));
// reject
MockHttpServletRequest httpRejectRequest = new MockHttpServletRequest();
httpRejectRequest.setParameter("submit_reject", "submit_reject");
httpRejectRequest.setParameter("reason", "test");
executeWorkflowAction(httpRejectRequest, workflow, taskToReject);
// Submitter person is both original submitter as well as reviewer, should have edit access of reject, i.e.
// sent back/to submission task
assertTrue(this.containsRPForUser(taskToReject.getWorkflowItem().getItem(), submitter, Constants.WRITE));
}
private boolean containsRPForUser(Item item, EPerson user, int action) throws SQLException {
List<ResourcePolicy> rps = authorizeService.getPolicies(context, item);
for (ResourcePolicy rp : rps) {
if (rp.getEPerson().getID().equals(user.getID()) && rp.getAction() == action) {
return true;
}
}
return false;
}
private void executeWorkflowAction(HttpServletRequest httpServletRequest, Workflow workflow, ClaimedTask task)
throws Exception {
final EPerson previousUser = context.getCurrentUser();
task = context.reloadEntity(task);
context.setCurrentUser(task.getOwner());
xmlWorkflowService
.doState(context, task.getOwner(), httpServletRequest, task.getWorkflowItem().getID(), workflow,
workflow.getStep(task.getStepID()).getActionConfig(task.getActionID()));
context.commit();
indexer.commit();
context.setCurrentUser(previousUser);
}
}

Binary file not shown.

After

Width:  |  Height:  |  Size: 36 KiB

View File

@@ -11,6 +11,7 @@ import java.util.ArrayList;
import java.util.List;
import javax.validation.constraints.NotNull;
import de.digitalcollections.iiif.model.enums.ViewingHint;
import de.digitalcollections.iiif.model.sharedcanvas.Canvas;
import de.digitalcollections.iiif.model.sharedcanvas.Range;
import de.digitalcollections.iiif.model.sharedcanvas.Resource;
@@ -32,6 +33,7 @@ public class RangeGenerator implements IIIFResource {
private String identifier;
private String label;
private final List<ViewingHint> viewingHint = new ArrayList<>();
private final List<Canvas> canvasList = new ArrayList<>();
private final List<Range> rangesList = new ArrayList<>();
private final RangeService rangeService;
@@ -69,6 +71,11 @@ public class RangeGenerator implements IIIFResource {
return this;
}
public RangeGenerator addViewingHint(String hint) {
viewingHint.add(new BehaviorGenerator().setType(hint).generateValue());
return this;
}
/**
* Adds canvas to range canvas list.
* @param canvas list of canvas generators
@@ -99,6 +106,9 @@ public class RangeGenerator implements IIIFResource {
} else {
range = new Range(identifier);
}
if (viewingHint.size() > 0) {
range.setViewingHints(viewingHint);
}
for (Canvas canvas : canvasList) {
range.addCanvas(canvas);
}

View File

@@ -9,7 +9,6 @@ package org.dspace.app.iiif.service;
import java.sql.SQLException;
import java.util.ArrayList;
import java.util.LinkedHashMap;
import java.util.List;
import java.util.Map;
@@ -29,7 +28,6 @@ import org.dspace.content.Item;
import org.dspace.content.MetadataValue;
import org.dspace.content.service.ItemService;
import org.dspace.core.Context;
import org.dspace.core.I18nUtil;
import org.dspace.services.ConfigurationService;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Component;
@@ -125,7 +123,7 @@ public class ManifestService extends AbstractResourceService {
addMetadata(context, item);
addViewingHint(item);
addThumbnail(item, context);
addRanges(context, item, manifestId);
addCanvasAndRange(context, item, manifestId);
manifestGenerator.addSequence(
sequenceService.getSequence(item));
addRendering(item, context);
@@ -133,66 +131,37 @@ public class ManifestService extends AbstractResourceService {
}
/**
* Add the ranges to the manifest structure. Ranges are generated from the
* iiif.toc metadata
* Adds Canvases and Ranges to the manifest. Ranges are generated from bitstream
* or bundle iiif metadata.
*
* @param context the DSpace Context
* @param item the DSpace Item to represent
* @param manifestId the generated manifestId
*/
private void addRanges(Context context, Item item, String manifestId) {
List<Bundle> bundles = utils.getIIIFBundles(item);
RangeGenerator root = new RangeGenerator(rangeService);
root.setLabel(I18nUtil.getMessage("iiif.toc.root-label"));
root.setIdentifier(manifestId + "/range/r0");
private void addCanvasAndRange(Context context, Item item, String manifestId) {
Map<String, RangeGenerator> tocRanges = new LinkedHashMap<String, RangeGenerator>();
// Set the root Range for this manifest.
rangeService.setRootRange(manifestId);
// Get bundles that can contain IIIF manifest data.
List<Bundle> bundles = utils.getIIIFBundles(item);
for (Bundle bnd : bundles) {
String bundleToCPrefix = null;
if (bundles.size() > 1) {
// Check for bundle Range metadata if multiple IIIF bundles exist.
bundleToCPrefix = utils.getBundleIIIFToC(bnd);
}
RangeGenerator lastRange = root;
for (Bitstream b : utils.getIIIFBitstreams(context, bnd)) {
CanvasGenerator canvasId = sequenceService.addCanvas(context, item, bnd, b);
List<String> tocs = utils.getIIIFToCs(b, bundleToCPrefix);
if (tocs.size() > 0) {
for (String toc : tocs) {
RangeGenerator currRange = root;
String[] parts = toc.split(IIIFUtils.TOC_SEPARATOR_REGEX);
String key = "";
for (int pIdx = 0; pIdx < parts.length; pIdx++) {
if (pIdx > 0) {
key += IIIFUtils.TOC_SEPARATOR;
}
key += parts[pIdx];
if (tocRanges.get(key) != null) {
currRange = tocRanges.get(key);
} else {
// create the sub range
RangeGenerator range = new RangeGenerator(rangeService);
range.setLabel(parts[pIdx]);
// add the range reference to the currRange so to get an identifier
currRange.addSubRange(range);
// move the current range
currRange = range;
tocRanges.put(key, range);
}
}
// add the bitstream canvas to the currRange
currRange
.addCanvas(canvasService.getRangeCanvasReference(canvasId.getIdentifier()));
lastRange = currRange;
}
} else {
lastRange.addCanvas(canvasService.getRangeCanvasReference(canvasId.getIdentifier()));
}
for (Bitstream bitstream : utils.getIIIFBitstreams(context, bnd)) {
// Add the Canvas to the manifest Sequence.
CanvasGenerator canvas = sequenceService.addCanvas(context, item, bnd, bitstream);
// Update the Ranges.
rangeService.updateRanges(bitstream, bundleToCPrefix, canvas);
}
}
if (tocRanges.size() > 0) {
manifestGenerator.addRange(root);
// If Ranges were created, add them to manifest Structures element.
Map<String, RangeGenerator> tocRanges = rangeService.getTocRanges();
if (tocRanges != null && tocRanges.size() > 0) {
RangeGenerator rootRange = rangeService.getRootRange();
manifestGenerator.addRange(rootRange);
for (RangeGenerator range : tocRanges.values()) {
manifestGenerator.addRange(range);
}
@@ -322,8 +291,8 @@ public class ManifestService extends AbstractResourceService {
}
/**
* This method looks for a PDF rendering in the Item's ORIGINAL bundle and adds
* it to the Sequence if found.
* This method looks for a PDF in the Item's ORIGINAL bundle and adds
* it as the Rendering resource if found.
*
* @param item DSpace Item
* @param context DSpace context

View File

@@ -7,7 +7,15 @@
*/
package org.dspace.app.iiif.service;
import java.util.LinkedHashMap;
import java.util.List;
import java.util.Map;
import org.dspace.app.iiif.model.generator.CanvasGenerator;
import org.dspace.app.iiif.model.generator.RangeGenerator;
import org.dspace.app.iiif.service.utils.IIIFUtils;
import org.dspace.content.Bitstream;
import org.dspace.core.I18nUtil;
import org.dspace.services.ConfigurationService;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Component;
@@ -28,14 +36,110 @@ public class RangeService extends AbstractResourceService {
@Autowired
CanvasService canvasService;
private Map<String, RangeGenerator> tocRanges = new LinkedHashMap<String, RangeGenerator>();
private RangeGenerator currentRange;
private RangeGenerator root;
public RangeService(ConfigurationService configurationService) {
setConfiguration(configurationService);
}
/**
* Ranges expect the Sub range object to have only an identifier.
*
* @param range the sub range to reference
* Get the root range generator. This will contain table of contents entries.
* @return
*/
public RangeGenerator getRootRange() {
return root;
}
/**
* Sets the root range generator to which sub-ranges will be added.
* @param manifestId id of the manifest to which ranges will be added.
*/
public void setRootRange(String manifestId) {
root = new RangeGenerator(this);
root.addViewingHint("top");
root.setLabel(I18nUtil.getMessage("iiif.toc.root-label"));
root.setIdentifier(manifestId + "/range/r0");
}
/**
* Gets the current ranges.
* @return map of toc ranges.
*/
public Map<String, RangeGenerator> getTocRanges() {
return this.tocRanges;
}
/**
* Updates the current range and adds sub-ranges.
* @param bitstream bitstream DSO
* @param bundleToCPrefix range prefix from bundle metadata
* @param canvas the current canvas generator
*/
public void updateRanges(Bitstream bitstream, String bundleToCPrefix, CanvasGenerator canvas) {
List<String> tocs = utils.getIIIFToCs(bitstream, bundleToCPrefix);
if (tocs.size() > 0) {
// Add a new Range.
addTocRange(tocs, canvas);
} else {
// Add canvases to the current Range.
if (tocRanges.size() > 0) {
String canvasIdentifier = canvas.getIdentifier();
CanvasGenerator simpleCanvas = canvasService.getRangeCanvasReference(canvasIdentifier);
currentRange.addCanvas(simpleCanvas);
}
}
}
/**
* Adds sub-ranges to the root Range. If the toc metadata includes a separator,
* hierarchical sub-ranges are created.
* @param tocs ranges from toc metadata
* @param canvasGenerator generator for the current canvas
* @return
*/
private void addTocRange(List<String> tocs , CanvasGenerator canvasGenerator) {
for (String toc : tocs) {
// Make tempRange a reference to root.
RangeGenerator tempRange = root;
String[] parts = toc.split(IIIFUtils.TOC_SEPARATOR_REGEX);
String key = "";
// Process sub-ranges.
for (int pIdx = 0; pIdx < parts.length; pIdx++) {
if (pIdx > 0) {
key += IIIFUtils.TOC_SEPARATOR;
}
key += parts[pIdx];
if (tocRanges.get(key) != null) {
// Handles the case of a bitstream that crosses two ranges.
tempRange = tocRanges.get(key);
} else {
RangeGenerator range = new RangeGenerator(this);
range.setLabel(parts[pIdx]);
// Add sub-range to the root Range
tempRange.addSubRange(range);
// Add new sub-range to the map.
tocRanges.put(key, range);
// Make tempRange a reference to the new sub-range.
tempRange = range;
}
}
// Add a simple canvas reference to the Range.
tempRange
.addCanvas(canvasService.getRangeCanvasReference(canvasGenerator.getIdentifier()));
// Update the current Range.
currentRange = tempRange;
}
}
/**
* Ranges expect the sub-range to have only an identifier.
*
* @param range the sub-range to reference
* @return RangeGenerator able to create the reference
*/
public RangeGenerator getRangeReference(RangeGenerator range) {

View File

@@ -7,6 +7,11 @@
*/
package org.dspace.app.iiif.service.utils;
import static org.dspace.iiif.util.IIIFSharedUtils.METADATA_IIIF_HEIGHT;
import static org.dspace.iiif.util.IIIFSharedUtils.METADATA_IIIF_IMAGE;
import static org.dspace.iiif.util.IIIFSharedUtils.METADATA_IIIF_SCHEMA;
import static org.dspace.iiif.util.IIIFSharedUtils.METADATA_IIIF_WIDTH;
import java.sql.SQLException;
import java.util.ArrayList;
import java.util.List;
@@ -27,9 +32,8 @@ import org.dspace.content.DSpaceObject;
import org.dspace.content.Item;
import org.dspace.content.MetadataValue;
import org.dspace.content.service.BitstreamService;
import org.dspace.core.Constants;
import org.dspace.core.Context;
import org.dspace.license.CreativeCommonsServiceImpl;
import org.dspace.iiif.util.IIIFSharedUtils;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Component;
@@ -59,9 +63,11 @@ public class IIIFUtils {
// metadata used to set the iiif viewing hint
public static final String METADATA_IIIF_VIEWING_HINT = "iiif.viewing.hint";
// metadata used to set the width of the canvas that has not an explicit name
public static final String METADATA_IMAGE_WIDTH = "iiif.image.width";
public static final String METADATA_IMAGE_WIDTH = METADATA_IIIF_SCHEMA + "." + METADATA_IIIF_IMAGE
+ "." + METADATA_IIIF_WIDTH;
// metadata used to set the height of the canvas that has not an explicit name
public static final String METADATA_IMAGE_HEIGTH = "iiif.image.height";
public static final String METADATA_IMAGE_HEIGHT = METADATA_IIIF_SCHEMA + "." + METADATA_IIIF_IMAGE
+ "." + METADATA_IIIF_HEIGHT;
// string used in the metadata toc as separator among the different levels
public static final String TOC_SEPARATOR = "|||";
@@ -76,49 +82,13 @@ public class IIIFUtils {
@Autowired
protected BitstreamService bitstreamService;
/**
* This method returns the bundles holding IIIF resources if any.
* If there is no IIIF content available an empty bundle list is returned.
* @param item the DSpace item
*
* @return list of DSpace bundles with IIIF content
*/
public List<Bundle> getIIIFBundles(Item item) {
boolean iiif = isIIIFEnabled(item);
List<Bundle> bundles = new ArrayList<>();
if (iiif) {
bundles = item.getBundles().stream().filter(b -> isIIIFBundle(b)).collect(Collectors.toList());
}
return bundles;
return IIIFSharedUtils.getIIIFBundles(item);
}
/**
* This method verify if the IIIF feature is enabled on the item
*
* @param item the dspace item
* @return true if the item supports IIIF
*/
public boolean isIIIFEnabled(Item item) {
return item.getMetadata().stream()
.filter(m -> m.getMetadataField().toString('.').contentEquals(METADATA_IIIF_ENABLED))
.anyMatch(m -> m.getValue().equalsIgnoreCase("true") ||
m.getValue().equalsIgnoreCase("yes"));
}
/**
* Utility method to check is a bundle can contain bitstreams to use as IIIF
* resources
*
* @param b the DSpace bundle to check
* @return true if the bundle can contain bitstreams to use as IIIF resources
*/
private boolean isIIIFBundle(Bundle b) {
return !StringUtils.equalsAnyIgnoreCase(b.getName(), Constants.LICENSE_BUNDLE_NAME,
Constants.METADATA_BUNDLE_NAME, CreativeCommonsServiceImpl.CC_BUNDLE_NAME, "THUMBNAIL",
"BRANDED_PREVIEW", "TEXT", OTHER_CONTENT_BUNDLE)
&& b.getMetadata().stream()
.filter(m -> m.getMetadataField().toString('.').contentEquals(METADATA_IIIF_ENABLED))
.noneMatch(m -> m.getValue().equalsIgnoreCase("false") || m.getValue().equalsIgnoreCase("no"));
return IIIFSharedUtils.isIIIFEnabled(item);
}
/**
@@ -131,7 +101,7 @@ public class IIIFUtils {
*/
public List<Bitstream> getIIIFBitstreams(Context context, Item item) {
List<Bitstream> bitstreams = new ArrayList<Bitstream>();
for (Bundle bnd : getIIIFBundles(item)) {
for (Bundle bnd : IIIFSharedUtils.getIIIFBundles(item)) {
bitstreams
.addAll(getIIIFBitstreams(context, bnd));
}
@@ -385,9 +355,9 @@ public class IIIFUtils {
* @return the height in pixel for the canvas associated with the bitstream
*/
public int getCanvasHeight(Bitstream bitstream, Bundle bundle, Item item, int defaultHeight) {
return getSizeFromMetadata(bitstream, METADATA_IMAGE_HEIGTH,
getSizeFromMetadata(bundle, METADATA_IMAGE_HEIGTH,
getSizeFromMetadata(item, METADATA_IMAGE_HEIGTH, defaultHeight)));
return getSizeFromMetadata(bitstream, METADATA_IMAGE_HEIGHT,
getSizeFromMetadata(bundle, METADATA_IMAGE_HEIGHT,
getSizeFromMetadata(item, METADATA_IMAGE_HEIGHT, defaultHeight)));
}
/**

View File

@@ -12,6 +12,8 @@ import org.apache.logging.log4j.Logger;
import org.apache.solr.client.solrj.SolrClient;
import org.apache.solr.client.solrj.SolrServerException;
import org.apache.solr.client.solrj.impl.HttpSolrClient;
import org.dspace.service.impl.HttpConnectionPoolService;
import org.dspace.services.factory.DSpaceServicesFactory;
import org.dspace.xoai.services.api.config.ConfigurationService;
import org.dspace.xoai.services.api.solr.SolrServerResolver;
import org.springframework.beans.factory.annotation.Autowired;
@@ -27,8 +29,15 @@ public class DSpaceSolrServerResolver implements SolrServerResolver {
public SolrClient getServer() throws SolrServerException {
if (server == null) {
String serverUrl = configurationService.getProperty("oai.solr.url");
HttpConnectionPoolService httpConnectionPoolService
= DSpaceServicesFactory.getInstance()
.getServiceManager()
.getServiceByName("solrHttpConnectionPoolService",
HttpConnectionPoolService.class);
try {
server = new HttpSolrClient.Builder(serverUrl).build();
server = new HttpSolrClient.Builder(serverUrl)
.withHttpClient(httpConnectionPoolService.getClient())
.build();
log.debug("OAI Solr Server Initialized");
} catch (Exception e) {
log.error("Could not initialize OAI Solr Server at " + serverUrl , e);

View File

@@ -13,6 +13,7 @@ import org.apache.logging.log4j.Logger;
import org.apache.solr.client.solrj.SolrClient;
import org.apache.solr.client.solrj.SolrServerException;
import org.apache.solr.client.solrj.impl.HttpSolrClient;
import org.dspace.service.impl.HttpConnectionPoolService;
import org.dspace.services.ConfigurationService;
import org.dspace.services.factory.DSpaceServicesFactory;
@@ -33,9 +34,16 @@ public class DSpaceSolrServer {
if (_server == null) {
ConfigurationService configurationService
= DSpaceServicesFactory.getInstance().getConfigurationService();
HttpConnectionPoolService httpConnectionPoolService
= DSpaceServicesFactory.getInstance()
.getServiceManager()
.getServiceByName("solrHttpConnectionPoolService",
HttpConnectionPoolService.class);
String serverUrl = configurationService.getProperty("oai.solr.url");
try {
_server = new HttpSolrClient.Builder(serverUrl).build();
_server = new HttpSolrClient.Builder(serverUrl)
.withHttpClient(httpConnectionPoolService.getClient())
.build();
log.debug("OAI Solr Server Initialized");
} catch (Exception e) {
log.error("Could not initialize OAI Solr Server at " + serverUrl , e);

View File

@@ -118,6 +118,7 @@ public class AuthenticationRestController implements InitializingBean {
response.setHeader("WWW-Authenticate", authenticateHeaderValue);
}
authenticationStatusRest.setAuthenticationMethod(context.getAuthenticationMethod());
authenticationStatusRest.setProjection(projection);
AuthenticationStatusResource authenticationStatusResource = converter.toResource(authenticationStatusRest);

View File

@@ -17,6 +17,7 @@ import org.apache.commons.lang3.StringUtils;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
import org.dspace.app.rest.converter.ConverterService;
import org.dspace.app.rest.exception.UnprocessableEntityException;
import org.dspace.app.rest.link.HalLinkService;
import org.dspace.app.rest.model.FacetConfigurationRest;
import org.dspace.app.rest.model.FacetResultsRest;
@@ -52,6 +53,8 @@ public class DiscoveryRestController implements InitializingBean {
private static final Logger log = LogManager.getLogger();
private static final String SOLR_PARSE_ERROR_CLASS = "org.apache.solr.search.SyntaxError";
@Autowired
protected Utils utils;
@@ -149,13 +152,22 @@ public class DiscoveryRestController implements InitializingBean {
}
//Get the Search results in JSON format
SearchResultsRest searchResultsRest = discoveryRestRepository
.getSearchObjects(query, dsoTypes, dsoScope, configuration, searchFilters, page, utils.obtainProjection());
try {
SearchResultsRest searchResultsRest = discoveryRestRepository.getSearchObjects(query, dsoTypes, dsoScope,
configuration, searchFilters, page, utils.obtainProjection());
//Convert the Search JSON results to paginated HAL resources
SearchResultsResource searchResultsResource = new SearchResultsResource(searchResultsRest, utils, page);
halLinkService.addLinks(searchResultsResource, page);
return searchResultsResource;
//Convert the Search JSON results to paginated HAL resources
SearchResultsResource searchResultsResource = new SearchResultsResource(searchResultsRest, utils, page);
halLinkService.addLinks(searchResultsResource, page);
return searchResultsResource;
} catch (IllegalArgumentException e) {
boolean isParsingException = e.getMessage().contains(SOLR_PARSE_ERROR_CLASS);
if (isParsingException) {
throw new UnprocessableEntityException(e.getMessage());
} else {
throw e;
}
}
}
@RequestMapping(method = RequestMethod.GET, value = "/facets")
@@ -198,13 +210,27 @@ public class DiscoveryRestController implements InitializingBean {
+ ", page: " + Objects.toString(page));
}
FacetResultsRest facetResultsRest = discoveryRestRepository
.getFacetObjects(facetName, prefix, query, dsoTypes, dsoScope, configuration, searchFilters, page);
try {
FacetResultsRest facetResultsRest = discoveryRestRepository
.getFacetObjects(facetName, prefix, query, dsoTypes, dsoScope, configuration, searchFilters, page);
FacetResultsResource facetResultsResource = converter.toResource(facetResultsRest);
FacetResultsResource facetResultsResource = converter.toResource(facetResultsRest);
halLinkService.addLinks(facetResultsResource, page);
return facetResultsResource;
halLinkService.addLinks(facetResultsResource, page);
return facetResultsResource;
} catch (Exception e) {
boolean isParsingException = e.getMessage().contains(SOLR_PARSE_ERROR_CLASS);
/*
* We unfortunately have to do a string comparison to locate the source of the error, as Solr only sends
* back a generic exception, and the org.apache.solr.search.SyntaxError is only available as plain text
* in the error message.
*/
if (isParsingException) {
throw new UnprocessableEntityException(e.getMessage());
} else {
throw e;
}
}
}
}

View File

@@ -1,106 +0,0 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.app.rest;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Arrays;
import javax.servlet.http.HttpServletResponse;
import org.apache.commons.lang3.StringUtils;
import org.dspace.app.rest.model.AuthnRest;
import org.dspace.authenticate.ShibAuthentication;
import org.dspace.core.Utils;
import org.dspace.services.ConfigurationService;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.InitializingBean;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.hateoas.Link;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RequestMethod;
import org.springframework.web.bind.annotation.RequestParam;
import org.springframework.web.bind.annotation.RestController;
/**
* Rest controller that handles redirect *after* shibboleth authentication succeeded.
* <P>
* Shibboleth authentication does NOT occur in this Controller, but occurs before this class is called.
* The general Shibboleth login process is as follows:
* 1. When Shibboleth plugin is enabled, client/UI receives Shibboleth's absolute URL in WWW-Authenticate header.
* See {@link org.dspace.authenticate.ShibAuthentication} loginPageURL() method.
* 2. Client sends the user to that URL when they select Shibboleth authentication.
* 3. User logs in using Shibboleth
* 4. If successful, they are redirected by Shibboleth to this Controller (the path of this controller is passed
* to Shibboleth as a URL param in step 1)
* 5. NOTE: Prior to hitting this Controller, {@link org.dspace.app.rest.security.ShibbolethAuthenticationFilter}
* briefly intercepts the request in order to check for a valid Shibboleth login (see
* ShibAuthentication.authenticate()) and store that user info in a JWT.
* 6. This Controller then gets the request & looks for a "redirectUrl" param (also a part of the original URL from
* step 1), and redirects the user to that location (after verifying it's a trusted URL). Usually this is a
* redirect back to the Client/UI page where the User started.
*
* @author Andrea Bollini (andrea dot bollini at 4science dot it)
* @author Giuseppe Digilio (giuseppe dot digilio at 4science dot it)
* @see ShibAuthentication
* @see org.dspace.app.rest.security.ShibbolethAuthenticationFilter
*/
@RequestMapping(value = "/api/" + AuthnRest.CATEGORY + "/shibboleth")
@RestController
public class ShibbolethRestController implements InitializingBean {
private static final Logger log = LoggerFactory.getLogger(ShibbolethRestController.class);
@Autowired
ConfigurationService configurationService;
@Autowired
DiscoverableEndpointsService discoverableEndpointsService;
@Override
public void afterPropertiesSet() {
discoverableEndpointsService
.register(this, Arrays.asList(new Link("/api/" + AuthnRest.CATEGORY, "shibboleth")));
}
// LGTM.com thinks this method has an unvalidated URL redirect (https://lgtm.com/rules/4840088/) in `redirectUrl`,
// even though we are clearly validating the hostname of `redirectUrl` and test it in ShibbolethRestControllerIT
@SuppressWarnings("lgtm[java/unvalidated-url-redirection]")
@RequestMapping(method = RequestMethod.GET)
public void shibboleth(HttpServletResponse response,
@RequestParam(name = "redirectUrl", required = false) String redirectUrl) throws IOException {
// NOTE: By the time we get here, we already know that Shibboleth is enabled & authentication succeeded,
// as both of those are verified by ShibbolethAuthenticationFilter which runs before this controller
// If redirectUrl unspecified, default to the configured UI
if (StringUtils.isEmpty(redirectUrl)) {
redirectUrl = configurationService.getProperty("dspace.ui.url");
}
// Validate that the redirectURL matches either the server or UI hostname. It *cannot* be an arbitrary URL.
String redirectHostName = Utils.getHostName(redirectUrl);
String serverHostName = Utils.getHostName(configurationService.getProperty("dspace.server.url"));
ArrayList<String> allowedHostNames = new ArrayList<String>();
allowedHostNames.add(serverHostName);
String[] allowedUrls = configurationService.getArrayProperty("rest.cors.allowed-origins");
for (String url : allowedUrls) {
allowedHostNames.add(Utils.getHostName(url));
}
if (StringUtils.equalsAnyIgnoreCase(redirectHostName, allowedHostNames.toArray(new String[0]))) {
log.debug("Shibboleth redirecting to " + redirectUrl);
response.sendRedirect(redirectUrl);
} else {
log.error("Invalid Shibboleth redirectURL=" + redirectUrl +
". URL doesn't match hostname of server or UI!");
response.sendError(HttpServletResponse.SC_BAD_REQUEST,
"Invalid redirectURL! Must match server or ui hostname.");
}
}
}

View File

@@ -20,7 +20,6 @@ import org.dspace.app.rest.model.hateoas.ViewEventResource;
import org.dspace.app.rest.repository.SearchEventRestRepository;
import org.dspace.app.rest.repository.StatisticsRestRepository;
import org.dspace.app.rest.repository.ViewEventRestRepository;
import org.dspace.app.rest.utils.Utils;
import org.springframework.beans.factory.InitializingBean;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.data.rest.webmvc.ControllerUtils;
@@ -39,9 +38,6 @@ import org.springframework.web.bind.annotation.RestController;
@RequestMapping("/api/" + RestAddressableModel.STATISTICS)
public class StatisticsRestController implements InitializingBean {
@Autowired
private Utils utils;
@Autowired
private DiscoverableEndpointsService discoverableEndpointsService;

View File

@@ -0,0 +1,45 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.app.rest.authorization.impl;
import java.sql.SQLException;
import org.apache.commons.lang.StringUtils;
import org.dspace.app.rest.authorization.AuthorizationFeature;
import org.dspace.app.rest.authorization.AuthorizationFeatureDocumentation;
import org.dspace.app.rest.model.BaseObjectRest;
import org.dspace.app.rest.model.EPersonRest;
import org.dspace.core.Context;
import org.springframework.stereotype.Component;
/**
* The canChangePassword feature. It can be used to verify if the user can change his password.
*/
@Component
@AuthorizationFeatureDocumentation(name = CanChangePasswordFeature.NAME,
description = "It can be used to verify if the user can change his password")
public class CanChangePasswordFeature implements AuthorizationFeature {
public static final String NAME = "canChangePassword";
@Override
public boolean isAuthorized(Context context, BaseObjectRest object) throws SQLException {
if (context.getCurrentUser() != null && StringUtils.equals(context.getAuthenticationMethod(), "password")) {
return true;
}
return false;
}
@Override
public String[] getSupportedTypes() {
return new String[]{
EPersonRest.CATEGORY + "." + EPersonRest.NAME
};
}
}

View File

@@ -31,9 +31,9 @@ import org.springframework.stereotype.Component;
* is the object's admin. Otherwise, authorization is granted if the current user can view the object.
*/
@Component
@AuthorizationFeatureDocumentation(name = ViewUsageStatisticsFeature.NAME,
@AuthorizationFeatureDocumentation(name = CanViewUsageStatisticsFeature.NAME,
description = "It can be used to verify if statistics can be viewed")
public class ViewUsageStatisticsFeature implements AuthorizationFeature {
public class CanViewUsageStatisticsFeature implements AuthorizationFeature {
public final static String NAME = "canViewUsageStatistics";
@@ -47,6 +47,7 @@ public class ViewUsageStatisticsFeature implements AuthorizationFeature {
private Utils utils;
@Override
@SuppressWarnings("rawtypes")
public boolean isAuthorized(Context context, BaseObjectRest object) throws SQLException {
if (object instanceof SiteRest
|| object instanceof CommunityRest

View File

@@ -31,6 +31,7 @@ public class BrowseIndexConverter implements DSpaceConverter<BrowseIndex, Browse
BrowseIndexRest bir = new BrowseIndexRest();
bir.setProjection(projection);
bir.setId(obj.getName());
bir.setDataType(obj.getDataType());
bir.setOrder(obj.getDefaultOrder());
bir.setMetadataBrowse(obj.isMetadataIndex());
List<String> metadataList = new ArrayList<String>();

View File

@@ -16,6 +16,7 @@ import org.dspace.app.rest.RestResourceController;
public class AuthenticationStatusRest extends BaseObjectRest<Integer> {
private boolean okay;
private boolean authenticated;
private String authenticationMethod;
public static final String NAME = "status";
public static final String CATEGORY = RestAddressableModel.AUTHENTICATION;
@@ -81,4 +82,12 @@ public class AuthenticationStatusRest extends BaseObjectRest<Integer> {
public void setOkay(boolean okay) {
this.okay = okay;
}
public String getAuthenticationMethod() {
return authenticationMethod;
}
public void setAuthenticationMethod(final String authenticationMethod) {
this.authenticationMethod = authenticationMethod;
}
}

View File

@@ -43,6 +43,8 @@ public class BrowseIndexRest extends BaseObjectRest<String> {
@JsonProperty(value = "metadata")
List<String> metadataList;
String dataType;
List<SortOption> sortOptions;
String order;
@@ -74,6 +76,14 @@ public class BrowseIndexRest extends BaseObjectRest<String> {
this.metadataList = metadataList;
}
public String getDataType() {
return dataType;
}
public void setDataType(String dataType) {
this.dataType = dataType;
}
public String getOrder() {
return order;
}

View File

@@ -131,7 +131,8 @@ public class DiscoveryRestRepository extends AbstractDSpaceRestRepository {
}
public FacetResultsRest getFacetObjects(String facetName, String prefix, String query, List<String> dsoTypes,
String dsoScope, final String configuration, List<SearchFilter> searchFilters, Pageable page) {
String dsoScope, final String configuration, List<SearchFilter> searchFilters, Pageable page)
throws SearchServiceException {
Context context = obtainContext();
@@ -139,17 +140,9 @@ public class DiscoveryRestRepository extends AbstractDSpaceRestRepository {
DiscoveryConfiguration discoveryConfiguration = searchConfigurationService
.getDiscoveryConfigurationByNameOrDso(configuration, scopeObject);
DiscoverResult searchResult = null;
DiscoverQuery discoverQuery = null;
try {
discoverQuery = queryBuilder.buildFacetQuery(context, scopeObject, discoveryConfiguration, prefix, query,
searchFilters, dsoTypes, page, facetName);
searchResult = searchService.search(context, scopeObject, discoverQuery);
} catch (SearchServiceException e) {
log.error("Error while searching with Discovery", e);
//TODO TOM handle search exception
}
DiscoverQuery discoverQuery = queryBuilder.buildFacetQuery(context, scopeObject, discoveryConfiguration, prefix,
query, searchFilters, dsoTypes, page, facetName);
DiscoverResult searchResult = searchService.search(context, scopeObject, discoverQuery);
FacetResultsRest facetResultsRest = discoverFacetResultsConverter.convert(context, facetName, prefix, query,
dsoTypes, dsoScope, searchFilters, searchResult, discoveryConfiguration, page,

View File

@@ -293,17 +293,22 @@ public class EPersonRestRepository extends DSpaceObjectRestRepository<EPerson, E
@PreAuthorize("hasPermission(#uuid, 'EPERSON', #patch)")
protected void patch(Context context, HttpServletRequest request, String apiCategory, String model, UUID uuid,
Patch patch) throws AuthorizeException, SQLException {
if (StringUtils.isNotBlank(request.getParameter("token"))) {
boolean passwordChangeFound = false;
for (Operation operation : patch.getOperations()) {
if (StringUtils.equalsIgnoreCase(operation.getPath(), "/password")) {
passwordChangeFound = true;
}
boolean passwordChangeFound = false;
for (Operation operation : patch.getOperations()) {
if (StringUtils.equalsIgnoreCase(operation.getPath(), "/password")) {
passwordChangeFound = true;
}
}
if (StringUtils.isNotBlank(request.getParameter("token"))) {
if (!passwordChangeFound) {
throw new AccessDeniedException("Refused to perform the EPerson patch based on a token without " +
"changing the password");
}
} else {
if (passwordChangeFound && !StringUtils.equals(context.getAuthenticationMethod(), "password")) {
throw new AccessDeniedException("Refused to perform the EPerson patch based to change the password " +
"for non \"password\" authentication");
}
}
patchDSpaceObject(apiCategory, model, uuid, patch);
}

View File

@@ -27,6 +27,7 @@ import org.dspace.core.Context;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.data.domain.Page;
import org.springframework.data.domain.Pageable;
import org.springframework.data.rest.webmvc.ResourceNotFoundException;
import org.springframework.security.access.prepost.PreAuthorize;
import org.springframework.stereotype.Component;
@@ -53,7 +54,7 @@ public class StatisticsRestRepository extends DSpaceRestRepository<UsageReportRe
try {
DSpaceObject dso = dspaceObjectUtil.findDSpaceObject(context, uuidObject);
if (dso == null) {
throw new IllegalArgumentException("No DSO found with uuid: " + uuidObject);
throw new ResourceNotFoundException("No DSO found with uuid: " + uuidObject);
}
usageReportRest = usageReportUtils.createUsageReport(context, dso, reportId);
@@ -73,7 +74,7 @@ public class StatisticsRestRepository extends DSpaceRestRepository<UsageReportRe
Context context = obtainContext();
DSpaceObject dso = dspaceObjectUtil.findDSpaceObject(context, uuid);
if (dso == null) {
throw new IllegalArgumentException("No DSO found with uuid: " + uuid);
throw new ResourceNotFoundException("No DSO found with uuid: " + uuid);
}
usageReportsOfItem = usageReportUtils.getUsageReportsOfDSO(context, dso);
} catch (SQLException | ParseException | SolrServerException | IOException e) {

Some files were not shown because too many files have changed in this diff Show More