Merge branch 'main' into task/main/CST-15074

# Conflicts:
#	dspace-api/src/main/java/org/dspace/orcid/service/impl/OrcidSynchronizationServiceImpl.java
#	dspace-api/src/test/java/org/dspace/identifier/VersionedHandleIdentifierProviderIT.java
This commit is contained in:
Vincenzo Mecca
2025-03-06 19:24:00 +01:00
284 changed files with 8857 additions and 2741 deletions

341
.github/dependabot.yml vendored Normal file
View File

@@ -0,0 +1,341 @@
#-------------------
# DSpace's dependabot rules. Enables maven updates for all dependencies on a weekly basis
# for main and any maintenance branches. Security updates only apply to main.
#-------------------
version: 2
updates:
###############
## Main branch
###############
# NOTE: At this time, "security-updates" rules only apply if "target-branch" is unspecified
# So, only this first section can include "applies-to: security-updates"
- package-ecosystem: "maven"
directory: "/"
schedule:
interval: "weekly"
# Allow up to 10 open PRs for dependencies
open-pull-requests-limit: 10
# Group together some upgrades in a single PR
groups:
# Group together all Build Tools in a single PR
build-tools:
applies-to: version-updates
patterns:
- "org.apache.maven.plugins:*"
- "*:*-maven-plugin"
- "*:maven-*-plugin"
- "com.github.spotbugs:spotbugs"
- "com.google.code.findbugs:*"
- "com.google.errorprone:*"
- "com.puppycrawl.tools:checkstyle"
- "org.sonatype.plugins:*"
exclude-patterns:
# Exclude anything from Spring, as that is in a separate group
- "org.springframework.*:*"
update-types:
- "minor"
- "patch"
test-tools:
applies-to: version-updates
patterns:
- "junit:*"
- "com.github.stefanbirker:system-rules"
- "com.h2database:*"
- "io.findify:s3mock*"
- "io.netty:*"
- "org.hamcrest:*"
- "org.mock-server:*"
- "org.mockito:*"
update-types:
- "minor"
- "patch"
# Group together all Apache Commons deps in a single PR
apache-commons:
applies-to: version-updates
patterns:
- "org.apache.commons:*"
- "commons-*:commons-*"
update-types:
- "minor"
- "patch"
# Group together all fasterxml deps in a single PR
fasterxml:
applies-to: version-updates
patterns:
- "com.fasterxml:*"
- "com.fasterxml.*:*"
update-types:
- "minor"
- "patch"
# Group together all Hibernate deps in a single PR
hibernate:
applies-to: version-updates
patterns:
- "org.hibernate.*:*"
update-types:
- "minor"
- "patch"
# Group together all Jakarta deps in a single PR
jakarta:
applies-to: version-updates
patterns:
- "jakarta.*:*"
- "org.eclipse.angus:jakarta.mail"
- "org.glassfish.jaxb:jaxb-runtime"
update-types:
- "minor"
- "patch"
# Group together all Spring deps in a single PR
spring:
applies-to: version-updates
patterns:
- "org.springframework:*"
- "org.springframework.*:*"
update-types:
- "minor"
- "patch"
# Group together all WebJARs deps in a single PR
webjars:
applies-to: version-updates
patterns:
- "org.webjars:*"
- "org.webjars.*:*"
update-types:
- "minor"
- "patch"
ignore:
# Don't try to auto-update any DSpace dependencies
- dependency-name: "org.dspace:*"
- dependency-name: "org.dspace.*:*"
# Ignore all major version updates for all dependencies. We'll only automate minor/patch updates.
- dependency-name: "*"
update-types: ["version-update:semver-major"]
######################
## dspace-8_x branch
######################
- package-ecosystem: "maven"
directory: "/"
target-branch: dspace-8_x
schedule:
interval: "weekly"
# Allow up to 10 open PRs for dependencies
open-pull-requests-limit: 10
# Group together some upgrades in a single PR
groups:
# Group together all Build Tools in a single PR
build-tools:
applies-to: version-updates
patterns:
- "org.apache.maven.plugins:*"
- "*:*-maven-plugin"
- "*:maven-*-plugin"
- "com.github.spotbugs:spotbugs"
- "com.google.code.findbugs:*"
- "com.google.errorprone:*"
- "com.puppycrawl.tools:checkstyle"
- "org.sonatype.plugins:*"
exclude-patterns:
# Exclude anything from Spring, as that is in a separate group
- "org.springframework.*:*"
update-types:
- "minor"
- "patch"
test-tools:
applies-to: version-updates
patterns:
- "junit:*"
- "com.github.stefanbirker:system-rules"
- "com.h2database:*"
- "io.findify:s3mock*"
- "io.netty:*"
- "org.hamcrest:*"
- "org.mock-server:*"
- "org.mockito:*"
update-types:
- "minor"
- "patch"
# Group together all Apache Commons deps in a single PR
apache-commons:
applies-to: version-updates
patterns:
- "org.apache.commons:*"
- "commons-*:commons-*"
update-types:
- "minor"
- "patch"
# Group together all fasterxml deps in a single PR
fasterxml:
applies-to: version-updates
patterns:
- "com.fasterxml:*"
- "com.fasterxml.*:*"
update-types:
- "minor"
- "patch"
# Group together all Hibernate deps in a single PR
hibernate:
applies-to: version-updates
patterns:
- "org.hibernate.*:*"
update-types:
- "minor"
- "patch"
# Group together all Jakarta deps in a single PR
jakarta:
applies-to: version-updates
patterns:
- "jakarta.*:*"
- "org.eclipse.angus:jakarta.mail"
- "org.glassfish.jaxb:jaxb-runtime"
update-types:
- "minor"
- "patch"
# Group together all Spring deps in a single PR
spring:
applies-to: version-updates
patterns:
- "org.springframework:*"
- "org.springframework.*:*"
update-types:
- "minor"
- "patch"
# Group together all WebJARs deps in a single PR
webjars:
applies-to: version-updates
patterns:
- "org.webjars:*"
- "org.webjars.*:*"
update-types:
- "minor"
- "patch"
ignore:
# Don't try to auto-update any DSpace dependencies
- dependency-name: "org.dspace:*"
- dependency-name: "org.dspace.*:*"
# Ignore all major version updates for all dependencies. We'll only automate minor/patch updates.
- dependency-name: "*"
update-types: [ "version-update:semver-major" ]
######################
## dspace-7_x branch
######################
- package-ecosystem: "maven"
directory: "/"
target-branch: dspace-7_x
schedule:
interval: "weekly"
# Allow up to 10 open PRs for dependencies
open-pull-requests-limit: 10
# Group together some upgrades in a single PR
groups:
# Group together all Build Tools in a single PR
build-tools:
applies-to: version-updates
patterns:
- "org.apache.maven.plugins:*"
- "*:*-maven-plugin"
- "*:maven-*-plugin"
- "com.github.spotbugs:spotbugs"
- "com.google.code.findbugs:*"
- "com.google.errorprone:*"
- "com.puppycrawl.tools:checkstyle"
- "org.sonatype.plugins:*"
exclude-patterns:
# Exclude anything from Spring, as that is in a separate group
- "org.springframework.*:*"
update-types:
- "minor"
- "patch"
test-tools:
applies-to: version-updates
patterns:
- "junit:*"
- "com.github.stefanbirker:system-rules"
- "com.h2database:*"
- "io.findify:s3mock*"
- "io.netty:*"
- "org.hamcrest:*"
- "org.mock-server:*"
- "org.mockito:*"
update-types:
- "minor"
- "patch"
# Group together all Apache Commons deps in a single PR
apache-commons:
applies-to: version-updates
patterns:
- "org.apache.commons:*"
- "commons-*:commons-*"
update-types:
- "minor"
- "patch"
# Group together all fasterxml deps in a single PR
fasterxml:
applies-to: version-updates
patterns:
- "com.fasterxml:*"
- "com.fasterxml.*:*"
update-types:
- "minor"
- "patch"
# Group together all Hibernate deps in a single PR
hibernate:
applies-to: version-updates
patterns:
- "org.hibernate.*:*"
update-types:
- "minor"
- "patch"
# Group together all Jakarta deps in a single PR
jakarta:
applies-to: version-updates
patterns:
- "jakarta.*:*"
- "org.eclipse.angus:jakarta.mail"
- "org.glassfish.jaxb:jaxb-runtime"
update-types:
- "minor"
- "patch"
# Group together all Google deps in a single PR
# NOTE: These Google deps are only used in 7.x and have been removed in 8.x and later
google-apis:
applies-to: version-updates
patterns:
- "com.google.apis:*"
- "com.google.api-client:*"
- "com.google.http-client:*"
- "com.google.oauth-client:*"
update-types:
- "minor"
- "patch"
# Group together all Spring deps in a single PR
spring:
applies-to: version-updates
patterns:
- "org.springframework:*"
- "org.springframework.*:*"
update-types:
- "minor"
- "patch"
# Group together all WebJARs deps in a single PR
webjars:
applies-to: version-updates
patterns:
- "org.webjars:*"
- "org.webjars.*:*"
update-types:
- "minor"
- "patch"
ignore:
# Don't try to auto-update any DSpace dependencies
- dependency-name: "org.dspace:*"
- dependency-name: "org.dspace.*:*"
# Last version of errorprone to support JDK 11 is 2.31.0
- dependency-name: "com.google.errorprone:*"
versions: [">=2.32.0"]
# Spring Security 5.8 changes the behavior of CSRF Tokens in a way which is incompatible with DSpace 7
# See https://github.com/DSpace/DSpace/pull/9888#issuecomment-2408165545
- dependency-name: "org.springframework.security:*"
versions: [">=5.8.0"]
# Ignore all major version updates for all dependencies. We'll only automate minor/patch updates.
- dependency-name: "*"
update-types: [ "version-update:semver-major" ]

View File

@@ -15,6 +15,7 @@ on:
permissions: permissions:
contents: read # to fetch code (actions/checkout) contents: read # to fetch code (actions/checkout)
packages: write # to write images to GitHub Container Registry (GHCR)
jobs: jobs:
#################################################### ####################################################
@@ -147,4 +148,102 @@ jobs:
tags_flavor: suffix=-loadsql tags_flavor: suffix=-loadsql
secrets: secrets:
DOCKER_USERNAME: ${{ secrets.DOCKER_USERNAME }} DOCKER_USERNAME: ${{ secrets.DOCKER_USERNAME }}
DOCKER_ACCESS_TOKEN: ${{ secrets.DOCKER_ACCESS_TOKEN }} DOCKER_ACCESS_TOKEN: ${{ secrets.DOCKER_ACCESS_TOKEN }}
#################################################################################
# Test Deployment via Docker to ensure newly built images are working properly
#################################################################################
docker-deploy:
# Ensure this job never runs on forked repos. It's only executed for 'dspace/dspace'
if: github.repository == 'dspace/dspace'
runs-on: ubuntu-latest
# Must run after all major images are built
needs: [dspace, dspace-test, dspace-cli, dspace-postgres-pgcrypto, dspace-solr]
env:
# Override defaults dspace.server.url because backend starts at http://127.0.0.1:8080
dspace__P__server__P__url: http://127.0.0.1:8080/server
# Enable all optional modules / controllers for this test deployment.
# This helps check for errors in deploying these modules via Spring Boot
iiif__P__enabled: true
ldn__P__enabled: true
oai__P__enabled: true
rdf__P__enabled: true
signposting__P__enabled: true
sword__D__server__P__enabled: true
swordv2__D__server__P__enabled: true
# If this is a PR against main (default branch), use "latest".
# Else if this is a PR against a different branch, used the base branch name.
# Else if this is a commit on main (default branch), use the "latest" tag.
# Else, just use the branch name.
# NOTE: DSPACE_VER is used because our docker compose scripts default to using the "-test" image.
DSPACE_VER: ${{ (github.event_name == 'pull_request' && github.event.pull_request.base.ref == github.event.repository.default_branch && 'latest') || (github.event_name == 'pull_request' && github.event.pull_request.base.ref) || (github.ref_name == github.event.repository.default_branch && 'latest') || github.ref_name }}
# Docker Registry to use for Docker compose scripts below.
# We use GitHub's Container Registry to avoid aggressive rate limits at DockerHub.
DOCKER_REGISTRY: ghcr.io
steps:
# Checkout our codebase (to get access to Docker Compose scripts)
- name: Checkout codebase
uses: actions/checkout@v4
# Download Docker image artifacts (which were just built by reusable-docker-build.yml)
- name: Download Docker image artifacts
uses: actions/download-artifact@v4
with:
# Download all amd64 Docker images (TAR files) into the /tmp/docker directory
pattern: docker-image-*-linux-amd64
path: /tmp/docker
merge-multiple: true
# Load each of the images into Docker by calling "docker image load" for each.
# This ensures we are using the images just built & not any prior versions on DockerHub
- name: Load all downloaded Docker images
run: |
find /tmp/docker -type f -name "*.tar" -exec docker image load --input "{}" \;
docker image ls -a
# Start backend using our compose script in the codebase.
- name: Start backend in Docker
run: |
docker compose -f docker-compose.yml up -d
sleep 10
docker container ls
# Create a test admin account. Load test data from a simple set of AIPs as defined in cli.ingest.yml
- name: Load test data into Backend
run: |
docker compose -f docker-compose-cli.yml run --rm dspace-cli create-administrator -e test@test.edu -f admin -l user -p admin -c en
docker compose -f docker-compose-cli.yml -f dspace/src/main/docker-compose/cli.ingest.yml run --rm dspace-cli
# Verify backend started successfully.
# 1. Make sure root endpoint is responding (check for dspace.name defined in docker-compose.yml)
# 2. Also check /collections endpoint to ensure the test data loaded properly (check for a collection name in AIPs)
- name: Verify backend is responding properly
run: |
result=$(wget -O- -q http://127.0.0.1:8080/server/api)
echo "$result"
echo "$result" | grep -oE "\"DSpace Started with Docker Compose\","
result=$(wget -O- -q http://127.0.0.1:8080/server/api/core/collections)
echo "$result"
echo "$result" | grep -oE "\"Dog in Yard\","
# Verify Handle Server can be stared and is working properly
# 1. First generate the "[dspace]/handle-server" folder with the sitebndl.zip
# 2. Start the Handle Server (and wait 20 seconds to let it start up)
# 3. Verify logs do NOT include "Exception" in the text (as that means an error occurred)
# 4. Check that Handle Proxy HTML page is responding on default port (8000)
- name: Verify Handle Server is working properly
run: |
docker exec -i dspace /dspace/bin/make-handle-config
echo "Starting Handle Server..."
docker exec -i dspace /dspace/bin/start-handle-server
sleep 20
echo "Checking for errors in error.log"
result=$(docker exec -i dspace sh -c "cat /dspace/handle-server/logs/error.log* || echo ''")
echo "$result"
echo "$result" | grep -vqz "Exception"
echo "Checking for errors in handle-server.log..."
result=$(docker exec -i dspace cat /dspace/log/handle-server.log)
echo "$result"
echo "$result" | grep -vqz "Exception"
echo "Checking to see if Handle Proxy webpage is available..."
result=$(wget -O- -q http://127.0.0.1:8000/)
echo "$result"
echo "$result" | grep -oE "Handle Proxy"
# Shutdown our containers
- name: Shutdown Docker containers
run: |
docker compose -f docker-compose.yml down

View File

@@ -54,10 +54,13 @@ env:
# For a new commit on default branch (main), use the literal tag 'latest' on Docker image. # For a new commit on default branch (main), use the literal tag 'latest' on Docker image.
# For a new commit on other branches, use the branch name as the tag for Docker image. # For a new commit on other branches, use the branch name as the tag for Docker image.
# For a new tag, copy that tag name as the tag for Docker image. # For a new tag, copy that tag name as the tag for Docker image.
# For a pull request, use the name of the base branch that the PR was created against or "latest" (for main).
# e.g. PR against 'main' will use "latest". a PR against 'dspace-7_x' will use 'dspace-7_x'.
IMAGE_TAGS: | IMAGE_TAGS: |
type=raw,value=latest,enable=${{ github.ref_name == github.event.repository.default_branch }} type=raw,value=latest,enable=${{ github.ref_name == github.event.repository.default_branch }}
type=ref,event=branch,enable=${{ github.ref_name != github.event.repository.default_branch }} type=ref,event=branch,enable=${{ github.ref_name != github.event.repository.default_branch }}
type=ref,event=tag type=ref,event=tag
type=raw,value=${{ (github.event.pull_request.base.ref == github.event.repository.default_branch && 'latest') || github.event.pull_request.base.ref }},enable=${{ github.event_name == 'pull_request' }}
# Define default tag "flavor" for docker/metadata-action per # Define default tag "flavor" for docker/metadata-action per
# https://github.com/docker/metadata-action#flavor-input # https://github.com/docker/metadata-action#flavor-input
# We manage the 'latest' tag ourselves to the 'main' branch (see settings above) # We manage the 'latest' tag ourselves to the 'main' branch (see settings above)
@@ -72,6 +75,9 @@ env:
DEPLOY_DEMO_BRANCH: 'dspace-8_x' DEPLOY_DEMO_BRANCH: 'dspace-8_x'
DEPLOY_SANDBOX_BRANCH: 'main' DEPLOY_SANDBOX_BRANCH: 'main'
DEPLOY_ARCH: 'linux/amd64' DEPLOY_ARCH: 'linux/amd64'
# Registry used during building of Docker images. (All images are later copied to docker.io registry)
# We use GitHub's Container Registry to avoid aggressive rate limits at DockerHub.
DOCKER_BUILD_REGISTRY: ghcr.io
jobs: jobs:
docker-build: docker-build:
@@ -96,6 +102,7 @@ jobs:
# This step converts the slashes in the "arch" matrix values above into dashes & saves to env.ARCH_NAME # This step converts the slashes in the "arch" matrix values above into dashes & saves to env.ARCH_NAME
# E.g. "linux/amd64" becomes "linux-amd64" # E.g. "linux/amd64" becomes "linux-amd64"
# This is necessary because all upload artifacts CANNOT have special chars (like slashes) # This is necessary because all upload artifacts CANNOT have special chars (like slashes)
# NOTE: The regex-like syntax below is Bash Parameter Substitution
- name: Prepare - name: Prepare
run: | run: |
platform=${{ matrix.arch }} platform=${{ matrix.arch }}
@@ -105,35 +112,45 @@ jobs:
- name: Checkout codebase - name: Checkout codebase
uses: actions/checkout@v4 uses: actions/checkout@v4
# https://github.com/docker/setup-buildx-action # https://github.com/docker/login-action
- name: Setup Docker Buildx # NOTE: This login occurs for BOTH non-PRs or PRs. PRs *must* also login to access private images from GHCR
uses: docker/setup-buildx-action@v3 # during the build process
- name: Login to ${{ env.DOCKER_BUILD_REGISTRY }}
uses: docker/login-action@v3
with:
registry: ${{ env.DOCKER_BUILD_REGISTRY }}
username: ${{ github.repository_owner }}
password: ${{ secrets.GITHUB_TOKEN }}
# https://github.com/docker/setup-qemu-action # https://github.com/docker/setup-qemu-action
- name: Set up QEMU emulation to build for multiple architectures - name: Set up QEMU emulation to build for multiple architectures
uses: docker/setup-qemu-action@v3 uses: docker/setup-qemu-action@v3
# https://github.com/docker/login-action # https://github.com/docker/setup-buildx-action
- name: Login to DockerHub - name: Setup Docker Buildx
# Only login if not a PR, as PRs only trigger a Docker build and not a push uses: docker/setup-buildx-action@v3
if: ${{ ! matrix.isPr }}
uses: docker/login-action@v3
with:
username: ${{ secrets.DOCKER_USERNAME }}
password: ${{ secrets.DOCKER_ACCESS_TOKEN }}
# https://github.com/docker/metadata-action # https://github.com/docker/metadata-action
# Get Metadata for docker_build_deps step below # Extract metadata used for Docker images in all build steps below
- name: Sync metadata (tags, labels) from GitHub to Docker for image - name: Extract metadata (tags, labels) from GitHub for Docker image
id: meta_build id: meta_build
uses: docker/metadata-action@v5 uses: docker/metadata-action@v5
with: with:
images: ${{ env.IMAGE_NAME }} images: ${{ env.DOCKER_BUILD_REGISTRY }}/${{ env.IMAGE_NAME }}
tags: ${{ env.IMAGE_TAGS }} tags: ${{ env.IMAGE_TAGS }}
flavor: ${{ env.TAGS_FLAVOR }} flavor: ${{ env.TAGS_FLAVOR }}
#--------------------------------------------------------------------
# First, for all branch commits (non-PRs) we build the image & upload
# to GitHub Container Registry (GHCR). After uploading the image
# to GHCR, we store the image digest in an artifact, so we can
# create a merged manifest later (see 'docker-build_manifest' job).
#
# NOTE: We use GHCR in order to avoid aggressive rate limits at DockerHub.
#--------------------------------------------------------------------
# https://github.com/docker/build-push-action # https://github.com/docker/build-push-action
- name: Build and push image - name: Build and push image to ${{ env.DOCKER_BUILD_REGISTRY }}
if: ${{ ! matrix.isPr }}
id: docker_build id: docker_build
uses: docker/build-push-action@v5 uses: docker/build-push-action@v5
with: with:
@@ -141,15 +158,20 @@ jobs:
${{ inputs.dockerfile_additional_contexts }} ${{ inputs.dockerfile_additional_contexts }}
context: ${{ inputs.dockerfile_context }} context: ${{ inputs.dockerfile_context }}
file: ${{ inputs.dockerfile_path }} file: ${{ inputs.dockerfile_path }}
# Tell DSpace's Docker files to use the build registry instead of DockerHub
build-args:
DOCKER_REGISTRY=${{ env.DOCKER_BUILD_REGISTRY }}
platforms: ${{ matrix.arch }} platforms: ${{ matrix.arch }}
# For pull requests, we run the Docker build (to ensure no PR changes break the build), push: true
# but we ONLY do an image push to DockerHub if it's NOT a PR
push: ${{ ! matrix.isPr }}
# Use tags / labels provided by 'docker/metadata-action' above # Use tags / labels provided by 'docker/metadata-action' above
tags: ${{ steps.meta_build.outputs.tags }} tags: ${{ steps.meta_build.outputs.tags }}
labels: ${{ steps.meta_build.outputs.labels }} labels: ${{ steps.meta_build.outputs.labels }}
# Use GitHub cache to load cached Docker images and cache the results of this build
# This decreases the number of images we need to fetch from DockerHub
cache-from: type=gha,scope=${{ inputs.build_id }}
cache-to: type=gha,scope=${{ inputs.build_id }},mode=max
# Export the digest of Docker build locally (for non PRs only) # Export the digest of Docker build locally
- name: Export Docker build digest - name: Export Docker build digest
if: ${{ ! matrix.isPr }} if: ${{ ! matrix.isPr }}
run: | run: |
@@ -157,7 +179,8 @@ jobs:
digest="${{ steps.docker_build.outputs.digest }}" digest="${{ steps.docker_build.outputs.digest }}"
touch "/tmp/digests/${digest#sha256:}" touch "/tmp/digests/${digest#sha256:}"
# Upload digest to an artifact, so that it can be used in manifest below # Upload digest to an artifact, so that it can be used in combined manifest below
# (The purpose of the combined manifest is to list both amd64 and arm64 builds under same tag)
- name: Upload Docker build digest to artifact - name: Upload Docker build digest to artifact
if: ${{ ! matrix.isPr }} if: ${{ ! matrix.isPr }}
uses: actions/upload-artifact@v4 uses: actions/upload-artifact@v4
@@ -167,33 +190,60 @@ jobs:
if-no-files-found: error if-no-files-found: error
retention-days: 1 retention-days: 1
# If this build is NOT a PR and passed in a REDEPLOY_SANDBOX_URL secret, #------------------------------------------------------------------------------
# Then redeploy https://sandbox.dspace.org if this build is for our deployment architecture and 'main' branch. # Second, we build the image again in order to store it in a local TAR file.
- name: Redeploy sandbox.dspace.org (based on main branch) # This TAR of the image is cached/saved as an artifact, so that it can be used
if: | # by later jobs to install the brand-new images for automated testing.
!matrix.isPR && # This TAR build is performed BOTH for PRs and for branch commits (non-PRs).
env.REDEPLOY_SANDBOX_URL != '' && #
matrix.arch == env.DEPLOY_ARCH && # (This approach has the advantage of avoiding having to download the newly built
github.ref_name == env.DEPLOY_SANDBOX_BRANCH # image from DockerHub or GHCR during automated testing.)
run: | #
curl -X POST $REDEPLOY_SANDBOX_URL # See the 'docker-deploy' job in docker.yml as an example of where this TAR is used.
#-------------------------------------------------------------------------------
# Build local image (again) and store in a TAR file in /tmp directory
# This step is only done for AMD64, as that's the only image we use in our automated testing (at this time).
# NOTE: This step cannot be combined with the build above as it's a different type of output.
- name: Build and push image to local TAR file
if: ${{ matrix.arch == 'linux/amd64'}}
uses: docker/build-push-action@v5
with:
build-contexts: |
${{ inputs.dockerfile_additional_contexts }}
context: ${{ inputs.dockerfile_context }}
file: ${{ inputs.dockerfile_path }}
# Tell DSpace's Docker files to use the build registry instead of DockerHub
build-args:
DOCKER_REGISTRY=${{ env.DOCKER_BUILD_REGISTRY }}
platforms: ${{ matrix.arch }}
tags: ${{ steps.meta_build.outputs.tags }}
labels: ${{ steps.meta_build.outputs.labels }}
# Use GitHub cache to load cached Docker images and cache the results of this build
# This decreases the number of images we need to fetch from DockerHub
cache-from: type=gha,scope=${{ inputs.build_id }}
cache-to: type=gha,scope=${{ inputs.build_id }},mode=max
# Export image to a local TAR file
outputs: type=docker,dest=/tmp/${{ inputs.build_id }}.tar
# If this build is NOT a PR and passed in a REDEPLOY_DEMO_URL secret, # Upload the local docker image (in TAR file) to a build Artifact
# Then redeploy https://demo.dspace.org if this build is for our deployment architecture and demo branch. # This step is only done for AMD64, as that's the only image we use in our automated testing (at this time).
- name: Redeploy demo.dspace.org (based on maintenance branch) - name: Upload local image TAR to artifact
if: | if: ${{ matrix.arch == 'linux/amd64'}}
!matrix.isPR && uses: actions/upload-artifact@v4
env.REDEPLOY_DEMO_URL != '' && with:
matrix.arch == env.DEPLOY_ARCH && name: docker-image-${{ inputs.build_id }}-${{ env.ARCH_NAME }}
github.ref_name == env.DEPLOY_DEMO_BRANCH path: /tmp/${{ inputs.build_id }}.tar
run: | if-no-files-found: error
curl -X POST $REDEPLOY_DEMO_URL retention-days: 1
# Merge Docker digests (from various architectures) into a manifest. ##########################################################################################
# This runs after all Docker builds complete above, and it tells hub.docker.com # Merge Docker digests (from various architectures) into a single manifest.
# that these builds should be all included in the manifest for this tag. # This runs after all Docker builds complete above. The purpose is to include all builds
# (e.g. AMD64 and ARM64 should be listed as options under the same tagged Docker image) # under a single manifest for this tag.
# (e.g. both linux/amd64 and linux/arm64 should be listed under the same tagged Docker image)
##########################################################################################
docker-build_manifest: docker-build_manifest:
# Only run if this is NOT a PR
if: ${{ github.event_name != 'pull_request' }} if: ${{ github.event_name != 'pull_request' }}
runs-on: ubuntu-latest runs-on: ubuntu-latest
needs: needs:
@@ -207,29 +257,102 @@ jobs:
pattern: digests-${{ inputs.build_id }}-* pattern: digests-${{ inputs.build_id }}-*
merge-multiple: true merge-multiple: true
- name: Login to ${{ env.DOCKER_BUILD_REGISTRY }}
uses: docker/login-action@v3
with:
registry: ${{ env.DOCKER_BUILD_REGISTRY }}
username: ${{ github.repository_owner }}
password: ${{ secrets.GITHUB_TOKEN }}
- name: Set up Docker Buildx - name: Set up Docker Buildx
uses: docker/setup-buildx-action@v3 uses: docker/setup-buildx-action@v3
- name: Add Docker metadata for image - name: Add Docker metadata for image
id: meta id: meta
uses: docker/metadata-action@v5 uses: docker/metadata-action@v5
with:
images: ${{ env.DOCKER_BUILD_REGISTRY }}/${{ env.IMAGE_NAME }}
tags: ${{ env.IMAGE_TAGS }}
flavor: ${{ env.TAGS_FLAVOR }}
- name: Create manifest list from digests and push to ${{ env.DOCKER_BUILD_REGISTRY }}
working-directory: /tmp/digests
run: |
docker buildx imagetools create $(jq -cr '.tags | map("-t " + .) | join(" ")' <<< "$DOCKER_METADATA_OUTPUT_JSON") \
$(printf '${{ env.DOCKER_BUILD_REGISTRY }}/${{ env.IMAGE_NAME }}@sha256:%s ' *)
- name: Inspect manifest in ${{ env.DOCKER_BUILD_REGISTRY }}
run: |
docker buildx imagetools inspect ${{ env.DOCKER_BUILD_REGISTRY }}/${{ env.IMAGE_NAME }}:${{ steps.meta.outputs.version }}
##########################################################################################
# Copy images / manifest to DockerHub.
# This MUST run after *both* images (AMD64 and ARM64) are built and uploaded to GitHub
# Container Registry (GHCR). Attempting to run this in parallel to GHCR builds can result
# in a race condition...i.e. the copy to DockerHub may fail if GHCR image has been updated
# at the moment when the copy occurs.
##########################################################################################
docker-copy_to_dockerhub:
# Only run if this is NOT a PR
if: ${{ github.event_name != 'pull_request' }}
runs-on: ubuntu-latest
needs:
- docker-build_manifest
steps:
# 'regctl' is used to more easily copy the image to DockerHub and obtain the digest from DockerHub
# See https://github.com/regclient/regclient/blob/main/docs/regctl.md
- name: Install regctl for Docker registry tools
uses: regclient/actions/regctl-installer@main
with:
release: 'v0.8.0'
# This recreates Docker tags for DockerHub
- name: Add Docker metadata for image
id: meta_dockerhub
uses: docker/metadata-action@v5
with: with:
images: ${{ env.IMAGE_NAME }} images: ${{ env.IMAGE_NAME }}
tags: ${{ env.IMAGE_TAGS }} tags: ${{ env.IMAGE_TAGS }}
flavor: ${{ env.TAGS_FLAVOR }} flavor: ${{ env.TAGS_FLAVOR }}
- name: Login to Docker Hub # Login to source registry first, as this is where we are copying *from*
- name: Login to ${{ env.DOCKER_BUILD_REGISTRY }}
uses: docker/login-action@v3
with:
registry: ${{ env.DOCKER_BUILD_REGISTRY }}
username: ${{ github.repository_owner }}
password: ${{ secrets.GITHUB_TOKEN }}
# Login to DockerHub, since this is where we are copying *to*
- name: Login to DockerHub
uses: docker/login-action@v3 uses: docker/login-action@v3
with: with:
username: ${{ secrets.DOCKER_USERNAME }} username: ${{ secrets.DOCKER_USERNAME }}
password: ${{ secrets.DOCKER_ACCESS_TOKEN }} password: ${{ secrets.DOCKER_ACCESS_TOKEN }}
- name: Create manifest list from digests and push # Copy the image from source to DockerHub
working-directory: /tmp/digests - name: Copy image from ${{ env.DOCKER_BUILD_REGISTRY }} to docker.io
run: | run: |
docker buildx imagetools create $(jq -cr '.tags | map("-t " + .) | join(" ")' <<< "$DOCKER_METADATA_OUTPUT_JSON") \ regctl image copy ${{ env.DOCKER_BUILD_REGISTRY }}/${{ env.IMAGE_NAME }}:${{ steps.meta_dockerhub.outputs.version }} docker.io/${{ env.IMAGE_NAME }}:${{ steps.meta_dockerhub.outputs.version }}
$(printf '${{ env.IMAGE_NAME }}@sha256:%s ' *)
- name: Inspect image #--------------------------------------------------------------------
# Finally, check whether demo.dspace.org or sandbox.dspace.org need
# to be redeployed based on these new DockerHub images.
#--------------------------------------------------------------------
# If this build is for the branch that Sandbox uses and passed in a REDEPLOY_SANDBOX_URL secret,
# Then redeploy https://sandbox.dspace.org
- name: Redeploy sandbox.dspace.org (based on main branch)
if: |
env.REDEPLOY_SANDBOX_URL != '' &&
github.ref_name == env.DEPLOY_SANDBOX_BRANCH
run: | run: |
docker buildx imagetools inspect ${{ env.IMAGE_NAME }}:${{ steps.meta.outputs.version }} curl -X POST $REDEPLOY_SANDBOX_URL
# If this build is for the branch that Demo uses and passed in a REDEPLOY_DEMO_URL secret,
# Then redeploy https://demo.dspace.org
- name: Redeploy demo.dspace.org (based on maintenance branch)
if: |
env.REDEPLOY_DEMO_URL != '' &&
github.ref_name == env.DEPLOY_DEMO_BRANCH
run: |
curl -X POST $REDEPLOY_DEMO_URL

4
.gitignore vendored
View File

@@ -10,6 +10,7 @@ tags
.project .project
.classpath .classpath
.checkstyle .checkstyle
.factorypath
## Ignore project files created by IntelliJ IDEA ## Ignore project files created by IntelliJ IDEA
*.iml *.iml
@@ -27,6 +28,9 @@ nbdist/
nbactions.xml nbactions.xml
nb-configuration.xml nb-configuration.xml
## Ignore project files created by Visual Studio Code
.vscode/
## Ignore all *.properties file in root folder, EXCEPT build.properties (the default) ## Ignore all *.properties file in root folder, EXCEPT build.properties (the default)
## KEPT FOR BACKWARDS COMPATIBILITY WITH 5.x (build.properties is now replaced with local.cfg) ## KEPT FOR BACKWARDS COMPATIBILITY WITH 5.x (build.properties is now replaced with local.cfg)
/*.properties /*.properties

View File

@@ -6,10 +6,14 @@
# This Dockerfile uses JDK17 by default. # This Dockerfile uses JDK17 by default.
# To build with other versions, use "--build-arg JDK_VERSION=[value]" # To build with other versions, use "--build-arg JDK_VERSION=[value]"
ARG JDK_VERSION=17 ARG JDK_VERSION=17
# The Docker version tag to build from
ARG DSPACE_VERSION=latest ARG DSPACE_VERSION=latest
# The Docker registry to use for DSpace images. Defaults to "docker.io"
# NOTE: non-DSpace images are hardcoded to use "docker.io" and are not impacted by this build argument
ARG DOCKER_REGISTRY=docker.io
# Step 1 - Run Maven Build # Step 1 - Run Maven Build
FROM dspace/dspace-dependencies:${DSPACE_VERSION} AS build FROM ${DOCKER_REGISTRY}/dspace/dspace-dependencies:${DSPACE_VERSION} AS build
ARG TARGET_DIR=dspace-installer ARG TARGET_DIR=dspace-installer
WORKDIR /app WORKDIR /app
# The dspace-installer directory will be written to /install # The dspace-installer directory will be written to /install
@@ -31,35 +35,38 @@ RUN mvn --no-transfer-progress package ${MAVEN_FLAGS} && \
RUN rm -rf /install/webapps/server/ RUN rm -rf /install/webapps/server/
# Step 2 - Run Ant Deploy # Step 2 - Run Ant Deploy
FROM eclipse-temurin:${JDK_VERSION} AS ant_build FROM docker.io/eclipse-temurin:${JDK_VERSION} AS ant_build
ARG TARGET_DIR=dspace-installer ARG TARGET_DIR=dspace-installer
# COPY the /install directory from 'build' container to /dspace-src in this container # COPY the /install directory from 'build' container to /dspace-src in this container
COPY --from=build /install /dspace-src COPY --from=build /install /dspace-src
WORKDIR /dspace-src WORKDIR /dspace-src
# Create the initial install deployment using ANT # Create the initial install deployment using ANT
ENV ANT_VERSION 1.10.13 ENV ANT_VERSION=1.10.13
ENV ANT_HOME /tmp/ant-$ANT_VERSION ENV ANT_HOME=/tmp/ant-$ANT_VERSION
ENV PATH $ANT_HOME/bin:$PATH ENV PATH=$ANT_HOME/bin:$PATH
# Need wget to install ant
RUN apt-get update \
&& apt-get install -y --no-install-recommends wget \
&& apt-get purge -y --auto-remove \
&& rm -rf /var/lib/apt/lists/*
# Download and install 'ant' # Download and install 'ant'
RUN mkdir $ANT_HOME && \ RUN mkdir $ANT_HOME && \
wget -qO- "https://archive.apache.org/dist/ant/binaries/apache-ant-$ANT_VERSION-bin.tar.gz" | tar -zx --strip-components=1 -C $ANT_HOME curl --silent --show-error --location --fail --retry 5 --output /tmp/apache-ant.tar.gz \
https://archive.apache.org/dist/ant/binaries/apache-ant-${ANT_VERSION}-bin.tar.gz && \
tar -zx --strip-components=1 -f /tmp/apache-ant.tar.gz -C $ANT_HOME && \
rm /tmp/apache-ant.tar.gz
# Run necessary 'ant' deploy scripts # Run necessary 'ant' deploy scripts
RUN ant init_installation update_configs update_code update_webapps RUN ant init_installation update_configs update_code update_webapps
# Step 3 - Start up DSpace via Runnable JAR # Step 3 - Start up DSpace via Runnable JAR
FROM eclipse-temurin:${JDK_VERSION} FROM docker.io/eclipse-temurin:${JDK_VERSION}
# NOTE: DSPACE_INSTALL must align with the "dspace.dir" default configuration. # NOTE: DSPACE_INSTALL must align with the "dspace.dir" default configuration.
ENV DSPACE_INSTALL=/dspace ENV DSPACE_INSTALL=/dspace
# Copy the /dspace directory from 'ant_build' container to /dspace in this container # Copy the /dspace directory from 'ant_build' container to /dspace in this container
COPY --from=ant_build /dspace $DSPACE_INSTALL COPY --from=ant_build /dspace $DSPACE_INSTALL
WORKDIR $DSPACE_INSTALL WORKDIR $DSPACE_INSTALL
# Expose Tomcat port # Need host command for "[dspace]/bin/make-handle-config"
EXPOSE 8080 RUN apt-get update \
&& apt-get install -y --no-install-recommends host \
&& apt-get purge -y --auto-remove \
&& rm -rf /var/lib/apt/lists/*
# Expose Tomcat port (8080) & Handle Server HTTP port (8000)
EXPOSE 8080 8000
# Give java extra memory (2GB) # Give java extra memory (2GB)
ENV JAVA_OPTS=-Xmx2000m ENV JAVA_OPTS=-Xmx2000m
# On startup, run DSpace Runnable JAR # On startup, run DSpace Runnable JAR

View File

@@ -6,10 +6,14 @@
# This Dockerfile uses JDK17 by default. # This Dockerfile uses JDK17 by default.
# To build with other versions, use "--build-arg JDK_VERSION=[value]" # To build with other versions, use "--build-arg JDK_VERSION=[value]"
ARG JDK_VERSION=17 ARG JDK_VERSION=17
# The Docker version tag to build from
ARG DSPACE_VERSION=latest ARG DSPACE_VERSION=latest
# The Docker registry to use for DSpace images. Defaults to "docker.io"
# NOTE: non-DSpace images are hardcoded to use "docker.io" and are not impacted by this build argument
ARG DOCKER_REGISTRY=docker.io
# Step 1 - Run Maven Build # Step 1 - Run Maven Build
FROM dspace/dspace-dependencies:${DSPACE_VERSION} AS build FROM ${DOCKER_REGISTRY}/dspace/dspace-dependencies:${DSPACE_VERSION} AS build
ARG TARGET_DIR=dspace-installer ARG TARGET_DIR=dspace-installer
WORKDIR /app WORKDIR /app
# The dspace-installer directory will be written to /install # The dspace-installer directory will be written to /install
@@ -25,28 +29,26 @@ RUN mvn --no-transfer-progress package && \
mvn clean mvn clean
# Step 2 - Run Ant Deploy # Step 2 - Run Ant Deploy
FROM eclipse-temurin:${JDK_VERSION} AS ant_build FROM docker.io/eclipse-temurin:${JDK_VERSION} AS ant_build
ARG TARGET_DIR=dspace-installer ARG TARGET_DIR=dspace-installer
# COPY the /install directory from 'build' container to /dspace-src in this container # COPY the /install directory from 'build' container to /dspace-src in this container
COPY --from=build /install /dspace-src COPY --from=build /install /dspace-src
WORKDIR /dspace-src WORKDIR /dspace-src
# Create the initial install deployment using ANT # Create the initial install deployment using ANT
ENV ANT_VERSION 1.10.13 ENV ANT_VERSION=1.10.13
ENV ANT_HOME /tmp/ant-$ANT_VERSION ENV ANT_HOME=/tmp/ant-$ANT_VERSION
ENV PATH $ANT_HOME/bin:$PATH ENV PATH=$ANT_HOME/bin:$PATH
# Need wget to install ant
RUN apt-get update \
&& apt-get install -y --no-install-recommends wget \
&& apt-get purge -y --auto-remove \
&& rm -rf /var/lib/apt/lists/*
# Download and install 'ant' # Download and install 'ant'
RUN mkdir $ANT_HOME && \ RUN mkdir $ANT_HOME && \
wget -qO- "https://archive.apache.org/dist/ant/binaries/apache-ant-$ANT_VERSION-bin.tar.gz" | tar -zx --strip-components=1 -C $ANT_HOME curl --silent --show-error --location --fail --retry 5 --output /tmp/apache-ant.tar.gz \
https://archive.apache.org/dist/ant/binaries/apache-ant-${ANT_VERSION}-bin.tar.gz && \
tar -zx --strip-components=1 -f /tmp/apache-ant.tar.gz -C $ANT_HOME && \
rm /tmp/apache-ant.tar.gz
# Run necessary 'ant' deploy scripts # Run necessary 'ant' deploy scripts
RUN ant init_installation update_configs update_code RUN ant init_installation update_configs update_code
# Step 3 - Run jdk # Step 3 - Run jdk
FROM eclipse-temurin:${JDK_VERSION} FROM docker.io/eclipse-temurin:${JDK_VERSION}
# NOTE: DSPACE_INSTALL must align with the "dspace.dir" default configuration. # NOTE: DSPACE_INSTALL must align with the "dspace.dir" default configuration.
ENV DSPACE_INSTALL=/dspace ENV DSPACE_INSTALL=/dspace
# Copy the /dspace directory from 'ant_build' container to /dspace in this container # Copy the /dspace directory from 'ant_build' container to /dspace in this container

View File

@@ -6,8 +6,8 @@
# To build with other versions, use "--build-arg JDK_VERSION=[value]" # To build with other versions, use "--build-arg JDK_VERSION=[value]"
ARG JDK_VERSION=17 ARG JDK_VERSION=17
# Step 1 - Run Maven Build # Step 1 - Download all Dependencies
FROM maven:3-eclipse-temurin-${JDK_VERSION} AS build FROM docker.io/maven:3-eclipse-temurin-${JDK_VERSION} AS build
ARG TARGET_DIR=dspace-installer ARG TARGET_DIR=dspace-installer
WORKDIR /app WORKDIR /app
# Create the 'dspace' user account & home directory # Create the 'dspace' user account & home directory
@@ -19,16 +19,64 @@ RUN chown -Rv dspace: /app
# Switch to dspace user & run below commands as that user # Switch to dspace user & run below commands as that user
USER dspace USER dspace
# Copy the DSpace source code (from local machine) into the workdir (excluding .dockerignore contents) # This next part may look odd, but it speeds up the build of this image *significantly*.
ADD --chown=dspace . /app/ # Copy ONLY the POMs to this image (from local machine). This will allow us to download all dependencies *without*
# performing any code compilation steps.
# Parent POM
ADD --chown=dspace pom.xml /app/
RUN mkdir -p /app/dspace
# 'dspace' module POM. Includes 'additions' ONLY, as it's the only submodule that is required to exist.
ADD --chown=dspace dspace/pom.xml /app/dspace/
RUN mkdir -p /app/dspace/modules/
ADD --chown=dspace dspace/modules/pom.xml /app/dspace/modules/
RUN mkdir -p /app/dspace/modules/additions
ADD --chown=dspace dspace/modules/additions/pom.xml /app/dspace/modules/additions/
# 'dspace-api' module POM
RUN mkdir -p /app/dspace-api
ADD --chown=dspace dspace-api/pom.xml /app/dspace-api/
# 'dspace-iiif' module POM
RUN mkdir -p /app/dspace-iiif
ADD --chown=dspace dspace-iiif/pom.xml /app/dspace-iiif/
# 'dspace-oai' module POM
RUN mkdir -p /app/dspace-oai
ADD --chown=dspace dspace-oai/pom.xml /app/dspace-oai/
# 'dspace-rdf' module POM
RUN mkdir -p /app/dspace-rdf
ADD --chown=dspace dspace-rdf/pom.xml /app/dspace-rdf/
# 'dspace-saml2' module POM
RUN mkdir -p /app/dspace-saml2
ADD --chown=dspace dspace-saml2/pom.xml /app/dspace-saml2/
# 'dspace-server-webapp' module POM
RUN mkdir -p /app/dspace-server-webapp
ADD --chown=dspace dspace-server-webapp/pom.xml /app/dspace-server-webapp/
# 'dspace-services' module POM
RUN mkdir -p /app/dspace-services
ADD --chown=dspace dspace-services/pom.xml /app/dspace-services/
# 'dspace-sword' module POM
RUN mkdir -p /app/dspace-sword
ADD --chown=dspace dspace-sword/pom.xml /app/dspace-sword/
# 'dspace-swordv2' module POM
RUN mkdir -p /app/dspace-swordv2
ADD --chown=dspace dspace-swordv2/pom.xml /app/dspace-swordv2/
# Trigger the installation of all maven dependencies (hide download progress messages) # Trigger the installation of all maven dependencies (hide download progress messages)
# Maven flags here ensure that we skip final assembly, skip building test environment and skip all code verification checks. # Maven flags here ensure that we skip final assembly, skip building test environment and skip all code verification checks.
# These flags speed up this installation as much as reasonably possible. # These flags speed up this installation and skip tasks we cannot perform as we don't have the full source code.
ENV MAVEN_FLAGS="-P-assembly -P-test-environment -Denforcer.skip=true -Dcheckstyle.skip=true -Dlicense.skip=true -Dxml.skip=true" ENV MAVEN_FLAGS="-P-assembly -P-test-environment -Denforcer.skip=true -Dcheckstyle.skip=true -Dlicense.skip=true -Dxjc.skip=true -Dxml.skip=true"
RUN mvn --no-transfer-progress install ${MAVEN_FLAGS} RUN mvn --no-transfer-progress verify ${MAVEN_FLAGS}
# Clear the contents of the /app directory (including all maven builds), so no artifacts remain. # Clear the contents of the /app directory (including all maven target folders), so no artifacts remain.
# This ensures when dspace:dspace is built, it will use the Maven local cache (~/.m2) for dependencies # This ensures when dspace:dspace is built, it will use the Maven local cache (~/.m2) for dependencies
USER root USER root
RUN rm -rf /app/* RUN rm -rf /app/*

View File

@@ -8,10 +8,14 @@
# This Dockerfile uses JDK17 by default. # This Dockerfile uses JDK17 by default.
# To build with other versions, use "--build-arg JDK_VERSION=[value]" # To build with other versions, use "--build-arg JDK_VERSION=[value]"
ARG JDK_VERSION=17 ARG JDK_VERSION=17
# The Docker version tag to build from
ARG DSPACE_VERSION=latest ARG DSPACE_VERSION=latest
# The Docker registry to use for DSpace images. Defaults to "docker.io"
# NOTE: non-DSpace images are hardcoded to use "docker.io" and are not impacted by this build argument
ARG DOCKER_REGISTRY=docker.io
# Step 1 - Run Maven Build # Step 1 - Run Maven Build
FROM dspace/dspace-dependencies:${DSPACE_VERSION} AS build FROM ${DOCKER_REGISTRY}/dspace/dspace-dependencies:${DSPACE_VERSION} AS build
ARG TARGET_DIR=dspace-installer ARG TARGET_DIR=dspace-installer
WORKDIR /app WORKDIR /app
# The dspace-installer directory will be written to /install # The dspace-installer directory will be written to /install
@@ -30,38 +34,41 @@ RUN mvn --no-transfer-progress package && \
RUN rm -rf /install/webapps/server/ RUN rm -rf /install/webapps/server/
# Step 2 - Run Ant Deploy # Step 2 - Run Ant Deploy
FROM eclipse-temurin:${JDK_VERSION} AS ant_build FROM docker.io/eclipse-temurin:${JDK_VERSION} AS ant_build
ARG TARGET_DIR=dspace-installer ARG TARGET_DIR=dspace-installer
# COPY the /install directory from 'build' container to /dspace-src in this container # COPY the /install directory from 'build' container to /dspace-src in this container
COPY --from=build /install /dspace-src COPY --from=build /install /dspace-src
WORKDIR /dspace-src WORKDIR /dspace-src
# Create the initial install deployment using ANT # Create the initial install deployment using ANT
ENV ANT_VERSION 1.10.12 ENV ANT_VERSION=1.10.12
ENV ANT_HOME /tmp/ant-$ANT_VERSION ENV ANT_HOME=/tmp/ant-$ANT_VERSION
ENV PATH $ANT_HOME/bin:$PATH ENV PATH=$ANT_HOME/bin:$PATH
# Need wget to install ant
RUN apt-get update \
&& apt-get install -y --no-install-recommends wget \
&& apt-get purge -y --auto-remove \
&& rm -rf /var/lib/apt/lists/*
# Download and install 'ant' # Download and install 'ant'
RUN mkdir $ANT_HOME && \ RUN mkdir $ANT_HOME && \
wget -qO- "https://archive.apache.org/dist/ant/binaries/apache-ant-$ANT_VERSION-bin.tar.gz" | tar -zx --strip-components=1 -C $ANT_HOME curl --silent --show-error --location --fail --retry 5 --output /tmp/apache-ant.tar.gz \
https://archive.apache.org/dist/ant/binaries/apache-ant-${ANT_VERSION}-bin.tar.gz && \
tar -zx --strip-components=1 -f /tmp/apache-ant.tar.gz -C $ANT_HOME && \
rm /tmp/apache-ant.tar.gz
# Run necessary 'ant' deploy scripts # Run necessary 'ant' deploy scripts
RUN ant init_installation update_configs update_code update_webapps RUN ant init_installation update_configs update_code update_webapps
# Step 3 - Start up DSpace via Runnable JAR # Step 3 - Start up DSpace via Runnable JAR
FROM eclipse-temurin:${JDK_VERSION} FROM docker.io/eclipse-temurin:${JDK_VERSION}
# NOTE: DSPACE_INSTALL must align with the "dspace.dir" default configuration. # NOTE: DSPACE_INSTALL must align with the "dspace.dir" default configuration.
ENV DSPACE_INSTALL=/dspace ENV DSPACE_INSTALL=/dspace
# Copy the /dspace directory from 'ant_build' container to /dspace in this container # Copy the /dspace directory from 'ant_build' container to /dspace in this container
COPY --from=ant_build /dspace $DSPACE_INSTALL COPY --from=ant_build /dspace $DSPACE_INSTALL
WORKDIR $DSPACE_INSTALL WORKDIR $DSPACE_INSTALL
# Need host command for "[dspace]/bin/make-handle-config"
RUN apt-get update \
&& apt-get install -y --no-install-recommends host \
&& apt-get purge -y --auto-remove \
&& rm -rf /var/lib/apt/lists/*
# Expose Tomcat port and debugging port # Expose Tomcat port and debugging port
EXPOSE 8080 8000 EXPOSE 8080 8000
# Give java extra memory (2GB) # Give java extra memory (2GB)
ENV JAVA_OPTS=-Xmx2000m ENV JAVA_OPTS=-Xmx2000m
# Set up debugging # enable JVM debugging via JDWP
ENV CATALINA_OPTS=-Xrunjdwp:transport=dt_socket,server=y,suspend=n,address=*:8000 ENV JAVA_TOOL_OPTIONS=-agentlib:jdwp=transport=dt_socket,server=y,suspend=n,address=*:8000
# On startup, run DSpace Runnable JAR # On startup, run DSpace Runnable JAR
ENTRYPOINT ["java", "-jar", "webapps/server-boot.jar", "--dspace.dir=$DSPACE_INSTALL"] ENTRYPOINT ["java", "-jar", "webapps/server-boot.jar", "--dspace.dir=$DSPACE_INSTALL"]

View File

@@ -92,7 +92,7 @@ For more information on CheckStyle configurations below, see: http://checkstyle.
<!-- Requirements for Javadocs for methods --> <!-- Requirements for Javadocs for methods -->
<module name="JavadocMethod"> <module name="JavadocMethod">
<!-- All public methods MUST HAVE Javadocs --> <!-- All public methods MUST HAVE Javadocs -->
<property name="scope" value="public"/> <property name="accessModifiers" value="public"/>
<!-- Allow params, throws and return tags to be optional --> <!-- Allow params, throws and return tags to be optional -->
<property name="allowMissingParamTags" value="true"/> <property name="allowMissingParamTags" value="true"/>
<property name="allowMissingReturnTag" value="true"/> <property name="allowMissingReturnTag" value="true"/>

View File

@@ -6,7 +6,7 @@ networks:
external: true external: true
services: services:
dspace-cli: dspace-cli:
image: "${DOCKER_OWNER:-dspace}/dspace-cli:${DSPACE_VER:-latest}" image: "${DOCKER_REGISTRY:-docker.io}/${DOCKER_OWNER:-dspace}/dspace-cli:${DSPACE_VER:-latest}"
container_name: dspace-cli container_name: dspace-cli
build: build:
context: . context: .

View File

@@ -28,7 +28,7 @@ services:
# from the host machine. This IP range MUST correspond to the 'dspacenet' subnet defined above. # from the host machine. This IP range MUST correspond to the 'dspacenet' subnet defined above.
proxies__P__trusted__P__ipranges: '172.23.0' proxies__P__trusted__P__ipranges: '172.23.0'
LOGGING_CONFIG: /dspace/config/log4j2-container.xml LOGGING_CONFIG: /dspace/config/log4j2-container.xml
image: "${DOCKER_OWNER:-dspace}/dspace:${DSPACE_VER:-latest-test}" image: "${DOCKER_REGISTRY:-docker.io}/${DOCKER_OWNER:-dspace}/dspace:${DSPACE_VER:-latest-test}"
build: build:
context: . context: .
dockerfile: Dockerfile.test dockerfile: Dockerfile.test
@@ -64,7 +64,7 @@ services:
dspacedb: dspacedb:
container_name: dspacedb container_name: dspacedb
# Uses a custom Postgres image with pgcrypto installed # Uses a custom Postgres image with pgcrypto installed
image: "${DOCKER_OWNER:-dspace}/dspace-postgres-pgcrypto:${DSPACE_VER:-latest}" image: "${DOCKER_REGISTRY:-docker.io}/${DOCKER_OWNER:-dspace}/dspace-postgres-pgcrypto:${DSPACE_VER:-latest}"
build: build:
# Must build out of subdirectory to have access to install script for pgcrypto # Must build out of subdirectory to have access to install script for pgcrypto
context: ./dspace/src/main/docker/dspace-postgres-pgcrypto/ context: ./dspace/src/main/docker/dspace-postgres-pgcrypto/
@@ -84,7 +84,7 @@ services:
# DSpace Solr container # DSpace Solr container
dspacesolr: dspacesolr:
container_name: dspacesolr container_name: dspacesolr
image: "${DOCKER_OWNER:-dspace}/dspace-solr:${DSPACE_VER:-latest}" image: "${DOCKER_REGISTRY:-docker.io}/${DOCKER_OWNER:-dspace}/dspace-solr:${DSPACE_VER:-latest}"
build: build:
context: ./dspace/src/main/docker/dspace-solr/ context: ./dspace/src/main/docker/dspace-solr/
# Provide path to Solr configs necessary to build Docker image # Provide path to Solr configs necessary to build Docker image

View File

@@ -102,7 +102,7 @@
<plugin> <plugin>
<groupId>org.codehaus.mojo</groupId> <groupId>org.codehaus.mojo</groupId>
<artifactId>build-helper-maven-plugin</artifactId> <artifactId>build-helper-maven-plugin</artifactId>
<version>3.4.0</version> <version>3.6.0</version>
<executions> <executions>
<execution> <execution>
<phase>validate</phase> <phase>validate</phase>
@@ -116,7 +116,7 @@
<plugin> <plugin>
<groupId>org.codehaus.mojo</groupId> <groupId>org.codehaus.mojo</groupId>
<artifactId>buildnumber-maven-plugin</artifactId> <artifactId>buildnumber-maven-plugin</artifactId>
<version>3.2.0</version> <version>3.2.1</version>
<configuration> <configuration>
<revisionOnScmFailure>UNKNOWN_REVISION</revisionOnScmFailure> <revisionOnScmFailure>UNKNOWN_REVISION</revisionOnScmFailure>
</configuration> </configuration>
@@ -177,7 +177,7 @@
<plugin> <plugin>
<groupId>org.codehaus.mojo</groupId> <groupId>org.codehaus.mojo</groupId>
<artifactId>jaxb2-maven-plugin</artifactId> <artifactId>jaxb2-maven-plugin</artifactId>
<version>3.1.0</version> <version>3.2.0</version>
<executions> <executions>
<execution> <execution>
<id>workflow-curation</id> <id>workflow-curation</id>
@@ -341,6 +341,14 @@
<groupId>org.apache.logging.log4j</groupId> <groupId>org.apache.logging.log4j</groupId>
<artifactId>log4j-api</artifactId> <artifactId>log4j-api</artifactId>
</dependency> </dependency>
<dependency>
<groupId>org.apache.logging.log4j</groupId>
<artifactId>log4j-core</artifactId>
</dependency>
<dependency>
<groupId>org.apache.logging.log4j</groupId>
<artifactId>log4j-slf4j2-impl</artifactId>
</dependency>
<dependency> <dependency>
<groupId>org.hibernate.orm</groupId> <groupId>org.hibernate.orm</groupId>
<artifactId>hibernate-core</artifactId> <artifactId>hibernate-core</artifactId>
@@ -388,6 +396,13 @@
<dependency> <dependency>
<groupId>org.springframework</groupId> <groupId>org.springframework</groupId>
<artifactId>spring-orm</artifactId> <artifactId>spring-orm</artifactId>
<exclusions>
<!-- Spring JCL is unnecessary and conflicts with commons-logging when both are on classpath -->
<exclusion>
<groupId>org.springframework</groupId>
<artifactId>spring-jcl</artifactId>
</exclusion>
</exclusions>
</dependency> </dependency>
<dependency> <dependency>
@@ -406,6 +421,16 @@
<groupId>org.mortbay.jasper</groupId> <groupId>org.mortbay.jasper</groupId>
<artifactId>apache-jsp</artifactId> <artifactId>apache-jsp</artifactId>
</exclusion> </exclusion>
<!-- Excluded BouncyCastle dependencies because we use a later version of BouncyCastle.
Having two versions of BouncyCastle in the classpath can cause Handle Server to throw errors. -->
<exclusion>
<groupId>org.bouncycastle</groupId>
<artifactId>bcpkix-jdk15on</artifactId>
</exclusion>
<exclusion>
<groupId>org.bouncycastle</groupId>
<artifactId>bcprov-jdk15on</artifactId>
</exclusion>
</exclusions> </exclusions>
</dependency> </dependency>
@@ -623,7 +648,7 @@
<dependency> <dependency>
<groupId>dnsjava</groupId> <groupId>dnsjava</groupId>
<artifactId>dnsjava</artifactId> <artifactId>dnsjava</artifactId>
<version>3.6.0</version> <version>3.6.3</version>
</dependency> </dependency>
<dependency> <dependency>
@@ -667,28 +692,6 @@
<version>${flyway.version}</version> <version>${flyway.version}</version>
</dependency> </dependency>
<!-- Google Analytics -->
<dependency>
<groupId>com.google.apis</groupId>
<artifactId>google-api-services-analytics</artifactId>
</dependency>
<dependency>
<groupId>com.google.api-client</groupId>
<artifactId>google-api-client</artifactId>
</dependency>
<dependency>
<groupId>com.google.http-client</groupId>
<artifactId>google-http-client</artifactId>
</dependency>
<dependency>
<groupId>com.google.http-client</groupId>
<artifactId>google-http-client-jackson2</artifactId>
</dependency>
<dependency>
<groupId>com.google.oauth-client</groupId>
<artifactId>google-oauth-client</artifactId>
</dependency>
<!-- FindBugs --> <!-- FindBugs -->
<dependency> <dependency>
<groupId>com.google.code.findbugs</groupId> <groupId>com.google.code.findbugs</groupId>
@@ -702,7 +705,6 @@
<dependency> <dependency>
<groupId>jakarta.inject</groupId> <groupId>jakarta.inject</groupId>
<artifactId>jakarta.inject-api</artifactId> <artifactId>jakarta.inject-api</artifactId>
<version>2.0.1</version>
</dependency> </dependency>
<!-- JAXB API and implementation (no longer bundled as of Java 11) --> <!-- JAXB API and implementation (no longer bundled as of Java 11) -->
@@ -733,7 +735,7 @@
<dependency> <dependency>
<groupId>com.amazonaws</groupId> <groupId>com.amazonaws</groupId>
<artifactId>aws-java-sdk-s3</artifactId> <artifactId>aws-java-sdk-s3</artifactId>
<version>1.12.261</version> <version>1.12.781</version>
</dependency> </dependency>
<!-- TODO: This may need to be replaced with the "orcid-model" artifact once this ticket is resolved: <!-- TODO: This may need to be replaced with the "orcid-model" artifact once this ticket is resolved:
@@ -748,6 +750,11 @@
<groupId>org.javassist</groupId> <groupId>org.javassist</groupId>
<artifactId>javassist</artifactId> <artifactId>javassist</artifactId>
</exclusion> </exclusion>
<!-- Exclude snakeyaml as a newer version is brought in by Spring Boot -->
<exclusion>
<groupId>org.yaml</groupId>
<artifactId>snakeyaml</artifactId>
</exclusion>
</exclusions> </exclusions>
</dependency> </dependency>
@@ -769,25 +776,27 @@
<dependency> <dependency>
<groupId>com.opencsv</groupId> <groupId>com.opencsv</groupId>
<artifactId>opencsv</artifactId> <artifactId>opencsv</artifactId>
<version>5.9</version> <version>5.10</version>
</dependency> </dependency>
<!-- Email templating --> <!-- Email templating -->
<dependency> <dependency>
<groupId>org.apache.velocity</groupId> <groupId>org.apache.velocity</groupId>
<artifactId>velocity-engine-core</artifactId> <artifactId>velocity-engine-core</artifactId>
<version>2.4.1</version>
</dependency> </dependency>
<dependency> <dependency>
<groupId>org.xmlunit</groupId> <groupId>org.xmlunit</groupId>
<artifactId>xmlunit-core</artifactId> <artifactId>xmlunit-core</artifactId>
<version>2.10.0</version>
<scope>test</scope> <scope>test</scope>
</dependency> </dependency>
<dependency> <dependency>
<groupId>org.apache.bcel</groupId> <groupId>org.apache.bcel</groupId>
<artifactId>bcel</artifactId> <artifactId>bcel</artifactId>
<version>6.7.0</version> <version>6.10.0</version>
<scope>test</scope> <scope>test</scope>
</dependency> </dependency>
@@ -814,7 +823,7 @@
<dependency> <dependency>
<groupId>org.mock-server</groupId> <groupId>org.mock-server</groupId>
<artifactId>mockserver-junit-rule</artifactId> <artifactId>mockserver-junit-rule</artifactId>
<version>5.11.2</version> <version>5.15.0</version>
<scope>test</scope> <scope>test</scope>
<exclusions> <exclusions>
<!-- Exclude snakeyaml to avoid conflicts with: spring-boot-starter-cache --> <!-- Exclude snakeyaml to avoid conflicts with: spring-boot-starter-cache -->
@@ -856,75 +865,4 @@
</exclusions> </exclusions>
</dependency> </dependency>
</dependencies> </dependencies>
<dependencyManagement>
<dependencies>
<!-- for mockserver -->
<!-- Solve dependency convergence issues related to Solr and
'mockserver-junit-rule' by selecting the versions we want to use. -->
<dependency>
<groupId>io.netty</groupId>
<artifactId>netty-buffer</artifactId>
<version>4.1.106.Final</version>
</dependency>
<dependency>
<groupId>io.netty</groupId>
<artifactId>netty-transport</artifactId>
<version>4.1.106.Final</version>
</dependency>
<dependency>
<groupId>io.netty</groupId>
<artifactId>netty-transport-native-unix-common</artifactId>
<version>4.1.106.Final</version>
</dependency>
<dependency>
<groupId>io.netty</groupId>
<artifactId>netty-common</artifactId>
<version>4.1.106.Final</version>
</dependency>
<dependency>
<groupId>io.netty</groupId>
<artifactId>netty-handler</artifactId>
<version>4.1.106.Final</version>
</dependency>
<dependency>
<groupId>io.netty</groupId>
<artifactId>netty-codec</artifactId>
<version>4.1.106.Final</version>
</dependency>
<dependency>
<groupId>org.apache.velocity</groupId>
<artifactId>velocity-engine-core</artifactId>
<version>2.3</version>
</dependency>
<dependency>
<groupId>org.xmlunit</groupId>
<artifactId>xmlunit-core</artifactId>
<version>2.10.0</version>
<scope>test</scope>
</dependency>
<dependency>
<groupId>com.github.java-json-tools</groupId>
<artifactId>json-schema-validator</artifactId>
<version>2.2.14</version>
</dependency>
<dependency>
<groupId>jakarta.validation</groupId>
<artifactId>jakarta.validation-api</artifactId>
<version>3.0.2</version>
</dependency>
<dependency>
<groupId>io.swagger</groupId>
<artifactId>swagger-core</artifactId>
<version>1.6.2</version>
</dependency>
<dependency>
<groupId>org.scala-lang</groupId>
<artifactId>scala-library</artifactId>
<version>2.13.11</version>
<scope>test</scope>
</dependency>
</dependencies>
</dependencyManagement>
</project> </project>

View File

@@ -8,6 +8,7 @@
package org.dspace.access.status; package org.dspace.access.status;
import java.sql.SQLException; import java.sql.SQLException;
import java.time.Instant;
import java.util.Date; import java.util.Date;
import java.util.List; import java.util.List;
import java.util.Objects; import java.util.Objects;
@@ -26,7 +27,6 @@ import org.dspace.content.service.ItemService;
import org.dspace.core.Constants; import org.dspace.core.Constants;
import org.dspace.core.Context; import org.dspace.core.Context;
import org.dspace.eperson.Group; import org.dspace.eperson.Group;
import org.joda.time.LocalDate;
/** /**
* Default plugin implementation of the access status helper. * Default plugin implementation of the access status helper.
@@ -230,7 +230,7 @@ public class DefaultAccessStatusHelper implements AccessStatusHelper {
// If the policy is not valid there is an active embargo // If the policy is not valid there is an active embargo
Date startDate = policy.getStartDate(); Date startDate = policy.getStartDate();
if (startDate != null && !startDate.before(LocalDate.now().toDate())) { if (startDate != null && !startDate.before(Date.from(Instant.now()))) {
// There is an active embargo: aim to take the shortest embargo (account for rare cases where // There is an active embargo: aim to take the shortest embargo (account for rare cases where
// more than one resource policy exists) // more than one resource policy exists)
if (embargoDate == null) { if (embargoDate == null) {

View File

@@ -18,6 +18,7 @@ import java.nio.charset.StandardCharsets;
import org.apache.commons.lang.StringUtils; import org.apache.commons.lang.StringUtils;
import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger; import org.apache.logging.log4j.Logger;
import org.apache.poi.util.IOUtils;
import org.apache.tika.Tika; import org.apache.tika.Tika;
import org.apache.tika.exception.TikaException; import org.apache.tika.exception.TikaException;
import org.apache.tika.metadata.Metadata; import org.apache.tika.metadata.Metadata;
@@ -72,21 +73,23 @@ public class TikaTextExtractionFilter
// Not using temporary file. We'll use Tika's default in-memory parsing. // Not using temporary file. We'll use Tika's default in-memory parsing.
// Get maximum characters to extract. Default is 100,000 chars, which is also Tika's default setting. // Get maximum characters to extract. Default is 100,000 chars, which is also Tika's default setting.
String extractedText; String extractedText;
int maxChars = configurationService.getIntProperty("textextractor.max-chars", 100000); int maxChars = configurationService.getIntProperty("textextractor.max-chars", 100_000);
try { try {
// Use Tika to extract text from input. Tika will automatically detect the file type. // Use Tika to extract text from input. Tika will automatically detect the file type.
Tika tika = new Tika(); Tika tika = new Tika();
tika.setMaxStringLength(maxChars); // Tell Tika the maximum number of characters to extract tika.setMaxStringLength(maxChars); // Tell Tika the maximum number of characters to extract
IOUtils.setByteArrayMaxOverride(
configurationService.getIntProperty("textextractor.max-array", 100_000_000));
extractedText = tika.parseToString(source); extractedText = tika.parseToString(source);
} catch (IOException e) { } catch (IOException e) {
System.err.format("Unable to extract text from bitstream in Item %s%n", currentItem.getID().toString()); System.err.format("Unable to extract text from bitstream in Item %s%n", currentItem.getID().toString());
e.printStackTrace(); e.printStackTrace(System.err);
log.error("Unable to extract text from bitstream in Item {}", currentItem.getID().toString(), e); log.error("Unable to extract text from bitstream in Item {}", currentItem.getID().toString(), e);
throw e; throw e;
} catch (OutOfMemoryError oe) { } catch (OutOfMemoryError oe) {
System.err.format("OutOfMemoryError occurred when extracting text from bitstream in Item %s. " + System.err.format("OutOfMemoryError occurred when extracting text from bitstream in Item %s. " +
"You may wish to enable 'textextractor.use-temp-file'.%n", currentItem.getID().toString()); "You may wish to enable 'textextractor.use-temp-file'.%n", currentItem.getID().toString());
oe.printStackTrace(); oe.printStackTrace(System.err);
log.error("OutOfMemoryError occurred when extracting text from bitstream in Item {}. " + log.error("OutOfMemoryError occurred when extracting text from bitstream in Item {}. " +
"You may wish to enable 'textextractor.use-temp-file'.", currentItem.getID().toString(), oe); "You may wish to enable 'textextractor.use-temp-file'.", currentItem.getID().toString(), oe);
throw oe; throw oe;

View File

@@ -281,10 +281,14 @@ public class LogAnalyser {
*/ */
private static String fileTemplate = "dspace\\.log.*"; private static String fileTemplate = "dspace\\.log.*";
private static final ConfigurationService configurationService =
DSpaceServicesFactory.getInstance().getConfigurationService();
/** /**
* the configuration file from which to configure the analyser * the configuration file from which to configure the analyser
*/ */
private static String configFile; private static String configFile = configurationService.getProperty("dspace.dir")
+ File.separator + "config" + File.separator + "dstat.cfg";
/** /**
* the output file to which to write aggregation data * the output file to which to write aggregation data
@@ -616,8 +620,6 @@ public class LogAnalyser {
} }
// now do the host name and url lookup // now do the host name and url lookup
ConfigurationService configurationService
= DSpaceServicesFactory.getInstance().getConfigurationService();
hostName = Utils.getHostName(configurationService.getProperty("dspace.ui.url")); hostName = Utils.getHostName(configurationService.getProperty("dspace.ui.url"));
name = configurationService.getProperty("dspace.name").trim(); name = configurationService.getProperty("dspace.name").trim();
url = configurationService.getProperty("dspace.ui.url").trim(); url = configurationService.getProperty("dspace.ui.url").trim();
@@ -658,8 +660,6 @@ public class LogAnalyser {
String myConfigFile, String myOutFile, String myConfigFile, String myOutFile,
Date myStartDate, Date myEndDate, Date myStartDate, Date myEndDate,
boolean myLookUp) { boolean myLookUp) {
ConfigurationService configurationService
= DSpaceServicesFactory.getInstance().getConfigurationService();
if (myLogDir != null) { if (myLogDir != null) {
logDir = myLogDir; logDir = myLogDir;
@@ -673,9 +673,6 @@ public class LogAnalyser {
if (myConfigFile != null) { if (myConfigFile != null) {
configFile = myConfigFile; configFile = myConfigFile;
} else {
configFile = configurationService.getProperty("dspace.dir")
+ File.separator + "config" + File.separator + "dstat.cfg";
} }
if (myStartDate != null) { if (myStartDate != null) {

View File

@@ -46,8 +46,6 @@ Several "stock" implementations are provided.
<dd>writes event records to the Java logger.</dd> <dd>writes event records to the Java logger.</dd>
<dt>{@link org.dspace.statistics.SolrLoggerUsageEventListener SolrLoggerUsageEventListener}</dt> <dt>{@link org.dspace.statistics.SolrLoggerUsageEventListener SolrLoggerUsageEventListener}</dt>
<dd>writes event records to Solr.</dd> <dd>writes event records to Solr.</dd>
<dt>{@link org.dspace.google.GoogleRecorderEventListener GoogleRecorderEventListener}<.dt>
<dd>writes event records to Google Analytics.</dd>
</dl> </dl>
</body> </body>
</html> </html>

View File

@@ -523,9 +523,9 @@ public class AuthorizeUtil {
for (Collection coll : colls) { for (Collection coll : colls) {
if (!AuthorizeConfiguration if (!AuthorizeConfiguration
.canCollectionAdminPerformItemReinstatiate()) { .canCollectionAdminPerformItemReinstate()) {
if (AuthorizeConfiguration if (AuthorizeConfiguration
.canCommunityAdminPerformItemReinstatiate() .canCommunityAdminPerformItemReinstate()
&& authorizeService.authorizeActionBoolean(context, && authorizeService.authorizeActionBoolean(context,
coll.getCommunities().get(0), Constants.ADMIN)) { coll.getCommunities().get(0), Constants.ADMIN)) {
// authorized // authorized

View File

@@ -163,7 +163,7 @@ public class DCInput {
* The scope of the input sets, this restricts hidden metadata fields from * The scope of the input sets, this restricts hidden metadata fields from
* view by the end user during submission. * view by the end user during submission.
*/ */
public static final String SUBMISSION_SCOPE = "submit"; public static final String SUBMISSION_SCOPE = "submission";
/** /**
* Class constructor for creating a DCInput object based on the contents of * Class constructor for creating a DCInput object based on the contents of
@@ -262,7 +262,7 @@ public class DCInput {
/** /**
* Is this DCInput for display in the given scope? The scope should be * Is this DCInput for display in the given scope? The scope should be
* either "workflow" or "submit", as per the input forms definition. If the * either "workflow" or "submission", as per the input forms definition. If the
* internal visibility is set to "null" then this will always return true. * internal visibility is set to "null" then this will always return true.
* *
* @param scope String identifying the scope that this input's visibility * @param scope String identifying the scope that this input's visibility

View File

@@ -14,7 +14,6 @@ import java.sql.SQLException;
import java.util.ArrayList; import java.util.ArrayList;
import java.util.Arrays; import java.util.Arrays;
import java.util.List; import java.util.List;
import java.util.Map;
import com.rometools.modules.opensearch.OpenSearchModule; import com.rometools.modules.opensearch.OpenSearchModule;
import com.rometools.modules.opensearch.entity.OSQuery; import com.rometools.modules.opensearch.entity.OSQuery;
@@ -58,12 +57,12 @@ public class OpenSearchServiceImpl implements OpenSearchService {
private static final Logger log = org.apache.logging.log4j.LogManager.getLogger(OpenSearchServiceImpl.class); private static final Logger log = org.apache.logging.log4j.LogManager.getLogger(OpenSearchServiceImpl.class);
// Namespaces used // Namespaces used
protected final String osNs = "http://a9.com/-/spec/opensearch/1.1/"; protected final static String osNs = "http://a9.com/-/spec/opensearch/1.1/";
@Autowired(required = true) @Autowired
protected ConfigurationService configurationService; protected ConfigurationService configurationService;
@Autowired(required = true) @Autowired
protected HandleService handleService; protected HandleService handleService;
protected OpenSearchServiceImpl() { protected OpenSearchServiceImpl() {
@@ -119,11 +118,10 @@ public class OpenSearchServiceImpl implements OpenSearchService {
@Override @Override
public String getResultsString(Context context, String format, String query, int totalResults, int start, public String getResultsString(Context context, String format, String query, int totalResults, int start,
int pageSize, int pageSize, IndexableObject scope, List<IndexableObject> results)
IndexableObject scope, List<IndexableObject> results, throws IOException {
Map<String, String> labels) throws IOException {
try { try {
return getResults(context, format, query, totalResults, start, pageSize, scope, results, labels) return getResults(context, format, query, totalResults, start, pageSize, scope, results)
.outputString(); .outputString();
} catch (FeedException e) { } catch (FeedException e) {
log.error(e.toString(), e); log.error(e.toString(), e);
@@ -133,11 +131,10 @@ public class OpenSearchServiceImpl implements OpenSearchService {
@Override @Override
public Document getResultsDoc(Context context, String format, String query, int totalResults, int start, public Document getResultsDoc(Context context, String format, String query, int totalResults, int start,
int pageSize, int pageSize, IndexableObject scope, List<IndexableObject> results)
IndexableObject scope, List<IndexableObject> results, Map<String, String> labels)
throws IOException { throws IOException {
try { try {
return getResults(context, format, query, totalResults, start, pageSize, scope, results, labels) return getResults(context, format, query, totalResults, start, pageSize, scope, results)
.outputW3CDom(); .outputW3CDom();
} catch (FeedException e) { } catch (FeedException e) {
log.error(e.toString(), e); log.error(e.toString(), e);
@@ -146,8 +143,7 @@ public class OpenSearchServiceImpl implements OpenSearchService {
} }
protected SyndicationFeed getResults(Context context, String format, String query, int totalResults, int start, protected SyndicationFeed getResults(Context context, String format, String query, int totalResults, int start,
int pageSize, IndexableObject scope, int pageSize, IndexableObject scope, List<IndexableObject> results) {
List<IndexableObject> results, Map<String, String> labels) {
// Encode results in requested format // Encode results in requested format
if ("rss".equals(format)) { if ("rss".equals(format)) {
format = "rss_2.0"; format = "rss_2.0";
@@ -156,7 +152,7 @@ public class OpenSearchServiceImpl implements OpenSearchService {
} }
SyndicationFeed feed = new SyndicationFeed(); SyndicationFeed feed = new SyndicationFeed();
feed.populate(null, context, scope, results, labels); feed.populate(null, context, scope, results);
feed.setType(format); feed.setType(format);
feed.addModule(openSearchMarkup(query, totalResults, start, pageSize)); feed.addModule(openSearchMarkup(query, totalResults, start, pageSize));
return feed; return feed;

View File

@@ -11,6 +11,7 @@ import java.io.IOException;
import java.sql.SQLException; import java.sql.SQLException;
import java.util.ArrayList; import java.util.ArrayList;
import java.util.Date; import java.util.Date;
import java.util.HashMap;
import java.util.List; import java.util.List;
import java.util.Map; import java.util.Map;
@@ -135,8 +136,6 @@ public class SyndicationFeed {
protected String[] podcastableMIMETypes = protected String[] podcastableMIMETypes =
configurationService.getArrayProperty("webui.feed.podcast.mimetypes", new String[] {"audio/x-mpeg"}); configurationService.getArrayProperty("webui.feed.podcast.mimetypes", new String[] {"audio/x-mpeg"});
// -------- Instance variables:
// the feed object we are building // the feed object we are building
protected SyndFeed feed = null; protected SyndFeed feed = null;
@@ -146,9 +145,6 @@ public class SyndicationFeed {
protected CommunityService communityService; protected CommunityService communityService;
protected ItemService itemService; protected ItemService itemService;
/**
* Constructor.
*/
public SyndicationFeed() { public SyndicationFeed() {
feed = new SyndFeedImpl(); feed = new SyndFeedImpl();
ContentServiceFactory contentServiceFactory = ContentServiceFactory.getInstance(); ContentServiceFactory contentServiceFactory = ContentServiceFactory.getInstance();
@@ -157,16 +153,6 @@ public class SyndicationFeed {
communityService = contentServiceFactory.getCommunityService(); communityService = contentServiceFactory.getCommunityService();
} }
/**
* Returns list of metadata selectors used to compose the description element
*
* @return selector list - format 'schema.element[.qualifier]'
*/
public static String[] getDescriptionSelectors() {
return (String[]) ArrayUtils.clone(descriptionFields);
}
/** /**
* Fills in the feed and entry-level metadata from DSpace objects. * Fills in the feed and entry-level metadata from DSpace objects.
* *
@@ -174,15 +160,17 @@ public class SyndicationFeed {
* @param context context * @param context context
* @param dso the scope * @param dso the scope
* @param items array of objects * @param items array of objects
* @param labels label map
*/ */
public void populate(HttpServletRequest request, Context context, IndexableObject dso, public void populate(HttpServletRequest request, Context context, IndexableObject dso,
List<IndexableObject> items, Map<String, String> labels) { List<IndexableObject> items) {
String logoURL = null; String logoURL = null;
String objectURL = null; String objectURL = null;
String defaultTitle = null; String defaultTitle = null;
boolean podcastFeed = false; boolean podcastFeed = false;
this.request = request; this.request = request;
Map<String, String> labels = getLabels();
// dso is null for the whole site, or a search without scope // dso is null for the whole site, or a search without scope
if (dso == null) { if (dso == null) {
defaultTitle = configurationService.getProperty("dspace.name"); defaultTitle = configurationService.getProperty("dspace.name");
@@ -553,5 +541,19 @@ public class SyndicationFeed {
List<MetadataValue> dcv = itemService.getMetadataByMetadataString(item, field); List<MetadataValue> dcv = itemService.getMetadataByMetadataString(item, field);
return (dcv.size() > 0) ? dcv.get(0).getValue() : null; return (dcv.size() > 0) ? dcv.get(0).getValue() : null;
} }
}
/**
* Internal method to get labels for the returned document
*/
private Map<String, String> getLabels() {
// TODO: get strings from translation file or configuration
Map<String, String> labelMap = new HashMap<>();
labelMap.put(SyndicationFeed.MSG_UNTITLED, "notitle");
labelMap.put(SyndicationFeed.MSG_LOGO_TITLE, "logo.title");
labelMap.put(SyndicationFeed.MSG_FEED_DESCRIPTION, "general-feed.description");
for (String selector : descriptionFields) {
labelMap.put("metadata." + selector, selector);
}
return labelMap;
}
}

View File

@@ -10,7 +10,6 @@ package org.dspace.app.util.service;
import java.io.IOException; import java.io.IOException;
import java.sql.SQLException; import java.sql.SQLException;
import java.util.List; import java.util.List;
import java.util.Map;
import org.dspace.content.DSpaceObject; import org.dspace.content.DSpaceObject;
import org.dspace.core.Context; import org.dspace.core.Context;
@@ -86,14 +85,12 @@ public interface OpenSearchService {
* @param pageSize - page size * @param pageSize - page size
* @param scope - search scope, null or the community/collection * @param scope - search scope, null or the community/collection
* @param results the retrieved DSpace objects satisfying search * @param results the retrieved DSpace objects satisfying search
* @param labels labels to apply - format specific
* @return formatted search results * @return formatted search results
* @throws IOException if IO error * @throws IOException if IO error
*/ */
public String getResultsString(Context context, String format, String query, int totalResults, int start, public String getResultsString(Context context, String format, String query, int totalResults, int start,
int pageSize, int pageSize, IndexableObject scope, List<IndexableObject> results)
IndexableObject scope, List<IndexableObject> results, throws IOException;
Map<String, String> labels) throws IOException;
/** /**
* Returns a formatted set of search results as a document * Returns a formatted set of search results as a document
@@ -106,13 +103,11 @@ public interface OpenSearchService {
* @param pageSize - page size * @param pageSize - page size
* @param scope - search scope, null or the community/collection * @param scope - search scope, null or the community/collection
* @param results the retrieved DSpace objects satisfying search * @param results the retrieved DSpace objects satisfying search
* @param labels labels to apply - format specific
* @return formatted search results * @return formatted search results
* @throws IOException if IO error * @throws IOException if IO error
*/ */
public Document getResultsDoc(Context context, String format, String query, int totalResults, int start, public Document getResultsDoc(Context context, String format, String query, int totalResults, int start,
int pageSize, int pageSize, IndexableObject scope, List<IndexableObject> results)
IndexableObject scope, List<IndexableObject> results, Map<String, String> labels)
throws IOException; throws IOException;
public DSpaceObject resolveScope(Context context, String scope) throws SQLException; public DSpaceObject resolveScope(Context context, String scope) throws SQLException;

View File

@@ -17,6 +17,7 @@ import java.util.Collections;
import java.util.Hashtable; import java.util.Hashtable;
import java.util.Iterator; import java.util.Iterator;
import java.util.List; import java.util.List;
import java.util.Optional;
import javax.naming.NamingEnumeration; import javax.naming.NamingEnumeration;
import javax.naming.NamingException; import javax.naming.NamingException;
import javax.naming.directory.Attribute; import javax.naming.directory.Attribute;
@@ -68,12 +69,8 @@ import org.dspace.services.factory.DSpaceServicesFactory;
* @author Ivan Masár * @author Ivan Masár
* @author Michael Plate * @author Michael Plate
*/ */
public class LDAPAuthentication public class LDAPAuthentication implements AuthenticationMethod {
implements AuthenticationMethod {
/**
* log4j category
*/
private static final Logger log private static final Logger log
= org.apache.logging.log4j.LogManager.getLogger(LDAPAuthentication.class); = org.apache.logging.log4j.LogManager.getLogger(LDAPAuthentication.class);
@@ -130,7 +127,7 @@ public class LDAPAuthentication
return false; return false;
} }
/* /**
* This is an explicit method. * This is an explicit method.
*/ */
@Override @Override
@@ -138,7 +135,7 @@ public class LDAPAuthentication
return false; return false;
} }
/* /**
* Add authenticated users to the group defined in dspace.cfg by * Add authenticated users to the group defined in dspace.cfg by
* the login.specialgroup key. * the login.specialgroup key.
*/ */
@@ -177,7 +174,7 @@ public class LDAPAuthentication
return Collections.EMPTY_LIST; return Collections.EMPTY_LIST;
} }
/* /**
* Authenticate the given credentials. * Authenticate the given credentials.
* This is the heart of the authentication method: test the * This is the heart of the authentication method: test the
* credentials for authenticity, and if accepted, attempt to match * credentials for authenticity, and if accepted, attempt to match
@@ -187,7 +184,7 @@ public class LDAPAuthentication
* @param context * @param context
* DSpace context, will be modified (ePerson set) upon success. * DSpace context, will be modified (ePerson set) upon success.
* *
* @param username * @param netid
* Username (or email address) when method is explicit. Use null for * Username (or email address) when method is explicit. Use null for
* implicit method. * implicit method.
* *
@@ -250,7 +247,7 @@ public class LDAPAuthentication
} }
// Check a DN was found // Check a DN was found
if ((dn == null) || (dn.trim().equals(""))) { if (StringUtils.isBlank(dn)) {
log.info(LogHelper log.info(LogHelper
.getHeader(context, "failed_login", "no DN found for user " + netid)); .getHeader(context, "failed_login", "no DN found for user " + netid));
return BAD_CREDENTIALS; return BAD_CREDENTIALS;
@@ -269,6 +266,18 @@ public class LDAPAuthentication
context.setCurrentUser(eperson); context.setCurrentUser(eperson);
request.setAttribute(LDAP_AUTHENTICATED, true); request.setAttribute(LDAP_AUTHENTICATED, true);
// update eperson's attributes
context.turnOffAuthorisationSystem();
setEpersonAttributes(context, eperson, ldap, Optional.empty());
try {
ePersonService.update(context, eperson);
context.dispatchEvents();
} catch (AuthorizeException e) {
log.warn("update of eperson " + eperson.getID() + " failed", e);
} finally {
context.restoreAuthSystemState();
}
// assign user to groups based on ldap dn // assign user to groups based on ldap dn
assignGroups(dn, ldap.ldapGroup, context); assignGroups(dn, ldap.ldapGroup, context);
@@ -313,14 +322,13 @@ public class LDAPAuthentication
log.info(LogHelper.getHeader(context, log.info(LogHelper.getHeader(context,
"type=ldap-login", "type=ldap_but_already_email")); "type=ldap-login", "type=ldap_but_already_email"));
context.turnOffAuthorisationSystem(); context.turnOffAuthorisationSystem();
eperson.setNetid(netid.toLowerCase()); setEpersonAttributes(context, eperson, ldap, Optional.of(netid));
ePersonService.update(context, eperson); ePersonService.update(context, eperson);
context.dispatchEvents(); context.dispatchEvents();
context.restoreAuthSystemState(); context.restoreAuthSystemState();
context.setCurrentUser(eperson); context.setCurrentUser(eperson);
request.setAttribute(LDAP_AUTHENTICATED, true); request.setAttribute(LDAP_AUTHENTICATED, true);
// assign user to groups based on ldap dn // assign user to groups based on ldap dn
assignGroups(dn, ldap.ldapGroup, context); assignGroups(dn, ldap.ldapGroup, context);
@@ -331,20 +339,7 @@ public class LDAPAuthentication
try { try {
context.turnOffAuthorisationSystem(); context.turnOffAuthorisationSystem();
eperson = ePersonService.create(context); eperson = ePersonService.create(context);
if (StringUtils.isNotEmpty(email)) { setEpersonAttributes(context, eperson, ldap, Optional.of(netid));
eperson.setEmail(email);
}
if (StringUtils.isNotEmpty(ldap.ldapGivenName)) {
eperson.setFirstName(context, ldap.ldapGivenName);
}
if (StringUtils.isNotEmpty(ldap.ldapSurname)) {
eperson.setLastName(context, ldap.ldapSurname);
}
if (StringUtils.isNotEmpty(ldap.ldapPhone)) {
ePersonService.setMetadataSingleValue(context, eperson,
MD_PHONE, ldap.ldapPhone, null);
}
eperson.setNetid(netid.toLowerCase());
eperson.setCanLogIn(true); eperson.setCanLogIn(true);
authenticationService.initEPerson(context, request, eperson); authenticationService.initEPerson(context, request, eperson);
ePersonService.update(context, eperson); ePersonService.update(context, eperson);
@@ -382,6 +377,29 @@ public class LDAPAuthentication
return BAD_ARGS; return BAD_ARGS;
} }
/**
* Update eperson's attributes
*/
private void setEpersonAttributes(Context context, EPerson eperson, SpeakerToLDAP ldap, Optional<String> netid)
throws SQLException {
if (StringUtils.isNotEmpty(ldap.ldapEmail)) {
eperson.setEmail(ldap.ldapEmail);
}
if (StringUtils.isNotEmpty(ldap.ldapGivenName)) {
eperson.setFirstName(context, ldap.ldapGivenName);
}
if (StringUtils.isNotEmpty(ldap.ldapSurname)) {
eperson.setLastName(context, ldap.ldapSurname);
}
if (StringUtils.isNotEmpty(ldap.ldapPhone)) {
ePersonService.setMetadataSingleValue(context, eperson, MD_PHONE, ldap.ldapPhone, null);
}
if (netid.isPresent()) {
eperson.setNetid(netid.get().toLowerCase());
}
}
/** /**
* Internal class to manage LDAP query and results, mainly * Internal class to manage LDAP query and results, mainly
* because there are multiple values to return. * because there are multiple values to return.
@@ -503,6 +521,7 @@ public class LDAPAuthentication
} else { } else {
searchName = ldap_provider_url + ldap_search_context; searchName = ldap_provider_url + ldap_search_context;
} }
@SuppressWarnings("BanJNDI")
NamingEnumeration<SearchResult> answer = ctx.search( NamingEnumeration<SearchResult> answer = ctx.search(
searchName, searchName,
"(&({0}={1}))", new Object[] {ldap_id_field, "(&({0}={1}))", new Object[] {ldap_id_field,
@@ -553,7 +572,7 @@ public class LDAPAuthentication
att = atts.get(attlist[4]); att = atts.get(attlist[4]);
if (att != null) { if (att != null) {
// loop through all groups returned by LDAP // loop through all groups returned by LDAP
ldapGroup = new ArrayList<String>(); ldapGroup = new ArrayList<>();
for (NamingEnumeration val = att.getAll(); val.hasMoreElements(); ) { for (NamingEnumeration val = att.getAll(); val.hasMoreElements(); ) {
ldapGroup.add((String) val.next()); ldapGroup.add((String) val.next());
} }
@@ -633,7 +652,8 @@ public class LDAPAuthentication
ctx.addToEnvironment(javax.naming.Context.AUTHORITATIVE, "true"); ctx.addToEnvironment(javax.naming.Context.AUTHORITATIVE, "true");
ctx.addToEnvironment(javax.naming.Context.REFERRAL, "follow"); ctx.addToEnvironment(javax.naming.Context.REFERRAL, "follow");
// dummy operation to check if authentication has succeeded // dummy operation to check if authentication has succeeded
ctx.getAttributes(""); @SuppressWarnings("BanJNDI")
Attributes trash = ctx.getAttributes("");
} else if (!useTLS) { } else if (!useTLS) {
// Authenticate // Authenticate
env.put(javax.naming.Context.SECURITY_AUTHENTICATION, "Simple"); env.put(javax.naming.Context.SECURITY_AUTHENTICATION, "Simple");
@@ -671,7 +691,7 @@ public class LDAPAuthentication
} }
} }
/* /**
* Returns the URL of an external login page which is not applicable for this authn method. * Returns the URL of an external login page which is not applicable for this authn method.
* *
* Note: Prior to DSpace 7, this method return the page of login servlet. * Note: Prior to DSpace 7, this method return the page of login servlet.
@@ -699,7 +719,7 @@ public class LDAPAuthentication
return "ldap"; return "ldap";
} }
/* /**
* Add authenticated users to the group defined in dspace.cfg by * Add authenticated users to the group defined in dspace.cfg by
* the authentication-ldap.login.groupmap.* key. * the authentication-ldap.login.groupmap.* key.
* *

View File

@@ -0,0 +1,711 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.authenticate;
import java.sql.SQLException;
import java.util.HashMap;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import jakarta.servlet.http.HttpServletRequest;
import jakarta.servlet.http.HttpServletResponse;
import org.apache.commons.lang3.StringUtils;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
import org.dspace.authenticate.factory.AuthenticateServiceFactory;
import org.dspace.authorize.AuthorizeException;
import org.dspace.content.MetadataField;
import org.dspace.content.MetadataSchema;
import org.dspace.content.MetadataSchemaEnum;
import org.dspace.content.NonUniqueMetadataException;
import org.dspace.content.factory.ContentServiceFactory;
import org.dspace.content.service.MetadataFieldService;
import org.dspace.content.service.MetadataSchemaService;
import org.dspace.core.Context;
import org.dspace.eperson.EPerson;
import org.dspace.eperson.Group;
import org.dspace.eperson.factory.EPersonServiceFactory;
import org.dspace.eperson.service.EPersonService;
import org.dspace.eperson.service.GroupService;
import org.dspace.services.ConfigurationService;
import org.dspace.services.factory.DSpaceServicesFactory;
/**
* SAML authentication for DSpace.
*
* @author Ray Lee
*/
public class SamlAuthentication implements AuthenticationMethod {
private static final Logger log = LogManager.getLogger(SamlAuthentication.class);
// Additional metadata mappings.
protected Map<String, String> metadataHeaderMap = null;
protected EPersonService ePersonService = EPersonServiceFactory.getInstance().getEPersonService();
protected GroupService groupService = EPersonServiceFactory.getInstance().getGroupService();
protected MetadataFieldService metadataFieldService = ContentServiceFactory.getInstance().getMetadataFieldService();
protected MetadataSchemaService metadataSchemaService =
ContentServiceFactory.getInstance().getMetadataSchemaService();
protected ConfigurationService configurationService = DSpaceServicesFactory.getInstance().getConfigurationService();
/**
* Authenticate the given or implicit credentials. This is the heart of the
* authentication method: test the credentials for authenticity, and if
* accepted, attempt to match (or optionally, create) an
* <code>EPerson</code>. If an <code>EPerson</code> is found it is set in
* the <code>Context</code> that was passed.
*
* DSpace supports authentication using NetID or email address. A user's NetID
* is a unique identifier from the IdP that identifies a particular user. The
* NetID can be of almost any form, such as a unique integer or string. In
* SAML, this is referred to as a Name ID.
*
* There are two ways to supply identity information to DSpace:
*
* 1) Name ID from SAML attribute (best)
*
* The Name ID-based method is superior because users may change their email
* address with the identity provider. When this happens DSpace will not be
* able to associate their new address with their old account.
*
* 2) Email address from SAML attribute (okay)
*
* In the case where a Name ID header is not available or not found DSpace
* will fall back to identifying a user based upon their email address.
*
* Identity Scheme Migration Strategies:
*
* If you are currently using Email based authentication (either 1 or 2) and
* want to upgrade to NetID based authentication then there is an easy path.
* Coordinate with the IdP to provide a Name ID in the SAML assertion. When a
* user attempts to log in, DSpace will first look for an EPerson with the
* passed Name ID. When this fails, DSpace will fall back to email based
* authentication. Then DSpace will update the user's EPerson account record
* to set their NetID, so all future authentications for this user will be based
* upon NetID.
*
* DSpace will prevent an account from switching NetIDs. If an account already
* has a NetID set, and a user tries to authenticate with the same email but
* a different NetID, the authentication will fail.
*
* @param context DSpace context, will be modified (EPerson set) upon success.
* @param username Not used by SAML-based authentication.
* @param password Not used by SAML-based authentication.
* @param realm Not used by SAML-based authentication.
* @param request The HTTP request that started this operation.
* @return one of: SUCCESS, NO_SUCH_USER, BAD_ARGS
* @throws SQLException if a database error occurs.
*/
@Override
public int authenticate(Context context, String username, String password,
String realm, HttpServletRequest request) throws SQLException {
if (request == null) {
log.warn("Unable to authenticate using SAML because the request object is null.");
return BAD_ARGS;
}
// Initialize additional EPerson metadata mappings.
initialize(context);
String nameId = findSingleAttribute(request, getNameIdAttributeName());
if (log.isDebugEnabled()) {
log.debug("Starting SAML Authentication");
log.debug("Received name ID: " + nameId);
}
// Should we auto register new users?
boolean autoRegister = configurationService.getBooleanProperty("authentication-saml.autoregister", true);
// Four steps to authenticate a user:
try {
// Step 1: Identify user
EPerson eperson = findEPerson(context, request);
// Step 2: Register new user, if necessary
if (eperson == null && autoRegister) {
eperson = registerNewEPerson(context, request);
}
if (eperson == null) {
return AuthenticationMethod.NO_SUCH_USER;
}
if (!eperson.canLogIn()) {
return AuthenticationMethod.BAD_ARGS;
}
// Step 3: Update user's metadata
updateEPerson(context, request, eperson);
// Step 4: Log the user in
context.setCurrentUser(eperson);
request.setAttribute("saml.authenticated", true);
AuthenticateServiceFactory.getInstance().getAuthenticationService().initEPerson(context, request, eperson);
log.info(eperson.getEmail() + " has been authenticated via SAML.");
return AuthenticationMethod.SUCCESS;
} catch (Throwable t) {
// Log the error, and undo the authentication before returning a failure.
log.error("Unable to successfully authenticate using SAML for user because of an exception.", t);
context.setCurrentUser(null);
return AuthenticationMethod.NO_SUCH_USER;
}
}
@Override
public List<Group> getSpecialGroups(Context context, HttpServletRequest request) throws SQLException {
return List.of();
}
@Override
public boolean allowSetPassword(Context context, HttpServletRequest request, String email) throws SQLException {
// SAML authentication doesn't use a password.
return false;
}
@Override
public boolean isImplicit() {
return false;
}
@Override
public boolean canSelfRegister(Context context, HttpServletRequest request,
String username) throws SQLException {
// SAML will auto create accounts if configured to do so, but that is not
// the same as self register. Self register means that the user can sign up for
// an account from the web. This is not supported with SAML.
return false;
}
@Override
public void initEPerson(Context context, HttpServletRequest request,
EPerson eperson) throws SQLException {
// We don't do anything because all our work is done in authenticate.
}
/**
* Returns the URL in the SAML relying party service that initiates a login with the IdP,
* as configured.
*
* @see AuthenticationMethod#loginPageURL(Context, HttpServletRequest, HttpServletResponse)
*/
@Override
public String loginPageURL(Context context, HttpServletRequest request, HttpServletResponse response) {
String samlLoginUrl = configurationService.getProperty("authentication-saml.authenticate-endpoint");
return response.encodeRedirectURL(samlLoginUrl);
}
@Override
public String getName() {
return "saml";
}
/**
* Check if the SAML plugin is enabled.
*
* @return true if enabled, false otherwise
*/
public static boolean isEnabled() {
final String samlPluginName = new SamlAuthentication().getName();
boolean samlEnabled = false;
// Loop through all enabled authentication plugins to see if SAML is one of them.
Iterator<AuthenticationMethod> authenticationMethodIterator =
AuthenticateServiceFactory.getInstance().getAuthenticationService().authenticationMethodIterator();
while (authenticationMethodIterator.hasNext()) {
if (samlPluginName.equals(authenticationMethodIterator.next().getName())) {
samlEnabled = true;
break;
}
}
return samlEnabled;
}
/**
* Identify an existing EPerson based upon the SAML attributes provided on
* the request object.
*
* 1) Name ID from SAML attribute (best)
* The Name ID-based method is superior because users may change their email
* address with the identity provider. When this happens DSpace will not be
* able to associate their new address with their old account.
*
* 2) Email address from SAML attribute (okay)
* In the case where a Name ID header is not available or not found DSpace
* will fall back to identifying a user based upon their email address.
*
* If successful then the identified EPerson will be returned, otherwise null.
*
* @param context The DSpace database context
* @param request The current HTTP Request
* @return The EPerson identified or null.
* @throws SQLException if database error
* @throws AuthorizeException if authorization error
*/
protected EPerson findEPerson(Context context, HttpServletRequest request) throws SQLException, AuthorizeException {
String nameId = findSingleAttribute(request, getNameIdAttributeName());
if (nameId != null) {
EPerson ePerson = ePersonService.findByNetid(context, nameId);
if (ePerson == null) {
log.info("Unable to identify EPerson by netid (SAML name ID): " + nameId);
} else {
log.info("Identified EPerson by netid (SAML name ID): " + nameId);
return ePerson;
}
}
String emailAttributeName = getEmailAttributeName();
String email = findSingleAttribute(request, emailAttributeName);
if (email != null) {
email = email.toLowerCase();
EPerson ePerson = ePersonService.findByEmail(context, email);
if (ePerson == null) {
log.info("Unable to identify EPerson by email: " + emailAttributeName + "=" + email);
} else {
log.info("Identified EPerson by email: " + emailAttributeName + "=" + email);
if (ePerson.getNetid() == null) {
return ePerson;
}
// The user has a netid that differs from the received SAML name ID.
log.error("SAML authentication identified EPerson by email: " + emailAttributeName + "=" + email);
log.error("Received SAML name ID: " + nameId);
log.error("EPerson has netid: " + ePerson.getNetid());
log.error(
"The SAML name ID is expected to be the same as the EPerson netid. " +
"This might be a hacking attempt to steal another user's credentials. If the " +
"user's netid has changed you will need to manually change it to the correct " +
"value or unset it in the database.");
}
}
if (nameId == null && email == null) {
log.error(
"SAML authentication did not find a name ID or email in the request from which to indentify a user");
}
return null;
}
/**
* Register a new EPerson. This method is called when no existing user was
* found for the NetID or email and autoregister is enabled. When these conditions
* are met this method will create a new EPerson object.
*
* In order to create a new EPerson object there is a minimal set of metadata
* required: email, first name, and last name. If we don't have access to these
* three pieces of information then we will be unable to create a new EPerson.
*
* Note that this method only adds the minimal metadata. Any additional metadata
* will need to be added by the updateEPerson method.
*
* @param context The current DSpace database context
* @param request The current HTTP Request
* @return A new EPerson object or null if unable to create a new EPerson.
* @throws SQLException if database error
* @throws AuthorizeException if authorization error
*/
protected EPerson registerNewEPerson(Context context, HttpServletRequest request)
throws SQLException, AuthorizeException {
String nameId = findSingleAttribute(request, getNameIdAttributeName());
String emailAttributeName = getEmailAttributeName();
String firstNameAttributeName = getFirstNameAttributeName();
String lastNameAttributeName = getLastNameAttributeName();
String email = findSingleAttribute(request, emailAttributeName);
String firstName = findSingleAttribute(request, firstNameAttributeName);
String lastName = findSingleAttribute(request, lastNameAttributeName);
if (email == null || firstName == null || lastName == null) {
// We require that there be an email, first name, and last name.
String message = "Unable to register new eperson because we are unable to find an email address, " +
"first name, and last name for the user.\n";
message += " name ID: " + nameId + "\n";
message += " email: " + emailAttributeName + "=" + email + "\n";
message += " first name: " + firstNameAttributeName + "=" + firstName + "\n";
message += " last name: " + lastNameAttributeName + "=" + lastName;
log.error(message);
return null;
}
try {
context.turnOffAuthorisationSystem();
EPerson ePerson = ePersonService.create(context);
// Set the minimum attributes for the new eperson
if (nameId != null) {
ePerson.setNetid(nameId);
}
ePerson.setEmail(email.toLowerCase());
ePerson.setFirstName(context, firstName);
ePerson.setLastName(context, lastName);
ePerson.setCanLogIn(true);
ePerson.setSelfRegistered(true);
// Commit the new eperson
AuthenticateServiceFactory.getInstance().getAuthenticationService().initEPerson(context, request, ePerson);
ePersonService.update(context, ePerson);
context.dispatchEvents();
if (log.isInfoEnabled()) {
String message = "Auto registered new eperson using SAML attributes:\n";
message += " netid: " + ePerson.getNetid() + "\n";
message += " email: " + ePerson.getEmail() + "\n";
message += " firstName: " + ePerson.getFirstName() + "\n";
message += " lastName: " + ePerson.getLastName();
log.info(message);
}
return ePerson;
} catch (SQLException | AuthorizeException e) {
log.error(e.getMessage(), e);
throw e;
} finally {
context.restoreAuthSystemState();
}
}
/**
* After we successfully authenticated a user, this method will update the user's attributes. The
* user's email, name, or other attribute may have been changed since the last time they
* logged into DSpace. This method will update the database with their most recent information.
*
* This method handles the basic DSpace metadata (email, first name, last name) along with
* additional metadata set using the setMetadata() methods on the EPerson object. The
* additional metadata mappings are defined in configuration.
*
* @param context The current DSpace database context
* @param request The current HTTP Request
* @param eperson The eperson object to update.
* @throws SQLException if database error
* @throws AuthorizeException if authorization error
*/
protected void updateEPerson(Context context, HttpServletRequest request, EPerson eperson)
throws SQLException, AuthorizeException {
String nameId = findSingleAttribute(request, getNameIdAttributeName());
String emailAttributeName = getEmailAttributeName();
String firstNameAttributeName = getFirstNameAttributeName();
String lastNameAttributeName = getLastNameAttributeName();
String email = findSingleAttribute(request, emailAttributeName);
String firstName = findSingleAttribute(request, firstNameAttributeName);
String lastName = findSingleAttribute(request, lastNameAttributeName);
try {
context.turnOffAuthorisationSystem();
// 1) Update the minimum metadata
// Only update the netid if none has been previously set. This can occur when a repo switches
// to netid based authentication. The current users do not have netids and fall back to email-based
// identification but once they login we update their record and lock the account to a particular netid.
if (nameId != null && eperson.getNetid() == null) {
eperson.setNetid(nameId);
}
// The email could have changed if using netid based lookup.
if (email != null) {
eperson.setEmail(email.toLowerCase());
}
if (firstName != null) {
eperson.setFirstName(context, firstName);
}
if (lastName != null) {
eperson.setLastName(context, lastName);
}
if (log.isDebugEnabled()) {
String message = "Updated the eperson's minimal metadata: \n";
message += " Email: " + emailAttributeName + "=" + email + "' \n";
message += " First name: " + firstNameAttributeName + "=" + firstName + "\n";
message += " Last name: " + lastNameAttributeName + "=" + lastName;
log.debug(message);
}
// 2) Update additional eperson metadata
for (String attributeName : metadataHeaderMap.keySet()) {
String metadataFieldName = metadataHeaderMap.get(attributeName);
String value = findSingleAttribute(request, attributeName);
// Truncate values
if (value == null) {
log.warn("Unable to update the eperson's '{}' metadata"
+ " because the attribute '{}' does not exist.", metadataFieldName, attributeName);
continue;
}
ePersonService.setMetadataSingleValue(context, eperson,
MetadataSchemaEnum.EPERSON.getName(), metadataFieldName, null, null, value);
log.debug("Updated the eperson's {} metadata using attribute: {}={}",
metadataFieldName, attributeName, value);
}
ePersonService.update(context, eperson);
context.dispatchEvents();
} catch (SQLException | AuthorizeException e) {
log.error(e.getMessage(), e);
throw e;
} finally {
context.restoreAuthSystemState();
}
}
/**
* Initialize SAML Authentication.
*
* During initalization the mapping of additional EPerson metadata will be loaded from the configuration
* and cached. While loading the metadata mapping this method will check the EPerson object to see
* if it supports the metadata field. If the field is not supported and autocreate is turned on then
* the field will be automatically created.
*
* It is safe to call this method multiple times.
*
* @param context context
* @throws SQLException if database error
*/
protected synchronized void initialize(Context context) throws SQLException {
if (metadataHeaderMap != null) {
return;
}
HashMap<String, String> map = new HashMap<>();
String[] mappingString = configurationService.getArrayProperty("authentication-saml.eperson.metadata");
boolean autoCreate = configurationService
.getBooleanProperty("authentication-saml.eperson.metadata.autocreate", false);
// Bail out if not set, returning an empty map.
if (mappingString == null || mappingString.length == 0) {
log.debug("No additional eperson metadata mapping found: authentication-saml.eperson.metadata");
metadataHeaderMap = map;
return;
}
log.debug("Loading additional eperson metadata from: authentication-saml.eperson.metadata="
+ StringUtils.join(mappingString, ","));
for (String metadataString : mappingString) {
metadataString = metadataString.trim();
String[] metadataParts = metadataString.split("=>");
if (metadataParts.length != 2) {
log.error("Unable to parse metadata mapping string: '" + metadataString + "'");
continue;
}
String attributeName = metadataParts[0].trim();
String metadataFieldName = metadataParts[1].trim().toLowerCase();
boolean valid = checkIfEPersonMetadataFieldExists(context, metadataFieldName);
if (!valid && autoCreate) {
valid = autoCreateEPersonMetadataField(context, metadataFieldName);
}
if (valid) {
// The eperson field is fine, we can use it.
log.debug("Loading additional eperson metadata mapping for: {}={}",
attributeName, metadataFieldName);
map.put(attributeName, metadataFieldName);
} else {
// The field doesn't exist, and we can't use it.
log.error("Skipping the additional eperson metadata mapping for: {}={}"
+ " because the field is not supported by the current configuration.",
attributeName, metadataFieldName);
}
}
metadataHeaderMap = map;
}
/**
* Check if a metadata field for an EPerson is available.
*
* @param metadataName The name of the metadata field.
* @param context context
* @return True if a valid metadata field, otherwise false.
* @throws SQLException if database error
*/
protected synchronized boolean checkIfEPersonMetadataFieldExists(Context context, String metadataName)
throws SQLException {
if (metadataName == null) {
return false;
}
MetadataField metadataField = metadataFieldService.findByElement(
context, MetadataSchemaEnum.EPERSON.getName(), metadataName, null);
return metadataField != null;
}
/**
* Validate metadata field names
*/
protected final String FIELD_NAME_REGEX = "^[_A-Za-z0-9]+$";
/**
* Automatically create a new metadata field for an EPerson
*
* @param context context
* @param metadataName The name of the new metadata field.
* @return True if successful, otherwise false.
* @throws SQLException if database error
*/
protected synchronized boolean autoCreateEPersonMetadataField(Context context, String metadataName)
throws SQLException {
if (metadataName == null) {
return false;
}
if (!metadataName.matches(FIELD_NAME_REGEX)) {
return false;
}
MetadataSchema epersonSchema = metadataSchemaService.find(context, "eperson");
MetadataField metadataField = null;
try {
context.turnOffAuthorisationSystem();
metadataField = metadataFieldService.create(context, epersonSchema, metadataName, null, null);
} catch (AuthorizeException | NonUniqueMetadataException e) {
log.error(e.getMessage(), e);
return false;
} finally {
context.restoreAuthSystemState();
}
return metadataField != null;
}
@Override
public boolean isUsed(final Context context, final HttpServletRequest request) {
if (request != null &&
context.getCurrentUser() != null &&
request.getAttribute("saml.authenticated") != null
) {
return true;
}
return false;
}
@Override
public boolean canChangePassword(Context context, EPerson ePerson, String currentPassword) {
return false;
}
private String findSingleAttribute(HttpServletRequest request, String name) {
if (StringUtils.isBlank(name)) {
return null;
}
Object value = request.getAttribute(name);
if (value instanceof List) {
List<?> list = (List<?>) value;
if (list.size() == 0) {
value = null;
} else {
value = list.get(0);
}
}
return (value == null ? null : value.toString());
}
private String getNameIdAttributeName() {
return configurationService.getProperty("authentication-saml.attribute.name-id", "org.dspace.saml.NAME_ID");
}
private String getEmailAttributeName() {
return configurationService.getProperty("authentication-saml.attribute.email", "org.dspace.saml.EMAIL");
}
private String getFirstNameAttributeName() {
return configurationService.getProperty("authentication-saml.attribute.first-name",
"org.dspace.saml.GIVEN_NAME");
}
private String getLastNameAttributeName() {
return configurationService.getProperty("authentication-saml.attribute.last-name", "org.dspace.saml.SURNAME");
}
}

View File

@@ -174,9 +174,9 @@ public class AuthorizeConfiguration {
* *
* @return true/false * @return true/false
*/ */
public static boolean canCommunityAdminPerformItemReinstatiate() { public static boolean canCommunityAdminPerformItemReinstate() {
init(); init();
return configurationService.getBooleanProperty("core.authorization.community-admin.item.reinstatiate", true); return configurationService.getBooleanProperty("core.authorization.community-admin.item.reinstate", true);
} }
/** /**
@@ -306,9 +306,9 @@ public class AuthorizeConfiguration {
* *
* @return true/false * @return true/false
*/ */
public static boolean canCollectionAdminPerformItemReinstatiate() { public static boolean canCollectionAdminPerformItemReinstate() {
init(); init();
return configurationService.getBooleanProperty("core.authorization.collection-admin.item.reinstatiate", true); return configurationService.getBooleanProperty("core.authorization.collection-admin.item.reinstate", true);
} }
/** /**

View File

@@ -422,9 +422,6 @@ public class BrowseEngine {
} }
} }
// this is the total number of results in answer to the query
int total = getTotalResults(true);
// set the ordering field (there is only one option) // set the ordering field (there is only one option)
dao.setOrderField("sort_value"); dao.setOrderField("sort_value");
@@ -444,6 +441,9 @@ public class BrowseEngine {
dao.setOffset(offset); dao.setOffset(offset);
dao.setLimit(scope.getResultsPerPage()); dao.setLimit(scope.getResultsPerPage());
// this is the total number of results in answer to the query
int total = getTotalResults(true);
// Holder for the results // Holder for the results
List<String[]> results = null; List<String[]> results = null;
@@ -680,33 +680,9 @@ public class BrowseEngine {
// tell the browse query whether we are distinct // tell the browse query whether we are distinct
dao.setDistinct(distinct); dao.setDistinct(distinct);
// ensure that the select is set to "*"
String[] select = {"*"};
dao.setCountValues(select);
// FIXME: it would be nice to have a good way of doing this in the DAO
// now reset all of the fields that we don't want to have constraining
// our count, storing them locally to reinstate later
String focusField = dao.getJumpToField();
String focusValue = dao.getJumpToValue();
int limit = dao.getLimit();
int offset = dao.getOffset();
dao.setJumpToField(null);
dao.setJumpToValue(null);
dao.setLimit(-1);
dao.setOffset(-1);
// perform the query and get the result // perform the query and get the result
int count = dao.doCountQuery(); int count = dao.doCountQuery();
// now put back the values we removed for this method
dao.setJumpToField(focusField);
dao.setJumpToValue(focusValue);
dao.setLimit(limit);
dao.setOffset(offset);
dao.setCountValues(null);
log.debug(LogHelper.getHeader(context, "get_total_results_return", "return=" + count)); log.debug(LogHelper.getHeader(context, "get_total_results_return", "return=" + count));
return count; return count;

View File

@@ -543,19 +543,6 @@ public class BrowseIndex {
return getTableName(false, false, true, false); return getTableName(false, false, true, false);
} }
/**
* Get the name of the column that is used to store the default value column
*
* @return the name of the value column
*/
public String getValueColumn() {
if (!isDate()) {
return "sort_text_value";
} else {
return "text_value";
}
}
/** /**
* Get the name of the primary key index column * Get the name of the primary key index column
* *
@@ -565,35 +552,6 @@ public class BrowseIndex {
return "id"; return "id";
} }
/**
* Is this browse index type for a title?
*
* @return true if title type, false if not
*/
// public boolean isTitle()
// {
// return "title".equals(getDataType());
// }
/**
* Is the browse index type for a date?
*
* @return true if date type, false if not
*/
public boolean isDate() {
return "date".equals(getDataType());
}
/**
* Is the browse index type for a plain text type?
*
* @return true if plain text type, false if not
*/
// public boolean isText()
// {
// return "text".equals(getDataType());
// }
/** /**
* Is the browse index of display type single? * Is the browse index of display type single?
* *

View File

@@ -13,6 +13,8 @@ import java.util.Collections;
import java.util.Comparator; import java.util.Comparator;
import java.util.List; import java.util.List;
import com.fasterxml.jackson.databind.node.JsonNodeFactory;
import com.fasterxml.jackson.databind.node.ObjectNode;
import org.apache.commons.lang3.StringUtils; import org.apache.commons.lang3.StringUtils;
import org.apache.logging.log4j.Logger; import org.apache.logging.log4j.Logger;
import org.apache.solr.client.solrj.util.ClientUtils; import org.apache.solr.client.solrj.util.ClientUtils;
@@ -180,18 +182,33 @@ public class SolrBrowseDAO implements BrowseDAO {
addDefaultFilterQueries(query); addDefaultFilterQueries(query);
if (distinct) { if (distinct) {
DiscoverFacetField dff; DiscoverFacetField dff;
// To get the number of distinct values we use the next "json.facet" query param
// {"entries_count": {"type":"terms","field": "<fieldName>_filter", "limit":0, "numBuckets":true}}"
ObjectNode jsonFacet = JsonNodeFactory.instance.objectNode();
ObjectNode entriesCount = JsonNodeFactory.instance.objectNode();
entriesCount.put("type", "terms");
entriesCount.put("field", facetField + "_filter");
entriesCount.put("limit", 0);
entriesCount.put("numBuckets", true);
jsonFacet.set("entries_count", entriesCount);
if (StringUtils.isNotBlank(startsWith)) { if (StringUtils.isNotBlank(startsWith)) {
dff = new DiscoverFacetField(facetField, dff = new DiscoverFacetField(facetField,
DiscoveryConfigurationParameters.TYPE_TEXT, -1, DiscoveryConfigurationParameters.TYPE_TEXT, limit,
DiscoveryConfigurationParameters.SORT.VALUE, startsWith); DiscoveryConfigurationParameters.SORT.VALUE, startsWith, offset);
// Add the prefix to the json facet query
entriesCount.put("prefix", startsWith);
} else { } else {
dff = new DiscoverFacetField(facetField, dff = new DiscoverFacetField(facetField,
DiscoveryConfigurationParameters.TYPE_TEXT, -1, DiscoveryConfigurationParameters.TYPE_TEXT, limit,
DiscoveryConfigurationParameters.SORT.VALUE); DiscoveryConfigurationParameters.SORT.VALUE, offset);
} }
query.addFacetField(dff); query.addFacetField(dff);
query.setFacetMinCount(1); query.setFacetMinCount(1);
query.setMaxResults(0); query.setMaxResults(0);
query.addProperty("json.facet", jsonFacet.toString());
} else { } else {
query.setMaxResults(limit/* > 0 ? limit : 20*/); query.setMaxResults(limit/* > 0 ? limit : 20*/);
if (offset > 0) { if (offset > 0) {
@@ -248,8 +265,7 @@ public class SolrBrowseDAO implements BrowseDAO {
DiscoverResult resp = getSolrResponse(); DiscoverResult resp = getSolrResponse();
int count = 0; int count = 0;
if (distinct) { if (distinct) {
List<FacetResult> facetResults = resp.getFacetResult(facetField); count = (int) resp.getTotalEntries();
count = facetResults.size();
} else { } else {
// we need to cast to int to respect the BrowseDAO contract... // we need to cast to int to respect the BrowseDAO contract...
count = (int) resp.getTotalSearchResults(); count = (int) resp.getTotalSearchResults();
@@ -266,8 +282,8 @@ public class SolrBrowseDAO implements BrowseDAO {
DiscoverResult resp = getSolrResponse(); DiscoverResult resp = getSolrResponse();
List<FacetResult> facet = resp.getFacetResult(facetField); List<FacetResult> facet = resp.getFacetResult(facetField);
int count = doCountQuery(); int count = doCountQuery();
int start = offset > 0 ? offset : 0; int start = 0;
int max = limit > 0 ? limit : count; //if negative, return everything int max = facet.size();
List<String[]> result = new ArrayList<>(); List<String[]> result = new ArrayList<>();
if (ascending) { if (ascending) {
for (int i = start; i < (start + max) && i < count; i++) { for (int i = start; i < (start + max) && i < count; i++) {

View File

@@ -67,6 +67,7 @@ import org.dspace.event.Event;
import org.dspace.harvest.HarvestedItem; import org.dspace.harvest.HarvestedItem;
import org.dspace.harvest.service.HarvestedItemService; import org.dspace.harvest.service.HarvestedItemService;
import org.dspace.identifier.DOI; import org.dspace.identifier.DOI;
import org.dspace.identifier.DOIIdentifierProvider;
import org.dspace.identifier.IdentifierException; import org.dspace.identifier.IdentifierException;
import org.dspace.identifier.service.DOIService; import org.dspace.identifier.service.DOIService;
import org.dspace.identifier.service.IdentifierService; import org.dspace.identifier.service.IdentifierService;
@@ -81,6 +82,9 @@ import org.dspace.orcid.service.OrcidTokenService;
import org.dspace.profile.service.ResearcherProfileService; import org.dspace.profile.service.ResearcherProfileService;
import org.dspace.qaevent.dao.QAEventsDAO; import org.dspace.qaevent.dao.QAEventsDAO;
import org.dspace.services.ConfigurationService; import org.dspace.services.ConfigurationService;
import org.dspace.versioning.Version;
import org.dspace.versioning.VersionHistory;
import org.dspace.versioning.service.VersionHistoryService;
import org.dspace.versioning.service.VersioningService; import org.dspace.versioning.service.VersioningService;
import org.dspace.workflow.WorkflowItemService; import org.dspace.workflow.WorkflowItemService;
import org.dspace.workflow.factory.WorkflowServiceFactory; import org.dspace.workflow.factory.WorkflowServiceFactory;
@@ -176,6 +180,9 @@ public class ItemServiceImpl extends DSpaceObjectServiceImpl<Item> implements It
@Autowired @Autowired
private QAEventsDAO qaEventsDao; private QAEventsDAO qaEventsDao;
@Autowired
private VersionHistoryService versionHistoryService;
protected ItemServiceImpl() { protected ItemServiceImpl() {
} }
@@ -851,6 +858,7 @@ public class ItemServiceImpl extends DSpaceObjectServiceImpl<Item> implements It
DOI doi = doiService.findDOIByDSpaceObject(context, item); DOI doi = doiService.findDOIByDSpaceObject(context, item);
if (doi != null) { if (doi != null) {
doi.setDSpaceObject(null); doi.setDSpaceObject(null);
doi.setStatus(DOIIdentifierProvider.TO_BE_DELETED);
} }
// remove version attached to the item // remove version attached to the item
@@ -1931,4 +1939,40 @@ prevent the generation of resource policy entry values with null dspace_object a
} }
} }
@Override
public boolean isLatestVersion(Context context, Item item) throws SQLException {
VersionHistory history = versionHistoryService.findByItem(context, item);
if (history == null) {
// not all items have a version history
// if an item does not have a version history, it is by definition the latest
// version
return true;
}
// start with the very latest version of the given item (may still be in
// workspace)
Version latestVersion = versionHistoryService.getLatestVersion(context, history);
// find the latest version of the given item that is archived
while (latestVersion != null && !latestVersion.getItem().isArchived()) {
latestVersion = versionHistoryService.getPrevious(context, history, latestVersion);
}
// could not find an archived version of the given item
if (latestVersion == null) {
// this scenario should never happen, but let's err on the side of showing too
// many items vs. to little
// (see discovery.xml, a lot of discovery configs filter out all items that are
// not the latest version)
return true;
}
// sanity check
assert latestVersion.getItem().isArchived();
return item.equals(latestVersion.getItem());
}
} }

View File

@@ -178,6 +178,14 @@ public class WorkspaceItemServiceImpl implements WorkspaceItemService {
@Override @Override
public WorkspaceItem create(Context c, WorkflowItem workflowItem) throws SQLException, AuthorizeException { public WorkspaceItem create(Context c, WorkflowItem workflowItem) throws SQLException, AuthorizeException {
WorkspaceItem potentialDuplicate = findByItem(c, workflowItem.getItem());
if (potentialDuplicate != null) {
throw new IllegalArgumentException(String.format(
"A workspace item referring to item %s already exists (%d)",
workflowItem.getItem().getID(),
potentialDuplicate.getID()
));
}
WorkspaceItem workspaceItem = workspaceItemDAO.create(c, new WorkspaceItem()); WorkspaceItem workspaceItem = workspaceItemDAO.create(c, new WorkspaceItem());
workspaceItem.setItem(workflowItem.getItem()); workspaceItem.setItem(workflowItem.getItem());
workspaceItem.setCollection(workflowItem.getCollection()); workspaceItem.setCollection(workflowItem.getCollection());

View File

@@ -8,6 +8,7 @@
package org.dspace.content.authority; package org.dspace.content.authority;
import java.io.File; import java.io.File;
import java.nio.file.Paths;
import java.util.ArrayList; import java.util.ArrayList;
import java.util.Arrays; import java.util.Arrays;
import java.util.HashMap; import java.util.HashMap;
@@ -65,14 +66,17 @@ public class DSpaceControlledVocabulary extends SelfNamedPlugin implements Hiera
protected static String labelTemplate = "//node[@label = '%s']"; protected static String labelTemplate = "//node[@label = '%s']";
protected static String idParentTemplate = "//node[@id = '%s']/parent::isComposedBy/parent::node"; protected static String idParentTemplate = "//node[@id = '%s']/parent::isComposedBy/parent::node";
protected static String rootTemplate = "/node"; protected static String rootTemplate = "/node";
protected static String idAttribute = "id";
protected static String labelAttribute = "label";
protected static String pluginNames[] = null; protected static String pluginNames[] = null;
protected String vocabularyName = null; protected String vocabularyName = null;
protected InputSource vocabulary = null; protected InputSource vocabulary = null;
protected Boolean suggestHierarchy = false; protected Boolean suggestHierarchy = false;
protected Boolean storeHierarchy = true; protected Boolean storeHierarchy = true;
protected String hierarchyDelimiter = "::"; protected String hierarchyDelimiter = "::";
protected Integer preloadLevel = 1; protected Integer preloadLevel = 1;
protected String valueAttribute = labelAttribute;
protected String valueTemplate = labelTemplate;
public DSpaceControlledVocabulary() { public DSpaceControlledVocabulary() {
super(); super();
@@ -115,7 +119,7 @@ public class DSpaceControlledVocabulary extends SelfNamedPlugin implements Hiera
} }
} }
protected void init() { protected void init(String locale) {
if (vocabulary == null) { if (vocabulary == null) {
ConfigurationService config = DSpaceServicesFactory.getInstance().getConfigurationService(); ConfigurationService config = DSpaceServicesFactory.getInstance().getConfigurationService();
@@ -125,13 +129,25 @@ public class DSpaceControlledVocabulary extends SelfNamedPlugin implements Hiera
File.separator + "controlled-vocabularies" + File.separator; File.separator + "controlled-vocabularies" + File.separator;
String configurationPrefix = "vocabulary.plugin." + vocabularyName; String configurationPrefix = "vocabulary.plugin." + vocabularyName;
storeHierarchy = config.getBooleanProperty(configurationPrefix + ".hierarchy.store", storeHierarchy); storeHierarchy = config.getBooleanProperty(configurationPrefix + ".hierarchy.store", storeHierarchy);
boolean storeIDs = config.getBooleanProperty(configurationPrefix + ".storeIDs", false);
suggestHierarchy = config.getBooleanProperty(configurationPrefix + ".hierarchy.suggest", suggestHierarchy); suggestHierarchy = config.getBooleanProperty(configurationPrefix + ".hierarchy.suggest", suggestHierarchy);
preloadLevel = config.getIntProperty(configurationPrefix + ".hierarchy.preloadLevel", preloadLevel); preloadLevel = config.getIntProperty(configurationPrefix + ".hierarchy.preloadLevel", preloadLevel);
String configuredDelimiter = config.getProperty(configurationPrefix + ".delimiter"); String configuredDelimiter = config.getProperty(configurationPrefix + ".delimiter");
if (configuredDelimiter != null) { if (configuredDelimiter != null) {
hierarchyDelimiter = configuredDelimiter.replaceAll("(^\"|\"$)", ""); hierarchyDelimiter = configuredDelimiter.replaceAll("(^\"|\"$)", "");
} }
if (storeIDs) {
valueAttribute = idAttribute;
valueTemplate = idTemplate;
}
String filename = vocabulariesPath + vocabularyName + ".xml"; String filename = vocabulariesPath + vocabularyName + ".xml";
if (StringUtils.isNotEmpty(locale)) {
String localizedFilename = vocabulariesPath + vocabularyName + "_" + locale + ".xml";
if (Paths.get(localizedFilename).toFile().exists()) {
filename = localizedFilename;
}
}
log.info("Loading " + filename); log.info("Loading " + filename);
vocabulary = new InputSource(filename); vocabulary = new InputSource(filename);
} }
@@ -144,9 +160,9 @@ public class DSpaceControlledVocabulary extends SelfNamedPlugin implements Hiera
return (""); return ("");
} else { } else {
String parentValue = buildString(node.getParentNode()); String parentValue = buildString(node.getParentNode());
Node currentLabel = node.getAttributes().getNamedItem("label"); Node currentNodeValue = node.getAttributes().getNamedItem(valueAttribute);
if (currentLabel != null) { if (currentNodeValue != null) {
String currentValue = currentLabel.getNodeValue(); String currentValue = currentNodeValue.getNodeValue();
if (parentValue.equals("")) { if (parentValue.equals("")) {
return currentValue; return currentValue;
} else { } else {
@@ -160,12 +176,13 @@ public class DSpaceControlledVocabulary extends SelfNamedPlugin implements Hiera
@Override @Override
public Choices getMatches(String text, int start, int limit, String locale) { public Choices getMatches(String text, int start, int limit, String locale) {
init(); init(locale);
log.debug("Getting matches for '" + text + "'"); log.debug("Getting matches for '" + text + "'");
String xpathExpression = ""; String xpathExpression = "";
String[] textHierarchy = text.split(hierarchyDelimiter, -1); String[] textHierarchy = text.split(hierarchyDelimiter, -1);
for (int i = 0; i < textHierarchy.length; i++) { for (int i = 0; i < textHierarchy.length; i++) {
xpathExpression += String.format(xpathTemplate, textHierarchy[i].replaceAll("'", "&apos;").toLowerCase()); xpathExpression +=
String.format(xpathTemplate, textHierarchy[i].replaceAll("'", "&apos;").toLowerCase());
} }
XPath xpath = XPathFactory.newInstance().newXPath(); XPath xpath = XPathFactory.newInstance().newXPath();
int total = 0; int total = 0;
@@ -184,12 +201,13 @@ public class DSpaceControlledVocabulary extends SelfNamedPlugin implements Hiera
@Override @Override
public Choices getBestMatch(String text, String locale) { public Choices getBestMatch(String text, String locale) {
init(); init(locale);
log.debug("Getting best matches for '" + text + "'"); log.debug("Getting best matches for '" + text + "'");
String xpathExpression = ""; String xpathExpression = "";
String[] textHierarchy = text.split(hierarchyDelimiter, -1); String[] textHierarchy = text.split(hierarchyDelimiter, -1);
for (int i = 0; i < textHierarchy.length; i++) { for (int i = 0; i < textHierarchy.length; i++) {
xpathExpression += String.format(labelTemplate, textHierarchy[i].replaceAll("'", "&apos;")); xpathExpression +=
String.format(valueTemplate, textHierarchy[i].replaceAll("'", "&apos;"));
} }
XPath xpath = XPathFactory.newInstance().newXPath(); XPath xpath = XPathFactory.newInstance().newXPath();
List<Choice> choices = new ArrayList<Choice>(); List<Choice> choices = new ArrayList<Choice>();
@@ -205,19 +223,19 @@ public class DSpaceControlledVocabulary extends SelfNamedPlugin implements Hiera
@Override @Override
public String getLabel(String key, String locale) { public String getLabel(String key, String locale) {
return getNodeLabel(key, this.suggestHierarchy); return getNodeValue(key, locale, this.suggestHierarchy);
} }
@Override @Override
public String getValue(String key, String locale) { public String getValue(String key, String locale) {
return getNodeLabel(key, this.storeHierarchy); return getNodeValue(key, locale, this.storeHierarchy);
} }
@Override @Override
public Choice getChoice(String authKey, String locale) { public Choice getChoice(String authKey, String locale) {
Node node; Node node;
try { try {
node = getNode(authKey); node = getNode(authKey, locale);
} catch (XPathExpressionException e) { } catch (XPathExpressionException e) {
return null; return null;
} }
@@ -226,27 +244,27 @@ public class DSpaceControlledVocabulary extends SelfNamedPlugin implements Hiera
@Override @Override
public boolean isHierarchical() { public boolean isHierarchical() {
init(); init(null);
return true; return true;
} }
@Override @Override
public Choices getTopChoices(String authorityName, int start, int limit, String locale) { public Choices getTopChoices(String authorityName, int start, int limit, String locale) {
init(); init(locale);
String xpathExpression = rootTemplate; String xpathExpression = rootTemplate;
return getChoicesByXpath(xpathExpression, start, limit); return getChoicesByXpath(xpathExpression, start, limit);
} }
@Override @Override
public Choices getChoicesByParent(String authorityName, String parentId, int start, int limit, String locale) { public Choices getChoicesByParent(String authorityName, String parentId, int start, int limit, String locale) {
init(); init(locale);
String xpathExpression = String.format(idTemplate, parentId); String xpathExpression = String.format(idTemplate, parentId);
return getChoicesByXpath(xpathExpression, start, limit); return getChoicesByXpath(xpathExpression, start, limit);
} }
@Override @Override
public Choice getParentChoice(String authorityName, String childId, String locale) { public Choice getParentChoice(String authorityName, String childId, String locale) {
init(); init(locale);
try { try {
String xpathExpression = String.format(idParentTemplate, childId); String xpathExpression = String.format(idParentTemplate, childId);
Choice choice = createChoiceFromNode(getNodeFromXPath(xpathExpression)); Choice choice = createChoiceFromNode(getNodeFromXPath(xpathExpression));
@@ -259,7 +277,7 @@ public class DSpaceControlledVocabulary extends SelfNamedPlugin implements Hiera
@Override @Override
public Integer getPreloadLevel() { public Integer getPreloadLevel() {
init(); init(null);
return preloadLevel; return preloadLevel;
} }
@@ -270,8 +288,8 @@ public class DSpaceControlledVocabulary extends SelfNamedPlugin implements Hiera
return false; return false;
} }
private Node getNode(String key) throws XPathExpressionException { private Node getNode(String key, String locale) throws XPathExpressionException {
init(); init(locale);
String xpathExpression = String.format(idTemplate, key); String xpathExpression = String.format(idTemplate, key);
Node node = getNodeFromXPath(xpathExpression); Node node = getNodeFromXPath(xpathExpression);
return node; return node;
@@ -319,16 +337,16 @@ public class DSpaceControlledVocabulary extends SelfNamedPlugin implements Hiera
return extras; return extras;
} }
private String getNodeLabel(String key, boolean useHierarchy) { private String getNodeValue(String key, String locale, boolean useHierarchy) {
try { try {
Node node = getNode(key); Node node = getNode(key, locale);
if (Objects.isNull(node)) { if (Objects.isNull(node)) {
return null; return null;
} }
if (useHierarchy) { if (useHierarchy) {
return this.buildString(node); return this.buildString(node);
} else { } else {
return node.getAttributes().getNamedItem("label").getNodeValue(); return node.getAttributes().getNamedItem(valueAttribute).getNodeValue();
} }
} catch (XPathExpressionException e) { } catch (XPathExpressionException e) {
return (""); return ("");
@@ -349,7 +367,7 @@ public class DSpaceControlledVocabulary extends SelfNamedPlugin implements Hiera
if (this.storeHierarchy) { if (this.storeHierarchy) {
return hierarchy; return hierarchy;
} else { } else {
return node.getAttributes().getNamedItem("label").getNodeValue(); return node.getAttributes().getNamedItem(valueAttribute).getNodeValue();
} }
} }

View File

@@ -1009,4 +1009,14 @@ public interface ItemService
*/ */
EntityType getEntityType(Context context, Item item) throws SQLException; EntityType getEntityType(Context context, Item item) throws SQLException;
/**
* Check whether the given item is the latest version. If the latest item cannot
* be determined, because either the version history or the latest version is
* not present, assume the item is latest.
* @param context the DSpace context.
* @param item the item that should be checked.
* @return true if the item is the latest version, false otherwise.
*/
public boolean isLatestVersion(Context context, Item item) throws SQLException;
} }

View File

@@ -313,7 +313,7 @@ public abstract class AbstractHibernateDAO<T> implements GenericDAO<T> {
org.hibernate.query.Query hquery = query.unwrap(org.hibernate.query.Query.class); org.hibernate.query.Query hquery = query.unwrap(org.hibernate.query.Query.class);
Stream<T> stream = hquery.stream(); Stream<T> stream = hquery.stream();
Iterator<T> iter = stream.iterator(); Iterator<T> iter = stream.iterator();
return new AbstractIterator<T> () { return new AbstractIterator<T>() {
@Override @Override
protected T computeNext() { protected T computeNext() {
return iter.hasNext() ? iter.next() : endOfData(); return iter.hasNext() ? iter.next() : endOfData();

View File

@@ -883,7 +883,19 @@ public class Context implements AutoCloseable {
} }
/** /**
* Remove an entity from the cache. This is necessary when batch processing a large number of items. * Remove all entities from the cache and reload the current user entity. This is useful when batch processing
* a large number of entities when the calling code requires the cache to be completely cleared before continuing.
*
* @throws SQLException if a database error occurs.
*/
public void uncacheEntities() throws SQLException {
dbConnection.uncacheEntities();
reloadContextBoundEntities();
}
/**
* Remove an entity from the cache. This is useful when batch processing a large number of entities
* when the calling code needs to retain some items in the cache while removing others.
* *
* @param entity The entity to reload * @param entity The entity to reload
* @param <E> The class of the entity. The entity must implement the {@link ReloadableEntity} interface. * @param <E> The class of the entity. The entity must implement the {@link ReloadableEntity} interface.

View File

@@ -124,28 +124,38 @@ public interface DBConnection<T> {
public long getCacheSize() throws SQLException; public long getCacheSize() throws SQLException;
/** /**
* Reload a DSpace object from the database. This will make sure the object * Reload an entity from the database. This will make sure the object
* is valid and stored in the cache. The returned object should be used * is valid and stored in the cache. The returned object should be used
* henceforth instead of the passed object. * henceforth instead of the passed object.
* *
* @param <E> type of {@link entity} * @param <E> type of entity.
* @param entity The DSpace object to reload * @param entity The entity to reload.
* @return the reloaded entity. * @return the reloaded entity.
* @throws java.sql.SQLException passed through. * @throws SQLException passed through.
*/ */
public <E extends ReloadableEntity> E reloadEntity(E entity) throws SQLException; public <E extends ReloadableEntity> E reloadEntity(E entity) throws SQLException;
/** /**
* Remove a DSpace object from the session cache when batch processing a * Remove all entities from the session cache.
* large number of objects.
* *
* <p>Objects removed from cache are not saved in any way. Therefore, if you * <p>Entities removed from cache are not saved in any way. Therefore, if you
* have modified an object, you should be sure to {@link commit()} changes * have modified any entities, you should be sure to {@link #commit()} changes
* before calling this method. * before calling this method.
* *
* @param <E> Type of {@link entity} * @throws SQLException passed through.
* @param entity The DSpace object to decache. */
* @throws java.sql.SQLException passed through. public void uncacheEntities() throws SQLException;
/**
* Remove an entity from the session cache.
*
* <p>Entities removed from cache are not saved in any way. Therefore, if you
* have modified the entity, you should be sure to {@link #commit()} changes
* before calling this method.
*
* @param <E> Type of entity.
* @param entity The entity to decache.
* @throws SQLException passed through.
*/ */
public <E extends ReloadableEntity> void uncacheEntity(E entity) throws SQLException; public <E extends ReloadableEntity> void uncacheEntity(E entity) throws SQLException;

View File

@@ -242,6 +242,11 @@ public class HibernateDBConnection implements DBConnection<Session> {
} }
} }
@Override
public void uncacheEntities() throws SQLException {
getSession().clear();
}
/** /**
* Evict an entity from the hibernate cache. * Evict an entity from the hibernate cache.
* <P> * <P>

View File

@@ -19,6 +19,8 @@ import org.dspace.content.Item;
import org.dspace.content.MetadataValue; import org.dspace.content.MetadataValue;
import org.dspace.curate.AbstractCurationTask; import org.dspace.curate.AbstractCurationTask;
import org.dspace.curate.Curator; import org.dspace.curate.Curator;
import org.dspace.services.ConfigurationService;
import org.dspace.services.factory.DSpaceServicesFactory;
/** /**
* A basic link checker that is designed to be extended. By default this link checker * A basic link checker that is designed to be extended. By default this link checker
@@ -42,6 +44,9 @@ public class BasicLinkChecker extends AbstractCurationTask {
// The log4j logger for this class // The log4j logger for this class
private static Logger log = org.apache.logging.log4j.LogManager.getLogger(BasicLinkChecker.class); private static Logger log = org.apache.logging.log4j.LogManager.getLogger(BasicLinkChecker.class);
protected static final ConfigurationService configurationService
= DSpaceServicesFactory.getInstance().getConfigurationService();
/** /**
* Perform the link checking. * Perform the link checking.
@@ -110,7 +115,8 @@ public class BasicLinkChecker extends AbstractCurationTask {
*/ */
protected boolean checkURL(String url, StringBuilder results) { protected boolean checkURL(String url, StringBuilder results) {
// Link check the URL // Link check the URL
int httpStatus = getResponseStatus(url); int redirects = 0;
int httpStatus = getResponseStatus(url, redirects);
if ((httpStatus >= 200) && (httpStatus < 300)) { if ((httpStatus >= 200) && (httpStatus < 300)) {
results.append(" - " + url + " = " + httpStatus + " - OK\n"); results.append(" - " + url + " = " + httpStatus + " - OK\n");
@@ -128,14 +134,24 @@ public class BasicLinkChecker extends AbstractCurationTask {
* @param url The url to open * @param url The url to open
* @return The HTTP response code (e.g. 200 / 301 / 404 / 500) * @return The HTTP response code (e.g. 200 / 301 / 404 / 500)
*/ */
protected int getResponseStatus(String url) { protected int getResponseStatus(String url, int redirects) {
try { try {
URL theURL = new URL(url); URL theURL = new URL(url);
HttpURLConnection connection = (HttpURLConnection) theURL.openConnection(); HttpURLConnection connection = (HttpURLConnection) theURL.openConnection();
int code = connection.getResponseCode(); connection.setInstanceFollowRedirects(true);
connection.disconnect(); int statusCode = connection.getResponseCode();
int maxRedirect = configurationService.getIntProperty("curate.checklinks.max-redirect", 0);
if ((statusCode == HttpURLConnection.HTTP_MOVED_TEMP || statusCode == HttpURLConnection.HTTP_MOVED_PERM ||
statusCode == HttpURLConnection.HTTP_SEE_OTHER)) {
connection.disconnect();
String newUrl = connection.getHeaderField("Location");
if (newUrl != null && (maxRedirect >= redirects || maxRedirect == -1)) {
redirects++;
return getResponseStatus(newUrl, redirects);
}
return code; }
return statusCode;
} catch (IOException ioe) { } catch (IOException ioe) {
// Must be a bad URL // Must be a bad URL

View File

@@ -15,13 +15,12 @@ import java.io.InputStream;
import java.net.InetSocketAddress; import java.net.InetSocketAddress;
import java.net.Socket; import java.net.Socket;
import java.net.SocketException; import java.net.SocketException;
import java.sql.SQLException;
import java.util.ArrayList; import java.util.ArrayList;
import java.util.List; import java.util.List;
import org.apache.commons.collections4.ListUtils;
import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger; import org.apache.logging.log4j.Logger;
import org.dspace.authorize.AuthorizeException;
import org.dspace.content.Bitstream; import org.dspace.content.Bitstream;
import org.dspace.content.Bundle; import org.dspace.content.Bundle;
import org.dspace.content.DSpaceObject; import org.dspace.content.DSpaceObject;
@@ -99,8 +98,13 @@ public class ClamScan extends AbstractCurationTask {
} }
try { try {
Bundle bundle = itemService.getBundles(item, "ORIGINAL").get(0); List<Bundle> bundles = itemService.getBundles(item, "ORIGINAL");
results = new ArrayList<>(); if (ListUtils.emptyIfNull(bundles).isEmpty()) {
setResult("No ORIGINAL bundle found for item: " + getItemHandle(item));
return Curator.CURATE_SKIP;
}
Bundle bundle = bundles.get(0);
results = new ArrayList<String>();
for (Bitstream bitstream : bundle.getBitstreams()) { for (Bitstream bitstream : bundle.getBitstreams()) {
InputStream inputstream = bitstreamService.retrieve(Curator.curationContext(), bitstream); InputStream inputstream = bitstreamService.retrieve(Curator.curationContext(), bitstream);
logDebugMessage("Scanning " + bitstream.getName() + " . . . "); logDebugMessage("Scanning " + bitstream.getName() + " . . . ");
@@ -121,10 +125,11 @@ public class ClamScan extends AbstractCurationTask {
} }
} }
} catch (AuthorizeException authE) { } catch (Exception e) {
throw new IOException(authE.getMessage(), authE); // Any exception which may occur during the performance of the task should be caught here
} catch (SQLException sqlE) { // And end the process gracefully
throw new IOException(sqlE.getMessage(), sqlE); log.error("Error scanning item: " + getItemHandle(item), e);
status = Curator.CURATE_ERROR;
} finally { } finally {
closeSession(); closeSession();
} }

View File

@@ -10,6 +10,7 @@ package org.dspace.ctask.general;
import java.io.IOException; import java.io.IOException;
import java.sql.SQLException; import java.sql.SQLException;
import java.util.List;
import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger; import org.apache.logging.log4j.Logger;
@@ -25,7 +26,6 @@ import org.dspace.identifier.IdentifierProvider;
import org.dspace.identifier.VersionedHandleIdentifierProviderWithCanonicalHandles; import org.dspace.identifier.VersionedHandleIdentifierProviderWithCanonicalHandles;
import org.dspace.identifier.factory.IdentifierServiceFactory; import org.dspace.identifier.factory.IdentifierServiceFactory;
import org.dspace.identifier.service.IdentifierService; import org.dspace.identifier.service.IdentifierService;
import org.dspace.services.factory.DSpaceServicesFactory;
/** /**
* Ensure that an object has all of the identifiers that it should, minting them * Ensure that an object has all of the identifiers that it should, minting them
@@ -45,20 +45,6 @@ public class CreateMissingIdentifiers
return Curator.CURATE_SKIP; return Curator.CURATE_SKIP;
} }
// XXX Temporary escape when an incompatible provider is configured.
// XXX Remove this when the provider is fixed.
boolean compatible = DSpaceServicesFactory
.getInstance()
.getServiceManager()
.getServiceByName(
VersionedHandleIdentifierProviderWithCanonicalHandles.class.getCanonicalName(),
IdentifierProvider.class) == null;
if (!compatible) {
setResult("This task is not compatible with VersionedHandleIdentifierProviderWithCanonicalHandles");
return Curator.CURATE_ERROR;
}
// XXX End of escape
String typeText = Constants.typeText[dso.getType()]; String typeText = Constants.typeText[dso.getType()];
// Get a Context // Get a Context
@@ -75,6 +61,18 @@ public class CreateMissingIdentifiers
.getInstance() .getInstance()
.getIdentifierService(); .getIdentifierService();
// XXX Temporary escape when an incompatible provider is configured.
// XXX Remove this when the provider is fixed.
List<IdentifierProvider> providerList = identifierService.getProviders();
boolean compatible =
providerList.stream().noneMatch(p -> p instanceof VersionedHandleIdentifierProviderWithCanonicalHandles);
if (!compatible) {
setResult("This task is not compatible with VersionedHandleIdentifierProviderWithCanonicalHandles");
return Curator.CURATE_ERROR;
}
// XXX End of escape
// Register any missing identifiers. // Register any missing identifiers.
try { try {
identifierService.register(context, dso); identifierService.register(context, dso);

View File

@@ -165,7 +165,7 @@ public class Curation extends DSpaceRunnable<CurationScriptConfiguration> {
* End of curation script; logs script time if -v verbose is set * End of curation script; logs script time if -v verbose is set
* *
* @param timeRun Time script was started * @param timeRun Time script was started
* @throws SQLException If DSpace contextx can't complete * @throws SQLException If DSpace context can't complete
*/ */
private void endScript(long timeRun) throws SQLException { private void endScript(long timeRun) throws SQLException {
context.complete(); context.complete();
@@ -185,7 +185,7 @@ public class Curation extends DSpaceRunnable<CurationScriptConfiguration> {
Curator curator = new Curator(handler); Curator curator = new Curator(handler);
OutputStream reporterStream; OutputStream reporterStream;
if (null == this.reporter) { if (null == this.reporter) {
reporterStream = new NullOutputStream(); reporterStream = NullOutputStream.NULL_OUTPUT_STREAM;
} else if ("-".equals(this.reporter)) { } else if ("-".equals(this.reporter)) {
reporterStream = System.out; reporterStream = System.out;
} else { } else {
@@ -300,9 +300,17 @@ public class Curation extends DSpaceRunnable<CurationScriptConfiguration> {
// scope // scope
if (this.commandLine.getOptionValue('s') != null) { if (this.commandLine.getOptionValue('s') != null) {
this.scope = this.commandLine.getOptionValue('s'); this.scope = this.commandLine.getOptionValue('s');
if (this.scope != null && Curator.TxScope.valueOf(this.scope.toUpperCase()) == null) { boolean knownScope;
this.handler.logError("Bad transaction scope '" + this.scope + "': only 'object', 'curation' or " + try {
"'open' recognized"); Curator.TxScope.valueOf(this.scope.toUpperCase());
knownScope = true;
} catch (IllegalArgumentException | NullPointerException e) {
knownScope = false;
}
if (!knownScope) {
this.handler.logError("Bad transaction scope '"
+ this.scope
+ "': only 'object', 'curation' or 'open' recognized");
throw new IllegalArgumentException( throw new IllegalArgumentException(
"Bad transaction scope '" + this.scope + "': only 'object', 'curation' or " + "Bad transaction scope '" + this.scope + "': only 'object', 'curation' or " +
"'open' recognized"); "'open' recognized");

View File

@@ -32,6 +32,9 @@ public class DiscoverResult {
private List<IndexableObject> indexableObjects; private List<IndexableObject> indexableObjects;
private Map<String, List<FacetResult>> facetResults; private Map<String, List<FacetResult>> facetResults;
// Total count of facet entries calculated for a metadata browsing query
private long totalEntries;
/** /**
* A map that contains all the documents sougth after, the key is a string representation of the Indexable Object * A map that contains all the documents sougth after, the key is a string representation of the Indexable Object
*/ */
@@ -64,6 +67,14 @@ public class DiscoverResult {
this.totalSearchResults = totalSearchResults; this.totalSearchResults = totalSearchResults;
} }
public long getTotalEntries() {
return totalEntries;
}
public void setTotalEntries(long totalEntries) {
this.totalEntries = totalEntries;
}
public int getStart() { public int getStart() {
return start; return start;
} }

View File

@@ -40,14 +40,13 @@ import org.dspace.services.factory.DSpaceServicesFactory;
import org.dspace.utils.DSpace; import org.dspace.utils.DSpace;
/** /**
* Class used to reindex dspace communities/collections/items into discovery * Class used to reindex DSpace communities/collections/items into discovery.
*/ */
public class IndexClient extends DSpaceRunnable<IndexDiscoveryScriptConfiguration> { public class IndexClient extends DSpaceRunnable<IndexDiscoveryScriptConfiguration> {
private Context context; private Context context;
private IndexingService indexer = DSpaceServicesFactory.getInstance().getServiceManager() private IndexingService indexer = DSpaceServicesFactory.getInstance().getServiceManager()
.getServiceByName(IndexingService.class.getName(), .getServiceByName(IndexingService.class.getName(), IndexingService.class);
IndexingService.class);
private IndexClientOptions indexClientOptions; private IndexClientOptions indexClientOptions;
@@ -69,103 +68,80 @@ public class IndexClient extends DSpaceRunnable<IndexDiscoveryScriptConfiguratio
} }
} }
/** Acquire from dspace-services in future */
/**
* new DSpace.getServiceManager().getServiceByName("org.dspace.discovery.SolrIndexer");
*/
Optional<IndexableObject> indexableObject = Optional.empty(); Optional<IndexableObject> indexableObject = Optional.empty();
if (indexClientOptions == IndexClientOptions.REMOVE || indexClientOptions == IndexClientOptions.INDEX) { if (indexClientOptions == IndexClientOptions.REMOVE || indexClientOptions == IndexClientOptions.INDEX) {
final String param = indexClientOptions == IndexClientOptions.REMOVE ? commandLine.getOptionValue('r') : final String param = indexClientOptions == IndexClientOptions.REMOVE ? commandLine.getOptionValue('r')
commandLine.getOptionValue('i'); : commandLine.getOptionValue('i');
UUID uuid = null; indexableObject = resolveIndexableObject(context, param);
try {
uuid = UUID.fromString(param);
} catch (Exception e) {
// nothing to do, it should be a handle
}
if (uuid != null) {
final Item item = ContentServiceFactory.getInstance().getItemService().find(context, uuid);
if (item != null) {
indexableObject = Optional.of(new IndexableItem(item));
} else {
// it could be a community
final Community community = ContentServiceFactory.getInstance().
getCommunityService().find(context, uuid);
if (community != null) {
indexableObject = Optional.of(new IndexableCommunity(community));
} else {
// it could be a collection
final Collection collection = ContentServiceFactory.getInstance().
getCollectionService().find(context, uuid);
if (collection != null) {
indexableObject = Optional.of(new IndexableCollection(collection));
}
}
}
} else {
final DSpaceObject dso = HandleServiceFactory.getInstance()
.getHandleService().resolveToObject(context, param);
if (dso != null) {
final IndexFactory indexableObjectService = IndexObjectFactoryFactory.getInstance().
getIndexFactoryByType(Constants.typeText[dso.getType()]);
indexableObject = indexableObjectService.findIndexableObject(context, dso.getID().toString());
}
}
if (!indexableObject.isPresent()) { if (!indexableObject.isPresent()) {
throw new IllegalArgumentException("Cannot resolve " + param + " to a DSpace object"); throw new IllegalArgumentException("Cannot resolve " + param + " to a DSpace object");
} }
} }
if (indexClientOptions == IndexClientOptions.REMOVE) { switch (indexClientOptions) {
handler.logInfo("Removing " + commandLine.getOptionValue("r") + " from Index"); case REMOVE:
indexer.unIndexContent(context, indexableObject.get().getUniqueIndexID()); handler.logInfo("Removing " + commandLine.getOptionValue("r") + " from Index");
} else if (indexClientOptions == IndexClientOptions.CLEAN) { indexer.unIndexContent(context, indexableObject.get().getUniqueIndexID());
handler.logInfo("Cleaning Index"); break;
indexer.cleanIndex(); case CLEAN:
} else if (indexClientOptions == IndexClientOptions.DELETE) { handler.logInfo("Cleaning Index");
handler.logInfo("Deleting Index"); indexer.cleanIndex();
indexer.deleteIndex(); break;
} else if (indexClientOptions == IndexClientOptions.BUILD || case DELETE:
indexClientOptions == IndexClientOptions.BUILDANDSPELLCHECK) { handler.logInfo("Deleting Index");
handler.logInfo("(Re)building index from scratch."); indexer.deleteIndex();
if (StringUtils.isNotBlank(type)) { break;
handler.logWarning(String.format("Type option, %s, not applicable for entire index rebuild option, b" + case BUILD:
", type will be ignored", TYPE_OPTION)); case BUILDANDSPELLCHECK:
} handler.logInfo("(Re)building index from scratch.");
indexer.deleteIndex(); if (StringUtils.isNotBlank(type)) {
indexer.createIndex(context); handler.logWarning(String.format(
if (indexClientOptions == IndexClientOptions.BUILDANDSPELLCHECK) { "Type option, %s, not applicable for entire index rebuild option, b"
+ ", type will be ignored",
TYPE_OPTION));
}
indexer.deleteIndex();
indexer.createIndex(context);
if (indexClientOptions == IndexClientOptions.BUILDANDSPELLCHECK) {
checkRebuildSpellCheck(commandLine, indexer);
}
break;
case OPTIMIZE:
handler.logInfo("Optimizing search core.");
indexer.optimize();
break;
case SPELLCHECK:
checkRebuildSpellCheck(commandLine, indexer); checkRebuildSpellCheck(commandLine, indexer);
} break;
} else if (indexClientOptions == IndexClientOptions.OPTIMIZE) { case INDEX:
handler.logInfo("Optimizing search core."); handler.logInfo("Indexing " + commandLine.getOptionValue('i') + " force " + commandLine.hasOption("f"));
indexer.optimize(); final long startTimeMillis = System.currentTimeMillis();
} else if (indexClientOptions == IndexClientOptions.SPELLCHECK) { final long count = indexAll(indexer, ContentServiceFactory.getInstance().getItemService(), context,
checkRebuildSpellCheck(commandLine, indexer); indexableObject.get());
} else if (indexClientOptions == IndexClientOptions.INDEX) { final long seconds = (System.currentTimeMillis() - startTimeMillis) / 1000;
handler.logInfo("Indexing " + commandLine.getOptionValue('i') + " force " + commandLine.hasOption("f")); handler.logInfo("Indexed " + count + " object" + (count > 1 ? "s" : "") +
final long startTimeMillis = System.currentTimeMillis(); " in " + seconds + " seconds");
final long count = indexAll(indexer, ContentServiceFactory.getInstance(). break;
getItemService(), context, indexableObject.get()); case UPDATE:
final long seconds = (System.currentTimeMillis() - startTimeMillis) / 1000; case UPDATEANDSPELLCHECK:
handler.logInfo("Indexed " + count + " object" + (count > 1 ? "s" : "") + " in " + seconds + " seconds"); handler.logInfo("Updating Index");
} else if (indexClientOptions == IndexClientOptions.UPDATE || indexer.updateIndex(context, false, type);
indexClientOptions == IndexClientOptions.UPDATEANDSPELLCHECK) { if (indexClientOptions == IndexClientOptions.UPDATEANDSPELLCHECK) {
handler.logInfo("Updating Index"); checkRebuildSpellCheck(commandLine, indexer);
indexer.updateIndex(context, false, type); }
if (indexClientOptions == IndexClientOptions.UPDATEANDSPELLCHECK) { break;
checkRebuildSpellCheck(commandLine, indexer); case FORCEUPDATE:
} case FORCEUPDATEANDSPELLCHECK:
} else if (indexClientOptions == IndexClientOptions.FORCEUPDATE || handler.logInfo("Updating Index");
indexClientOptions == IndexClientOptions.FORCEUPDATEANDSPELLCHECK) { indexer.updateIndex(context, true, type);
handler.logInfo("Updating Index"); if (indexClientOptions == IndexClientOptions.FORCEUPDATEANDSPELLCHECK) {
indexer.updateIndex(context, true, type); checkRebuildSpellCheck(commandLine, indexer);
if (indexClientOptions == IndexClientOptions.FORCEUPDATEANDSPELLCHECK) { }
checkRebuildSpellCheck(commandLine, indexer); break;
} default:
handler.handleException("Invalid index client option.");
break;
} }
handler.logInfo("Done with indexing"); handler.logInfo("Done with indexing");
@@ -174,7 +150,7 @@ public class IndexClient extends DSpaceRunnable<IndexDiscoveryScriptConfiguratio
@Override @Override
public IndexDiscoveryScriptConfiguration getScriptConfiguration() { public IndexDiscoveryScriptConfiguration getScriptConfiguration() {
return new DSpace().getServiceManager().getServiceByName("index-discovery", return new DSpace().getServiceManager().getServiceByName("index-discovery",
IndexDiscoveryScriptConfiguration.class); IndexDiscoveryScriptConfiguration.class);
} }
public void setup() throws ParseException { public void setup() throws ParseException {
@@ -186,78 +162,93 @@ public class IndexClient extends DSpaceRunnable<IndexDiscoveryScriptConfiguratio
} }
indexClientOptions = IndexClientOptions.getIndexClientOption(commandLine); indexClientOptions = IndexClientOptions.getIndexClientOption(commandLine);
} }
/**
* Indexes the given object and all children, if applicable.
*
* @param indexingService
* @param itemService
* @param context The relevant DSpace Context.
* @param dso DSpace object to index recursively
* @throws IOException A general class of exceptions produced by failed or interrupted I/O operations.
* @throws SearchServiceException in case of a solr exception
* @throws SQLException An exception that provides information on a database access error or other errors.
*/
private static long indexAll(final IndexingService indexingService,
final ItemService itemService,
final Context context,
final IndexableObject dso)
throws IOException, SearchServiceException, SQLException {
long count = 0;
indexingService.indexContent(context, dso, true, true); /**
count++; * Resolves the given parameter to an IndexableObject (Item, Collection, or Community).
if (dso.getIndexedObject() instanceof Community) { *
final Community community = (Community) dso.getIndexedObject(); * @param context The relevant DSpace Context.
final String communityHandle = community.getHandle(); * @param param The UUID or handle of the DSpace object.
for (final Community subcommunity : community.getSubcommunities()) { * @return An Optional containing the IndexableObject if found.
count += indexAll(indexingService, itemService, context, new IndexableCommunity(subcommunity)); * @throws SQLException If database error occurs.
//To prevent memory issues, discard an object from the cache after processing */
context.uncacheEntity(subcommunity); private Optional<IndexableObject> resolveIndexableObject(Context context, String param) throws SQLException {
} UUID uuid = null;
final Community reloadedCommunity = (Community) HandleServiceFactory.getInstance().getHandleService() try {
.resolveToObject(context, uuid = UUID.fromString(param);
communityHandle); } catch (Exception e) {
for (final Collection collection : reloadedCommunity.getCollections()) { // It's not a UUID, proceed to treat it as a handle.
count++;
indexingService.indexContent(context, new IndexableCollection(collection), true, true);
count += indexItems(indexingService, itemService, context, collection);
//To prevent memory issues, discard an object from the cache after processing
context.uncacheEntity(collection);
}
} else if (dso instanceof IndexableCollection) {
count += indexItems(indexingService, itemService, context, (Collection) dso.getIndexedObject());
} }
return count; if (uuid != null) {
Item item = ContentServiceFactory.getInstance().getItemService().find(context, uuid);
if (item != null) {
return Optional.of(new IndexableItem(item));
}
Community community = ContentServiceFactory.getInstance().getCommunityService().find(context, uuid);
if (community != null) {
return Optional.of(new IndexableCommunity(community));
}
Collection collection = ContentServiceFactory.getInstance().getCollectionService().find(context, uuid);
if (collection != null) {
return Optional.of(new IndexableCollection(collection));
}
} else {
DSpaceObject dso = HandleServiceFactory.getInstance().getHandleService().resolveToObject(context, param);
if (dso != null) {
IndexFactory indexableObjectService = IndexObjectFactoryFactory.getInstance()
.getIndexFactoryByType(Constants.typeText[dso.getType()]);
return indexableObjectService.findIndexableObject(context, dso.getID().toString());
}
}
return Optional.empty();
} }
/** /**
* Indexes all items in the given collection. * Indexes the given object and all its children recursively.
* *
* @param indexingService * @param indexingService The indexing service.
* @param itemService * @param itemService The item service.
* @param context The relevant DSpace Context. * @param context The relevant DSpace Context.
* @param collection collection to index * @param indexableObject The IndexableObject to index recursively.
* @throws IOException A general class of exceptions produced by failed or interrupted I/O operations. * @return The count of indexed objects.
* @throws SearchServiceException in case of a solr exception * @throws IOException If I/O error occurs.
* @throws SQLException An exception that provides information on a database access error or other errors. * @throws SearchServiceException If a search service error occurs.
* @throws SQLException If database error occurs.
*/ */
private static long indexItems(final IndexingService indexingService, private long indexAll(final IndexingService indexingService, final ItemService itemService, final Context context,
final ItemService itemService, final IndexableObject indexableObject) throws IOException, SearchServiceException, SQLException {
final Context context,
final Collection collection)
throws IOException, SearchServiceException, SQLException {
long count = 0; long count = 0;
final Iterator<Item> itemIterator = itemService.findByCollection(context, collection); boolean commit = indexableObject instanceof IndexableCommunity ||
while (itemIterator.hasNext()) { indexableObject instanceof IndexableCollection;
Item item = itemIterator.next(); indexingService.indexContent(context, indexableObject, true, commit);
indexingService.indexContent(context, new IndexableItem(item), true, false); count++;
count++;
//To prevent memory issues, discard an object from the cache after processing if (indexableObject instanceof IndexableCommunity) {
context.uncacheEntity(item); final Community community = (Community) indexableObject.getIndexedObject();
final String communityHandle = community.getHandle();
for (final Community subcommunity : community.getSubcommunities()) {
count += indexAll(indexingService, itemService, context, new IndexableCommunity(subcommunity));
context.uncacheEntity(subcommunity);
}
// Reload community to get up-to-date collections
final Community reloadedCommunity = (Community) HandleServiceFactory.getInstance().getHandleService()
.resolveToObject(context, communityHandle);
for (final Collection collection : reloadedCommunity.getCollections()) {
count += indexAll(indexingService, itemService, context, new IndexableCollection(collection));
context.uncacheEntity(collection);
}
} else if (indexableObject instanceof IndexableCollection) {
final Collection collection = (Collection) indexableObject.getIndexedObject();
final Iterator<Item> itemIterator = itemService.findByCollection(context, collection);
while (itemIterator.hasNext()) {
Item item = itemIterator.next();
indexingService.indexContent(context, new IndexableItem(item), true, false);
count++;
context.uncacheEntity(item);
}
indexingService.commit();
} }
indexingService.commit();
return count; return count;
} }
@@ -268,10 +259,10 @@ public class IndexClient extends DSpaceRunnable<IndexDiscoveryScriptConfiguratio
* @param line the command line options * @param line the command line options
* @param indexer the solr indexer * @param indexer the solr indexer
* @throws SearchServiceException in case of a solr exception * @throws SearchServiceException in case of a solr exception
* @throws IOException passed through * @throws IOException If I/O error occurs.
*/ */
protected void checkRebuildSpellCheck(CommandLine line, IndexingService indexer) protected void checkRebuildSpellCheck(CommandLine line, IndexingService indexer)
throws SearchServiceException, IOException { throws SearchServiceException, IOException {
handler.logInfo("Rebuilding spell checker."); handler.logInfo("Rebuilding spell checker.");
indexer.buildSpellCheck(); indexer.buildSpellCheck();
} }

View File

@@ -41,6 +41,8 @@ import org.apache.solr.client.solrj.SolrQuery;
import org.apache.solr.client.solrj.SolrServerException; import org.apache.solr.client.solrj.SolrServerException;
import org.apache.solr.client.solrj.response.FacetField; import org.apache.solr.client.solrj.response.FacetField;
import org.apache.solr.client.solrj.response.QueryResponse; import org.apache.solr.client.solrj.response.QueryResponse;
import org.apache.solr.client.solrj.response.json.BucketBasedJsonFacet;
import org.apache.solr.client.solrj.response.json.NestableJsonFacet;
import org.apache.solr.client.solrj.util.ClientUtils; import org.apache.solr.client.solrj.util.ClientUtils;
import org.apache.solr.common.SolrDocument; import org.apache.solr.common.SolrDocument;
import org.apache.solr.common.SolrDocumentList; import org.apache.solr.common.SolrDocumentList;
@@ -1055,6 +1057,8 @@ public class SolrServiceImpl implements SearchService, IndexingService {
} }
//Resolve our facet field values //Resolve our facet field values
resolveFacetFields(context, query, result, skipLoadingResponse, solrQueryResponse); resolveFacetFields(context, query, result, skipLoadingResponse, solrQueryResponse);
//Add total entries count for metadata browsing
resolveEntriesCount(result, solrQueryResponse);
} }
// If any stale entries are found in the current page of results, // If any stale entries are found in the current page of results,
// we remove those stale entries and rerun the same query again. // we remove those stale entries and rerun the same query again.
@@ -1080,7 +1084,37 @@ public class SolrServiceImpl implements SearchService, IndexingService {
return result; return result;
} }
/**
* Stores the total count of entries for metadata index browsing. The count is calculated by the
* <code>json.facet</code> parameter with the following value:
*
* <pre><code>
* {
* "entries_count": {
* "type": "terms",
* "field": "facetNameField_filter",
* "limit": 0,
* "prefix": "prefix_value",
* "numBuckets": true
* }
* }
* </code></pre>
*
* This value is returned in the <code>facets</code> field of the Solr response.
*
* @param result DiscoverResult object where the total entries count will be stored
* @param solrQueryResponse QueryResponse object containing the solr response
*/
private void resolveEntriesCount(DiscoverResult result, QueryResponse solrQueryResponse) {
NestableJsonFacet response = solrQueryResponse.getJsonFacetingResponse();
if (response != null) {
BucketBasedJsonFacet facet = response.getBucketBasedFacets("entries_count");
if (facet != null) {
result.setTotalEntries(facet.getNumBucketsCount());
}
}
}
private void resolveFacetFields(Context context, DiscoverQuery query, DiscoverResult result, private void resolveFacetFields(Context context, DiscoverQuery query, DiscoverResult result,
boolean skipLoadingResponse, QueryResponse solrQueryResponse) throws SQLException { boolean skipLoadingResponse, QueryResponse solrQueryResponse) throws SQLException {
@@ -1411,8 +1445,6 @@ public class SolrServiceImpl implements SearchService, IndexingService {
} else { } else {
return field + "_acid"; return field + "_acid";
} }
} else if (facetFieldConfig.getType().equals(DiscoveryConfigurationParameters.TYPE_STANDARD)) {
return field;
} else { } else {
return field; return field;
} }

View File

@@ -118,20 +118,10 @@ public abstract class IndexFactoryImpl<T extends IndexableObject, S> implements
ParseContext tikaContext = new ParseContext(); ParseContext tikaContext = new ParseContext();
// Use Apache Tika to parse the full text stream(s) // Use Apache Tika to parse the full text stream(s)
boolean extractionSucceeded = false;
try (InputStream fullTextStreams = streams.getStream()) { try (InputStream fullTextStreams = streams.getStream()) {
tikaParser.parse(fullTextStreams, tikaHandler, tikaMetadata, tikaContext); tikaParser.parse(fullTextStreams, tikaHandler, tikaMetadata, tikaContext);
extractionSucceeded = true;
// Write Tika metadata to "tika_meta_*" fields.
// This metadata is not very useful right now,
// but we'll keep it just in case it becomes more useful.
for (String name : tikaMetadata.names()) {
for (String value : tikaMetadata.getValues(name)) {
doc.addField("tika_meta_" + name, value);
}
}
// Save (parsed) full text to "fulltext" field
doc.addField("fulltext", tikaHandler.toString());
} catch (SAXException saxe) { } catch (SAXException saxe) {
// Check if this SAXException is just a notice that this file was longer than the character limit. // Check if this SAXException is just a notice that this file was longer than the character limit.
// Unfortunately there is not a unique, public exception type to catch here. This error is thrown // Unfortunately there is not a unique, public exception type to catch here. This error is thrown
@@ -141,6 +131,7 @@ public abstract class IndexFactoryImpl<T extends IndexableObject, S> implements
// log that we only indexed up to that configured limit // log that we only indexed up to that configured limit
log.info("Full text is larger than the configured limit (discovery.solr.fulltext.charLimit)." log.info("Full text is larger than the configured limit (discovery.solr.fulltext.charLimit)."
+ " Only the first {} characters were indexed.", charLimit); + " Only the first {} characters were indexed.", charLimit);
extractionSucceeded = true;
} else { } else {
log.error("Tika parsing error. Could not index full text.", saxe); log.error("Tika parsing error. Could not index full text.", saxe);
throw new IOException("Tika parsing error. Could not index full text.", saxe); throw new IOException("Tika parsing error. Could not index full text.", saxe);
@@ -148,11 +139,19 @@ public abstract class IndexFactoryImpl<T extends IndexableObject, S> implements
} catch (TikaException | IOException ex) { } catch (TikaException | IOException ex) {
log.error("Tika parsing error. Could not index full text.", ex); log.error("Tika parsing error. Could not index full text.", ex);
throw new IOException("Tika parsing error. Could not index full text.", ex); throw new IOException("Tika parsing error. Could not index full text.", ex);
} finally {
// Add document to index
solr.add(doc);
} }
return; if (extractionSucceeded) {
// Write Tika metadata to "tika_meta_*" fields.
// This metadata is not very useful right now,
// but we'll keep it just in case it becomes more useful.
for (String name : tikaMetadata.names()) {
for (String value : tikaMetadata.getValues(name)) {
doc.addField("tika_meta_" + name, value);
}
}
// Save (parsed) full text to "fulltext" field
doc.addField("fulltext", tikaHandler.toString());
}
} }
// Add document to index // Add document to index
solr.add(doc); solr.add(doc);

View File

@@ -67,8 +67,6 @@ import org.dspace.handle.service.HandleService;
import org.dspace.services.factory.DSpaceServicesFactory; import org.dspace.services.factory.DSpaceServicesFactory;
import org.dspace.util.MultiFormatDateParser; import org.dspace.util.MultiFormatDateParser;
import org.dspace.util.SolrUtils; import org.dspace.util.SolrUtils;
import org.dspace.versioning.Version;
import org.dspace.versioning.VersionHistory;
import org.dspace.versioning.service.VersionHistoryService; import org.dspace.versioning.service.VersionHistoryService;
import org.dspace.xmlworkflow.storedcomponents.XmlWorkflowItem; import org.dspace.xmlworkflow.storedcomponents.XmlWorkflowItem;
import org.dspace.xmlworkflow.storedcomponents.service.XmlWorkflowItemService; import org.dspace.xmlworkflow.storedcomponents.service.XmlWorkflowItemService;
@@ -151,12 +149,14 @@ public class ItemIndexFactoryImpl extends DSpaceObjectIndexFactoryImpl<Indexable
doc.addField("withdrawn", item.isWithdrawn()); doc.addField("withdrawn", item.isWithdrawn());
doc.addField("discoverable", item.isDiscoverable()); doc.addField("discoverable", item.isDiscoverable());
doc.addField("lastModified", SolrUtils.getDateFormatter().format(item.getLastModified())); doc.addField("lastModified", SolrUtils.getDateFormatter().format(item.getLastModified()));
doc.addField("latestVersion", isLatestVersion(context, item)); doc.addField("latestVersion", itemService.isLatestVersion(context, item));
EPerson submitter = item.getSubmitter(); EPerson submitter = item.getSubmitter();
if (submitter != null) { if (submitter != null && !(DSpaceServicesFactory.getInstance().getConfigurationService().getBooleanProperty(
addFacetIndex(doc, "submitter", submitter.getID().toString(), "discovery.index.item.submitter.enabled", false))) {
submitter.getFullName()); doc.addField("submitter_authority", submitter.getID().toString());
} else if (submitter != null) {
addFacetIndex(doc, "submitter", submitter.getID().toString(), submitter.getFullName());
} }
// Add the item metadata // Add the item metadata
@@ -175,43 +175,6 @@ public class ItemIndexFactoryImpl extends DSpaceObjectIndexFactoryImpl<Indexable
return doc; return doc;
} }
/**
* Check whether the given item is the latest version.
* If the latest item cannot be determined, because either the version history or the latest version is not present,
* assume the item is latest.
* @param context the DSpace context.
* @param item the item that should be checked.
* @return true if the item is the latest version, false otherwise.
*/
protected boolean isLatestVersion(Context context, Item item) throws SQLException {
VersionHistory history = versionHistoryService.findByItem(context, item);
if (history == null) {
// not all items have a version history
// if an item does not have a version history, it is by definition the latest version
return true;
}
// start with the very latest version of the given item (may still be in workspace)
Version latestVersion = versionHistoryService.getLatestVersion(context, history);
// find the latest version of the given item that is archived
while (latestVersion != null && !latestVersion.getItem().isArchived()) {
latestVersion = versionHistoryService.getPrevious(context, history, latestVersion);
}
// could not find an archived version of the given item
if (latestVersion == null) {
// this scenario should never happen, but let's err on the side of showing too many items vs. to little
// (see discovery.xml, a lot of discovery configs filter out all items that are not the latest version)
return true;
}
// sanity check
assert latestVersion.getItem().isArchived();
return item.equals(latestVersion.getItem());
}
@Override @Override
public SolrInputDocument buildNewDocument(Context context, IndexableItem indexableItem) public SolrInputDocument buildNewDocument(Context context, IndexableItem indexableItem)
throws SQLException, IOException { throws SQLException, IOException {
@@ -704,7 +667,7 @@ public class ItemIndexFactoryImpl extends DSpaceObjectIndexFactoryImpl<Indexable
return List.copyOf(workflowItemIndexFactory.getIndexableObjects(context, xmlWorkflowItem)); return List.copyOf(workflowItemIndexFactory.getIndexableObjects(context, xmlWorkflowItem));
} }
if (!isLatestVersion(context, item)) { if (!itemService.isLatestVersion(context, item)) {
// the given item is an older version of another item // the given item is an older version of another item
return List.of(new IndexableItem(item)); return List.of(new IndexableItem(item));
} }

View File

@@ -15,6 +15,7 @@ import jakarta.persistence.Id;
import jakarta.persistence.JoinColumn; import jakarta.persistence.JoinColumn;
import jakarta.persistence.ManyToOne; import jakarta.persistence.ManyToOne;
import jakarta.persistence.Table; import jakarta.persistence.Table;
import jakarta.persistence.UniqueConstraint;
import org.dspace.core.HibernateProxyHelper; import org.dspace.core.HibernateProxyHelper;
/** /**
@@ -23,7 +24,7 @@ import org.dspace.core.HibernateProxyHelper;
* @author kevinvandevelde at atmire.com * @author kevinvandevelde at atmire.com
*/ */
@Entity @Entity
@Table(name = "group2groupcache") @Table(name = "group2groupcache", uniqueConstraints = { @UniqueConstraint(columnNames = {"parent_id", "child_id"}) })
public class Group2GroupCache implements Serializable { public class Group2GroupCache implements Serializable {
@Id @Id

View File

@@ -20,6 +20,7 @@ import java.util.Set;
import java.util.UUID; import java.util.UUID;
import org.apache.commons.collections4.CollectionUtils; import org.apache.commons.collections4.CollectionUtils;
import org.apache.commons.collections4.SetUtils;
import org.apache.commons.lang3.StringUtils; import org.apache.commons.lang3.StringUtils;
import org.apache.commons.lang3.tuple.Pair; import org.apache.commons.lang3.tuple.Pair;
import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.LogManager;
@@ -673,15 +674,14 @@ public class GroupServiceImpl extends DSpaceObjectServiceImpl<Group> implements
/** /**
* Regenerate the group cache AKA the group2groupcache table in the database - * Returns a set with pairs of parent and child group UUIDs, representing the new cache table rows.
* meant to be called when a group is added or removed from another group
* *
* @param context The relevant DSpace Context. * @param context The relevant DSpace Context.
* @param flushQueries flushQueries Flush all pending queries * @param flushQueries flushQueries Flush all pending queries
* @return Pairs of parent and child group UUID of the new cache.
* @throws SQLException An exception that provides information on a database access error or other errors. * @throws SQLException An exception that provides information on a database access error or other errors.
*/ */
protected void rethinkGroupCache(Context context, boolean flushQueries) throws SQLException { private Set<Pair<UUID, UUID>> computeNewCache(Context context, boolean flushQueries) throws SQLException {
Map<UUID, Set<UUID>> parents = new HashMap<>(); Map<UUID, Set<UUID>> parents = new HashMap<>();
List<Pair<UUID, UUID>> group2groupResults = groupDAO.getGroup2GroupResults(context, flushQueries); List<Pair<UUID, UUID>> group2groupResults = groupDAO.getGroup2GroupResults(context, flushQueries);
@@ -689,19 +689,8 @@ public class GroupServiceImpl extends DSpaceObjectServiceImpl<Group> implements
UUID parent = group2groupResult.getLeft(); UUID parent = group2groupResult.getLeft();
UUID child = group2groupResult.getRight(); UUID child = group2groupResult.getRight();
// if parent doesn't have an entry, create one parents.putIfAbsent(parent, new HashSet<>());
if (!parents.containsKey(parent)) { parents.get(parent).add(child);
Set<UUID> children = new HashSet<>();
// add child id to the list
children.add(child);
parents.put(parent, children);
} else {
// parent has an entry, now add the child to the parent's record
// of children
Set<UUID> children = parents.get(parent);
children.add(child);
}
} }
// now parents is a hash of all of the IDs of groups that are parents // now parents is a hash of all of the IDs of groups that are parents
@@ -714,28 +703,43 @@ public class GroupServiceImpl extends DSpaceObjectServiceImpl<Group> implements
parent.getValue().addAll(myChildren); parent.getValue().addAll(myChildren);
} }
// empty out group2groupcache table // write out new cache IN MEMORY ONLY and returns it
group2GroupCacheDAO.deleteAll(context); Set<Pair<UUID, UUID>> newCache = new HashSet<>();
// write out new one
for (Map.Entry<UUID, Set<UUID>> parent : parents.entrySet()) { for (Map.Entry<UUID, Set<UUID>> parent : parents.entrySet()) {
UUID key = parent.getKey(); UUID key = parent.getKey();
for (UUID child : parent.getValue()) { for (UUID child : parent.getValue()) {
newCache.add(Pair.of(key, child));
Group parentGroup = find(context, key);
Group childGroup = find(context, child);
if (parentGroup != null && childGroup != null && group2GroupCacheDAO
.find(context, parentGroup, childGroup) == null) {
Group2GroupCache group2GroupCache = group2GroupCacheDAO.create(context, new Group2GroupCache());
group2GroupCache.setParent(parentGroup);
group2GroupCache.setChild(childGroup);
group2GroupCacheDAO.save(context, group2GroupCache);
}
} }
} }
return newCache;
}
/**
* Regenerate the group cache AKA the group2groupcache table in the database -
* meant to be called when a group is added or removed from another group
*
* @param context The relevant DSpace Context.
* @param flushQueries flushQueries Flush all pending queries
* @throws SQLException An exception that provides information on a database access error or other errors.
*/
protected void rethinkGroupCache(Context context, boolean flushQueries) throws SQLException {
// current cache in the database
Set<Pair<UUID, UUID>> oldCache = group2GroupCacheDAO.getCache(context);
// correct cache, computed from the Group table
Set<Pair<UUID, UUID>> newCache = computeNewCache(context, flushQueries);
SetUtils.SetView<Pair<UUID, UUID>> toDelete = SetUtils.difference(oldCache, newCache);
SetUtils.SetView<Pair<UUID, UUID>> toCreate = SetUtils.difference(newCache, oldCache);
for (Pair<UUID, UUID> pair : toDelete ) {
group2GroupCacheDAO.deleteFromCache(context, pair.getLeft(), pair.getRight());
}
for (Pair<UUID, UUID> pair : toCreate ) {
group2GroupCacheDAO.addToCache(context, pair.getLeft(), pair.getRight());
}
} }
@Override @Override

View File

@@ -9,7 +9,10 @@ package org.dspace.eperson.dao;
import java.sql.SQLException; import java.sql.SQLException;
import java.util.List; import java.util.List;
import java.util.Set;
import java.util.UUID;
import org.apache.commons.lang3.tuple.Pair;
import org.dspace.core.Context; import org.dspace.core.Context;
import org.dspace.core.GenericDAO; import org.dspace.core.GenericDAO;
import org.dspace.eperson.Group; import org.dspace.eperson.Group;
@@ -25,13 +28,74 @@ import org.dspace.eperson.Group2GroupCache;
*/ */
public interface Group2GroupCacheDAO extends GenericDAO<Group2GroupCache> { public interface Group2GroupCacheDAO extends GenericDAO<Group2GroupCache> {
public List<Group2GroupCache> findByParent(Context context, Group group) throws SQLException; /**
* Returns the current cache table as a set of UUID pairs.
* @param context The relevant DSpace Context.
* @return Set of UUID pairs, where the first element is the parent UUID and the second one is the child UUID.
* @throws SQLException An exception that provides information on a database access error or other errors.
*/
Set<Pair<UUID, UUID>> getCache(Context context) throws SQLException;
public List<Group2GroupCache> findByChildren(Context context, Iterable<Group> groups) throws SQLException; /**
* Returns all cache entities that are children of a given parent Group entity.
* @param context The relevant DSpace Context.
* @param group Parent group to perform the search.
* @return List of cached groups that are children of the parent group.
* @throws SQLException An exception that provides information on a database access error or other errors.
*/
List<Group2GroupCache> findByParent(Context context, Group group) throws SQLException;
public Group2GroupCache findByParentAndChild(Context context, Group parent, Group child) throws SQLException; /**
* Returns all cache entities that are parents of at least one group from a children groups list.
* @param context The relevant DSpace Context.
* @param groups Children groups to perform the search.
* @return List of cached groups that are parents of at least one group from the children groups list.
* @throws SQLException An exception that provides information on a database access error or other errors.
*/
List<Group2GroupCache> findByChildren(Context context, Iterable<Group> groups) throws SQLException;
public Group2GroupCache find(Context context, Group parent, Group child) throws SQLException; /**
* Returns the cache entity given specific parent and child groups.
* @param context The relevant DSpace Context.
* @param parent Parent group.
* @param child Child gruoup.
* @return Cached group.
* @throws SQLException An exception that provides information on a database access error or other errors.
*/
Group2GroupCache findByParentAndChild(Context context, Group parent, Group child) throws SQLException;
public void deleteAll(Context context) throws SQLException; /**
* Returns the cache entity given specific parent and child groups.
* @param context The relevant DSpace Context.
* @param parent Parent group.
* @param child Child gruoup.
* @return Cached group.
* @throws SQLException An exception that provides information on a database access error or other errors.
*/
Group2GroupCache find(Context context, Group parent, Group child) throws SQLException;
/**
* Completely deletes the current cache table.
* @param context The relevant DSpace Context.
* @throws SQLException An exception that provides information on a database access error or other errors.
*/
void deleteAll(Context context) throws SQLException;
/**
* Deletes a specific cache row given parent and child groups UUIDs.
* @param context The relevant DSpace Context.
* @param parent Parent group UUID.
* @param child Child group UUID.
* @throws SQLException An exception that provides information on a database access error or other errors.
*/
void deleteFromCache(Context context, UUID parent, UUID child) throws SQLException;
/**
* Adds a single row to the cache table given parent and child groups UUIDs.
* @param context The relevant DSpace Context.
* @param parent Parent group UUID.
* @param child Child group UUID.
* @throws SQLException An exception that provides information on a database access error or other errors.
*/
void addToCache(Context context, UUID parent, UUID child) throws SQLException;
} }

View File

@@ -8,14 +8,18 @@
package org.dspace.eperson.dao.impl; package org.dspace.eperson.dao.impl;
import java.sql.SQLException; import java.sql.SQLException;
import java.util.HashSet;
import java.util.LinkedList; import java.util.LinkedList;
import java.util.List; import java.util.List;
import java.util.Set;
import java.util.UUID;
import jakarta.persistence.Query; import jakarta.persistence.Query;
import jakarta.persistence.criteria.CriteriaBuilder; import jakarta.persistence.criteria.CriteriaBuilder;
import jakarta.persistence.criteria.CriteriaQuery; import jakarta.persistence.criteria.CriteriaQuery;
import jakarta.persistence.criteria.Predicate; import jakarta.persistence.criteria.Predicate;
import jakarta.persistence.criteria.Root; import jakarta.persistence.criteria.Root;
import org.apache.commons.lang3.tuple.Pair;
import org.dspace.core.AbstractHibernateDAO; import org.dspace.core.AbstractHibernateDAO;
import org.dspace.core.Context; import org.dspace.core.Context;
import org.dspace.eperson.Group; import org.dspace.eperson.Group;
@@ -35,6 +39,16 @@ public class Group2GroupCacheDAOImpl extends AbstractHibernateDAO<Group2GroupCac
super(); super();
} }
@Override
public Set<Pair<UUID, UUID>> getCache(Context context) throws SQLException {
Query query = createQuery(
context,
"SELECT new org.apache.commons.lang3.tuple.ImmutablePair(g.parent.id, g.child.id) FROM Group2GroupCache g"
);
List<Pair<UUID, UUID>> results = query.getResultList();
return new HashSet<Pair<UUID, UUID>>(results);
}
@Override @Override
public List<Group2GroupCache> findByParent(Context context, Group group) throws SQLException { public List<Group2GroupCache> findByParent(Context context, Group group) throws SQLException {
CriteriaBuilder criteriaBuilder = getCriteriaBuilder(context); CriteriaBuilder criteriaBuilder = getCriteriaBuilder(context);
@@ -90,4 +104,24 @@ public class Group2GroupCacheDAOImpl extends AbstractHibernateDAO<Group2GroupCac
public void deleteAll(Context context) throws SQLException { public void deleteAll(Context context) throws SQLException {
createQuery(context, "delete from Group2GroupCache").executeUpdate(); createQuery(context, "delete from Group2GroupCache").executeUpdate();
} }
@Override
public void deleteFromCache(Context context, UUID parent, UUID child) throws SQLException {
Query query = getHibernateSession(context).createNativeQuery(
"delete from group2groupcache g WHERE g.parent_id = :parent AND g.child_id = :child"
);
query.setParameter("parent", parent);
query.setParameter("child", child);
query.executeUpdate();
}
@Override
public void addToCache(Context context, UUID parent, UUID child) throws SQLException {
Query query = getHibernateSession(context).createNativeQuery(
"insert into group2groupcache (parent_id, child_id) VALUES (:parent, :child)"
);
query.setParameter("parent", parent);
query.setParameter("child", child);
query.executeUpdate();
}
} }

View File

@@ -1,144 +0,0 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.google;
import java.io.File;
import java.util.HashSet;
import java.util.Set;
import com.google.api.client.auth.oauth2.Credential;
import com.google.api.client.googleapis.auth.oauth2.GoogleCredential;
import com.google.api.client.googleapis.javanet.GoogleNetHttpTransport;
import com.google.api.client.http.HttpTransport;
import com.google.api.client.json.JsonFactory;
import com.google.api.client.json.jackson2.JacksonFactory;
import com.google.api.services.analytics.Analytics;
import com.google.api.services.analytics.AnalyticsScopes;
import org.apache.logging.log4j.Logger;
import org.dspace.services.factory.DSpaceServicesFactory;
/**
* User: Robin Taylor
* Date: 11/07/2014
* Time: 13:23
*/
public class GoogleAccount {
// Read from config
private String applicationName;
private String tableId;
private String emailAddress;
private String certificateLocation;
// Created from factories
private JsonFactory jsonFactory;
private HttpTransport httpTransport;
// The Google stuff
private Credential credential;
private Analytics client;
private volatile static GoogleAccount uniqueInstance;
private static Logger log = org.apache.logging.log4j.LogManager.getLogger(GoogleAccount.class);
private GoogleAccount() {
applicationName = DSpaceServicesFactory.getInstance().getConfigurationService()
.getProperty("google-analytics.application.name");
tableId = DSpaceServicesFactory.getInstance().getConfigurationService()
.getProperty("google-analytics.table.id");
emailAddress = DSpaceServicesFactory.getInstance().getConfigurationService()
.getProperty("google-analytics.account.email");
certificateLocation = DSpaceServicesFactory.getInstance().getConfigurationService()
.getProperty("google-analytics.certificate.location");
jsonFactory = JacksonFactory.getDefaultInstance();
try {
httpTransport = GoogleNetHttpTransport.newTrustedTransport();
credential = authorize();
} catch (Exception e) {
throw new RuntimeException("Error initialising Google Analytics client", e);
}
// Create an Analytics instance
client = new Analytics.Builder(httpTransport, jsonFactory, credential).setApplicationName(applicationName)
.build();
log.info("Google Analytics client successfully initialised");
}
public static GoogleAccount getInstance() {
if (uniqueInstance == null) {
synchronized (GoogleAccount.class) {
if (uniqueInstance == null) {
uniqueInstance = new GoogleAccount();
}
}
}
return uniqueInstance;
}
private Credential authorize() throws Exception {
Set<String> scopes = new HashSet<String>();
scopes.add(AnalyticsScopes.ANALYTICS);
scopes.add(AnalyticsScopes.ANALYTICS_EDIT);
scopes.add(AnalyticsScopes.ANALYTICS_MANAGE_USERS);
scopes.add(AnalyticsScopes.ANALYTICS_PROVISION);
scopes.add(AnalyticsScopes.ANALYTICS_READONLY);
credential = new GoogleCredential.Builder()
.setTransport(httpTransport)
.setJsonFactory(jsonFactory)
.setServiceAccountId(emailAddress)
.setServiceAccountScopes(scopes)
.setServiceAccountPrivateKeyFromP12File(new File(certificateLocation))
.build();
return credential;
}
public String getApplicationName() {
return applicationName;
}
public String getTableId() {
return tableId;
}
public String getEmailAddress() {
return emailAddress;
}
public String getCertificateLocation() {
return certificateLocation;
}
public JsonFactory getJsonFactory() {
return jsonFactory;
}
public HttpTransport getHttpTransport() {
return httpTransport;
}
public Credential getCredential() {
return credential;
}
public Analytics getClient() {
return client;
}
}

View File

@@ -1,49 +0,0 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.google;
import java.io.IOException;
import com.google.api.services.analytics.model.GaData;
/**
* User: Robin Taylor
* Date: 20/08/2014
* Time: 09:26
*/
public class GoogleQueryManager {
public GaData getPageViews(String startDate, String endDate, String handle) throws IOException {
return GoogleAccount.getInstance().getClient().data().ga().get(
GoogleAccount.getInstance().getTableId(),
startDate,
endDate,
"ga:pageviews") // Metrics.
.setDimensions("ga:year,ga:month")
.setSort("-ga:year,-ga:month")
.setFilters("ga:pagePath=~/handle/" + handle + "$")
.execute();
}
public GaData getBitstreamDownloads(String startDate, String endDate, String handle) throws IOException {
return GoogleAccount.getInstance().getClient().data().ga().get(
GoogleAccount.getInstance().getTableId(),
startDate,
endDate,
"ga:totalEvents") // Metrics.
.setDimensions("ga:year,ga:month")
.setSort("-ga:year,-ga:month")
.setFilters(
"ga:eventCategory==bitstream;ga:eventAction==download;ga:pagePath=~" + handle + "/")
.execute();
}
}

View File

@@ -1,201 +0,0 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.google;
import java.io.IOException;
import java.sql.SQLException;
import java.util.ArrayList;
import java.util.List;
import java.util.UUID;
import jakarta.servlet.http.HttpServletRequest;
import org.apache.commons.lang3.StringUtils;
import org.apache.http.NameValuePair;
import org.apache.http.client.entity.UrlEncodedFormEntity;
import org.apache.http.client.methods.CloseableHttpResponse;
import org.apache.http.client.methods.HttpPost;
import org.apache.http.impl.client.CloseableHttpClient;
import org.apache.http.impl.client.HttpClients;
import org.apache.http.message.BasicNameValuePair;
import org.apache.logging.log4j.Logger;
import org.dspace.content.factory.ContentServiceFactory;
import org.dspace.core.Constants;
import org.dspace.service.ClientInfoService;
import org.dspace.services.ConfigurationService;
import org.dspace.services.model.Event;
import org.dspace.usage.AbstractUsageEventListener;
import org.dspace.usage.UsageEvent;
import org.springframework.beans.factory.annotation.Autowired;
/**
* User: Robin Taylor
* Date: 14/08/2014
* Time: 10:05
*
* Notify Google Analytics of... well anything we want really.
* @deprecated Use org.dspace.google.GoogleAsyncEventListener instead
*/
@Deprecated
public class GoogleRecorderEventListener extends AbstractUsageEventListener {
private String analyticsKey;
private CloseableHttpClient httpclient;
private String GoogleURL = "https://www.google-analytics.com/collect";
private static Logger log = org.apache.logging.log4j.LogManager.getLogger(GoogleRecorderEventListener.class);
protected ContentServiceFactory contentServiceFactory;
protected ConfigurationService configurationService;
protected ClientInfoService clientInfoService;
public GoogleRecorderEventListener() {
// httpclient is threadsafe so we only need one.
httpclient = HttpClients.createDefault();
}
@Autowired
public void setContentServiceFactory(ContentServiceFactory contentServiceFactory) {
this.contentServiceFactory = contentServiceFactory;
}
@Autowired
public void setConfigurationService(ConfigurationService configurationService) {
this.configurationService = configurationService;
}
@Autowired
public void setClientInfoService(ClientInfoService clientInfoService) {
this.clientInfoService = clientInfoService;
}
@Override
public void receiveEvent(Event event) {
if ((event instanceof UsageEvent)) {
log.debug("Usage event received " + event.getName());
// This is a wee bit messy but these keys should be combined in future.
analyticsKey = configurationService.getProperty("google.analytics.key");
if (StringUtils.isNotBlank(analyticsKey)) {
try {
UsageEvent ue = (UsageEvent) event;
if (ue.getAction() == UsageEvent.Action.VIEW) {
if (ue.getObject().getType() == Constants.BITSTREAM) {
logEvent(ue, "bitstream", "download");
// Note: I've left this commented out code here to show how we could record page views
// as events,
// but since they are already taken care of by the Google Analytics Javascript there is
// not much point.
//} else if (ue.getObject().getType() == Constants.ITEM) {
// logEvent(ue, "item", "view");
//} else if (ue.getObject().getType() == Constants.COLLECTION) {
// logEvent(ue, "collection", "view");
//} else if (ue.getObject().getType() == Constants.COMMUNITY) {
// logEvent(ue, "community", "view");
}
}
} catch (Exception e) {
log.error(e.getMessage());
}
}
}
}
private void logEvent(UsageEvent ue, String category, String action) throws IOException, SQLException {
HttpPost httpPost = new HttpPost(GoogleURL);
List<NameValuePair> nvps = new ArrayList<NameValuePair>();
nvps.add(new BasicNameValuePair("v", "1"));
nvps.add(new BasicNameValuePair("tid", analyticsKey));
// Client Id, should uniquely identify the user or device. If we have a session id for the user
// then lets use it, else generate a UUID.
if (ue.getRequest().getSession(false) != null) {
nvps.add(new BasicNameValuePair("cid", ue.getRequest().getSession().getId()));
} else {
nvps.add(new BasicNameValuePair("cid", UUID.randomUUID().toString()));
}
nvps.add(new BasicNameValuePair("t", "event"));
nvps.add(new BasicNameValuePair("uip", getIPAddress(ue.getRequest())));
nvps.add(new BasicNameValuePair("ua", ue.getRequest().getHeader("USER-AGENT")));
nvps.add(new BasicNameValuePair("dr", ue.getRequest().getHeader("referer")));
nvps.add(new BasicNameValuePair("dp", ue.getRequest().getRequestURI()));
nvps.add(new BasicNameValuePair("dt", getObjectName(ue)));
nvps.add(new BasicNameValuePair("ec", category));
nvps.add(new BasicNameValuePair("ea", action));
if (ue.getObject().getType() == Constants.BITSTREAM) {
// Bitstream downloads may occasionally be for collection or community images, so we need to label them
// with the parent object type.
nvps.add(new BasicNameValuePair("el", getParentType(ue)));
}
httpPost.setEntity(new UrlEncodedFormEntity(nvps));
try (CloseableHttpResponse response2 = httpclient.execute(httpPost)) {
// I can't find a list of what are acceptable responses, so I log the response but take no action.
log.debug("Google Analytics response is " + response2.getStatusLine());
}
log.debug("Posted to Google Analytics - " + ue.getRequest().getRequestURI());
}
private String getParentType(UsageEvent ue) {
try {
int parentType = contentServiceFactory.getDSpaceObjectService(ue.getObject())
.getParentObject(ue.getContext(), ue.getObject()).getType();
if (parentType == Constants.ITEM) {
return "item";
} else if (parentType == Constants.COLLECTION) {
return "collection";
} else if (parentType == Constants.COMMUNITY) {
return "community";
}
} catch (SQLException e) {
// This shouldn't merit interrupting the user's transaction so log the error and continue.
log.error(
"Error in Google Analytics recording - can't determine ParentObjectType for bitstream " + ue.getObject()
.getID());
e.printStackTrace();
}
return null;
}
private String getObjectName(UsageEvent ue) {
try {
if (ue.getObject().getType() == Constants.BITSTREAM) {
// For a bitstream download we really want to know the title of the owning item rather than the
// bitstream name.
return contentServiceFactory.getDSpaceObjectService(ue.getObject())
.getParentObject(ue.getContext(), ue.getObject()).getName();
} else {
return ue.getObject().getName();
}
} catch (SQLException e) {
// This shouldn't merit interrupting the user's transaction so log the error and continue.
log.error(
"Error in Google Analytics recording - can't determine ParentObjectName for bitstream " + ue.getObject()
.getID());
e.printStackTrace();
}
return null;
}
private String getIPAddress(HttpServletRequest request) {
return clientInfoService.getClientIp(request);
}
}

View File

@@ -57,6 +57,11 @@ public class IdentifierServiceImpl implements IdentifierService {
} }
} }
@Override
public List<IdentifierProvider> getProviders() {
return this.providers;
}
/** /**
* Reserves identifiers for the item * Reserves identifiers for the item
* *

View File

@@ -355,7 +355,10 @@ public class VersionedDOIIdentifierProvider extends DOIIdentifierProvider implem
if (changed) { if (changed) {
try { try {
itemService.clearMetadata(c, item, MD_SCHEMA, DOI_ELEMENT, DOI_QUALIFIER, Item.ANY); itemService.clearMetadata(c, item, MD_SCHEMA, DOI_ELEMENT, DOI_QUALIFIER, Item.ANY);
itemService.addMetadata(c, item, MD_SCHEMA, DOI_ELEMENT, DOI_QUALIFIER, null, newIdentifiers); // Checks if Array newIdentifiers is empty to avoid adding null values to the metadata field.
if (!newIdentifiers.isEmpty()) {
itemService.addMetadata(c, item, MD_SCHEMA, DOI_ELEMENT, DOI_QUALIFIER, null, newIdentifiers);
}
itemService.update(c, item); itemService.update(c, item);
} catch (SQLException ex) { } catch (SQLException ex) {
throw new RuntimeException("A problem with the database connection occurred.", ex); throw new RuntimeException("A problem with the database connection occurred.", ex);

View File

@@ -13,7 +13,6 @@ import java.io.PrintStream;
import java.sql.SQLException; import java.sql.SQLException;
import java.util.Arrays; import java.util.Arrays;
import java.util.Date; import java.util.Date;
import java.util.Iterator;
import java.util.List; import java.util.List;
import java.util.Locale; import java.util.Locale;
import java.util.UUID; import java.util.UUID;
@@ -227,8 +226,16 @@ public class DOIOrganiser {
} }
for (DOI doi : dois) { for (DOI doi : dois) {
organiser.reserve(doi); doi = context.reloadEntity(doi);
context.uncacheEntity(doi); try {
organiser.reserve(doi);
context.commit();
} catch (RuntimeException e) {
System.err.format("DOI %s for object %s reservation failed, skipping: %s%n",
doi.getDSpaceObject().getID().toString(),
doi.getDoi(), e.getMessage());
context.rollback();
}
} }
} catch (SQLException ex) { } catch (SQLException ex) {
System.err.println("Error in database connection:" + ex.getMessage()); System.err.println("Error in database connection:" + ex.getMessage());
@@ -245,14 +252,22 @@ public class DOIOrganiser {
+ "that could be registered."); + "that could be registered.");
} }
for (DOI doi : dois) { for (DOI doi : dois) {
organiser.register(doi); doi = context.reloadEntity(doi);
context.uncacheEntity(doi); try {
organiser.register(doi);
context.commit();
} catch (SQLException e) {
System.err.format("DOI %s for object %s registration failed, skipping: %s%n",
doi.getDSpaceObject().getID().toString(),
doi.getDoi(), e.getMessage());
context.rollback();
}
} }
} catch (SQLException ex) { } catch (SQLException ex) {
System.err.println("Error in database connection:" + ex.getMessage()); System.err.format("Error in database connection: %s%n", ex.getMessage());
ex.printStackTrace(System.err); ex.printStackTrace(System.err);
} catch (DOIIdentifierException ex) { } catch (RuntimeException ex) {
System.err.println("Error registering DOI identifier:" + ex.getMessage()); System.err.format("Error registering DOI identifier: %s%n", ex.getMessage());
} }
} }
@@ -268,8 +283,9 @@ public class DOIOrganiser {
} }
for (DOI doi : dois) { for (DOI doi : dois) {
doi = context.reloadEntity(doi);
organiser.update(doi); organiser.update(doi);
context.uncacheEntity(doi); context.commit();
} }
} catch (SQLException ex) { } catch (SQLException ex) {
System.err.println("Error in database connection:" + ex.getMessage()); System.err.println("Error in database connection:" + ex.getMessage());
@@ -286,12 +302,17 @@ public class DOIOrganiser {
+ "that could be deleted."); + "that could be deleted.");
} }
Iterator<DOI> iterator = dois.iterator(); for (DOI doi : dois) {
while (iterator.hasNext()) { doi = context.reloadEntity(doi);
DOI doi = iterator.next(); try {
iterator.remove(); organiser.delete(doi.getDoi());
organiser.delete(doi.getDoi()); context.commit();
context.uncacheEntity(doi); } catch (SQLException e) {
System.err.format("DOI %s for object %s deletion failed, skipping: %s%n",
doi.getDSpaceObject().getID().toString(),
doi.getDoi(), e.getMessage());
context.rollback();
}
} }
} catch (SQLException ex) { } catch (SQLException ex) {
System.err.println("Error in database connection:" + ex.getMessage()); System.err.println("Error in database connection:" + ex.getMessage());
@@ -401,12 +422,18 @@ public class DOIOrganiser {
/** /**
* Register DOI with the provider * Register DOI with the provider
* @param doiRow - doi to register * @param doiRow DOI to register
* @param filter - logical item filter to override * @param filter logical item filter to override
* @throws SQLException * @throws IllegalArgumentException
* @throws DOIIdentifierException * if {@link doiRow} does not name an Item.
* @throws IllegalStateException
* on invalid DOI.
* @throws RuntimeException
* on database error.
*/ */
public void register(DOI doiRow, Filter filter) throws SQLException, DOIIdentifierException { public void register(DOI doiRow, Filter filter)
throws IllegalArgumentException, IllegalStateException,
RuntimeException {
DSpaceObject dso = doiRow.getDSpaceObject(); DSpaceObject dso = doiRow.getDSpaceObject();
if (Constants.ITEM != dso.getType()) { if (Constants.ITEM != dso.getType()) {
throw new IllegalArgumentException("Currenty DSpace supports DOIs for Items only."); throw new IllegalArgumentException("Currenty DSpace supports DOIs for Items only.");
@@ -473,30 +500,33 @@ public class DOIOrganiser {
} }
/** /**
* Register DOI with the provider * Register DOI with the provider.
* @param doiRow - doi to register * @param doiRow DOI to register
* @throws SQLException * @throws IllegalArgumentException passed through.
* @throws DOIIdentifierException * @throws IllegalStateException passed through.
* @throws RuntimeException passed through.
*/ */
public void register(DOI doiRow) throws SQLException, DOIIdentifierException { public void register(DOI doiRow)
throws IllegalStateException, IllegalArgumentException,
RuntimeException {
register(doiRow, this.filter); register(doiRow, this.filter);
} }
/** /**
* Reserve DOI with the provider, * Reserve DOI with the provider,
* @param doiRow - doi to reserve * @param doiRow - doi to reserve
* @throws SQLException
* @throws DOIIdentifierException
*/ */
public void reserve(DOI doiRow) { public void reserve(DOI doiRow) {
reserve(doiRow, this.filter); reserve(doiRow, this.filter);
} }
/** /**
* Reserve DOI with the provider * Reserve DOI with the provider.
* @param doiRow - doi to reserve * @param doiRow - doi to reserve
* @throws SQLException * @param filter - Logical item filter to determine whether this
* @throws DOIIdentifierException * identifier should be reserved online.
* @throws IllegalStateException on invalid DOI.
* @throws RuntimeException on database error.
*/ */
public void reserve(DOI doiRow, Filter filter) { public void reserve(DOI doiRow, Filter filter) {
DSpaceObject dso = doiRow.getDSpaceObject(); DSpaceObject dso = doiRow.getDSpaceObject();
@@ -577,7 +607,8 @@ public class DOIOrganiser {
} }
} catch (IdentifierException ex) { } catch (IdentifierException ex) {
if (!(ex instanceof DOIIdentifierException)) { if (!(ex instanceof DOIIdentifierException)) {
LOG.error("It wasn't possible to register the identifier online. ", ex); LOG.error("Registering DOI {} for object {}: the registrar returned an error.",
doiRow.getDoi(), dso.getID(), ex);
} }
DOIIdentifierException doiIdentifierException = (DOIIdentifierException) ex; DOIIdentifierException doiIdentifierException = (DOIIdentifierException) ex;

View File

@@ -461,6 +461,10 @@ public class DataCiteConnector
log.warn("While reserving the DOI {}, we got a http status code " log.warn("While reserving the DOI {}, we got a http status code "
+ "{} and the message \"{}\".", + "{} and the message \"{}\".",
doi, Integer.toString(resp.statusCode), resp.getContent()); doi, Integer.toString(resp.statusCode), resp.getContent());
Format format = Format.getCompactFormat();
format.setEncoding("UTF-8");
XMLOutputter xout = new XMLOutputter(format);
log.info("We send the following XML:\n{}", xout.outputString(root));
throw new DOIIdentifierException("Unable to parse an answer from " throw new DOIIdentifierException("Unable to parse an answer from "
+ "DataCite API. Please have a look into DSpace logs.", + "DataCite API. Please have a look into DSpace logs.",
DOIIdentifierException.BAD_ANSWER); DOIIdentifierException.BAD_ANSWER);
@@ -632,6 +636,14 @@ public class DataCiteConnector
return sendHttpRequest(httpget, doi); return sendHttpRequest(httpget, doi);
} }
/**
* Send a DataCite metadata document to the registrar.
*
* @param doi identify the object.
* @param metadataRoot describe the object. The root element of the document.
* @return the registrar's response.
* @throws DOIIdentifierException passed through.
*/
protected DataCiteResponse sendMetadataPostRequest(String doi, Element metadataRoot) protected DataCiteResponse sendMetadataPostRequest(String doi, Element metadataRoot)
throws DOIIdentifierException { throws DOIIdentifierException {
Format format = Format.getCompactFormat(); Format format = Format.getCompactFormat();
@@ -640,6 +652,14 @@ public class DataCiteConnector
return sendMetadataPostRequest(doi, xout.outputString(new Document(metadataRoot))); return sendMetadataPostRequest(doi, xout.outputString(new Document(metadataRoot)));
} }
/**
* Send a DataCite metadata document to the registrar.
*
* @param doi identify the object.
* @param metadata describe the object.
* @return the registrar's response.
* @throws DOIIdentifierException passed through.
*/
protected DataCiteResponse sendMetadataPostRequest(String doi, String metadata) protected DataCiteResponse sendMetadataPostRequest(String doi, String metadata)
throws DOIIdentifierException { throws DOIIdentifierException {
// post mds/metadata/ // post mds/metadata/
@@ -687,7 +707,7 @@ public class DataCiteConnector
* properties such as request URI and method type. * properties such as request URI and method type.
* @param doi DOI string to operate on * @param doi DOI string to operate on
* @return response from DataCite * @return response from DataCite
* @throws DOIIdentifierException if DOI error * @throws DOIIdentifierException if registrar returns an error.
*/ */
protected DataCiteResponse sendHttpRequest(HttpUriRequest req, String doi) protected DataCiteResponse sendHttpRequest(HttpUriRequest req, String doi)
throws DOIIdentifierException { throws DOIIdentifierException {

View File

@@ -6,17 +6,14 @@
* http://www.dspace.org/license/ * http://www.dspace.org/license/
*/ */
/** /**
* Make requests to the DOI registration angencies, f.e.to * Make requests to the DOI registration agencies and analyze the responses.
* <a href='http://n2t.net/ezid/'>EZID</a> DOI service, and analyze the responses. *
*
* <p> * <p>
* Use {@link org.dspace.identifier.ezid.EZIDRequestFactory#getInstance} to * {@link DOIOrganiser} is a tool for managing DOI registrations.
* configure an {@link org.dspace.identifier.ezid.EZIDRequest} *
* with your authority number and credentials. {@code EZIDRequest} encapsulates * <p>
* EZID's operations (lookup, create/mint, modify, delete...). * Classes specific to the <a href='https://datacite.org/'>DataCite</a>
* An operation returns an {@link org.dspace.identifier.ezid.EZIDResponse} which * registrar are here. See {@link org.dspace.identifier.ezid} for the
* gives easy access to EZID's status code and value, status of the underlying * <a href='https://ezid.cdlib.org'>EZID</a> registrar.
* HTTP request, and key/value pairs found in the response body (if any).
* </p>
*/ */
package org.dspace.identifier.doi; package org.dspace.identifier.doi;

View File

@@ -0,0 +1,21 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
/**
* DOI classes specific to the EZID registrar.
*
* <p>
* Use {@link org.dspace.identifier.ezid.EZIDRequestFactory#getInstance} to
* configure an {@link org.dspace.identifier.ezid.EZIDRequest}
* with your authority number and credentials. {@code EZIDRequest} encapsulates
* EZID's operations (lookup, create/mint, modify, delete...).
* An operation returns an {@link org.dspace.identifier.ezid.EZIDResponse} which
* gives easy access to EZID's status code and value, status of the underlying
* HTTP request, and key/value pairs found in the response body (if any).
* </p>
*/
package org.dspace.identifier.ezid;

View File

@@ -19,6 +19,7 @@ import org.dspace.identifier.Identifier;
import org.dspace.identifier.IdentifierException; import org.dspace.identifier.IdentifierException;
import org.dspace.identifier.IdentifierNotFoundException; import org.dspace.identifier.IdentifierNotFoundException;
import org.dspace.identifier.IdentifierNotResolvableException; import org.dspace.identifier.IdentifierNotResolvableException;
import org.dspace.identifier.IdentifierProvider;
/** /**
* @author Fabio Bolognesi (fabio at atmire dot com) * @author Fabio Bolognesi (fabio at atmire dot com)
@@ -194,4 +195,9 @@ public interface IdentifierService {
void delete(Context context, DSpaceObject dso, String identifier) void delete(Context context, DSpaceObject dso, String identifier)
throws AuthorizeException, SQLException, IdentifierException; throws AuthorizeException, SQLException, IdentifierException;
/**
* Get List of currently enabled IdentifierProviders
* @return List of enabled IdentifierProvider objects.
*/
List<IdentifierProvider> getProviders();
} }

View File

@@ -53,6 +53,16 @@ public class DataCiteImportMetadataSourceServiceImpl
@Autowired @Autowired
private ConfigurationService configurationService; private ConfigurationService configurationService;
private String entityFilterQuery;
public String getEntityFilterQuery() {
return entityFilterQuery;
}
public void setEntityFilterQuery(String entityFilterQuery) {
this.entityFilterQuery = entityFilterQuery;
}
@Override @Override
public String getImportSource() { public String getImportSource() {
return "datacite"; return "datacite";
@@ -80,6 +90,9 @@ public class DataCiteImportMetadataSourceServiceImpl
if (StringUtils.isBlank(id)) { if (StringUtils.isBlank(id)) {
id = query; id = query;
} }
if (StringUtils.isNotBlank(getEntityFilterQuery())) {
id = id + " " + getEntityFilterQuery();
}
uriParameters.put("query", id); uriParameters.put("query", id);
uriParameters.put("page[size]", "1"); uriParameters.put("page[size]", "1");
int timeoutMs = configurationService.getIntProperty("datacite.timeout", 180000); int timeoutMs = configurationService.getIntProperty("datacite.timeout", 180000);
@@ -118,6 +131,9 @@ public class DataCiteImportMetadataSourceServiceImpl
if (StringUtils.isBlank(id)) { if (StringUtils.isBlank(id)) {
id = query; id = query;
} }
if (StringUtils.isNotBlank(getEntityFilterQuery())) {
id = id + " " + getEntityFilterQuery();
}
uriParameters.put("query", id); uriParameters.put("query", id);
// start = current dspace page / datacite page number starting with 1 // start = current dspace page / datacite page number starting with 1
// dspace rounds up/down to the next configured pagination settings. // dspace rounds up/down to the next configured pagination settings.

View File

@@ -0,0 +1,38 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.importer.external.datacite;
import java.util.Map;
import jakarta.annotation.Resource;
import org.dspace.importer.external.metadatamapping.AbstractMetadataFieldMapping;
/**
* An implementation of {@link AbstractMetadataFieldMapping}
* Responsible for defining the mapping of the datacite metadatum fields on the DSpace metadatum fields
*
* @author Pasquale Cavallo (pasquale.cavallo at 4science dot it)
* @author Florian Gantner (florian.gantner@uni-bamberg.de)
*/
public class DataCiteProjectFieldMapping extends AbstractMetadataFieldMapping {
/**
* Defines which metadatum is mapped on which metadatum. Note that while the key must be unique it
* only matters here for postprocessing of the value. The mapped MetadatumContributor has full control over
* what metadatafield is generated.
*
* @param metadataFieldMap The map containing the link between retrieve metadata and metadata that will be set to
* the item.
*/
@Override
@Resource(name = "dataciteProjectMetadataFieldMap")
public void setMetadataFieldMap(Map metadataFieldMap) {
super.setMetadataFieldMap(metadataFieldMap);
}
}

View File

@@ -0,0 +1,24 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.importer.external.datacite;
/**
* Implements a data source for querying Datacite for specific for Project resourceTypes.
* This inherits the methods of DataCiteImportMetadataSourceServiceImpl
*
* @author Florian Gantner (florian.gantner@uni-bamberg.de)
*
*/
public class DataCiteProjectImportMetadataSourceServiceImpl
extends DataCiteImportMetadataSourceServiceImpl {
@Override
public String getImportSource() {
return "dataciteProject";
}
}

View File

@@ -26,7 +26,7 @@ import org.jdom2.xpath.XPathFactory;
* This contributor is able to concat multi value. * This contributor is able to concat multi value.
* Given a certain path, if it contains several nodes, * Given a certain path, if it contains several nodes,
* the values of nodes will be concatenated into a single one. * the values of nodes will be concatenated into a single one.
* The concrete example we can see in the file wos-responce.xml in the <abstract_text> node, * The concrete example we can see in the file wos-response.xml in the <abstract_text> node,
* which may contain several <p> paragraphs, * which may contain several <p> paragraphs,
* this Contributor allows concatenating all <p> paragraphs. to obtain a single one. * this Contributor allows concatenating all <p> paragraphs. to obtain a single one.
* *

View File

@@ -0,0 +1,86 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.importer.external.metadatamapping.transform;
import static java.util.Optional.ofNullable;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Optional;
import com.fasterxml.jackson.core.JsonProcessingException;
import com.fasterxml.jackson.databind.JsonNode;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.fasterxml.jackson.databind.node.JsonNodeType;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
import org.dspace.importer.external.metadatamapping.contributor.JsonPathMetadataProcessor;
import org.dspace.util.SimpleMapConverter;
/**
* This class is a Metadata processor from a structured JSON Metadata result
* and uses a SimpleMapConverter, with a mapping properties file
* to map to a single string value based on mapped keys.<br/>
* Like:<br/>
* <code>journal-article = Article<code/>
*
* @author paulo-graca
*
*/
public class StringJsonValueMappingMetadataProcessorService implements JsonPathMetadataProcessor {
private final static Logger log = LogManager.getLogger();
/**
* The value map converter.
* a list of values to map from
*/
private SimpleMapConverter valueMapConverter;
private String path;
@Override
public Collection<String> processMetadata(String json) {
JsonNode rootNode = convertStringJsonToJsonNode(json);
Optional<JsonNode> abstractNode = Optional.of(rootNode.at(path));
Collection<String> values = new ArrayList<>();
if (abstractNode.isPresent() && abstractNode.get().getNodeType().equals(JsonNodeType.STRING)) {
String stringValue = abstractNode.get().asText();
values.add(ofNullable(stringValue)
.map(value -> valueMapConverter != null ? valueMapConverter.getValue(value) : value)
.orElse(valueMapConverter.getValue(null)));
}
return values;
}
private JsonNode convertStringJsonToJsonNode(String json) {
ObjectMapper mapper = new ObjectMapper();
JsonNode body = null;
try {
body = mapper.readTree(json);
} catch (JsonProcessingException e) {
log.error("Unable to process json response.", e);
}
return body;
}
/* Getters and Setters */
public String convertType(String type) {
return valueMapConverter != null ? valueMapConverter.getValue(type) : type;
}
public void setValueMapConverter(SimpleMapConverter valueMapConverter) {
this.valueMapConverter = valueMapConverter;
}
public void setPath(String path) {
this.path = path;
}
}

View File

@@ -10,6 +10,7 @@ package org.dspace.orcid.client;
import java.util.List; import java.util.List;
import java.util.Optional; import java.util.Optional;
import org.dspace.orcid.OrcidToken;
import org.dspace.orcid.exception.OrcidClientException; import org.dspace.orcid.exception.OrcidClientException;
import org.dspace.orcid.model.OrcidTokenResponseDTO; import org.dspace.orcid.model.OrcidTokenResponseDTO;
import org.orcid.jaxb.model.v3.release.record.Person; import org.orcid.jaxb.model.v3.release.record.Person;
@@ -161,4 +162,11 @@ public interface OrcidClient {
*/ */
OrcidResponse deleteByPutCode(String accessToken, String orcid, String putCode, String path); OrcidResponse deleteByPutCode(String accessToken, String orcid, String putCode, String path);
/**
* Revokes the given {@param accessToken} with a POST method.
* @param orcidToken the access token to revoke
* @throws OrcidClientException if some error occurs during the search
*/
void revokeToken(OrcidToken orcidToken);
} }

View File

@@ -42,6 +42,7 @@ import org.apache.http.client.methods.RequestBuilder;
import org.apache.http.entity.StringEntity; import org.apache.http.entity.StringEntity;
import org.apache.http.impl.client.HttpClientBuilder; import org.apache.http.impl.client.HttpClientBuilder;
import org.apache.http.message.BasicNameValuePair; import org.apache.http.message.BasicNameValuePair;
import org.dspace.orcid.OrcidToken;
import org.dspace.orcid.exception.OrcidClientException; import org.dspace.orcid.exception.OrcidClientException;
import org.dspace.orcid.model.OrcidEntityType; import org.dspace.orcid.model.OrcidEntityType;
import org.dspace.orcid.model.OrcidProfileSectionType; import org.dspace.orcid.model.OrcidProfileSectionType;
@@ -178,6 +179,16 @@ public class OrcidClientImpl implements OrcidClient {
return execute(buildDeleteUriRequest(accessToken, "/" + orcid + path + "/" + putCode), true); return execute(buildDeleteUriRequest(accessToken, "/" + orcid + path + "/" + putCode), true);
} }
@Override
public void revokeToken(OrcidToken orcidToken) {
List<NameValuePair> params = new ArrayList<>();
params.add(new BasicNameValuePair("client_id", orcidConfiguration.getClientId()));
params.add(new BasicNameValuePair("client_secret", orcidConfiguration.getClientSecret()));
params.add(new BasicNameValuePair("token", orcidToken.getAccessToken()));
executeSuccessful(buildPostForRevokeToken(new UrlEncodedFormEntity(params, Charset.defaultCharset())));
}
@Override @Override
public OrcidTokenResponseDTO getReadPublicAccessToken() { public OrcidTokenResponseDTO getReadPublicAccessToken() {
return getClientCredentialsAccessToken("/read-public"); return getClientCredentialsAccessToken("/read-public");
@@ -220,6 +231,14 @@ public class OrcidClientImpl implements OrcidClient {
.build(); .build();
} }
private HttpUriRequest buildPostForRevokeToken(HttpEntity entity) {
return post(orcidConfiguration.getRevokeUrl())
.addHeader("Accept", "application/json")
.addHeader("Content-Type", "application/x-www-form-urlencoded")
.setEntity(entity)
.build();
}
private HttpUriRequest buildPutUriRequest(String accessToken, String relativePath, Object object) { private HttpUriRequest buildPutUriRequest(String accessToken, String relativePath, Object object) {
return put(orcidConfiguration.getApiUrl() + relativePath.trim()) return put(orcidConfiguration.getApiUrl() + relativePath.trim())
.addHeader("Content-Type", "application/vnd.orcid+xml") .addHeader("Content-Type", "application/vnd.orcid+xml")
@@ -234,6 +253,24 @@ public class OrcidClientImpl implements OrcidClient {
.build(); .build();
} }
private void executeSuccessful(HttpUriRequest httpUriRequest) {
try {
HttpClient client = HttpClientBuilder.create().build();
HttpResponse response = client.execute(httpUriRequest);
if (isNotSuccessfull(response)) {
throw new OrcidClientException(
getStatusCode(response),
"Operation " + httpUriRequest.getMethod() + " for the resource " + httpUriRequest.getURI() +
" was not successful: " + new String(response.getEntity().getContent().readAllBytes(),
StandardCharsets.UTF_8)
);
}
} catch (IOException e) {
throw new RuntimeException(e);
}
}
private <T> T executeAndParseJson(HttpUriRequest httpUriRequest, Class<T> clazz) { private <T> T executeAndParseJson(HttpUriRequest httpUriRequest, Class<T> clazz) {
HttpClient client = HttpClientBuilder.create().build(); HttpClient client = HttpClientBuilder.create().build();

View File

@@ -35,6 +35,8 @@ public final class OrcidConfiguration {
private String scopes; private String scopes;
private String revokeUrl;
public String getApiUrl() { public String getApiUrl() {
return apiUrl; return apiUrl;
} }
@@ -111,4 +113,11 @@ public final class OrcidConfiguration {
return !StringUtils.isAnyBlank(clientId, clientSecret); return !StringUtils.isAnyBlank(clientId, clientSecret);
} }
public String getRevokeUrl() {
return revokeUrl;
}
public void setRevokeUrl(String revokeUrl) {
this.revokeUrl = revokeUrl;
}
} }

View File

@@ -14,9 +14,10 @@ import static java.util.Comparator.nullsFirst;
import static org.apache.commons.collections.CollectionUtils.isNotEmpty; import static org.apache.commons.collections.CollectionUtils.isNotEmpty;
import java.sql.SQLException; import java.sql.SQLException;
import java.util.ArrayList; import java.util.HashSet;
import java.util.List; import java.util.List;
import java.util.Optional; import java.util.Optional;
import java.util.Set;
import java.util.UUID; import java.util.UUID;
import java.util.stream.Collectors; import java.util.stream.Collectors;
import java.util.stream.Stream; import java.util.stream.Stream;
@@ -82,7 +83,7 @@ public class OrcidQueueConsumer implements Consumer {
private RelationshipService relationshipService; private RelationshipService relationshipService;
private final List<UUID> alreadyConsumedItems = new ArrayList<>(); private final Set<UUID> itemsToConsume = new HashSet<>();
@Override @Override
public void initialize() throws Exception { public void initialize() throws Exception {
@@ -117,17 +118,26 @@ public class OrcidQueueConsumer implements Consumer {
return; return;
} }
if (alreadyConsumedItems.contains(item.getID())) { itemsToConsume.add(item.getID());
return; }
}
@Override
context.turnOffAuthorisationSystem(); public void end(Context context) throws Exception {
try {
consumeItem(context, item); for (UUID itemId : itemsToConsume) {
} finally {
context.restoreAuthSystemState(); Item item = itemService.find(context, itemId);
context.turnOffAuthorisationSystem();
try {
consumeItem(context, item);
} finally {
context.restoreAuthSystemState();
}
} }
itemsToConsume.clear();
} }
/** /**
@@ -146,7 +156,7 @@ public class OrcidQueueConsumer implements Consumer {
consumeProfile(context, item); consumeProfile(context, item);
} }
alreadyConsumedItems.add(item.getID()); itemsToConsume.add(item.getID());
} }
@@ -169,6 +179,10 @@ public class OrcidQueueConsumer implements Consumer {
continue; continue;
} }
if (isNotLatestVersion(context, entity)) {
continue;
}
orcidQueueService.create(context, relatedItem, entity); orcidQueueService.create(context, relatedItem, entity);
} }
@@ -329,6 +343,14 @@ public class OrcidQueueConsumer implements Consumer {
return !getProfileType().equals(itemService.getEntityTypeLabel(profileItemItem)); return !getProfileType().equals(itemService.getEntityTypeLabel(profileItemItem));
} }
private boolean isNotLatestVersion(Context context, Item entity) {
try {
return !itemService.isLatestVersion(context, entity);
} catch (SQLException e) {
throw new RuntimeException(e);
}
}
private String getMetadataValue(Item item, String metadataField) { private String getMetadataValue(Item item, String metadataField) {
return itemService.getMetadataFirstValue(item, new MetadataFieldName(metadataField), Item.ANY); return itemService.getMetadataFirstValue(item, new MetadataFieldName(metadataField), Item.ANY);
} }
@@ -345,11 +367,6 @@ public class OrcidQueueConsumer implements Consumer {
return !configurationService.getBooleanProperty("orcid.synchronization-enabled", true); return !configurationService.getBooleanProperty("orcid.synchronization-enabled", true);
} }
@Override
public void end(Context context) throws Exception {
alreadyConsumedItems.clear();
}
@Override @Override
public void finish(Context context) throws Exception { public void finish(Context context) throws Exception {
// nothing to do // nothing to do

View File

@@ -74,6 +74,16 @@ public interface OrcidQueueDAO extends GenericDAO<OrcidQueue> {
*/ */
public List<OrcidQueue> findByProfileItemOrEntity(Context context, Item item) throws SQLException; public List<OrcidQueue> findByProfileItemOrEntity(Context context, Item item) throws SQLException;
/**
* Get the OrcidQueue records where the given item is the entity.
*
* @param context DSpace context object
* @param item the item to search for
* @return the found OrcidQueue entities
* @throws SQLException if database error
*/
public List<OrcidQueue> findByEntity(Context context, Item item) throws SQLException;
/** /**
* Find all the OrcidQueue records with the given entity and record type. * Find all the OrcidQueue records with the given entity and record type.
* *

View File

@@ -63,6 +63,13 @@ public class OrcidQueueDAOImpl extends AbstractHibernateDAO<OrcidQueue> implemen
return query.getResultList(); return query.getResultList();
} }
@Override
public List<OrcidQueue> findByEntity(Context context, Item item) throws SQLException {
Query query = createQuery(context, "FROM OrcidQueue WHERE entity.id = :itemId");
query.setParameter("itemId", item.getID());
return query.getResultList();
}
@Override @Override
public List<OrcidQueue> findByEntityAndRecordType(Context context, Item entity, String type) throws SQLException { public List<OrcidQueue> findByEntityAndRecordType(Context context, Item entity, String type) throws SQLException {
Query query = createQuery(context, "FROM OrcidQueue WHERE entity = :entity AND recordType = :type"); Query query = createQuery(context, "FROM OrcidQueue WHERE entity = :entity AND recordType = :type");

View File

@@ -164,6 +164,16 @@ public interface OrcidQueueService {
*/ */
public List<OrcidQueue> findByProfileItemOrEntity(Context context, Item item) throws SQLException; public List<OrcidQueue> findByProfileItemOrEntity(Context context, Item item) throws SQLException;
/**
* Get the OrcidQueue records where the given item is the entity.
*
* @param context DSpace context object
* @param item the item to search for
* @return the found OrcidQueue records
* @throws SQLException if database error
*/
public List<OrcidQueue> findByEntity(Context context, Item item) throws SQLException;
/** /**
* Get all the OrcidQueue records with attempts less than the given attempts. * Get all the OrcidQueue records with attempts less than the given attempts.
* *

View File

@@ -70,6 +70,11 @@ public class OrcidQueueServiceImpl implements OrcidQueueService {
return orcidQueueDAO.findByProfileItemOrEntity(context, item); return orcidQueueDAO.findByProfileItemOrEntity(context, item);
} }
@Override
public List<OrcidQueue> findByEntity(Context context, Item item) throws SQLException {
return orcidQueueDAO.findByEntity(context, item);
}
@Override @Override
public long countByProfileItemId(Context context, UUID profileItemId) throws SQLException { public long countByProfileItemId(Context context, UUID profileItemId) throws SQLException {
return orcidQueueDAO.countByProfileItemId(context, profileItemId); return orcidQueueDAO.countByProfileItemId(context, profileItemId);

View File

@@ -38,6 +38,7 @@ import org.dspace.eperson.EPerson;
import org.dspace.eperson.service.EPersonService; import org.dspace.eperson.service.EPersonService;
import org.dspace.orcid.OrcidQueue; import org.dspace.orcid.OrcidQueue;
import org.dspace.orcid.OrcidToken; import org.dspace.orcid.OrcidToken;
import org.dspace.orcid.client.OrcidClient;
import org.dspace.orcid.model.OrcidEntityType; import org.dspace.orcid.model.OrcidEntityType;
import org.dspace.orcid.model.OrcidTokenResponseDTO; import org.dspace.orcid.model.OrcidTokenResponseDTO;
import org.dspace.orcid.service.OrcidQueueService; import org.dspace.orcid.service.OrcidQueueService;
@@ -49,6 +50,8 @@ import org.dspace.profile.OrcidProfileSyncPreference;
import org.dspace.profile.OrcidSynchronizationMode; import org.dspace.profile.OrcidSynchronizationMode;
import org.dspace.profile.service.ResearcherProfileService; import org.dspace.profile.service.ResearcherProfileService;
import org.dspace.services.ConfigurationService; import org.dspace.services.ConfigurationService;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired; import org.springframework.beans.factory.annotation.Autowired;
/** /**
@@ -59,6 +62,8 @@ import org.springframework.beans.factory.annotation.Autowired;
*/ */
public class OrcidSynchronizationServiceImpl implements OrcidSynchronizationService { public class OrcidSynchronizationServiceImpl implements OrcidSynchronizationService {
private static final Logger log = LoggerFactory.getLogger(OrcidSynchronizationServiceImpl.class);
@Autowired @Autowired
private ItemService itemService; private ItemService itemService;
@@ -80,6 +85,9 @@ public class OrcidSynchronizationServiceImpl implements OrcidSynchronizationServ
@Autowired @Autowired
private ResearcherProfileService researcherProfileService; private ResearcherProfileService researcherProfileService;
@Autowired
private OrcidClient orcidClient;
@Override @Override
public void linkProfile(Context context, Item profile, OrcidTokenResponseDTO token) throws SQLException { public void linkProfile(Context context, Item profile, OrcidTokenResponseDTO token) throws SQLException {
@@ -118,24 +126,11 @@ public class OrcidSynchronizationServiceImpl implements OrcidSynchronizationServ
@Override @Override
public void unlinkProfile(Context context, Item profile) throws SQLException { public void unlinkProfile(Context context, Item profile) throws SQLException {
clearOrcidProfileMetadata(context, profile);
String orcid = itemService.getMetadataFirstValue(profile, "person", "identifier", "orcid", Item.ANY); clearSynchronizationSettings(context, profile);
itemService.clearMetadata(context, profile, "person", "identifier", "orcid", Item.ANY); clearOrcidToken(context, profile);
itemService.clearMetadata(context, profile, "dspace", "orcid", "scope", Item.ANY);
itemService.clearMetadata(context, profile, "dspace", "orcid", "authenticated", Item.ANY);
if (!configurationService.getBooleanProperty("orcid.disconnection.remain-sync", false)) {
clearSynchronizationSettings(context, profile);
}
EPerson eperson = ePersonService.findByNetid(context, orcid);
if (eperson != null ) {
eperson.setNetid(null);
updateEPerson(context, eperson);
}
orcidTokenService.deleteByProfileItem(context, profile);
updateItem(context, profile); updateItem(context, profile);
@@ -146,6 +141,23 @@ public class OrcidSynchronizationServiceImpl implements OrcidSynchronizationServ
} }
private void clearOrcidToken(Context context, Item profile) {
OrcidToken profileToken = orcidTokenService.findByProfileItem(context, profile);
if (profileToken == null) {
log.warn("Cannot find any token related to the user profile: {}", profile.getID());
return;
}
orcidTokenService.deleteByProfileItem(context, profile);
orcidClient.revokeToken(profileToken);
}
private void clearOrcidProfileMetadata(Context context, Item profile) throws SQLException {
itemService.clearMetadata(context, profile, "person", "identifier", "orcid", Item.ANY);
itemService.clearMetadata(context, profile, "dspace", "orcid", "scope", Item.ANY);
itemService.clearMetadata(context, profile, "dspace", "orcid", "authenticated", Item.ANY);
}
@Override @Override
public boolean setEntityPreference(Context context, Item profile, OrcidEntityType type, public boolean setEntityPreference(Context context, Item profile, OrcidEntityType type,
OrcidEntitySyncPreference value) throws SQLException { OrcidEntitySyncPreference value) throws SQLException {
@@ -291,6 +303,11 @@ public class OrcidSynchronizationServiceImpl implements OrcidSynchronizationServ
private void clearSynchronizationSettings(Context context, Item profile) private void clearSynchronizationSettings(Context context, Item profile)
throws SQLException { throws SQLException {
if (configurationService.getBooleanProperty("orcid.disconnection.remain-sync", false)) {
return;
}
itemService.clearMetadata(context, profile, "dspace", "orcid", "sync-mode", Item.ANY); itemService.clearMetadata(context, profile, "dspace", "orcid", "sync-mode", Item.ANY);
itemService.clearMetadata(context, profile, "dspace", "orcid", "sync-profile", Item.ANY); itemService.clearMetadata(context, profile, "dspace", "orcid", "sync-profile", Item.ANY);

View File

@@ -243,7 +243,7 @@ public class RDFConsumer implements Consumer {
DSOIdentifier id = new DSOIdentifier(dso, ctx); DSOIdentifier id = new DSOIdentifier(dso, ctx);
// If an item gets withdrawn, a MODIFY event is fired. We have to // If an item gets withdrawn, a MODIFY event is fired. We have to
// delete the item from the triple store instead of converting it. // delete the item from the triple store instead of converting it.
// we don't have to take care for reinstantions of items as they can // we don't have to take care for reinstate events on items as they can
// be processed as normal modify events. // be processed as normal modify events.
if (dso instanceof Item if (dso instanceof Item
&& event.getDetail() != null && event.getDetail() != null

View File

@@ -45,8 +45,8 @@ import org.dspace.core.Context;
import org.dspace.core.LogHelper; import org.dspace.core.LogHelper;
import org.dspace.eperson.EPerson; import org.dspace.eperson.EPerson;
import org.dspace.eperson.Group; import org.dspace.eperson.Group;
import org.dspace.eperson.service.EPersonService;
import org.dspace.scripts.service.ProcessService; import org.dspace.scripts.service.ProcessService;
import org.dspace.services.ConfigurationService;
import org.springframework.beans.factory.annotation.Autowired; import org.springframework.beans.factory.annotation.Autowired;
/** /**
@@ -72,7 +72,7 @@ public class ProcessServiceImpl implements ProcessService {
private MetadataFieldService metadataFieldService; private MetadataFieldService metadataFieldService;
@Autowired @Autowired
private EPersonService ePersonService; private ConfigurationService configurationService;
@Override @Override
public Process create(Context context, EPerson ePerson, String scriptName, public Process create(Context context, EPerson ePerson, String scriptName,
@@ -293,8 +293,8 @@ public class ProcessServiceImpl implements ProcessService {
@Override @Override
public void appendLog(int processId, String scriptName, String output, ProcessLogLevel processLogLevel) public void appendLog(int processId, String scriptName, String output, ProcessLogLevel processLogLevel)
throws IOException { throws IOException {
File tmpDir = FileUtils.getTempDirectory(); File logsDir = getLogsDirectory();
File tempFile = new File(tmpDir, scriptName + processId + ".log"); File tempFile = new File(logsDir, processId + "-" + scriptName + ".log");
FileWriter out = new FileWriter(tempFile, true); FileWriter out = new FileWriter(tempFile, true);
try { try {
try (BufferedWriter writer = new BufferedWriter(out)) { try (BufferedWriter writer = new BufferedWriter(out)) {
@@ -309,12 +309,15 @@ public class ProcessServiceImpl implements ProcessService {
@Override @Override
public void createLogBitstream(Context context, Process process) public void createLogBitstream(Context context, Process process)
throws IOException, SQLException, AuthorizeException { throws IOException, SQLException, AuthorizeException {
File tmpDir = FileUtils.getTempDirectory(); File logsDir = getLogsDirectory();
File tempFile = new File(tmpDir, process.getName() + process.getID() + ".log"); File tempFile = new File(logsDir, process.getID() + "-" + process.getName() + ".log");
FileInputStream inputStream = FileUtils.openInputStream(tempFile); if (tempFile.exists()) {
appendFile(context, process, inputStream, Process.OUTPUT_TYPE, process.getName() + process.getID() + ".log"); FileInputStream inputStream = FileUtils.openInputStream(tempFile);
inputStream.close(); appendFile(context, process, inputStream, Process.OUTPUT_TYPE,
tempFile.delete(); process.getID() + "-" + process.getName() + ".log");
inputStream.close();
tempFile.delete();
}
} }
@Override @Override
@@ -328,6 +331,23 @@ public class ProcessServiceImpl implements ProcessService {
return processDAO.countByUser(context, user); return processDAO.countByUser(context, user);
} }
@Override
public void failRunningProcesses(Context context) throws SQLException, IOException, AuthorizeException {
List<Process> processesToBeFailed = findByStatusAndCreationTimeOlderThan(
context, List.of(ProcessStatus.RUNNING, ProcessStatus.SCHEDULED), new Date());
for (Process process : processesToBeFailed) {
context.setCurrentUser(process.getEPerson());
// Fail the process.
log.info("Process with ID {} did not complete before tomcat shutdown, failing it now.", process.getID());
fail(context, process);
// But still attach its log to the process.
appendLog(process.getID(), process.getName(),
"Process did not complete before tomcat shutdown.",
ProcessLogLevel.ERROR);
createLogBitstream(context, process);
}
}
private String formatLogLine(int processId, String scriptName, String output, ProcessLogLevel processLogLevel) { private String formatLogLine(int processId, String scriptName, String output, ProcessLogLevel processLogLevel) {
SimpleDateFormat sdf = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss.SSS"); SimpleDateFormat sdf = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss.SSS");
StringBuilder sb = new StringBuilder(); StringBuilder sb = new StringBuilder();
@@ -343,4 +363,15 @@ public class ProcessServiceImpl implements ProcessService {
return sb.toString(); return sb.toString();
} }
private File getLogsDirectory() {
String pathStr = configurationService.getProperty("dspace.dir")
+ File.separator + "log" + File.separator + "processes";
File logsDir = new File(pathStr);
if (!logsDir.exists()) {
if (!logsDir.mkdirs()) {
throw new RuntimeException("Couldn't create [dspace.dir]/log/processes/ directory.");
}
}
return logsDir;
}
} }

View File

@@ -277,4 +277,14 @@ public interface ProcessService {
* @throws SQLException If something goes wrong * @throws SQLException If something goes wrong
*/ */
int countByUser(Context context, EPerson user) throws SQLException; int countByUser(Context context, EPerson user) throws SQLException;
/**
* Cleans up running processes by failing them an attaching their logs to the process objects.
*
* @param context The DSpace context
* @throws SQLException
* @throws IOException
* @throws AuthorizeException
*/
void failRunningProcesses(Context context) throws SQLException, IOException, AuthorizeException;
} }

View File

@@ -357,7 +357,7 @@ public class StatisticsImporter {
SolrInputDocument sid = new SolrInputDocument(); SolrInputDocument sid = new SolrInputDocument();
sid.addField("ip", ip); sid.addField("ip", ip);
sid.addField("type", dso.getType()); sid.addField("type", dso.getType());
sid.addField("id", dso.getID()); sid.addField("id", dso.getID().toString());
sid.addField("time", DateFormatUtils.format(date, SolrLoggerServiceImpl.DATE_FORMAT_8601)); sid.addField("time", DateFormatUtils.format(date, SolrLoggerServiceImpl.DATE_FORMAT_8601));
sid.addField("continent", continent); sid.addField("continent", continent);
sid.addField("country", country); sid.addField("country", country);
@@ -471,13 +471,13 @@ public class StatisticsImporter {
boolean verbose = line.hasOption('v'); boolean verbose = line.hasOption('v');
// Find our solr server // Find our solr server
String sserver = configurationService.getProperty("solr-statistics", "server"); String sserver = configurationService.getProperty("solr-statistics.server");
if (verbose) { if (verbose) {
System.out.println("Writing to solr server at: " + sserver); System.out.println("Writing to solr server at: " + sserver);
} }
solr = new HttpSolrClient.Builder(sserver).build(); solr = new HttpSolrClient.Builder(sserver).build();
String dbPath = configurationService.getProperty("usage-statistics", "dbfile"); String dbPath = configurationService.getProperty("usage-statistics.dbfile");
try { try {
File dbFile = new File(dbPath); File dbFile = new File(dbPath);
geoipLookup = new DatabaseReader.Builder(dbFile).build(); geoipLookup = new DatabaseReader.Builder(dbFile).build();
@@ -492,6 +492,11 @@ public class StatisticsImporter {
"Unable to load GeoLite Database file (" + dbPath + ")! You may need to reinstall it. See the DSpace " + "Unable to load GeoLite Database file (" + dbPath + ")! You may need to reinstall it. See the DSpace " +
"installation instructions for more details.", "installation instructions for more details.",
e); e);
} catch (NullPointerException e) {
log.error(
"The value of the property usage-statistics.dbfile is null. You may need to install the GeoLite " +
"Database file and/or uncomment the property in the config file!",
e);
} }

View File

@@ -25,7 +25,7 @@
* {@code EventService}, as with the stock listeners. * {@code EventService}, as with the stock listeners.
* </p> * </p>
* *
* @see org.dspace.google.GoogleRecorderEventListener * @see org.dspace.google.GoogleAsyncEventListener
* @see org.dspace.statistics.SolrLoggerUsageEventListener * @see org.dspace.statistics.SolrLoggerUsageEventListener
*/ */

View File

@@ -33,6 +33,11 @@ import org.dspace.core.Context;
import org.dspace.discovery.IndexEventConsumer; import org.dspace.discovery.IndexEventConsumer;
import org.dspace.event.Consumer; import org.dspace.event.Consumer;
import org.dspace.event.Event; import org.dspace.event.Event;
import org.dspace.orcid.OrcidHistory;
import org.dspace.orcid.OrcidQueue;
import org.dspace.orcid.factory.OrcidServiceFactory;
import org.dspace.orcid.service.OrcidHistoryService;
import org.dspace.orcid.service.OrcidQueueService;
import org.dspace.versioning.factory.VersionServiceFactory; import org.dspace.versioning.factory.VersionServiceFactory;
import org.dspace.versioning.service.VersionHistoryService; import org.dspace.versioning.service.VersionHistoryService;
import org.dspace.versioning.utils.RelationshipVersioningUtils; import org.dspace.versioning.utils.RelationshipVersioningUtils;
@@ -58,6 +63,8 @@ public class VersioningConsumer implements Consumer {
private RelationshipTypeService relationshipTypeService; private RelationshipTypeService relationshipTypeService;
private RelationshipService relationshipService; private RelationshipService relationshipService;
private RelationshipVersioningUtils relationshipVersioningUtils; private RelationshipVersioningUtils relationshipVersioningUtils;
private OrcidQueueService orcidQueueService;
private OrcidHistoryService orcidHistoryService;
@Override @Override
public void initialize() throws Exception { public void initialize() throws Exception {
@@ -67,6 +74,8 @@ public class VersioningConsumer implements Consumer {
relationshipTypeService = ContentServiceFactory.getInstance().getRelationshipTypeService(); relationshipTypeService = ContentServiceFactory.getInstance().getRelationshipTypeService();
relationshipService = ContentServiceFactory.getInstance().getRelationshipService(); relationshipService = ContentServiceFactory.getInstance().getRelationshipService();
relationshipVersioningUtils = VersionServiceFactory.getInstance().getRelationshipVersioningUtils(); relationshipVersioningUtils = VersionServiceFactory.getInstance().getRelationshipVersioningUtils();
this.orcidQueueService = OrcidServiceFactory.getInstance().getOrcidQueueService();
this.orcidHistoryService = OrcidServiceFactory.getInstance().getOrcidHistoryService();
} }
@Override @Override
@@ -132,7 +141,8 @@ public class VersioningConsumer implements Consumer {
// unarchive previous item // unarchive previous item
unarchiveItem(ctx, previousItem); unarchiveItem(ctx, previousItem);
// handles versions for ORCID publications waiting to be shipped, or already published (history-queue).
handleOrcidSynchronization(ctx, previousItem, latestItem);
// update relationships // update relationships
updateRelationships(ctx, latestItem, previousItem); updateRelationships(ctx, latestItem, previousItem);
} }
@@ -148,6 +158,29 @@ public class VersioningConsumer implements Consumer {
)); ));
} }
private void handleOrcidSynchronization(Context ctx, Item previousItem, Item latestItem) {
try {
replaceOrcidHistoryEntities(ctx, previousItem, latestItem);
removeOrcidQueueEntries(ctx, previousItem);
} catch (SQLException e) {
throw new RuntimeException(e);
}
}
private void removeOrcidQueueEntries(Context ctx, Item previousItem) throws SQLException {
List<OrcidQueue> queueEntries = orcidQueueService.findByEntity(ctx, previousItem);
for (OrcidQueue queueEntry : queueEntries) {
orcidQueueService.delete(ctx, queueEntry);
}
}
private void replaceOrcidHistoryEntities(Context ctx, Item previousItem, Item latestItem) throws SQLException {
List<OrcidHistory> entries = orcidHistoryService.findByEntity(ctx, previousItem);
for (OrcidHistory entry : entries) {
entry.setEntity(latestItem);
}
}
/** /**
* Update {@link Relationship#latestVersionStatus} of the relationships of both the old version and the new version * Update {@link Relationship#latestVersionStatus} of the relationships of both the old version and the new version
* of the item. * of the item.

View File

@@ -8,6 +8,7 @@
package org.dspace.xmlworkflow.state.actions.processingaction; package org.dspace.xmlworkflow.state.actions.processingaction;
import java.sql.SQLException; import java.sql.SQLException;
import java.util.ArrayList;
import java.util.Arrays; import java.util.Arrays;
import java.util.Collections; import java.util.Collections;
import java.util.List; import java.util.List;
@@ -20,6 +21,8 @@ import org.dspace.app.util.Util;
import org.dspace.authorize.AuthorizeException; import org.dspace.authorize.AuthorizeException;
import org.dspace.content.MetadataFieldName; import org.dspace.content.MetadataFieldName;
import org.dspace.core.Context; import org.dspace.core.Context;
import org.dspace.services.ConfigurationService;
import org.dspace.services.factory.DSpaceServicesFactory;
import org.dspace.xmlworkflow.service.WorkflowRequirementsService; import org.dspace.xmlworkflow.service.WorkflowRequirementsService;
import org.dspace.xmlworkflow.state.Step; import org.dspace.xmlworkflow.state.Step;
import org.dspace.xmlworkflow.state.actions.ActionAdvancedInfo; import org.dspace.xmlworkflow.state.actions.ActionAdvancedInfo;
@@ -34,6 +37,9 @@ import org.dspace.xmlworkflow.storedcomponents.XmlWorkflowItem;
public class ScoreReviewAction extends ProcessingAction { public class ScoreReviewAction extends ProcessingAction {
private static final Logger log = LogManager.getLogger(ScoreReviewAction.class); private static final Logger log = LogManager.getLogger(ScoreReviewAction.class);
private final ConfigurationService configurationService
= DSpaceServicesFactory.getInstance().getConfigurationService();
// Option(s) // Option(s)
public static final String SUBMIT_SCORE = "submit_score"; public static final String SUBMIT_SCORE = "submit_score";
@@ -114,7 +120,14 @@ public class ScoreReviewAction extends ProcessingAction {
@Override @Override
public List<String> getOptions() { public List<String> getOptions() {
return List.of(SUBMIT_SCORE, RETURN_TO_POOL); List<String> options = new ArrayList<>();
options.add(SUBMIT_SCORE);
if (configurationService.getBooleanProperty("workflow.reviewer.file-edit", false)) {
options.add(SUBMIT_EDIT_METADATA);
}
options.add(RETURN_TO_POOL);
return options;
} }
@Override @Override

View File

@@ -21,6 +21,8 @@ import org.dspace.content.WorkspaceItem;
import org.dspace.content.factory.ContentServiceFactory; import org.dspace.content.factory.ContentServiceFactory;
import org.dspace.core.Context; import org.dspace.core.Context;
import org.dspace.eperson.EPerson; import org.dspace.eperson.EPerson;
import org.dspace.services.ConfigurationService;
import org.dspace.services.factory.DSpaceServicesFactory;
import org.dspace.workflow.WorkflowException; import org.dspace.workflow.WorkflowException;
import org.dspace.xmlworkflow.factory.XmlWorkflowServiceFactory; import org.dspace.xmlworkflow.factory.XmlWorkflowServiceFactory;
import org.dspace.xmlworkflow.state.Step; import org.dspace.xmlworkflow.state.Step;
@@ -40,6 +42,9 @@ import org.dspace.xmlworkflow.storedcomponents.XmlWorkflowItem;
public class SingleUserReviewAction extends ProcessingAction { public class SingleUserReviewAction extends ProcessingAction {
private static final Logger log = LogManager.getLogger(SingleUserReviewAction.class); private static final Logger log = LogManager.getLogger(SingleUserReviewAction.class);
private final ConfigurationService configurationService
= DSpaceServicesFactory.getInstance().getConfigurationService();
public static final int OUTCOME_REJECT = 1; public static final int OUTCOME_REJECT = 1;
protected static final String SUBMIT_DECLINE_TASK = "submit_decline_task"; protected static final String SUBMIT_DECLINE_TASK = "submit_decline_task";
@@ -95,6 +100,9 @@ public class SingleUserReviewAction extends ProcessingAction {
public List<String> getOptions() { public List<String> getOptions() {
List<String> options = new ArrayList<>(); List<String> options = new ArrayList<>();
options.add(SUBMIT_APPROVE); options.add(SUBMIT_APPROVE);
if (configurationService.getBooleanProperty("workflow.reviewer.file-edit", false)) {
options.add(SUBMIT_EDIT_METADATA);
}
options.add(SUBMIT_REJECT); options.add(SUBMIT_REJECT);
options.add(SUBMIT_DECLINE_TASK); options.add(SUBMIT_DECLINE_TASK);
return options; return options;

View File

@@ -13,6 +13,7 @@ import java.util.ArrayList;
import java.util.Collections; import java.util.Collections;
import java.util.Iterator; import java.util.Iterator;
import java.util.List; import java.util.List;
import java.util.Optional;
import java.util.Set; import java.util.Set;
import org.apache.commons.collections4.CollectionUtils; import org.apache.commons.collections4.CollectionUtils;
@@ -100,12 +101,17 @@ public class PoolTaskServiceImpl implements PoolTaskService {
//If the user does not have a claimedtask yet, see whether one of the groups of the user has pooltasks //If the user does not have a claimedtask yet, see whether one of the groups of the user has pooltasks
//for this workflow item //for this workflow item
Set<Group> groups = groupService.allMemberGroupsSet(context, ePerson); Set<Group> groups = groupService.allMemberGroupsSet(context, ePerson);
for (Group group : groups) { List<PoolTask> generalTasks = poolTaskDAO.findByWorkflowItem(context, workflowItem);
poolTask = poolTaskDAO.findByWorkflowItemAndGroup(context, group, workflowItem);
if (poolTask != null) {
return poolTask;
}
Optional<PoolTask> firstClaimedTask = groups.stream()
.flatMap(group -> generalTasks.stream()
.filter(f -> f.getGroup().getID().equals(group.getID()))
.findFirst()
.stream())
.findFirst();
if (firstClaimedTask.isPresent()) {
return firstClaimedTask.get();
} }
} }
} }

View File

@@ -0,0 +1,21 @@
--
-- The contents of this file are subject to the license and copyright
-- detailed in the LICENSE and NOTICE files at the root of the source
-- tree and available online at
--
-- http://www.dspace.org/license/
--
-- In the workspaceitem table, if there are multiple rows referring to the same item ID, keep only the first of them.
DELETE FROM workspaceitem WHERE EXISTS (
SELECT item_id
FROM workspaceitem
GROUP BY item_id
HAVING COUNT(workspace_item_id) > 1
) AND workspaceitem.workspace_item_id NOT IN (
SELECT MIN(workspace_item_id) AS workspace_item_id
FROM workspaceitem
GROUP BY item_id
);
-- Identify which rows have duplicates, and compute their replacements.
ALTER TABLE workspaceitem ADD CONSTRAINT unique_item_id UNIQUE(item_id);

View File

@@ -0,0 +1,21 @@
--
-- The contents of this file are subject to the license and copyright
-- detailed in the LICENSE and NOTICE files at the root of the source
-- tree and available online at
--
-- http://www.dspace.org/license/
--
-- In the workspaceitem table, if there are multiple rows referring to the same item ID, keep only the first of them.
WITH dedup AS (
SELECT item_id, MIN(workspace_item_id) AS workspace_item_id
FROM workspaceitem
GROUP BY item_id
HAVING COUNT(workspace_item_id) > 1
)
DELETE FROM workspaceitem
USING dedup
WHERE workspaceitem.item_id = dedup.item_id AND workspaceitem.workspace_item_id <> dedup.workspace_item_id;
-- Enforce uniqueness of item_id in workspaceitem table.
ALTER TABLE workspaceitem ADD CONSTRAINT unique_item_id UNIQUE(item_id);

View File

@@ -51,11 +51,21 @@
<bean id="DataCiteImportService" <bean id="DataCiteImportService"
class="org.dspace.importer.external.datacite.DataCiteImportMetadataSourceServiceImpl" scope="singleton"> class="org.dspace.importer.external.datacite.DataCiteImportMetadataSourceServiceImpl" scope="singleton">
<property name="metadataFieldMapping" ref="DataCiteMetadataFieldMapping"/> <property name="metadataFieldMapping" ref="DataCiteMetadataFieldMapping"/>
<property name="entityFilterQuery" value="${datacite.publicationimport.entityfilterquery}" />
</bean> </bean>
<bean id="DataCiteMetadataFieldMapping" <bean id="DataCiteMetadataFieldMapping"
class="org.dspace.importer.external.datacite.DataCiteFieldMapping"> class="org.dspace.importer.external.datacite.DataCiteFieldMapping">
</bean> </bean>
<bean id="DataCiteProjectImportService"
class="org.dspace.importer.external.datacite.DataCiteProjectImportMetadataSourceServiceImpl" scope="singleton">
<property name="metadataFieldMapping" ref="DataCiteProjectMetadataFieldMapping"/>
<property name="entityFilterQuery" value="${datacite.projectimport.entityfilterquery}" />
</bean>
<bean id="DataCiteProjectMetadataFieldMapping"
class="org.dspace.importer.external.datacite.DataCiteProjectFieldMapping">
</bean>
<bean id="ArXivImportService" <bean id="ArXivImportService"
class="org.dspace.importer.external.arxiv.service.ArXivImportMetadataSourceServiceImpl" scope="singleton"> class="org.dspace.importer.external.arxiv.service.ArXivImportMetadataSourceServiceImpl" scope="singleton">
<property name="metadataFieldMapping" ref="ArXivMetadataFieldMapping"/> <property name="metadataFieldMapping" ref="ArXivMetadataFieldMapping"/>

View File

@@ -0,0 +1,10 @@
<?xml version="1.0" encoding="UTF-8"?>
<node id='Countries' label='Countries'>
<isComposedBy>
<node id='Africa' label='Africa'>
<isComposedBy>
<node id='DZA' label='Algeria'/>
</isComposedBy>
</node>
</isComposedBy>
</node>

View File

@@ -0,0 +1,10 @@
<?xml version="1.0" encoding="UTF-8"?>
<node id='Countries' label='Länder'>
<isComposedBy>
<node id='Africa' label='Afrika'>
<isComposedBy>
<node id='DZA' label='Algerien'/>
</isComposedBy>
</node>
</isComposedBy>
</node>

View File

@@ -156,8 +156,6 @@ useProxies = true
proxies.trusted.ipranges = 7.7.7.7 proxies.trusted.ipranges = 7.7.7.7
proxies.trusted.include_ui_ip = true proxies.trusted.include_ui_ip = true
csvexport.dir = dspace-server-webapp/src/test/data/dspaceFolder/exports
# For the tests we have to disable this health indicator because there isn't a mock server and the calculated status was DOWN # For the tests we have to disable this health indicator because there isn't a mock server and the calculated status was DOWN
management.health.solrOai.enabled = false management.health.solrOai.enabled = false
@@ -175,6 +173,9 @@ authority.controlled.dspace.object.owner = true
webui.browse.link.1 = author:dc.contributor.* webui.browse.link.1 = author:dc.contributor.*
webui.browse.link.2 = subject:dc.subject.* webui.browse.link.2 = subject:dc.subject.*
# Configuration required for testing the controlled vocabulary functionality, which is configured using properties
vocabulary.plugin.countries.hierarchy.store=false
vocabulary.plugin.countries.storeIDs=true
# Enable duplicate detection for tests # Enable duplicate detection for tests
duplicate.enable = true duplicate.enable = true

View File

@@ -104,5 +104,16 @@
</list> </list>
</property> </property>
</bean> </bean>
<bean id="dataciteProjectLiveImportDataProvider" class="org.dspace.external.provider.impl.LiveImportDataProvider">
<property name="metadataSource" ref="DataCiteProjectImportService"/>
<property name="sourceIdentifier" value="dataciteProject"/>
<property name="recordIdMetadata" value="dc.identifier"/>
<property name="supportedEntityTypes">
<list>
<value>Project</value>
</list>
</property>
</bean>
</beans> </beans>

View File

@@ -21,8 +21,12 @@ import org.dspace.builder.AbstractBuilder;
import org.dspace.discovery.SearchUtils; import org.dspace.discovery.SearchUtils;
import org.dspace.servicemanager.DSpaceKernelImpl; import org.dspace.servicemanager.DSpaceKernelImpl;
import org.dspace.servicemanager.DSpaceKernelInit; import org.dspace.servicemanager.DSpaceKernelInit;
import org.junit.After;
import org.junit.AfterClass; import org.junit.AfterClass;
import org.junit.Before;
import org.junit.BeforeClass; import org.junit.BeforeClass;
import org.junit.Rule;
import org.junit.rules.TestName;
/** /**
* Abstract Test class copied from DSpace API * Abstract Test class copied from DSpace API
@@ -46,6 +50,12 @@ public class AbstractDSpaceIntegrationTest {
*/ */
protected static DSpaceKernelImpl kernelImpl; protected static DSpaceKernelImpl kernelImpl;
/**
* Obtain the TestName from JUnit, so that we can print it out in the test logs (see below)
*/
@Rule
public TestName testName = new TestName();
/** /**
* Default constructor * Default constructor
*/ */
@@ -90,6 +100,20 @@ public class AbstractDSpaceIntegrationTest {
} }
} }
@Before
public void printTestMethodBefore() {
// Log the test method being executed. Put lines around it to make it stand out.
log.info("---");
log.info("Starting execution of test method: {}()", testName.getMethodName());
log.info("---");
}
@After
public void printTestMethodAfter() {
// Log the test method just completed.
log.info("Finished execution of test method: {}()", testName.getMethodName());
}
/** /**
* This method will be run after all tests finish as per @AfterClass. It * This method will be run after all tests finish as per @AfterClass. It
* will clean resources initialized by the @BeforeClass methods. * will clean resources initialized by the @BeforeClass methods.

View File

@@ -18,9 +18,13 @@ import java.util.TimeZone;
import org.apache.logging.log4j.Logger; import org.apache.logging.log4j.Logger;
import org.dspace.servicemanager.DSpaceKernelImpl; import org.dspace.servicemanager.DSpaceKernelImpl;
import org.dspace.servicemanager.DSpaceKernelInit; import org.dspace.servicemanager.DSpaceKernelInit;
import org.junit.After;
import org.junit.AfterClass; import org.junit.AfterClass;
import org.junit.Before;
import org.junit.BeforeClass; import org.junit.BeforeClass;
import org.junit.Ignore; import org.junit.Ignore;
import org.junit.Rule;
import org.junit.rules.TestName;
import org.junit.runner.RunWith; import org.junit.runner.RunWith;
import org.mockito.junit.MockitoJUnitRunner; import org.mockito.junit.MockitoJUnitRunner;
@@ -62,6 +66,12 @@ public class AbstractDSpaceTest {
*/ */
protected static DSpaceKernelImpl kernelImpl; protected static DSpaceKernelImpl kernelImpl;
/**
* Obtain the TestName from JUnit, so that we can print it out in the test logs (see below)
*/
@Rule
public TestName testName = new TestName();
/** /**
* This method will be run before the first test as per @BeforeClass. It will * This method will be run before the first test as per @BeforeClass. It will
* initialize shared resources required for all tests of this class. * initialize shared resources required for all tests of this class.
@@ -94,6 +104,19 @@ public class AbstractDSpaceTest {
} }
} }
@Before
public void printTestMethodBefore() {
// Log the test method being executed. Put lines around it to make it stand out.
log.info("---");
log.info("Starting execution of test method: {}()", testName.getMethodName());
log.info("---");
}
@After
public void printTestMethodAfter() {
// Log the test method just completed.
log.info("Finished execution of test method: {}()", testName.getMethodName());
}
/** /**
* This method will be run after all tests finish as per @AfterClass. It * This method will be run after all tests finish as per @AfterClass. It

View File

@@ -20,8 +20,8 @@ import org.dspace.app.launcher.ScriptLauncher;
import org.dspace.app.scripts.handler.impl.TestDSpaceRunnableHandler; import org.dspace.app.scripts.handler.impl.TestDSpaceRunnableHandler;
import org.dspace.authority.AuthoritySearchService; import org.dspace.authority.AuthoritySearchService;
import org.dspace.authority.MockAuthoritySolrServiceImpl; import org.dspace.authority.MockAuthoritySolrServiceImpl;
import org.dspace.authorize.AuthorizeException;
import org.dspace.builder.AbstractBuilder; import org.dspace.builder.AbstractBuilder;
import org.dspace.builder.EPersonBuilder;
import org.dspace.content.Community; import org.dspace.content.Community;
import org.dspace.core.Context; import org.dspace.core.Context;
import org.dspace.core.I18nUtil; import org.dspace.core.I18nUtil;
@@ -127,19 +127,16 @@ public class AbstractIntegrationTestWithDatabase extends AbstractDSpaceIntegrati
EPersonService ePersonService = EPersonServiceFactory.getInstance().getEPersonService(); EPersonService ePersonService = EPersonServiceFactory.getInstance().getEPersonService();
eperson = ePersonService.findByEmail(context, "test@email.com"); eperson = ePersonService.findByEmail(context, "test@email.com");
if (eperson == null) { if (eperson == null) {
// This EPerson creation should only happen once (i.e. for first test run) // Create test EPerson for usage in all tests
log.info("Creating initial EPerson (email=test@email.com) for Unit Tests"); log.info("Creating Test EPerson (email=test@email.com) for Integration Tests");
eperson = ePersonService.create(context); eperson = EPersonBuilder.createEPerson(context)
eperson.setFirstName(context, "first"); .withNameInMetadata("first", "last")
eperson.setLastName(context, "last"); .withEmail("test@email.com")
eperson.setEmail("test@email.com"); .withCanLogin(true)
eperson.setCanLogIn(true); .withLanguage(I18nUtil.getDefaultLocale().getLanguage())
eperson.setLanguage(context, I18nUtil.getDefaultLocale().getLanguage()); .withPassword(password)
ePersonService.setPassword(eperson, password); .build();
// actually save the eperson to unit testing DB
ePersonService.update(context, eperson);
} }
// Set our global test EPerson as the current user in DSpace // Set our global test EPerson as the current user in DSpace
context.setCurrentUser(eperson); context.setCurrentUser(eperson);
@@ -148,26 +145,23 @@ public class AbstractIntegrationTestWithDatabase extends AbstractDSpaceIntegrati
admin = ePersonService.findByEmail(context, "admin@email.com"); admin = ePersonService.findByEmail(context, "admin@email.com");
if (admin == null) { if (admin == null) {
// This EPerson creation should only happen once (i.e. for first test run) // Create test Administrator for usage in all tests
log.info("Creating initial EPerson (email=admin@email.com) for Unit Tests"); log.info("Creating Test Admin EPerson (email=admin@email.com) for Integration Tests");
admin = ePersonService.create(context); admin = EPersonBuilder.createEPerson(context)
admin.setFirstName(context, "first (admin)"); .withNameInMetadata("first (admin)", "last (admin)")
admin.setLastName(context, "last (admin)"); .withEmail("admin@email.com")
admin.setEmail("admin@email.com"); .withCanLogin(true)
admin.setCanLogIn(true); .withLanguage(I18nUtil.getDefaultLocale().getLanguage())
admin.setLanguage(context, I18nUtil.getDefaultLocale().getLanguage()); .withPassword(password)
ePersonService.setPassword(admin, password); .build();
// actually save the eperson to unit testing DB
ePersonService.update(context, admin); // Add Test Administrator to the ADMIN group in test database
GroupService groupService = EPersonServiceFactory.getInstance().getGroupService(); GroupService groupService = EPersonServiceFactory.getInstance().getGroupService();
Group adminGroup = groupService.findByName(context, Group.ADMIN); Group adminGroup = groupService.findByName(context, Group.ADMIN);
groupService.addMember(context, adminGroup, admin); groupService.addMember(context, adminGroup, admin);
} }
context.restoreAuthSystemState(); context.restoreAuthSystemState();
} catch (AuthorizeException ex) {
log.error("Error creating initial eperson or default groups", ex);
fail("Error creating initial eperson or default groups in AbstractUnitTest init()");
} catch (SQLException ex) { } catch (SQLException ex) {
log.error(ex.getMessage(), ex); log.error(ex.getMessage(), ex);
fail("SQL Error on AbstractUnitTest init()"); fail("SQL Error on AbstractUnitTest init()");

View File

@@ -23,7 +23,8 @@ import java.util.List;
import com.google.common.io.Files; import com.google.common.io.Files;
import com.opencsv.CSVReader; import com.opencsv.CSVReader;
import com.opencsv.exceptions.CsvException; import com.opencsv.exceptions.CsvException;
import org.apache.log4j.Logger; import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
import org.dspace.AbstractIntegrationTestWithDatabase; import org.dspace.AbstractIntegrationTestWithDatabase;
import org.dspace.app.launcher.ScriptLauncher; import org.dspace.app.launcher.ScriptLauncher;
import org.dspace.app.scripts.handler.impl.TestDSpaceRunnableHandler; import org.dspace.app.scripts.handler.impl.TestDSpaceRunnableHandler;
@@ -51,7 +52,7 @@ public class MetadataExportSearchIT extends AbstractIntegrationTestWithDatabase
private Item[] itemsSubject2 = new Item[numberItemsSubject2]; private Item[] itemsSubject2 = new Item[numberItemsSubject2];
private String filename; private String filename;
private Collection collection; private Collection collection;
private Logger logger = Logger.getLogger(MetadataExportSearchIT.class); private Logger logger = LogManager.getLogger(MetadataExportSearchIT.class);
private ConfigurationService configurationService = DSpaceServicesFactory.getInstance().getConfigurationService(); private ConfigurationService configurationService = DSpaceServicesFactory.getInstance().getConfigurationService();
private SearchService searchService; private SearchService searchService;

View File

@@ -0,0 +1,511 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.authenticate;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertNotNull;
import static org.junit.Assert.assertNull;
import static org.junit.Assert.assertTrue;
import java.util.List;
import jakarta.servlet.http.HttpServletRequest;
import org.dspace.AbstractUnitTest;
import org.dspace.builder.AbstractBuilder;
import org.dspace.builder.EPersonBuilder;
import org.dspace.content.MetadataValue;
import org.dspace.eperson.EPerson;
import org.dspace.services.ConfigurationService;
import org.dspace.services.factory.DSpaceServicesFactory;
import org.junit.After;
import org.junit.AfterClass;
import org.junit.Before;
import org.junit.BeforeClass;
import org.junit.Test;
import org.springframework.mock.web.MockHttpServletRequest;
public class SamlAuthenticationTest extends AbstractUnitTest {
private static ConfigurationService configurationService;
private HttpServletRequest request;
private SamlAuthentication samlAuth;
private EPerson testUser;
@BeforeClass
public static void beforeAll() {
configurationService = DSpaceServicesFactory.getInstance().getConfigurationService();
AbstractBuilder.init(); // AbstractUnitTest doesn't do this for us.
}
@Before
public void beforeEach() throws Exception {
configurationService.setProperty("authentication-saml.autoregister", true);
configurationService.setProperty("authentication-saml.eperson.metadata.autocreate", true);
request = new MockHttpServletRequest();
samlAuth = new SamlAuthentication();
testUser = null;
}
@After
public void afterEach() throws Exception {
if (testUser != null) {
EPersonBuilder.deleteEPerson(testUser.getID());
}
}
@AfterClass
public static void afterAll() {
AbstractBuilder.destroy(); // AbstractUnitTest doesn't do this for us.
}
@Test
public void testAuthenticateExistingUserByEmail() throws Exception {
context.setCurrentUser(null);
context.turnOffAuthorisationSystem();
testUser = EPersonBuilder.createEPerson(context)
.withEmail("alyssa@dspace.org")
.withNameInMetadata("Alyssa", "Hacker")
.withCanLogin(true)
.build();
context.restoreAuthSystemState();
request.setAttribute("org.dspace.saml.EMAIL", List.of("alyssa@dspace.org"));
int result = samlAuth.authenticate(context, null, null, null, request);
assertEquals(AuthenticationMethod.SUCCESS, result);
EPerson user = context.getCurrentUser();
assertNotNull(user);
assertEquals("alyssa@dspace.org", user.getEmail());
assertNull(user.getNetid());
assertEquals("Alyssa", user.getFirstName());
assertEquals("Hacker", user.getLastName());
}
@Test
public void testAuthenticateExistingUserByNetId() throws Exception {
context.setCurrentUser(null);
context.turnOffAuthorisationSystem();
testUser = EPersonBuilder.createEPerson(context)
.withEmail("alyssa@dspace.org")
.withNetId("001")
.withNameInMetadata("Alyssa", "Hacker")
.withCanLogin(true)
.build();
context.restoreAuthSystemState();
request.setAttribute("org.dspace.saml.NAME_ID", "001");
int result = samlAuth.authenticate(context, null, null, null, request);
assertEquals(AuthenticationMethod.SUCCESS, result);
EPerson user = context.getCurrentUser();
assertNotNull(user);
assertEquals("alyssa@dspace.org", user.getEmail());
assertEquals("001", user.getNetid());
assertEquals("Alyssa", user.getFirstName());
assertEquals("Hacker", user.getLastName());
}
@Test
public void testAuthenticateExistingUserByEmailWithUnexpectedNetId() throws Exception {
EPerson originalUser = context.getCurrentUser();
context.turnOffAuthorisationSystem();
testUser = EPersonBuilder.createEPerson(context)
.withEmail("ben@dspace.org")
.withNetId("002")
.withNameInMetadata("Ben", "Bitdiddle")
.withCanLogin(true)
.build();
context.restoreAuthSystemState();
request.setAttribute("org.dspace.saml.EMAIL", List.of("ben@dspace.org"));
request.setAttribute("org.dspace.saml.NAME_ID", "oh-no-its-different-than-the-stored-netid");
int result = samlAuth.authenticate(context, null, null, null, request);
assertEquals(AuthenticationMethod.NO_SUCH_USER, result);
assertEquals(originalUser, context.getCurrentUser());
}
@Test
public void testAuthenticateExistingUserByEmailUpdatesNullNetId() throws Exception {
context.setCurrentUser(null);
context.turnOffAuthorisationSystem();
testUser = EPersonBuilder.createEPerson(context)
.withEmail("carrie@dspace.org")
.withNameInMetadata("Carrie", "Pragma")
.withCanLogin(true)
.build();
context.restoreAuthSystemState();
request.setAttribute("org.dspace.saml.EMAIL", List.of("carrie@dspace.org"));
request.setAttribute("org.dspace.saml.NAME_ID", "netid-from-idp");
int result = samlAuth.authenticate(context, null, null, null, request);
assertEquals(AuthenticationMethod.SUCCESS, result);
EPerson user = context.getCurrentUser();
assertNotNull(user);
assertEquals("carrie@dspace.org", user.getEmail());
assertEquals("netid-from-idp", user.getNetid());
assertEquals("Carrie", user.getFirstName());
assertEquals("Pragma", user.getLastName());
}
@Test
public void testAuthenticateExistingUserByNetIdUpdatesEmail() throws Exception {
context.setCurrentUser(null);
context.turnOffAuthorisationSystem();
testUser = EPersonBuilder.createEPerson(context)
.withEmail("alyssa@dspace.org")
.withNetId("001")
.withNameInMetadata("Alyssa", "Hacker")
.withCanLogin(true)
.build();
context.restoreAuthSystemState();
request.setAttribute("org.dspace.saml.NAME_ID", "001");
request.setAttribute("org.dspace.saml.EMAIL", List.of("aphacker@dspace.org"));
int result = samlAuth.authenticate(context, null, null, null, request);
assertEquals(AuthenticationMethod.SUCCESS, result);
EPerson user = context.getCurrentUser();
assertNotNull(user);
assertEquals("aphacker@dspace.org", user.getEmail());
assertEquals("001", user.getNetid());
assertEquals("Alyssa", user.getFirstName());
assertEquals("Hacker", user.getLastName());
}
@Test
public void testAuthenticateExistingUserUpdatesName() throws Exception {
context.setCurrentUser(null);
context.turnOffAuthorisationSystem();
testUser = EPersonBuilder.createEPerson(context)
.withEmail("alyssa@dspace.org")
.withNetId("001")
.withNameInMetadata("Alyssa", "Hacker")
.withCanLogin(true)
.build();
context.restoreAuthSystemState();
request.setAttribute("org.dspace.saml.NAME_ID", "001");
request.setAttribute("org.dspace.saml.GIVEN_NAME", "Liz");
request.setAttribute("org.dspace.saml.SURNAME", "Hacker-Bitdiddle");
int result = samlAuth.authenticate(context, null, null, null, request);
assertEquals(AuthenticationMethod.SUCCESS, result);
EPerson user = context.getCurrentUser();
assertNotNull(user);
assertEquals("alyssa@dspace.org", user.getEmail());
assertEquals("001", user.getNetid());
assertEquals("Liz", user.getFirstName());
assertEquals("Hacker-Bitdiddle", user.getLastName());
}
@Test
public void testAuthenticateExistingUserAdditionalMetadata() throws Exception {
configurationService.setProperty("authentication-saml.eperson.metadata",
"org.dspace.saml.PHONE => phone," +
"org.dspace.saml.NICKNAME => nickname");
context.setCurrentUser(null);
context.turnOffAuthorisationSystem();
testUser = EPersonBuilder.createEPerson(context)
.withEmail("alyssa@dspace.org")
.withNetId("001")
.withNameInMetadata("Alyssa", "Hacker")
.withCanLogin(true)
.build();
context.restoreAuthSystemState();
request.setAttribute("org.dspace.saml.NAME_ID", "001");
request.setAttribute("org.dspace.saml.PHONE", "123-456-7890");
request.setAttribute("org.dspace.saml.NICKNAME", "Liz");
int result = samlAuth.authenticate(context, null, null, null, request);
assertEquals(AuthenticationMethod.SUCCESS, result);
EPerson user = context.getCurrentUser();
assertNotNull(user);
assertEquals("alyssa@dspace.org", user.getEmail());
assertEquals("001", user.getNetid());
assertEquals("Alyssa", user.getFirstName());
assertEquals("Hacker", user.getLastName());
List<MetadataValue> metadata = user.getMetadata();
assertEquals(4, metadata.size());
assertEquals("eperson_phone", metadata.get(2).getMetadataField().toString());
assertEquals("123-456-7890", metadata.get(2).getValue());
assertEquals("eperson_nickname", metadata.get(3).getMetadataField().toString());
assertEquals("Liz", metadata.get(3).getValue());
}
@Test
public void testInvalidAdditionalMetadataMappingsAreIgnored() throws Exception {
configurationService.setProperty("authentication-saml.eperson.metadata",
"oops this is bad," +
"org.dspace.saml.NICKNAME => nickname");
context.setCurrentUser(null);
context.turnOffAuthorisationSystem();
testUser = EPersonBuilder.createEPerson(context)
.withEmail("alyssa@dspace.org")
.withNetId("001")
.withNameInMetadata("Alyssa", "Hacker")
.withCanLogin(true)
.build();
context.restoreAuthSystemState();
request.setAttribute("org.dspace.saml.NAME_ID", "001");
request.setAttribute("org.dspace.saml.PHONE", "123-456-7890");
request.setAttribute("org.dspace.saml.NICKNAME", "Liz");
int result = samlAuth.authenticate(context, null, null, null, request);
assertEquals(AuthenticationMethod.SUCCESS, result);
EPerson user = context.getCurrentUser();
assertNotNull(user);
assertEquals("alyssa@dspace.org", user.getEmail());
assertEquals("001", user.getNetid());
assertEquals("Alyssa", user.getFirstName());
assertEquals("Hacker", user.getLastName());
List<MetadataValue> metadata = user.getMetadata();
assertEquals(3, metadata.size());
assertEquals("eperson_nickname", metadata.get(2).getMetadataField().toString());
assertEquals("Liz", metadata.get(2).getValue());
}
@Test
public void testAuthenticateExistingUserAdditionalMetadataAutocreateDisabled() throws Exception {
configurationService.setProperty("authentication-saml.eperson.metadata.autocreate", false);
configurationService.setProperty("authentication-saml.eperson.metadata",
"org.dspace.saml.PHONE => phone," +
"org.dspace.saml.DEPARTMENT => department");
context.setCurrentUser(null);
context.turnOffAuthorisationSystem();
testUser = EPersonBuilder.createEPerson(context)
.withEmail("alyssa@dspace.org")
.withNetId("001")
.withNameInMetadata("Alyssa", "Hacker")
.withCanLogin(true)
.build();
context.restoreAuthSystemState();
request.setAttribute("org.dspace.saml.NAME_ID", "001");
request.setAttribute("org.dspace.saml.PHONE", "123-456-7890");
request.setAttribute("org.dspace.saml.DEPARTMENT", "Library");
int result = samlAuth.authenticate(context, null, null, null, request);
assertEquals(AuthenticationMethod.SUCCESS, result);
EPerson user = context.getCurrentUser();
assertNotNull(user);
assertEquals("alyssa@dspace.org", user.getEmail());
assertEquals("001", user.getNetid());
assertEquals("Alyssa", user.getFirstName());
assertEquals("Hacker", user.getLastName());
List<MetadataValue> metadata = user.getMetadata();
assertEquals(3, metadata.size());
assertEquals("eperson_phone", metadata.get(2).getMetadataField().toString());
assertEquals("123-456-7890", metadata.get(2).getValue());
}
@Test
public void testAdditionalMetadataWithInvalidNameNotAutocreated() throws Exception {
configurationService.setProperty("authentication-saml.eperson.metadata",
"org.dspace.saml.PHONE => phone," +
"org.dspace.saml.DEPARTMENT => (department)"); // parens not allowed
context.setCurrentUser(null);
context.turnOffAuthorisationSystem();
testUser = EPersonBuilder.createEPerson(context)
.withEmail("alyssa@dspace.org")
.withNetId("001")
.withNameInMetadata("Alyssa", "Hacker")
.withCanLogin(true)
.build();
context.restoreAuthSystemState();
request.setAttribute("org.dspace.saml.NAME_ID", "001");
request.setAttribute("org.dspace.saml.PHONE", "123-456-7890");
request.setAttribute("org.dspace.saml.DEPARTMENT", "Library");
int result = samlAuth.authenticate(context, null, null, null, request);
assertEquals(AuthenticationMethod.SUCCESS, result);
EPerson user = context.getCurrentUser();
assertNotNull(user);
assertEquals("alyssa@dspace.org", user.getEmail());
assertEquals("001", user.getNetid());
assertEquals("Alyssa", user.getFirstName());
assertEquals("Hacker", user.getLastName());
List<MetadataValue> metadata = user.getMetadata();
assertEquals(3, metadata.size());
assertEquals("eperson_phone", metadata.get(2).getMetadataField().toString());
assertEquals("123-456-7890", metadata.get(2).getValue());
}
@Test
public void testExistingUserLoginDisabled() throws Exception {
EPerson originalUser = context.getCurrentUser();
context.turnOffAuthorisationSystem();
testUser = EPersonBuilder.createEPerson(context)
.withEmail("alyssa@dspace.org")
.withNameInMetadata("Alyssa", "Hacker")
.withCanLogin(false)
.build();
context.restoreAuthSystemState();
request.setAttribute("org.dspace.saml.EMAIL", List.of("alyssa@dspace.org"));
int result = samlAuth.authenticate(context, null, null, null, request);
assertEquals(AuthenticationMethod.BAD_ARGS, result);
assertEquals(originalUser, context.getCurrentUser());
}
@Test
public void testNonExistentUserWithoutEmail() throws Exception {
EPerson originalUser = context.getCurrentUser();
request.setAttribute("org.dspace.saml.NAME_ID", "non-existent-netid");
int result = samlAuth.authenticate(context, null, null, null, request);
assertEquals(AuthenticationMethod.NO_SUCH_USER, result);
assertEquals(originalUser, context.getCurrentUser());
}
@Test
public void testNonExistentUserWithEmailAutoregisterEnabled() throws Exception {
context.setCurrentUser(null);
request.setAttribute("org.dspace.saml.NAME_ID", "non-existent-netid");
request.setAttribute("org.dspace.saml.EMAIL", List.of("ben@dspace.org"));
request.setAttribute("org.dspace.saml.GIVEN_NAME", "Ben");
request.setAttribute("org.dspace.saml.SURNAME", "Bitdiddle");
int result = samlAuth.authenticate(context, null, null, null, request);
assertEquals(AuthenticationMethod.SUCCESS, result);
EPerson user = context.getCurrentUser();
assertNotNull(user);
assertEquals("ben@dspace.org", user.getEmail());
assertEquals("non-existent-netid", user.getNetid());
assertEquals("Ben", user.getFirstName());
assertEquals("Bitdiddle", user.getLastName());
assertTrue(user.canLogIn());
assertTrue(user.getSelfRegistered());
testUser = user; // Make sure the autoregistered user gets deleted.
}
@Test
public void testNonExistentUserWithEmailAutoregisterDisabled() throws Exception {
configurationService.setProperty("authentication-saml.autoregister", false);
EPerson originalUser = context.getCurrentUser();
request.setAttribute("org.dspace.saml.NAME_ID", "non-existent-netid");
request.setAttribute("org.dspace.saml.EMAIL", List.of("ben@dspace.org"));
request.setAttribute("org.dspace.saml.GIVEN_NAME", "Ben");
request.setAttribute("org.dspace.saml.SURNAME", "Bitdiddle");
int result = samlAuth.authenticate(context, null, null, null, request);
assertEquals(AuthenticationMethod.NO_SUCH_USER, result);
assertEquals(originalUser, context.getCurrentUser());
}
@Test
public void testNoEmailOrNameIdInRequest() throws Exception {
context.setCurrentUser(null);
context.turnOffAuthorisationSystem();
testUser = EPersonBuilder.createEPerson(context)
.withEmail("alyssa@dspace.org")
.withNameInMetadata("Alyssa", "Hacker")
.withCanLogin(true)
.build();
context.restoreAuthSystemState();
int result = samlAuth.authenticate(context, null, null, null, request);
assertEquals(AuthenticationMethod.NO_SUCH_USER, result);
}
@Test
public void testRequestIsNull() throws Exception {
EPerson originalUser = context.getCurrentUser();
int result = samlAuth.authenticate(context, null, null, null, null);
assertEquals(AuthenticationMethod.BAD_ARGS, result);
assertEquals(originalUser, context.getCurrentUser());
}
}

View File

@@ -11,7 +11,8 @@ import java.io.IOException;
import java.sql.SQLException; import java.sql.SQLException;
import java.util.Date; import java.util.Date;
import org.apache.log4j.Logger; import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
import org.dspace.content.Item; import org.dspace.content.Item;
import org.dspace.core.Context; import org.dspace.core.Context;
import org.dspace.orcid.OrcidHistory; import org.dspace.orcid.OrcidHistory;
@@ -24,7 +25,7 @@ import org.dspace.orcid.service.OrcidHistoryService;
*/ */
public class OrcidHistoryBuilder extends AbstractBuilder<OrcidHistory, OrcidHistoryService> { public class OrcidHistoryBuilder extends AbstractBuilder<OrcidHistory, OrcidHistoryService> {
private static final Logger log = Logger.getLogger(OrcidHistoryBuilder.class); private static final Logger log = LogManager.getLogger(OrcidHistoryBuilder.class);
private OrcidHistory orcidHistory; private OrcidHistory orcidHistory;

View File

@@ -59,7 +59,7 @@ import org.dspace.content.virtual.Collected;
import org.dspace.content.virtual.VirtualMetadataConfiguration; import org.dspace.content.virtual.VirtualMetadataConfiguration;
import org.dspace.content.virtual.VirtualMetadataPopulator; import org.dspace.content.virtual.VirtualMetadataPopulator;
import org.dspace.core.Constants; import org.dspace.core.Constants;
import org.dspace.discovery.SolrSearchCore; import org.dspace.discovery.MockSolrSearchCore;
import org.dspace.kernel.ServiceManager; import org.dspace.kernel.ServiceManager;
import org.dspace.services.factory.DSpaceServicesFactory; import org.dspace.services.factory.DSpaceServicesFactory;
import org.dspace.versioning.Version; import org.dspace.versioning.Version;
@@ -79,8 +79,9 @@ public class VersioningWithRelationshipsIT extends AbstractIntegrationTestWithDa
ContentServiceFactory.getInstance().getInstallItemService(); ContentServiceFactory.getInstance().getInstallItemService();
private final ItemService itemService = private final ItemService itemService =
ContentServiceFactory.getInstance().getItemService(); ContentServiceFactory.getInstance().getItemService();
private final SolrSearchCore solrSearchCore = private final MockSolrSearchCore solrSearchCore =
DSpaceServicesFactory.getInstance().getServiceManager().getServicesByType(SolrSearchCore.class).get(0); DSpaceServicesFactory.getInstance().getServiceManager().getServiceByName(null, MockSolrSearchCore.class);
protected Community community; protected Community community;
protected Collection collection; protected Collection collection;
protected EntityType publicationEntityType; protected EntityType publicationEntityType;

View File

@@ -12,6 +12,7 @@ import static org.hamcrest.CoreMatchers.notNullValue;
import static org.hamcrest.CoreMatchers.nullValue; import static org.hamcrest.CoreMatchers.nullValue;
import static org.hamcrest.MatcherAssert.assertThat; import static org.hamcrest.MatcherAssert.assertThat;
import static org.junit.Assert.assertFalse; import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertThrows;
import static org.junit.Assert.assertTrue; import static org.junit.Assert.assertTrue;
import static org.junit.Assert.fail; import static org.junit.Assert.fail;
import static org.mockito.ArgumentMatchers.any; import static org.mockito.ArgumentMatchers.any;
@@ -39,6 +40,7 @@ import org.dspace.core.Context;
import org.dspace.eperson.EPerson; import org.dspace.eperson.EPerson;
import org.dspace.eperson.factory.EPersonServiceFactory; import org.dspace.eperson.factory.EPersonServiceFactory;
import org.dspace.eperson.service.EPersonService; import org.dspace.eperson.service.EPersonService;
import org.dspace.workflow.MockWorkflowItem;
import org.junit.After; import org.junit.After;
import org.junit.Before; import org.junit.Before;
import org.junit.Test; import org.junit.Test;
@@ -468,4 +470,14 @@ public class WorkspaceItemTest extends AbstractUnitTest {
assertTrue("testSetPublishedBefore 0", wi.isPublishedBefore()); assertTrue("testSetPublishedBefore 0", wi.isPublishedBefore());
} }
@Test
public void testDuplicateItemID() throws Exception {
context.turnOffAuthorisationSystem();
Item item = wi.getItem();
MockWorkflowItem wfItem = new MockWorkflowItem();
wfItem.item = item;
wfItem.collection = collection;
assertThrows(IllegalArgumentException.class, () -> workspaceItemService.create(context, wfItem));
context.restoreAuthSystemState();
}
} }

View File

@@ -89,6 +89,145 @@ public class DSpaceControlledVocabularyTest extends AbstractDSpaceTest {
assertEquals("north 40", result.values[0].value); assertEquals("north 40", result.values[0].value);
} }
/**
* Test of getMatches method of class
* DSpaceControlledVocabulary using a localized controlled vocabulary with no locale (fallback to default)
* @throws java.lang.ClassNotFoundException passed through.
*/
@Test
public void testGetMatchesNoLocale() throws ClassNotFoundException {
final String PLUGIN_INTERFACE = "org.dspace.content.authority.ChoiceAuthority";
String idValue = "DZA";
String labelPart = "Alge";
int start = 0;
int limit = 10;
// This "countries" Controlled Vocab is included in TestEnvironment data
// (under /src/test/data/dspaceFolder/) and it should be auto-loaded
// by test configs in /src/test/data/dspaceFolder/config/local.cfg
DSpaceControlledVocabulary instance = (DSpaceControlledVocabulary)
CoreServiceFactory.getInstance().getPluginService().getNamedPlugin(Class.forName(PLUGIN_INTERFACE),
"countries");
assertNotNull(instance);
Choices result = instance.getMatches(labelPart, start, limit, null);
assertEquals(idValue, result.values[0].value);
assertEquals("Algeria", result.values[0].label);
}
/**
* Test of getBestMatch method of class
* DSpaceControlledVocabulary using a localized controlled vocabulary with no locale (fallback to default)
* @throws java.lang.ClassNotFoundException passed through.
*/
@Test
public void testGetBestMatchIdValueNoLocale() throws ClassNotFoundException {
final String PLUGIN_INTERFACE = "org.dspace.content.authority.ChoiceAuthority";
String idValue = "DZA";
// This "countries" Controlled Vocab is included in TestEnvironment data
// (under /src/test/data/dspaceFolder/) and it should be auto-loaded
// by test configs in /src/test/data/dspaceFolder/config/local.cfg
DSpaceControlledVocabulary instance = (DSpaceControlledVocabulary)
CoreServiceFactory.getInstance().getPluginService().getNamedPlugin(Class.forName(PLUGIN_INTERFACE),
"countries");
assertNotNull(instance);
Choices result = instance.getBestMatch(idValue, null);
assertEquals(idValue, result.values[0].value);
assertEquals("Algeria", result.values[0].label);
}
/**
* Test of getMatches method of class
* DSpaceControlledVocabulary using a localized controlled vocabulary with valid locale parameter (localized
* label returned)
*/
@Test
public void testGetMatchesGermanLocale() throws ClassNotFoundException {
final String PLUGIN_INTERFACE = "org.dspace.content.authority.ChoiceAuthority";
String idValue = "DZA";
String labelPart = "Alge";
int start = 0;
int limit = 10;
// This "countries" Controlled Vocab is included in TestEnvironment data
// (under /src/test/data/dspaceFolder/) and it should be auto-loaded
// by test configs in /src/test/data/dspaceFolder/config/local.cfg
DSpaceControlledVocabulary instance = (DSpaceControlledVocabulary)
CoreServiceFactory.getInstance().getPluginService().getNamedPlugin(Class.forName(PLUGIN_INTERFACE),
"countries");
assertNotNull(instance);
Choices result = instance.getMatches(labelPart, start, limit, "de");
assertEquals(idValue, result.values[0].value);
assertEquals("Algerien", result.values[0].label);
}
/**
* Test of getBestMatch method of class
* DSpaceControlledVocabulary using a localized controlled vocabulary with valid locale parameter (localized
* label returned)
*/
@Test
public void testGetBestMatchIdValueGermanLocale() throws ClassNotFoundException {
final String PLUGIN_INTERFACE = "org.dspace.content.authority.ChoiceAuthority";
String idValue = "DZA";
// This "countries" Controlled Vocab is included in TestEnvironment data
// (under /src/test/data/dspaceFolder/) and it should be auto-loaded
// by test configs in /src/test/data/dspaceFolder/config/local.cfg
DSpaceControlledVocabulary instance = (DSpaceControlledVocabulary)
CoreServiceFactory.getInstance().getPluginService().getNamedPlugin(Class.forName(PLUGIN_INTERFACE),
"countries");
assertNotNull(instance);
Choices result = instance.getBestMatch(idValue, "de");
assertEquals(idValue, result.values[0].value);
assertEquals("Algerien", result.values[0].label);
}
/**
* Test of getChoice method of class
* DSpaceControlledVocabulary using a localized controlled vocabulary with no locale (fallback to default)
* @throws java.lang.ClassNotFoundException passed through.
*/
@Test
public void testGetChoiceNoLocale() throws ClassNotFoundException {
final String PLUGIN_INTERFACE = "org.dspace.content.authority.ChoiceAuthority";
String idValue = "DZA";
// This "countries" Controlled Vocab is included in TestEnvironment data
// (under /src/test/data/dspaceFolder/) and it should be auto-loaded
// by test configs in /src/test/data/dspaceFolder/config/local.cfg
DSpaceControlledVocabulary instance = (DSpaceControlledVocabulary)
CoreServiceFactory.getInstance().getPluginService().getNamedPlugin(Class.forName(PLUGIN_INTERFACE),
"countries");
assertNotNull(instance);
Choice result = instance.getChoice(idValue, null);
assertEquals(idValue, result.value);
assertEquals("Algeria", result.label);
}
/**
* Test of getChoice method of class
* DSpaceControlledVocabulary using a localized controlled vocabulary with valid locale parameter (localized
* label returned)
* @throws java.lang.ClassNotFoundException passed through.
*/
@Test
public void testGetChoiceGermanLocale() throws ClassNotFoundException {
final String PLUGIN_INTERFACE = "org.dspace.content.authority.ChoiceAuthority";
String idValue = "DZA";
// This "countries" Controlled Vocab is included in TestEnvironment data
// (under /src/test/data/dspaceFolder/) and it should be auto-loaded
// by test configs in /src/test/data/dspaceFolder/config/local.cfg
DSpaceControlledVocabulary instance = (DSpaceControlledVocabulary)
CoreServiceFactory.getInstance().getPluginService().getNamedPlugin(Class.forName(PLUGIN_INTERFACE),
"countries");
assertNotNull(instance);
Choice result = instance.getChoice(idValue, "de");
assertEquals(idValue, result.value);
assertEquals("Algerien", result.label);
}
/** /**
* Test of getBestMatch method, of class DSpaceControlledVocabulary. * Test of getBestMatch method, of class DSpaceControlledVocabulary.
*/ */

View File

@@ -11,6 +11,7 @@ import static org.hamcrest.CoreMatchers.equalTo;
import static org.hamcrest.MatcherAssert.assertThat; import static org.hamcrest.MatcherAssert.assertThat;
import static org.hamcrest.Matchers.hasSize; import static org.hamcrest.Matchers.hasSize;
import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertNotNull; import static org.junit.Assert.assertNotNull;
import static org.junit.Assert.assertNull; import static org.junit.Assert.assertNull;
import static org.junit.Assert.assertTrue; import static org.junit.Assert.assertTrue;
@@ -990,4 +991,38 @@ public class ItemServiceIT extends AbstractIntegrationTestWithDatabase {
context.restoreAuthSystemState(); context.restoreAuthSystemState();
} }
@Test
public void testIsLatestVersion() throws Exception {
assertTrue("Original should be the latest version", this.itemService.isLatestVersion(context, item));
context.turnOffAuthorisationSystem();
Version firstVersion = versioningService.createNewVersion(context, item);
Item firstPublication = firstVersion.getItem();
WorkspaceItem firstPublicationWSI = workspaceItemService.findByItem(context, firstPublication);
installItemService.installItem(context, firstPublicationWSI);
context.commit();
context.restoreAuthSystemState();
assertTrue("First version should be valid", this.itemService.isLatestVersion(context, firstPublication));
assertFalse("Original version should not be valid", this.itemService.isLatestVersion(context, item));
context.turnOffAuthorisationSystem();
Version secondVersion = versioningService.createNewVersion(context, item);
Item secondPublication = secondVersion.getItem();
WorkspaceItem secondPublicationWSI = workspaceItemService.findByItem(context, secondPublication);
installItemService.installItem(context, secondPublicationWSI);
context.commit();
context.restoreAuthSystemState();
assertTrue("Second version should be valid", this.itemService.isLatestVersion(context, secondPublication));
assertFalse("First version should not be valid", this.itemService.isLatestVersion(context, firstPublication));
assertFalse("Original version should not be valid", this.itemService.isLatestVersion(context, item));
context.turnOffAuthorisationSystem();
}
} }

View File

@@ -558,4 +558,29 @@ public class ContextTest extends AbstractUnitTest {
cleanupContext(instance); cleanupContext(instance);
} }
@Test
public void testUncacheEntities() throws Throwable {
// To set up the test, ensure the cache contains more than the current user entity
groupService.findByName(context, Group.ANONYMOUS);
assertTrue("Cache size should be greater than one", context.getDBConnection().getCacheSize() > 1);
context.uncacheEntities();
assertThat("Cache size should be one (current user)", context.getDBConnection().getCacheSize(), equalTo(1L));
context.reloadEntity(context.getCurrentUser());
assertThat("Cache should only contain the current user", context.getDBConnection().getCacheSize(), equalTo(1L));
}
@Test
public void testUncacheEntity() throws Throwable {
// Remember the cache size after loading an entity
Group group = groupService.findByName(context, Group.ANONYMOUS);
long oldCacheSize = context.getDBConnection().getCacheSize();
// Uncache the entity
context.uncacheEntity(group);
long newCacheSize = context.getDBConnection().getCacheSize();
assertThat("Cache size should be reduced by one", newCacheSize, equalTo(oldCacheSize - 1));
}
} }

View File

@@ -205,6 +205,28 @@ public class HibernateDBConnectionTest extends AbstractUnitTest {
.contains(person)); .contains(person));
} }
/**
* Test of uncacheEntities method
*/
@Test
public void testUncacheEntities() throws SQLException {
// Get DBConnection associated with DSpace Context
HibernateDBConnection dbConnection = (HibernateDBConnection) context.getDBConnection();
EPerson person = context.getCurrentUser();
assertTrue("Current user should be cached in session", dbConnection.getSession()
.contains(person));
dbConnection.uncacheEntities();
assertFalse("Current user should be gone from cache", dbConnection.getSession()
.contains(person));
// Test ability to reload an uncached entity
person = dbConnection.reloadEntity(person);
assertTrue("Current user should be cached back in session", dbConnection.getSession()
.contains(person));
}
/** /**
* Test of uncacheEntity method * Test of uncacheEntity method
*/ */

Some files were not shown because too many files have changed in this diff Show More