mirror of
https://github.com/DSpace/DSpace.git
synced 2025-10-07 01:54:22 +00:00
Merge branch 'main' into main-seperate-logs-for-cli-jobs-fixing-corruption-of-log-file
This commit is contained in:
118
.github/dependabot.yml
vendored
Normal file
118
.github/dependabot.yml
vendored
Normal file
@@ -0,0 +1,118 @@
|
||||
#-------------------
|
||||
# DSpace's dependabot rules. Enables maven updates for all dependencies on a weekly basis
|
||||
# for main and any maintenance branches. Security updates only apply to main.
|
||||
#-------------------
|
||||
version: 2
|
||||
updates:
|
||||
- package-ecosystem: "maven"
|
||||
directory: "/"
|
||||
schedule:
|
||||
interval: "weekly"
|
||||
# Allow up to 10 open PRs for dependencies
|
||||
open-pull-requests-limit: 10
|
||||
# Group together some upgrades in a single PR
|
||||
groups:
|
||||
# Group together all Build Tools in a single PR
|
||||
build-tools:
|
||||
applies-to: version-updates
|
||||
patterns:
|
||||
- "org.apache.maven.plugins:*"
|
||||
- "*:*-maven-plugin"
|
||||
- "*:maven-*-plugin"
|
||||
- "com.github.spotbugs:spotbugs"
|
||||
- "com.google.code.findbugs:*"
|
||||
- "com.google.errorprone:*"
|
||||
- "com.puppycrawl.tools:checkstyle"
|
||||
- "org.sonatype.plugins:*"
|
||||
exclude-patterns:
|
||||
# Exclude anything from Spring, as that is in a separate group
|
||||
- "org.springframework.*:*"
|
||||
update-types:
|
||||
- "minor"
|
||||
- "patch"
|
||||
test-tools:
|
||||
applies-to: version-updates
|
||||
patterns:
|
||||
- "junit:*"
|
||||
- "com.github.stefanbirker:system-rules"
|
||||
- "com.h2database:*"
|
||||
- "io.findify:s3mock*"
|
||||
- "io.netty:*"
|
||||
- "org.hamcrest:*"
|
||||
- "org.mock-server:*"
|
||||
- "org.mockito:*"
|
||||
update-types:
|
||||
- "minor"
|
||||
- "patch"
|
||||
# Group together all Apache Commons deps in a single PR
|
||||
apache-commons:
|
||||
applies-to: version-updates
|
||||
patterns:
|
||||
- "org.apache.commons:*"
|
||||
- "commons-*:commons-*"
|
||||
update-types:
|
||||
- "minor"
|
||||
- "patch"
|
||||
# Group together all fasterxml deps in a single PR
|
||||
fasterxml:
|
||||
applies-to: version-updates
|
||||
patterns:
|
||||
- "com.fasterxml:*"
|
||||
- "com.fasterxml.*:*"
|
||||
update-types:
|
||||
- "minor"
|
||||
- "patch"
|
||||
# Group together all Hibernate deps in a single PR
|
||||
hibernate:
|
||||
applies-to: version-updates
|
||||
patterns:
|
||||
- "org.hibernate.*:*"
|
||||
update-types:
|
||||
- "minor"
|
||||
- "patch"
|
||||
# Group together all Jakarta deps in a single PR
|
||||
jakarta:
|
||||
applies-to: version-updates
|
||||
patterns:
|
||||
- "jakarta.*:*"
|
||||
- "org.eclipse.angus:jakarta.mail"
|
||||
- "org.glassfish.jaxb:jaxb-runtime"
|
||||
update-types:
|
||||
- "minor"
|
||||
- "patch"
|
||||
# Group together all Google deps in a single PR
|
||||
google-apis:
|
||||
applies-to: version-updates
|
||||
patterns:
|
||||
- "com.google.apis:*"
|
||||
- "com.google.api-client:*"
|
||||
- "com.google.http-client:*"
|
||||
- "com.google.oauth-client:*"
|
||||
update-types:
|
||||
- "minor"
|
||||
- "patch"
|
||||
# Group together all Spring deps in a single PR
|
||||
spring:
|
||||
applies-to: version-updates
|
||||
patterns:
|
||||
- "org.springframework:*"
|
||||
- "org.springframework.*:*"
|
||||
update-types:
|
||||
- "minor"
|
||||
- "patch"
|
||||
# Group together all WebJARs deps in a single PR
|
||||
webjars:
|
||||
applies-to: version-updates
|
||||
patterns:
|
||||
- "org.webjars:*"
|
||||
- "org.webjars.*:*"
|
||||
update-types:
|
||||
- "minor"
|
||||
- "patch"
|
||||
ignore:
|
||||
# Don't try to auto-update any DSpace dependencies
|
||||
- dependency-name: "org.dspace:*"
|
||||
- dependency-name: "org.dspace.*:*"
|
||||
# Ignore all major version updates for all dependencies. We'll only automate minor/patch updates.
|
||||
- dependency-name: "*"
|
||||
update-types: ["version-update:semver-major"]
|
101
.github/workflows/docker.yml
vendored
101
.github/workflows/docker.yml
vendored
@@ -15,6 +15,7 @@ on:
|
||||
|
||||
permissions:
|
||||
contents: read # to fetch code (actions/checkout)
|
||||
packages: write # to write images to GitHub Container Registry (GHCR)
|
||||
|
||||
jobs:
|
||||
####################################################
|
||||
@@ -147,4 +148,102 @@ jobs:
|
||||
tags_flavor: suffix=-loadsql
|
||||
secrets:
|
||||
DOCKER_USERNAME: ${{ secrets.DOCKER_USERNAME }}
|
||||
DOCKER_ACCESS_TOKEN: ${{ secrets.DOCKER_ACCESS_TOKEN }}
|
||||
DOCKER_ACCESS_TOKEN: ${{ secrets.DOCKER_ACCESS_TOKEN }}
|
||||
|
||||
#################################################################################
|
||||
# Test Deployment via Docker to ensure newly built images are working properly
|
||||
#################################################################################
|
||||
docker-deploy:
|
||||
# Ensure this job never runs on forked repos. It's only executed for 'dspace/dspace'
|
||||
if: github.repository == 'dspace/dspace'
|
||||
runs-on: ubuntu-latest
|
||||
# Must run after all major images are built
|
||||
needs: [dspace, dspace-test, dspace-cli, dspace-postgres-pgcrypto, dspace-solr]
|
||||
env:
|
||||
# Override defaults dspace.server.url because backend starts at http://127.0.0.1:8080
|
||||
dspace__P__server__P__url: http://127.0.0.1:8080/server
|
||||
# Enable all optional modules / controllers for this test deployment.
|
||||
# This helps check for errors in deploying these modules via Spring Boot
|
||||
iiif__P__enabled: true
|
||||
ldn__P__enabled: true
|
||||
oai__P__enabled: true
|
||||
rdf__P__enabled: true
|
||||
signposting__P__enabled: true
|
||||
sword__D__server__P__enabled: true
|
||||
swordv2__D__server__P__enabled: true
|
||||
# If this is a PR against main (default branch), use "latest".
|
||||
# Else if this is a PR against a different branch, used the base branch name.
|
||||
# Else if this is a commit on main (default branch), use the "latest" tag.
|
||||
# Else, just use the branch name.
|
||||
# NOTE: DSPACE_VER is used because our docker compose scripts default to using the "-test" image.
|
||||
DSPACE_VER: ${{ (github.event_name == 'pull_request' && github.event.pull_request.base.ref == github.event.repository.default_branch && 'latest') || (github.event_name == 'pull_request' && github.event.pull_request.base.ref) || (github.ref_name == github.event.repository.default_branch && 'latest') || github.ref_name }}
|
||||
# Docker Registry to use for Docker compose scripts below.
|
||||
# We use GitHub's Container Registry to avoid aggressive rate limits at DockerHub.
|
||||
DOCKER_REGISTRY: ghcr.io
|
||||
steps:
|
||||
# Checkout our codebase (to get access to Docker Compose scripts)
|
||||
- name: Checkout codebase
|
||||
uses: actions/checkout@v4
|
||||
# Download Docker image artifacts (which were just built by reusable-docker-build.yml)
|
||||
- name: Download Docker image artifacts
|
||||
uses: actions/download-artifact@v4
|
||||
with:
|
||||
# Download all amd64 Docker images (TAR files) into the /tmp/docker directory
|
||||
pattern: docker-image-*-linux-amd64
|
||||
path: /tmp/docker
|
||||
merge-multiple: true
|
||||
# Load each of the images into Docker by calling "docker image load" for each.
|
||||
# This ensures we are using the images just built & not any prior versions on DockerHub
|
||||
- name: Load all downloaded Docker images
|
||||
run: |
|
||||
find /tmp/docker -type f -name "*.tar" -exec docker image load --input "{}" \;
|
||||
docker image ls -a
|
||||
# Start backend using our compose script in the codebase.
|
||||
- name: Start backend in Docker
|
||||
run: |
|
||||
docker compose -f docker-compose.yml up -d
|
||||
sleep 10
|
||||
docker container ls
|
||||
# Create a test admin account. Load test data from a simple set of AIPs as defined in cli.ingest.yml
|
||||
- name: Load test data into Backend
|
||||
run: |
|
||||
docker compose -f docker-compose-cli.yml run --rm dspace-cli create-administrator -e test@test.edu -f admin -l user -p admin -c en
|
||||
docker compose -f docker-compose-cli.yml -f dspace/src/main/docker-compose/cli.ingest.yml run --rm dspace-cli
|
||||
# Verify backend started successfully.
|
||||
# 1. Make sure root endpoint is responding (check for dspace.name defined in docker-compose.yml)
|
||||
# 2. Also check /collections endpoint to ensure the test data loaded properly (check for a collection name in AIPs)
|
||||
- name: Verify backend is responding properly
|
||||
run: |
|
||||
result=$(wget -O- -q http://127.0.0.1:8080/server/api)
|
||||
echo "$result"
|
||||
echo "$result" | grep -oE "\"DSpace Started with Docker Compose\","
|
||||
result=$(wget -O- -q http://127.0.0.1:8080/server/api/core/collections)
|
||||
echo "$result"
|
||||
echo "$result" | grep -oE "\"Dog in Yard\","
|
||||
# Verify Handle Server can be stared and is working properly
|
||||
# 1. First generate the "[dspace]/handle-server" folder with the sitebndl.zip
|
||||
# 2. Start the Handle Server (and wait 20 seconds to let it start up)
|
||||
# 3. Verify logs do NOT include "Exception" in the text (as that means an error occurred)
|
||||
# 4. Check that Handle Proxy HTML page is responding on default port (8000)
|
||||
- name: Verify Handle Server is working properly
|
||||
run: |
|
||||
docker exec -i dspace /dspace/bin/make-handle-config
|
||||
echo "Starting Handle Server..."
|
||||
docker exec -i dspace /dspace/bin/start-handle-server
|
||||
sleep 20
|
||||
echo "Checking for errors in error.log"
|
||||
result=$(docker exec -i dspace sh -c "cat /dspace/handle-server/logs/error.log* || echo ''")
|
||||
echo "$result"
|
||||
echo "$result" | grep -vqz "Exception"
|
||||
echo "Checking for errors in handle-server.log..."
|
||||
result=$(docker exec -i dspace cat /dspace/log/handle-server.log)
|
||||
echo "$result"
|
||||
echo "$result" | grep -vqz "Exception"
|
||||
echo "Checking to see if Handle Proxy webpage is available..."
|
||||
result=$(wget -O- -q http://127.0.0.1:8000/)
|
||||
echo "$result"
|
||||
echo "$result" | grep -oE "Handle Proxy"
|
||||
# Shutdown our containers
|
||||
- name: Shutdown Docker containers
|
||||
run: |
|
||||
docker compose -f docker-compose.yml down
|
||||
|
225
.github/workflows/reusable-docker-build.yml
vendored
225
.github/workflows/reusable-docker-build.yml
vendored
@@ -54,10 +54,13 @@ env:
|
||||
# For a new commit on default branch (main), use the literal tag 'latest' on Docker image.
|
||||
# For a new commit on other branches, use the branch name as the tag for Docker image.
|
||||
# For a new tag, copy that tag name as the tag for Docker image.
|
||||
# For a pull request, use the name of the base branch that the PR was created against or "latest" (for main).
|
||||
# e.g. PR against 'main' will use "latest". a PR against 'dspace-7_x' will use 'dspace-7_x'.
|
||||
IMAGE_TAGS: |
|
||||
type=raw,value=latest,enable=${{ github.ref_name == github.event.repository.default_branch }}
|
||||
type=ref,event=branch,enable=${{ github.ref_name != github.event.repository.default_branch }}
|
||||
type=ref,event=tag
|
||||
type=raw,value=${{ (github.event.pull_request.base.ref == github.event.repository.default_branch && 'latest') || github.event.pull_request.base.ref }},enable=${{ github.event_name == 'pull_request' }}
|
||||
# Define default tag "flavor" for docker/metadata-action per
|
||||
# https://github.com/docker/metadata-action#flavor-input
|
||||
# We manage the 'latest' tag ourselves to the 'main' branch (see settings above)
|
||||
@@ -72,6 +75,9 @@ env:
|
||||
DEPLOY_DEMO_BRANCH: 'dspace-8_x'
|
||||
DEPLOY_SANDBOX_BRANCH: 'main'
|
||||
DEPLOY_ARCH: 'linux/amd64'
|
||||
# Registry used during building of Docker images. (All images are later copied to docker.io registry)
|
||||
# We use GitHub's Container Registry to avoid aggressive rate limits at DockerHub.
|
||||
DOCKER_BUILD_REGISTRY: ghcr.io
|
||||
|
||||
jobs:
|
||||
docker-build:
|
||||
@@ -96,6 +102,7 @@ jobs:
|
||||
# This step converts the slashes in the "arch" matrix values above into dashes & saves to env.ARCH_NAME
|
||||
# E.g. "linux/amd64" becomes "linux-amd64"
|
||||
# This is necessary because all upload artifacts CANNOT have special chars (like slashes)
|
||||
# NOTE: The regex-like syntax below is Bash Parameter Substitution
|
||||
- name: Prepare
|
||||
run: |
|
||||
platform=${{ matrix.arch }}
|
||||
@@ -105,35 +112,45 @@ jobs:
|
||||
- name: Checkout codebase
|
||||
uses: actions/checkout@v4
|
||||
|
||||
# https://github.com/docker/setup-buildx-action
|
||||
- name: Setup Docker Buildx
|
||||
uses: docker/setup-buildx-action@v3
|
||||
# https://github.com/docker/login-action
|
||||
# NOTE: This login occurs for BOTH non-PRs or PRs. PRs *must* also login to access private images from GHCR
|
||||
# during the build process
|
||||
- name: Login to ${{ env.DOCKER_BUILD_REGISTRY }}
|
||||
uses: docker/login-action@v3
|
||||
with:
|
||||
registry: ${{ env.DOCKER_BUILD_REGISTRY }}
|
||||
username: ${{ github.repository_owner }}
|
||||
password: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
# https://github.com/docker/setup-qemu-action
|
||||
- name: Set up QEMU emulation to build for multiple architectures
|
||||
uses: docker/setup-qemu-action@v3
|
||||
|
||||
# https://github.com/docker/login-action
|
||||
- name: Login to DockerHub
|
||||
# Only login if not a PR, as PRs only trigger a Docker build and not a push
|
||||
if: ${{ ! matrix.isPr }}
|
||||
uses: docker/login-action@v3
|
||||
with:
|
||||
username: ${{ secrets.DOCKER_USERNAME }}
|
||||
password: ${{ secrets.DOCKER_ACCESS_TOKEN }}
|
||||
# https://github.com/docker/setup-buildx-action
|
||||
- name: Setup Docker Buildx
|
||||
uses: docker/setup-buildx-action@v3
|
||||
|
||||
# https://github.com/docker/metadata-action
|
||||
# Get Metadata for docker_build_deps step below
|
||||
- name: Sync metadata (tags, labels) from GitHub to Docker for image
|
||||
# Extract metadata used for Docker images in all build steps below
|
||||
- name: Extract metadata (tags, labels) from GitHub for Docker image
|
||||
id: meta_build
|
||||
uses: docker/metadata-action@v5
|
||||
with:
|
||||
images: ${{ env.IMAGE_NAME }}
|
||||
images: ${{ env.DOCKER_BUILD_REGISTRY }}/${{ env.IMAGE_NAME }}
|
||||
tags: ${{ env.IMAGE_TAGS }}
|
||||
flavor: ${{ env.TAGS_FLAVOR }}
|
||||
|
||||
#--------------------------------------------------------------------
|
||||
# First, for all branch commits (non-PRs) we build the image & upload
|
||||
# to GitHub Container Registry (GHCR). After uploading the image
|
||||
# to GHCR, we store the image digest in an artifact, so we can
|
||||
# create a merged manifest later (see 'docker-build_manifest' job).
|
||||
#
|
||||
# NOTE: We use GHCR in order to avoid aggressive rate limits at DockerHub.
|
||||
#--------------------------------------------------------------------
|
||||
# https://github.com/docker/build-push-action
|
||||
- name: Build and push image
|
||||
- name: Build and push image to ${{ env.DOCKER_BUILD_REGISTRY }}
|
||||
if: ${{ ! matrix.isPr }}
|
||||
id: docker_build
|
||||
uses: docker/build-push-action@v5
|
||||
with:
|
||||
@@ -141,15 +158,20 @@ jobs:
|
||||
${{ inputs.dockerfile_additional_contexts }}
|
||||
context: ${{ inputs.dockerfile_context }}
|
||||
file: ${{ inputs.dockerfile_path }}
|
||||
# Tell DSpace's Docker files to use the build registry instead of DockerHub
|
||||
build-args:
|
||||
DOCKER_REGISTRY=${{ env.DOCKER_BUILD_REGISTRY }}
|
||||
platforms: ${{ matrix.arch }}
|
||||
# For pull requests, we run the Docker build (to ensure no PR changes break the build),
|
||||
# but we ONLY do an image push to DockerHub if it's NOT a PR
|
||||
push: ${{ ! matrix.isPr }}
|
||||
push: true
|
||||
# Use tags / labels provided by 'docker/metadata-action' above
|
||||
tags: ${{ steps.meta_build.outputs.tags }}
|
||||
labels: ${{ steps.meta_build.outputs.labels }}
|
||||
# Use GitHub cache to load cached Docker images and cache the results of this build
|
||||
# This decreases the number of images we need to fetch from DockerHub
|
||||
cache-from: type=gha,scope=${{ inputs.build_id }}
|
||||
cache-to: type=gha,scope=${{ inputs.build_id }},mode=max
|
||||
|
||||
# Export the digest of Docker build locally (for non PRs only)
|
||||
# Export the digest of Docker build locally
|
||||
- name: Export Docker build digest
|
||||
if: ${{ ! matrix.isPr }}
|
||||
run: |
|
||||
@@ -157,7 +179,8 @@ jobs:
|
||||
digest="${{ steps.docker_build.outputs.digest }}"
|
||||
touch "/tmp/digests/${digest#sha256:}"
|
||||
|
||||
# Upload digest to an artifact, so that it can be used in manifest below
|
||||
# Upload digest to an artifact, so that it can be used in combined manifest below
|
||||
# (The purpose of the combined manifest is to list both amd64 and arm64 builds under same tag)
|
||||
- name: Upload Docker build digest to artifact
|
||||
if: ${{ ! matrix.isPr }}
|
||||
uses: actions/upload-artifact@v4
|
||||
@@ -167,33 +190,60 @@ jobs:
|
||||
if-no-files-found: error
|
||||
retention-days: 1
|
||||
|
||||
# If this build is NOT a PR and passed in a REDEPLOY_SANDBOX_URL secret,
|
||||
# Then redeploy https://sandbox.dspace.org if this build is for our deployment architecture and 'main' branch.
|
||||
- name: Redeploy sandbox.dspace.org (based on main branch)
|
||||
if: |
|
||||
!matrix.isPR &&
|
||||
env.REDEPLOY_SANDBOX_URL != '' &&
|
||||
matrix.arch == env.DEPLOY_ARCH &&
|
||||
github.ref_name == env.DEPLOY_SANDBOX_BRANCH
|
||||
run: |
|
||||
curl -X POST $REDEPLOY_SANDBOX_URL
|
||||
#------------------------------------------------------------------------------
|
||||
# Second, we build the image again in order to store it in a local TAR file.
|
||||
# This TAR of the image is cached/saved as an artifact, so that it can be used
|
||||
# by later jobs to install the brand-new images for automated testing.
|
||||
# This TAR build is performed BOTH for PRs and for branch commits (non-PRs).
|
||||
#
|
||||
# (This approach has the advantage of avoiding having to download the newly built
|
||||
# image from DockerHub or GHCR during automated testing.)
|
||||
#
|
||||
# See the 'docker-deploy' job in docker.yml as an example of where this TAR is used.
|
||||
#-------------------------------------------------------------------------------
|
||||
# Build local image (again) and store in a TAR file in /tmp directory
|
||||
# This step is only done for AMD64, as that's the only image we use in our automated testing (at this time).
|
||||
# NOTE: This step cannot be combined with the build above as it's a different type of output.
|
||||
- name: Build and push image to local TAR file
|
||||
if: ${{ matrix.arch == 'linux/amd64'}}
|
||||
uses: docker/build-push-action@v5
|
||||
with:
|
||||
build-contexts: |
|
||||
${{ inputs.dockerfile_additional_contexts }}
|
||||
context: ${{ inputs.dockerfile_context }}
|
||||
file: ${{ inputs.dockerfile_path }}
|
||||
# Tell DSpace's Docker files to use the build registry instead of DockerHub
|
||||
build-args:
|
||||
DOCKER_REGISTRY=${{ env.DOCKER_BUILD_REGISTRY }}
|
||||
platforms: ${{ matrix.arch }}
|
||||
tags: ${{ steps.meta_build.outputs.tags }}
|
||||
labels: ${{ steps.meta_build.outputs.labels }}
|
||||
# Use GitHub cache to load cached Docker images and cache the results of this build
|
||||
# This decreases the number of images we need to fetch from DockerHub
|
||||
cache-from: type=gha,scope=${{ inputs.build_id }}
|
||||
cache-to: type=gha,scope=${{ inputs.build_id }},mode=max
|
||||
# Export image to a local TAR file
|
||||
outputs: type=docker,dest=/tmp/${{ inputs.build_id }}.tar
|
||||
|
||||
# If this build is NOT a PR and passed in a REDEPLOY_DEMO_URL secret,
|
||||
# Then redeploy https://demo.dspace.org if this build is for our deployment architecture and demo branch.
|
||||
- name: Redeploy demo.dspace.org (based on maintenance branch)
|
||||
if: |
|
||||
!matrix.isPR &&
|
||||
env.REDEPLOY_DEMO_URL != '' &&
|
||||
matrix.arch == env.DEPLOY_ARCH &&
|
||||
github.ref_name == env.DEPLOY_DEMO_BRANCH
|
||||
run: |
|
||||
curl -X POST $REDEPLOY_DEMO_URL
|
||||
# Upload the local docker image (in TAR file) to a build Artifact
|
||||
# This step is only done for AMD64, as that's the only image we use in our automated testing (at this time).
|
||||
- name: Upload local image TAR to artifact
|
||||
if: ${{ matrix.arch == 'linux/amd64'}}
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: docker-image-${{ inputs.build_id }}-${{ env.ARCH_NAME }}
|
||||
path: /tmp/${{ inputs.build_id }}.tar
|
||||
if-no-files-found: error
|
||||
retention-days: 1
|
||||
|
||||
# Merge Docker digests (from various architectures) into a manifest.
|
||||
# This runs after all Docker builds complete above, and it tells hub.docker.com
|
||||
# that these builds should be all included in the manifest for this tag.
|
||||
# (e.g. AMD64 and ARM64 should be listed as options under the same tagged Docker image)
|
||||
##########################################################################################
|
||||
# Merge Docker digests (from various architectures) into a single manifest.
|
||||
# This runs after all Docker builds complete above. The purpose is to include all builds
|
||||
# under a single manifest for this tag.
|
||||
# (e.g. both linux/amd64 and linux/arm64 should be listed under the same tagged Docker image)
|
||||
##########################################################################################
|
||||
docker-build_manifest:
|
||||
# Only run if this is NOT a PR
|
||||
if: ${{ github.event_name != 'pull_request' }}
|
||||
runs-on: ubuntu-latest
|
||||
needs:
|
||||
@@ -207,29 +257,102 @@ jobs:
|
||||
pattern: digests-${{ inputs.build_id }}-*
|
||||
merge-multiple: true
|
||||
|
||||
- name: Login to ${{ env.DOCKER_BUILD_REGISTRY }}
|
||||
uses: docker/login-action@v3
|
||||
with:
|
||||
registry: ${{ env.DOCKER_BUILD_REGISTRY }}
|
||||
username: ${{ github.repository_owner }}
|
||||
password: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@v3
|
||||
|
||||
- name: Add Docker metadata for image
|
||||
id: meta
|
||||
uses: docker/metadata-action@v5
|
||||
with:
|
||||
images: ${{ env.DOCKER_BUILD_REGISTRY }}/${{ env.IMAGE_NAME }}
|
||||
tags: ${{ env.IMAGE_TAGS }}
|
||||
flavor: ${{ env.TAGS_FLAVOR }}
|
||||
|
||||
- name: Create manifest list from digests and push to ${{ env.DOCKER_BUILD_REGISTRY }}
|
||||
working-directory: /tmp/digests
|
||||
run: |
|
||||
docker buildx imagetools create $(jq -cr '.tags | map("-t " + .) | join(" ")' <<< "$DOCKER_METADATA_OUTPUT_JSON") \
|
||||
$(printf '${{ env.DOCKER_BUILD_REGISTRY }}/${{ env.IMAGE_NAME }}@sha256:%s ' *)
|
||||
|
||||
- name: Inspect manifest in ${{ env.DOCKER_BUILD_REGISTRY }}
|
||||
run: |
|
||||
docker buildx imagetools inspect ${{ env.DOCKER_BUILD_REGISTRY }}/${{ env.IMAGE_NAME }}:${{ steps.meta.outputs.version }}
|
||||
|
||||
##########################################################################################
|
||||
# Copy images / manifest to DockerHub.
|
||||
# This MUST run after *both* images (AMD64 and ARM64) are built and uploaded to GitHub
|
||||
# Container Registry (GHCR). Attempting to run this in parallel to GHCR builds can result
|
||||
# in a race condition...i.e. the copy to DockerHub may fail if GHCR image has been updated
|
||||
# at the moment when the copy occurs.
|
||||
##########################################################################################
|
||||
docker-copy_to_dockerhub:
|
||||
# Only run if this is NOT a PR
|
||||
if: ${{ github.event_name != 'pull_request' }}
|
||||
runs-on: ubuntu-latest
|
||||
needs:
|
||||
- docker-build_manifest
|
||||
|
||||
steps:
|
||||
# 'regctl' is used to more easily copy the image to DockerHub and obtain the digest from DockerHub
|
||||
# See https://github.com/regclient/regclient/blob/main/docs/regctl.md
|
||||
- name: Install regctl for Docker registry tools
|
||||
uses: regclient/actions/regctl-installer@main
|
||||
with:
|
||||
release: 'v0.8.0'
|
||||
|
||||
# This recreates Docker tags for DockerHub
|
||||
- name: Add Docker metadata for image
|
||||
id: meta_dockerhub
|
||||
uses: docker/metadata-action@v5
|
||||
with:
|
||||
images: ${{ env.IMAGE_NAME }}
|
||||
tags: ${{ env.IMAGE_TAGS }}
|
||||
flavor: ${{ env.TAGS_FLAVOR }}
|
||||
|
||||
- name: Login to Docker Hub
|
||||
# Login to source registry first, as this is where we are copying *from*
|
||||
- name: Login to ${{ env.DOCKER_BUILD_REGISTRY }}
|
||||
uses: docker/login-action@v3
|
||||
with:
|
||||
registry: ${{ env.DOCKER_BUILD_REGISTRY }}
|
||||
username: ${{ github.repository_owner }}
|
||||
password: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
# Login to DockerHub, since this is where we are copying *to*
|
||||
- name: Login to DockerHub
|
||||
uses: docker/login-action@v3
|
||||
with:
|
||||
username: ${{ secrets.DOCKER_USERNAME }}
|
||||
password: ${{ secrets.DOCKER_ACCESS_TOKEN }}
|
||||
|
||||
- name: Create manifest list from digests and push
|
||||
working-directory: /tmp/digests
|
||||
# Copy the image from source to DockerHub
|
||||
- name: Copy image from ${{ env.DOCKER_BUILD_REGISTRY }} to docker.io
|
||||
run: |
|
||||
docker buildx imagetools create $(jq -cr '.tags | map("-t " + .) | join(" ")' <<< "$DOCKER_METADATA_OUTPUT_JSON") \
|
||||
$(printf '${{ env.IMAGE_NAME }}@sha256:%s ' *)
|
||||
regctl image copy ${{ env.DOCKER_BUILD_REGISTRY }}/${{ env.IMAGE_NAME }}:${{ steps.meta_dockerhub.outputs.version }} docker.io/${{ env.IMAGE_NAME }}:${{ steps.meta_dockerhub.outputs.version }}
|
||||
|
||||
- name: Inspect image
|
||||
#--------------------------------------------------------------------
|
||||
# Finally, check whether demo.dspace.org or sandbox.dspace.org need
|
||||
# to be redeployed based on these new DockerHub images.
|
||||
#--------------------------------------------------------------------
|
||||
# If this build is for the branch that Sandbox uses and passed in a REDEPLOY_SANDBOX_URL secret,
|
||||
# Then redeploy https://sandbox.dspace.org
|
||||
- name: Redeploy sandbox.dspace.org (based on main branch)
|
||||
if: |
|
||||
env.REDEPLOY_SANDBOX_URL != '' &&
|
||||
github.ref_name == env.DEPLOY_SANDBOX_BRANCH
|
||||
run: |
|
||||
docker buildx imagetools inspect ${{ env.IMAGE_NAME }}:${{ steps.meta.outputs.version }}
|
||||
curl -X POST $REDEPLOY_SANDBOX_URL
|
||||
# If this build is for the branch that Demo uses and passed in a REDEPLOY_DEMO_URL secret,
|
||||
# Then redeploy https://demo.dspace.org
|
||||
- name: Redeploy demo.dspace.org (based on maintenance branch)
|
||||
if: |
|
||||
env.REDEPLOY_DEMO_URL != '' &&
|
||||
github.ref_name == env.DEPLOY_DEMO_BRANCH
|
||||
run: |
|
||||
curl -X POST $REDEPLOY_DEMO_URL
|
1
.gitignore
vendored
1
.gitignore
vendored
@@ -10,6 +10,7 @@ tags
|
||||
.project
|
||||
.classpath
|
||||
.checkstyle
|
||||
.factorypath
|
||||
|
||||
## Ignore project files created by IntelliJ IDEA
|
||||
*.iml
|
||||
|
35
Dockerfile
35
Dockerfile
@@ -6,10 +6,14 @@
|
||||
# This Dockerfile uses JDK17 by default.
|
||||
# To build with other versions, use "--build-arg JDK_VERSION=[value]"
|
||||
ARG JDK_VERSION=17
|
||||
# The Docker version tag to build from
|
||||
ARG DSPACE_VERSION=latest
|
||||
# The Docker registry to use for DSpace images. Defaults to "docker.io"
|
||||
# NOTE: non-DSpace images are hardcoded to use "docker.io" and are not impacted by this build argument
|
||||
ARG DOCKER_REGISTRY=docker.io
|
||||
|
||||
# Step 1 - Run Maven Build
|
||||
FROM dspace/dspace-dependencies:${DSPACE_VERSION} AS build
|
||||
FROM ${DOCKER_REGISTRY}/dspace/dspace-dependencies:${DSPACE_VERSION} AS build
|
||||
ARG TARGET_DIR=dspace-installer
|
||||
WORKDIR /app
|
||||
# The dspace-installer directory will be written to /install
|
||||
@@ -31,35 +35,38 @@ RUN mvn --no-transfer-progress package ${MAVEN_FLAGS} && \
|
||||
RUN rm -rf /install/webapps/server/
|
||||
|
||||
# Step 2 - Run Ant Deploy
|
||||
FROM eclipse-temurin:${JDK_VERSION} AS ant_build
|
||||
FROM docker.io/eclipse-temurin:${JDK_VERSION} AS ant_build
|
||||
ARG TARGET_DIR=dspace-installer
|
||||
# COPY the /install directory from 'build' container to /dspace-src in this container
|
||||
COPY --from=build /install /dspace-src
|
||||
WORKDIR /dspace-src
|
||||
# Create the initial install deployment using ANT
|
||||
ENV ANT_VERSION 1.10.13
|
||||
ENV ANT_HOME /tmp/ant-$ANT_VERSION
|
||||
ENV PATH $ANT_HOME/bin:$PATH
|
||||
# Need wget to install ant
|
||||
RUN apt-get update \
|
||||
&& apt-get install -y --no-install-recommends wget \
|
||||
&& apt-get purge -y --auto-remove \
|
||||
&& rm -rf /var/lib/apt/lists/*
|
||||
ENV ANT_VERSION=1.10.13
|
||||
ENV ANT_HOME=/tmp/ant-$ANT_VERSION
|
||||
ENV PATH=$ANT_HOME/bin:$PATH
|
||||
# Download and install 'ant'
|
||||
RUN mkdir $ANT_HOME && \
|
||||
wget -qO- "https://archive.apache.org/dist/ant/binaries/apache-ant-$ANT_VERSION-bin.tar.gz" | tar -zx --strip-components=1 -C $ANT_HOME
|
||||
curl --silent --show-error --location --fail --retry 5 --output /tmp/apache-ant.tar.gz \
|
||||
https://archive.apache.org/dist/ant/binaries/apache-ant-${ANT_VERSION}-bin.tar.gz && \
|
||||
tar -zx --strip-components=1 -f /tmp/apache-ant.tar.gz -C $ANT_HOME && \
|
||||
rm /tmp/apache-ant.tar.gz
|
||||
# Run necessary 'ant' deploy scripts
|
||||
RUN ant init_installation update_configs update_code update_webapps
|
||||
|
||||
# Step 3 - Start up DSpace via Runnable JAR
|
||||
FROM eclipse-temurin:${JDK_VERSION}
|
||||
FROM docker.io/eclipse-temurin:${JDK_VERSION}
|
||||
# NOTE: DSPACE_INSTALL must align with the "dspace.dir" default configuration.
|
||||
ENV DSPACE_INSTALL=/dspace
|
||||
# Copy the /dspace directory from 'ant_build' container to /dspace in this container
|
||||
COPY --from=ant_build /dspace $DSPACE_INSTALL
|
||||
WORKDIR $DSPACE_INSTALL
|
||||
# Expose Tomcat port
|
||||
EXPOSE 8080
|
||||
# Need host command for "[dspace]/bin/make-handle-config"
|
||||
RUN apt-get update \
|
||||
&& apt-get install -y --no-install-recommends host \
|
||||
&& apt-get purge -y --auto-remove \
|
||||
&& rm -rf /var/lib/apt/lists/*
|
||||
# Expose Tomcat port (8080) & Handle Server HTTP port (8000)
|
||||
EXPOSE 8080 8000
|
||||
# Give java extra memory (2GB)
|
||||
ENV JAVA_OPTS=-Xmx2000m
|
||||
# On startup, run DSpace Runnable JAR
|
||||
|
@@ -6,10 +6,14 @@
|
||||
# This Dockerfile uses JDK17 by default.
|
||||
# To build with other versions, use "--build-arg JDK_VERSION=[value]"
|
||||
ARG JDK_VERSION=17
|
||||
# The Docker version tag to build from
|
||||
ARG DSPACE_VERSION=latest
|
||||
# The Docker registry to use for DSpace images. Defaults to "docker.io"
|
||||
# NOTE: non-DSpace images are hardcoded to use "docker.io" and are not impacted by this build argument
|
||||
ARG DOCKER_REGISTRY=docker.io
|
||||
|
||||
# Step 1 - Run Maven Build
|
||||
FROM dspace/dspace-dependencies:${DSPACE_VERSION} AS build
|
||||
FROM ${DOCKER_REGISTRY}/dspace/dspace-dependencies:${DSPACE_VERSION} AS build
|
||||
ARG TARGET_DIR=dspace-installer
|
||||
WORKDIR /app
|
||||
# The dspace-installer directory will be written to /install
|
||||
@@ -25,28 +29,26 @@ RUN mvn --no-transfer-progress package && \
|
||||
mvn clean
|
||||
|
||||
# Step 2 - Run Ant Deploy
|
||||
FROM eclipse-temurin:${JDK_VERSION} AS ant_build
|
||||
FROM docker.io/eclipse-temurin:${JDK_VERSION} AS ant_build
|
||||
ARG TARGET_DIR=dspace-installer
|
||||
# COPY the /install directory from 'build' container to /dspace-src in this container
|
||||
COPY --from=build /install /dspace-src
|
||||
WORKDIR /dspace-src
|
||||
# Create the initial install deployment using ANT
|
||||
ENV ANT_VERSION 1.10.13
|
||||
ENV ANT_HOME /tmp/ant-$ANT_VERSION
|
||||
ENV PATH $ANT_HOME/bin:$PATH
|
||||
# Need wget to install ant
|
||||
RUN apt-get update \
|
||||
&& apt-get install -y --no-install-recommends wget \
|
||||
&& apt-get purge -y --auto-remove \
|
||||
&& rm -rf /var/lib/apt/lists/*
|
||||
ENV ANT_VERSION=1.10.13
|
||||
ENV ANT_HOME=/tmp/ant-$ANT_VERSION
|
||||
ENV PATH=$ANT_HOME/bin:$PATH
|
||||
# Download and install 'ant'
|
||||
RUN mkdir $ANT_HOME && \
|
||||
wget -qO- "https://archive.apache.org/dist/ant/binaries/apache-ant-$ANT_VERSION-bin.tar.gz" | tar -zx --strip-components=1 -C $ANT_HOME
|
||||
curl --silent --show-error --location --fail --retry 5 --output /tmp/apache-ant.tar.gz \
|
||||
https://archive.apache.org/dist/ant/binaries/apache-ant-${ANT_VERSION}-bin.tar.gz && \
|
||||
tar -zx --strip-components=1 -f /tmp/apache-ant.tar.gz -C $ANT_HOME && \
|
||||
rm /tmp/apache-ant.tar.gz
|
||||
# Run necessary 'ant' deploy scripts
|
||||
RUN ant init_installation update_configs update_code
|
||||
|
||||
# Step 3 - Run jdk
|
||||
FROM eclipse-temurin:${JDK_VERSION}
|
||||
FROM docker.io/eclipse-temurin:${JDK_VERSION}
|
||||
# NOTE: DSPACE_INSTALL must align with the "dspace.dir" default configuration.
|
||||
ENV DSPACE_INSTALL=/dspace
|
||||
# Copy the /dspace directory from 'ant_build' container to /dspace in this container
|
||||
|
@@ -6,8 +6,8 @@
|
||||
# To build with other versions, use "--build-arg JDK_VERSION=[value]"
|
||||
ARG JDK_VERSION=17
|
||||
|
||||
# Step 1 - Run Maven Build
|
||||
FROM maven:3-eclipse-temurin-${JDK_VERSION} AS build
|
||||
# Step 1 - Download all Dependencies
|
||||
FROM docker.io/maven:3-eclipse-temurin-${JDK_VERSION} AS build
|
||||
ARG TARGET_DIR=dspace-installer
|
||||
WORKDIR /app
|
||||
# Create the 'dspace' user account & home directory
|
||||
@@ -19,16 +19,60 @@ RUN chown -Rv dspace: /app
|
||||
# Switch to dspace user & run below commands as that user
|
||||
USER dspace
|
||||
|
||||
# Copy the DSpace source code (from local machine) into the workdir (excluding .dockerignore contents)
|
||||
ADD --chown=dspace . /app/
|
||||
# This next part may look odd, but it speeds up the build of this image *significantly*.
|
||||
# Copy ONLY the POMs to this image (from local machine). This will allow us to download all dependencies *without*
|
||||
# performing any code compilation steps.
|
||||
|
||||
# Parent POM
|
||||
ADD --chown=dspace pom.xml /app/
|
||||
RUN mkdir -p /app/dspace
|
||||
|
||||
# 'dspace' module POM. Includes 'additions' ONLY, as it's the only submodule that is required to exist.
|
||||
ADD --chown=dspace dspace/pom.xml /app/dspace/
|
||||
RUN mkdir -p /app/dspace/modules/
|
||||
ADD --chown=dspace dspace/modules/pom.xml /app/dspace/modules/
|
||||
RUN mkdir -p /app/dspace/modules/additions
|
||||
ADD --chown=dspace dspace/modules/additions/pom.xml /app/dspace/modules/additions/
|
||||
|
||||
# 'dspace-api' module POM
|
||||
RUN mkdir -p /app/dspace-api
|
||||
ADD --chown=dspace dspace-api/pom.xml /app/dspace-api/
|
||||
|
||||
# 'dspace-iiif' module POM
|
||||
RUN mkdir -p /app/dspace-iiif
|
||||
ADD --chown=dspace dspace-iiif/pom.xml /app/dspace-iiif/
|
||||
|
||||
# 'dspace-oai' module POM
|
||||
RUN mkdir -p /app/dspace-oai
|
||||
ADD --chown=dspace dspace-oai/pom.xml /app/dspace-oai/
|
||||
|
||||
# 'dspace-rdf' module POM
|
||||
RUN mkdir -p /app/dspace-rdf
|
||||
ADD --chown=dspace dspace-rdf/pom.xml /app/dspace-rdf/
|
||||
|
||||
# 'dspace-server-webapp' module POM
|
||||
RUN mkdir -p /app/dspace-server-webapp
|
||||
ADD --chown=dspace dspace-server-webapp/pom.xml /app/dspace-server-webapp/
|
||||
|
||||
# 'dspace-services' module POM
|
||||
RUN mkdir -p /app/dspace-services
|
||||
ADD --chown=dspace dspace-services/pom.xml /app/dspace-services/
|
||||
|
||||
# 'dspace-sword' module POM
|
||||
RUN mkdir -p /app/dspace-sword
|
||||
ADD --chown=dspace dspace-sword/pom.xml /app/dspace-sword/
|
||||
|
||||
# 'dspace-swordv2' module POM
|
||||
RUN mkdir -p /app/dspace-swordv2
|
||||
ADD --chown=dspace dspace-swordv2/pom.xml /app/dspace-swordv2/
|
||||
|
||||
# Trigger the installation of all maven dependencies (hide download progress messages)
|
||||
# Maven flags here ensure that we skip final assembly, skip building test environment and skip all code verification checks.
|
||||
# These flags speed up this installation as much as reasonably possible.
|
||||
ENV MAVEN_FLAGS="-P-assembly -P-test-environment -Denforcer.skip=true -Dcheckstyle.skip=true -Dlicense.skip=true -Dxml.skip=true"
|
||||
RUN mvn --no-transfer-progress install ${MAVEN_FLAGS}
|
||||
# These flags speed up this installation and skip tasks we cannot perform as we don't have the full source code.
|
||||
ENV MAVEN_FLAGS="-P-assembly -P-test-environment -Denforcer.skip=true -Dcheckstyle.skip=true -Dlicense.skip=true -Dxjc.skip=true -Dxml.skip=true"
|
||||
RUN mvn --no-transfer-progress verify ${MAVEN_FLAGS}
|
||||
|
||||
# Clear the contents of the /app directory (including all maven builds), so no artifacts remain.
|
||||
# Clear the contents of the /app directory (including all maven target folders), so no artifacts remain.
|
||||
# This ensures when dspace:dspace is built, it will use the Maven local cache (~/.m2) for dependencies
|
||||
USER root
|
||||
RUN rm -rf /app/*
|
||||
|
@@ -8,10 +8,14 @@
|
||||
# This Dockerfile uses JDK17 by default.
|
||||
# To build with other versions, use "--build-arg JDK_VERSION=[value]"
|
||||
ARG JDK_VERSION=17
|
||||
# The Docker version tag to build from
|
||||
ARG DSPACE_VERSION=latest
|
||||
# The Docker registry to use for DSpace images. Defaults to "docker.io"
|
||||
# NOTE: non-DSpace images are hardcoded to use "docker.io" and are not impacted by this build argument
|
||||
ARG DOCKER_REGISTRY=docker.io
|
||||
|
||||
# Step 1 - Run Maven Build
|
||||
FROM dspace/dspace-dependencies:${DSPACE_VERSION} AS build
|
||||
FROM ${DOCKER_REGISTRY}/dspace/dspace-dependencies:${DSPACE_VERSION} AS build
|
||||
ARG TARGET_DIR=dspace-installer
|
||||
WORKDIR /app
|
||||
# The dspace-installer directory will be written to /install
|
||||
@@ -30,33 +34,36 @@ RUN mvn --no-transfer-progress package && \
|
||||
RUN rm -rf /install/webapps/server/
|
||||
|
||||
# Step 2 - Run Ant Deploy
|
||||
FROM eclipse-temurin:${JDK_VERSION} AS ant_build
|
||||
FROM docker.io/eclipse-temurin:${JDK_VERSION} AS ant_build
|
||||
ARG TARGET_DIR=dspace-installer
|
||||
# COPY the /install directory from 'build' container to /dspace-src in this container
|
||||
COPY --from=build /install /dspace-src
|
||||
WORKDIR /dspace-src
|
||||
# Create the initial install deployment using ANT
|
||||
ENV ANT_VERSION 1.10.12
|
||||
ENV ANT_HOME /tmp/ant-$ANT_VERSION
|
||||
ENV PATH $ANT_HOME/bin:$PATH
|
||||
# Need wget to install ant
|
||||
RUN apt-get update \
|
||||
&& apt-get install -y --no-install-recommends wget \
|
||||
&& apt-get purge -y --auto-remove \
|
||||
&& rm -rf /var/lib/apt/lists/*
|
||||
ENV ANT_VERSION=1.10.12
|
||||
ENV ANT_HOME=/tmp/ant-$ANT_VERSION
|
||||
ENV PATH=$ANT_HOME/bin:$PATH
|
||||
# Download and install 'ant'
|
||||
RUN mkdir $ANT_HOME && \
|
||||
wget -qO- "https://archive.apache.org/dist/ant/binaries/apache-ant-$ANT_VERSION-bin.tar.gz" | tar -zx --strip-components=1 -C $ANT_HOME
|
||||
curl --silent --show-error --location --fail --retry 5 --output /tmp/apache-ant.tar.gz \
|
||||
https://archive.apache.org/dist/ant/binaries/apache-ant-${ANT_VERSION}-bin.tar.gz && \
|
||||
tar -zx --strip-components=1 -f /tmp/apache-ant.tar.gz -C $ANT_HOME && \
|
||||
rm /tmp/apache-ant.tar.gz
|
||||
# Run necessary 'ant' deploy scripts
|
||||
RUN ant init_installation update_configs update_code update_webapps
|
||||
|
||||
# Step 3 - Start up DSpace via Runnable JAR
|
||||
FROM eclipse-temurin:${JDK_VERSION}
|
||||
FROM docker.io/eclipse-temurin:${JDK_VERSION}
|
||||
# NOTE: DSPACE_INSTALL must align with the "dspace.dir" default configuration.
|
||||
ENV DSPACE_INSTALL=/dspace
|
||||
# Copy the /dspace directory from 'ant_build' container to /dspace in this container
|
||||
COPY --from=ant_build /dspace $DSPACE_INSTALL
|
||||
WORKDIR $DSPACE_INSTALL
|
||||
# Need host command for "[dspace]/bin/make-handle-config"
|
||||
RUN apt-get update \
|
||||
&& apt-get install -y --no-install-recommends host \
|
||||
&& apt-get purge -y --auto-remove \
|
||||
&& rm -rf /var/lib/apt/lists/*
|
||||
# Expose Tomcat port and debugging port
|
||||
EXPOSE 8080 8000
|
||||
# Give java extra memory (2GB)
|
||||
|
@@ -92,7 +92,7 @@ For more information on CheckStyle configurations below, see: http://checkstyle.
|
||||
<!-- Requirements for Javadocs for methods -->
|
||||
<module name="JavadocMethod">
|
||||
<!-- All public methods MUST HAVE Javadocs -->
|
||||
<property name="scope" value="public"/>
|
||||
<property name="accessModifiers" value="public"/>
|
||||
<!-- Allow params, throws and return tags to be optional -->
|
||||
<property name="allowMissingParamTags" value="true"/>
|
||||
<property name="allowMissingReturnTag" value="true"/>
|
||||
|
@@ -6,7 +6,7 @@ networks:
|
||||
external: true
|
||||
services:
|
||||
dspace-cli:
|
||||
image: "${DOCKER_OWNER:-dspace}/dspace-cli:${DSPACE_VER:-latest}"
|
||||
image: "${DOCKER_REGISTRY:-docker.io}/${DOCKER_OWNER:-dspace}/dspace-cli:${DSPACE_VER:-latest}"
|
||||
container_name: dspace-cli
|
||||
build:
|
||||
context: .
|
||||
|
@@ -28,7 +28,7 @@ services:
|
||||
# from the host machine. This IP range MUST correspond to the 'dspacenet' subnet defined above.
|
||||
proxies__P__trusted__P__ipranges: '172.23.0'
|
||||
LOGGING_CONFIG: /dspace/config/log4j2-container.xml
|
||||
image: "${DOCKER_OWNER:-dspace}/dspace:${DSPACE_VER:-latest-test}"
|
||||
image: "${DOCKER_REGISTRY:-docker.io}/${DOCKER_OWNER:-dspace}/dspace:${DSPACE_VER:-latest-test}"
|
||||
build:
|
||||
context: .
|
||||
dockerfile: Dockerfile.test
|
||||
@@ -64,7 +64,7 @@ services:
|
||||
dspacedb:
|
||||
container_name: dspacedb
|
||||
# Uses a custom Postgres image with pgcrypto installed
|
||||
image: "${DOCKER_OWNER:-dspace}/dspace-postgres-pgcrypto:${DSPACE_VER:-latest}"
|
||||
image: "${DOCKER_REGISTRY:-docker.io}/${DOCKER_OWNER:-dspace}/dspace-postgres-pgcrypto:${DSPACE_VER:-latest}"
|
||||
build:
|
||||
# Must build out of subdirectory to have access to install script for pgcrypto
|
||||
context: ./dspace/src/main/docker/dspace-postgres-pgcrypto/
|
||||
@@ -84,7 +84,7 @@ services:
|
||||
# DSpace Solr container
|
||||
dspacesolr:
|
||||
container_name: dspacesolr
|
||||
image: "${DOCKER_OWNER:-dspace}/dspace-solr:${DSPACE_VER:-latest}"
|
||||
image: "${DOCKER_REGISTRY:-docker.io}/${DOCKER_OWNER:-dspace}/dspace-solr:${DSPACE_VER:-latest}"
|
||||
build:
|
||||
context: ./dspace/src/main/docker/dspace-solr/
|
||||
# Provide path to Solr configs necessary to build Docker image
|
||||
|
@@ -102,7 +102,7 @@
|
||||
<plugin>
|
||||
<groupId>org.codehaus.mojo</groupId>
|
||||
<artifactId>build-helper-maven-plugin</artifactId>
|
||||
<version>3.4.0</version>
|
||||
<version>3.6.0</version>
|
||||
<executions>
|
||||
<execution>
|
||||
<phase>validate</phase>
|
||||
@@ -116,7 +116,7 @@
|
||||
<plugin>
|
||||
<groupId>org.codehaus.mojo</groupId>
|
||||
<artifactId>buildnumber-maven-plugin</artifactId>
|
||||
<version>3.2.0</version>
|
||||
<version>3.2.1</version>
|
||||
<configuration>
|
||||
<revisionOnScmFailure>UNKNOWN_REVISION</revisionOnScmFailure>
|
||||
</configuration>
|
||||
@@ -177,7 +177,7 @@
|
||||
<plugin>
|
||||
<groupId>org.codehaus.mojo</groupId>
|
||||
<artifactId>jaxb2-maven-plugin</artifactId>
|
||||
<version>3.1.0</version>
|
||||
<version>3.2.0</version>
|
||||
<executions>
|
||||
<execution>
|
||||
<id>workflow-curation</id>
|
||||
@@ -341,6 +341,14 @@
|
||||
<groupId>org.apache.logging.log4j</groupId>
|
||||
<artifactId>log4j-api</artifactId>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.apache.logging.log4j</groupId>
|
||||
<artifactId>log4j-core</artifactId>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.apache.logging.log4j</groupId>
|
||||
<artifactId>log4j-slf4j2-impl</artifactId>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.hibernate.orm</groupId>
|
||||
<artifactId>hibernate-core</artifactId>
|
||||
@@ -388,6 +396,13 @@
|
||||
<dependency>
|
||||
<groupId>org.springframework</groupId>
|
||||
<artifactId>spring-orm</artifactId>
|
||||
<exclusions>
|
||||
<!-- Spring JCL is unnecessary and conflicts with commons-logging when both are on classpath -->
|
||||
<exclusion>
|
||||
<groupId>org.springframework</groupId>
|
||||
<artifactId>spring-jcl</artifactId>
|
||||
</exclusion>
|
||||
</exclusions>
|
||||
</dependency>
|
||||
|
||||
<dependency>
|
||||
@@ -406,6 +421,16 @@
|
||||
<groupId>org.mortbay.jasper</groupId>
|
||||
<artifactId>apache-jsp</artifactId>
|
||||
</exclusion>
|
||||
<!-- Excluded BouncyCastle dependencies because we use a later version of BouncyCastle.
|
||||
Having two versions of BouncyCastle in the classpath can cause Handle Server to throw errors. -->
|
||||
<exclusion>
|
||||
<groupId>org.bouncycastle</groupId>
|
||||
<artifactId>bcpkix-jdk15on</artifactId>
|
||||
</exclusion>
|
||||
<exclusion>
|
||||
<groupId>org.bouncycastle</groupId>
|
||||
<artifactId>bcprov-jdk15on</artifactId>
|
||||
</exclusion>
|
||||
</exclusions>
|
||||
</dependency>
|
||||
|
||||
@@ -623,7 +648,7 @@
|
||||
<dependency>
|
||||
<groupId>dnsjava</groupId>
|
||||
<artifactId>dnsjava</artifactId>
|
||||
<version>3.6.0</version>
|
||||
<version>3.6.2</version>
|
||||
</dependency>
|
||||
|
||||
<dependency>
|
||||
@@ -672,22 +697,6 @@
|
||||
<groupId>com.google.apis</groupId>
|
||||
<artifactId>google-api-services-analytics</artifactId>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>com.google.api-client</groupId>
|
||||
<artifactId>google-api-client</artifactId>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>com.google.http-client</groupId>
|
||||
<artifactId>google-http-client</artifactId>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>com.google.http-client</groupId>
|
||||
<artifactId>google-http-client-jackson2</artifactId>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>com.google.oauth-client</groupId>
|
||||
<artifactId>google-oauth-client</artifactId>
|
||||
</dependency>
|
||||
|
||||
<!-- FindBugs -->
|
||||
<dependency>
|
||||
@@ -702,7 +711,6 @@
|
||||
<dependency>
|
||||
<groupId>jakarta.inject</groupId>
|
||||
<artifactId>jakarta.inject-api</artifactId>
|
||||
<version>2.0.1</version>
|
||||
</dependency>
|
||||
|
||||
<!-- JAXB API and implementation (no longer bundled as of Java 11) -->
|
||||
@@ -733,7 +741,7 @@
|
||||
<dependency>
|
||||
<groupId>com.amazonaws</groupId>
|
||||
<artifactId>aws-java-sdk-s3</artifactId>
|
||||
<version>1.12.261</version>
|
||||
<version>1.12.779</version>
|
||||
</dependency>
|
||||
|
||||
<!-- TODO: This may need to be replaced with the "orcid-model" artifact once this ticket is resolved:
|
||||
@@ -776,18 +784,20 @@
|
||||
<dependency>
|
||||
<groupId>org.apache.velocity</groupId>
|
||||
<artifactId>velocity-engine-core</artifactId>
|
||||
<version>2.4.1</version>
|
||||
</dependency>
|
||||
|
||||
<dependency>
|
||||
<groupId>org.xmlunit</groupId>
|
||||
<artifactId>xmlunit-core</artifactId>
|
||||
<version>2.10.0</version>
|
||||
<scope>test</scope>
|
||||
</dependency>
|
||||
|
||||
<dependency>
|
||||
<groupId>org.apache.bcel</groupId>
|
||||
<artifactId>bcel</artifactId>
|
||||
<version>6.7.0</version>
|
||||
<version>6.10.0</version>
|
||||
<scope>test</scope>
|
||||
</dependency>
|
||||
|
||||
@@ -814,7 +824,7 @@
|
||||
<dependency>
|
||||
<groupId>org.mock-server</groupId>
|
||||
<artifactId>mockserver-junit-rule</artifactId>
|
||||
<version>5.11.2</version>
|
||||
<version>5.15.0</version>
|
||||
<scope>test</scope>
|
||||
<exclusions>
|
||||
<!-- Exclude snakeyaml to avoid conflicts with: spring-boot-starter-cache -->
|
||||
@@ -856,75 +866,4 @@
|
||||
</exclusions>
|
||||
</dependency>
|
||||
</dependencies>
|
||||
|
||||
<dependencyManagement>
|
||||
<dependencies>
|
||||
<!-- for mockserver -->
|
||||
<!-- Solve dependency convergence issues related to Solr and
|
||||
'mockserver-junit-rule' by selecting the versions we want to use. -->
|
||||
<dependency>
|
||||
<groupId>io.netty</groupId>
|
||||
<artifactId>netty-buffer</artifactId>
|
||||
<version>4.1.106.Final</version>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>io.netty</groupId>
|
||||
<artifactId>netty-transport</artifactId>
|
||||
<version>4.1.106.Final</version>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>io.netty</groupId>
|
||||
<artifactId>netty-transport-native-unix-common</artifactId>
|
||||
<version>4.1.106.Final</version>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>io.netty</groupId>
|
||||
<artifactId>netty-common</artifactId>
|
||||
<version>4.1.106.Final</version>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>io.netty</groupId>
|
||||
<artifactId>netty-handler</artifactId>
|
||||
<version>4.1.106.Final</version>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>io.netty</groupId>
|
||||
<artifactId>netty-codec</artifactId>
|
||||
<version>4.1.106.Final</version>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.apache.velocity</groupId>
|
||||
<artifactId>velocity-engine-core</artifactId>
|
||||
<version>2.3</version>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.xmlunit</groupId>
|
||||
<artifactId>xmlunit-core</artifactId>
|
||||
<version>2.10.0</version>
|
||||
<scope>test</scope>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>com.github.java-json-tools</groupId>
|
||||
<artifactId>json-schema-validator</artifactId>
|
||||
<version>2.2.14</version>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>jakarta.validation</groupId>
|
||||
<artifactId>jakarta.validation-api</artifactId>
|
||||
<version>3.0.2</version>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>io.swagger</groupId>
|
||||
<artifactId>swagger-core</artifactId>
|
||||
<version>1.6.2</version>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.scala-lang</groupId>
|
||||
<artifactId>scala-library</artifactId>
|
||||
<version>2.13.11</version>
|
||||
<scope>test</scope>
|
||||
</dependency>
|
||||
</dependencies>
|
||||
</dependencyManagement>
|
||||
|
||||
</project>
|
||||
|
@@ -18,6 +18,7 @@ import java.nio.charset.StandardCharsets;
|
||||
import org.apache.commons.lang.StringUtils;
|
||||
import org.apache.logging.log4j.LogManager;
|
||||
import org.apache.logging.log4j.Logger;
|
||||
import org.apache.poi.util.IOUtils;
|
||||
import org.apache.tika.Tika;
|
||||
import org.apache.tika.exception.TikaException;
|
||||
import org.apache.tika.metadata.Metadata;
|
||||
@@ -72,21 +73,23 @@ public class TikaTextExtractionFilter
|
||||
// Not using temporary file. We'll use Tika's default in-memory parsing.
|
||||
// Get maximum characters to extract. Default is 100,000 chars, which is also Tika's default setting.
|
||||
String extractedText;
|
||||
int maxChars = configurationService.getIntProperty("textextractor.max-chars", 100000);
|
||||
int maxChars = configurationService.getIntProperty("textextractor.max-chars", 100_000);
|
||||
try {
|
||||
// Use Tika to extract text from input. Tika will automatically detect the file type.
|
||||
Tika tika = new Tika();
|
||||
tika.setMaxStringLength(maxChars); // Tell Tika the maximum number of characters to extract
|
||||
IOUtils.setByteArrayMaxOverride(
|
||||
configurationService.getIntProperty("textextractor.max-array", 100_000_000));
|
||||
extractedText = tika.parseToString(source);
|
||||
} catch (IOException e) {
|
||||
System.err.format("Unable to extract text from bitstream in Item %s%n", currentItem.getID().toString());
|
||||
e.printStackTrace();
|
||||
e.printStackTrace(System.err);
|
||||
log.error("Unable to extract text from bitstream in Item {}", currentItem.getID().toString(), e);
|
||||
throw e;
|
||||
} catch (OutOfMemoryError oe) {
|
||||
System.err.format("OutOfMemoryError occurred when extracting text from bitstream in Item %s. " +
|
||||
"You may wish to enable 'textextractor.use-temp-file'.%n", currentItem.getID().toString());
|
||||
oe.printStackTrace();
|
||||
oe.printStackTrace(System.err);
|
||||
log.error("OutOfMemoryError occurred when extracting text from bitstream in Item {}. " +
|
||||
"You may wish to enable 'textextractor.use-temp-file'.", currentItem.getID().toString(), oe);
|
||||
throw oe;
|
||||
|
@@ -281,10 +281,14 @@ public class LogAnalyser {
|
||||
*/
|
||||
private static String fileTemplate = "dspace\\.log.*";
|
||||
|
||||
private static final ConfigurationService configurationService =
|
||||
DSpaceServicesFactory.getInstance().getConfigurationService();
|
||||
|
||||
/**
|
||||
* the configuration file from which to configure the analyser
|
||||
*/
|
||||
private static String configFile;
|
||||
private static String configFile = configurationService.getProperty("dspace.dir")
|
||||
+ File.separator + "config" + File.separator + "dstat.cfg";
|
||||
|
||||
/**
|
||||
* the output file to which to write aggregation data
|
||||
@@ -616,8 +620,6 @@ public class LogAnalyser {
|
||||
}
|
||||
|
||||
// now do the host name and url lookup
|
||||
ConfigurationService configurationService
|
||||
= DSpaceServicesFactory.getInstance().getConfigurationService();
|
||||
hostName = Utils.getHostName(configurationService.getProperty("dspace.ui.url"));
|
||||
name = configurationService.getProperty("dspace.name").trim();
|
||||
url = configurationService.getProperty("dspace.ui.url").trim();
|
||||
@@ -658,8 +660,6 @@ public class LogAnalyser {
|
||||
String myConfigFile, String myOutFile,
|
||||
Date myStartDate, Date myEndDate,
|
||||
boolean myLookUp) {
|
||||
ConfigurationService configurationService
|
||||
= DSpaceServicesFactory.getInstance().getConfigurationService();
|
||||
|
||||
if (myLogDir != null) {
|
||||
logDir = myLogDir;
|
||||
@@ -673,9 +673,6 @@ public class LogAnalyser {
|
||||
|
||||
if (myConfigFile != null) {
|
||||
configFile = myConfigFile;
|
||||
} else {
|
||||
configFile = configurationService.getProperty("dspace.dir")
|
||||
+ File.separator + "config" + File.separator + "dstat.cfg";
|
||||
}
|
||||
|
||||
if (myStartDate != null) {
|
||||
|
@@ -46,8 +46,6 @@ Several "stock" implementations are provided.
|
||||
<dd>writes event records to the Java logger.</dd>
|
||||
<dt>{@link org.dspace.statistics.SolrLoggerUsageEventListener SolrLoggerUsageEventListener}</dt>
|
||||
<dd>writes event records to Solr.</dd>
|
||||
<dt>{@link org.dspace.google.GoogleRecorderEventListener GoogleRecorderEventListener}<.dt>
|
||||
<dd>writes event records to Google Analytics.</dd>
|
||||
</dl>
|
||||
</body>
|
||||
</html>
|
||||
|
@@ -523,9 +523,9 @@ public class AuthorizeUtil {
|
||||
|
||||
for (Collection coll : colls) {
|
||||
if (!AuthorizeConfiguration
|
||||
.canCollectionAdminPerformItemReinstatiate()) {
|
||||
.canCollectionAdminPerformItemReinstate()) {
|
||||
if (AuthorizeConfiguration
|
||||
.canCommunityAdminPerformItemReinstatiate()
|
||||
.canCommunityAdminPerformItemReinstate()
|
||||
&& authorizeService.authorizeActionBoolean(context,
|
||||
coll.getCommunities().get(0), Constants.ADMIN)) {
|
||||
// authorized
|
||||
|
@@ -163,7 +163,7 @@ public class DCInput {
|
||||
* The scope of the input sets, this restricts hidden metadata fields from
|
||||
* view by the end user during submission.
|
||||
*/
|
||||
public static final String SUBMISSION_SCOPE = "submit";
|
||||
public static final String SUBMISSION_SCOPE = "submission";
|
||||
|
||||
/**
|
||||
* Class constructor for creating a DCInput object based on the contents of
|
||||
@@ -262,7 +262,7 @@ public class DCInput {
|
||||
|
||||
/**
|
||||
* Is this DCInput for display in the given scope? The scope should be
|
||||
* either "workflow" or "submit", as per the input forms definition. If the
|
||||
* either "workflow" or "submission", as per the input forms definition. If the
|
||||
* internal visibility is set to "null" then this will always return true.
|
||||
*
|
||||
* @param scope String identifying the scope that this input's visibility
|
||||
|
@@ -14,7 +14,6 @@ import java.sql.SQLException;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Arrays;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
|
||||
import com.rometools.modules.opensearch.OpenSearchModule;
|
||||
import com.rometools.modules.opensearch.entity.OSQuery;
|
||||
@@ -58,12 +57,12 @@ public class OpenSearchServiceImpl implements OpenSearchService {
|
||||
private static final Logger log = org.apache.logging.log4j.LogManager.getLogger(OpenSearchServiceImpl.class);
|
||||
|
||||
// Namespaces used
|
||||
protected final String osNs = "http://a9.com/-/spec/opensearch/1.1/";
|
||||
protected final static String osNs = "http://a9.com/-/spec/opensearch/1.1/";
|
||||
|
||||
@Autowired(required = true)
|
||||
@Autowired
|
||||
protected ConfigurationService configurationService;
|
||||
|
||||
@Autowired(required = true)
|
||||
@Autowired
|
||||
protected HandleService handleService;
|
||||
|
||||
protected OpenSearchServiceImpl() {
|
||||
@@ -119,11 +118,10 @@ public class OpenSearchServiceImpl implements OpenSearchService {
|
||||
|
||||
@Override
|
||||
public String getResultsString(Context context, String format, String query, int totalResults, int start,
|
||||
int pageSize,
|
||||
IndexableObject scope, List<IndexableObject> results,
|
||||
Map<String, String> labels) throws IOException {
|
||||
int pageSize, IndexableObject scope, List<IndexableObject> results)
|
||||
throws IOException {
|
||||
try {
|
||||
return getResults(context, format, query, totalResults, start, pageSize, scope, results, labels)
|
||||
return getResults(context, format, query, totalResults, start, pageSize, scope, results)
|
||||
.outputString();
|
||||
} catch (FeedException e) {
|
||||
log.error(e.toString(), e);
|
||||
@@ -133,11 +131,10 @@ public class OpenSearchServiceImpl implements OpenSearchService {
|
||||
|
||||
@Override
|
||||
public Document getResultsDoc(Context context, String format, String query, int totalResults, int start,
|
||||
int pageSize,
|
||||
IndexableObject scope, List<IndexableObject> results, Map<String, String> labels)
|
||||
int pageSize, IndexableObject scope, List<IndexableObject> results)
|
||||
throws IOException {
|
||||
try {
|
||||
return getResults(context, format, query, totalResults, start, pageSize, scope, results, labels)
|
||||
return getResults(context, format, query, totalResults, start, pageSize, scope, results)
|
||||
.outputW3CDom();
|
||||
} catch (FeedException e) {
|
||||
log.error(e.toString(), e);
|
||||
@@ -146,8 +143,7 @@ public class OpenSearchServiceImpl implements OpenSearchService {
|
||||
}
|
||||
|
||||
protected SyndicationFeed getResults(Context context, String format, String query, int totalResults, int start,
|
||||
int pageSize, IndexableObject scope,
|
||||
List<IndexableObject> results, Map<String, String> labels) {
|
||||
int pageSize, IndexableObject scope, List<IndexableObject> results) {
|
||||
// Encode results in requested format
|
||||
if ("rss".equals(format)) {
|
||||
format = "rss_2.0";
|
||||
@@ -156,7 +152,7 @@ public class OpenSearchServiceImpl implements OpenSearchService {
|
||||
}
|
||||
|
||||
SyndicationFeed feed = new SyndicationFeed();
|
||||
feed.populate(null, context, scope, results, labels);
|
||||
feed.populate(null, context, scope, results);
|
||||
feed.setType(format);
|
||||
feed.addModule(openSearchMarkup(query, totalResults, start, pageSize));
|
||||
return feed;
|
||||
|
@@ -11,6 +11,7 @@ import java.io.IOException;
|
||||
import java.sql.SQLException;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Date;
|
||||
import java.util.HashMap;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
|
||||
@@ -135,8 +136,6 @@ public class SyndicationFeed {
|
||||
protected String[] podcastableMIMETypes =
|
||||
configurationService.getArrayProperty("webui.feed.podcast.mimetypes", new String[] {"audio/x-mpeg"});
|
||||
|
||||
// -------- Instance variables:
|
||||
|
||||
// the feed object we are building
|
||||
protected SyndFeed feed = null;
|
||||
|
||||
@@ -146,9 +145,6 @@ public class SyndicationFeed {
|
||||
protected CommunityService communityService;
|
||||
protected ItemService itemService;
|
||||
|
||||
/**
|
||||
* Constructor.
|
||||
*/
|
||||
public SyndicationFeed() {
|
||||
feed = new SyndFeedImpl();
|
||||
ContentServiceFactory contentServiceFactory = ContentServiceFactory.getInstance();
|
||||
@@ -157,16 +153,6 @@ public class SyndicationFeed {
|
||||
communityService = contentServiceFactory.getCommunityService();
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns list of metadata selectors used to compose the description element
|
||||
*
|
||||
* @return selector list - format 'schema.element[.qualifier]'
|
||||
*/
|
||||
public static String[] getDescriptionSelectors() {
|
||||
return (String[]) ArrayUtils.clone(descriptionFields);
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* Fills in the feed and entry-level metadata from DSpace objects.
|
||||
*
|
||||
@@ -174,15 +160,17 @@ public class SyndicationFeed {
|
||||
* @param context context
|
||||
* @param dso the scope
|
||||
* @param items array of objects
|
||||
* @param labels label map
|
||||
*/
|
||||
public void populate(HttpServletRequest request, Context context, IndexableObject dso,
|
||||
List<IndexableObject> items, Map<String, String> labels) {
|
||||
List<IndexableObject> items) {
|
||||
String logoURL = null;
|
||||
String objectURL = null;
|
||||
String defaultTitle = null;
|
||||
boolean podcastFeed = false;
|
||||
this.request = request;
|
||||
|
||||
Map<String, String> labels = getLabels();
|
||||
|
||||
// dso is null for the whole site, or a search without scope
|
||||
if (dso == null) {
|
||||
defaultTitle = configurationService.getProperty("dspace.name");
|
||||
@@ -553,5 +541,19 @@ public class SyndicationFeed {
|
||||
List<MetadataValue> dcv = itemService.getMetadataByMetadataString(item, field);
|
||||
return (dcv.size() > 0) ? dcv.get(0).getValue() : null;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Internal method to get labels for the returned document
|
||||
*/
|
||||
private Map<String, String> getLabels() {
|
||||
// TODO: get strings from translation file or configuration
|
||||
Map<String, String> labelMap = new HashMap<>();
|
||||
labelMap.put(SyndicationFeed.MSG_UNTITLED, "notitle");
|
||||
labelMap.put(SyndicationFeed.MSG_LOGO_TITLE, "logo.title");
|
||||
labelMap.put(SyndicationFeed.MSG_FEED_DESCRIPTION, "general-feed.description");
|
||||
for (String selector : descriptionFields) {
|
||||
labelMap.put("metadata." + selector, selector);
|
||||
}
|
||||
return labelMap;
|
||||
}
|
||||
}
|
||||
|
@@ -10,7 +10,6 @@ package org.dspace.app.util.service;
|
||||
import java.io.IOException;
|
||||
import java.sql.SQLException;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
|
||||
import org.dspace.content.DSpaceObject;
|
||||
import org.dspace.core.Context;
|
||||
@@ -86,14 +85,12 @@ public interface OpenSearchService {
|
||||
* @param pageSize - page size
|
||||
* @param scope - search scope, null or the community/collection
|
||||
* @param results the retrieved DSpace objects satisfying search
|
||||
* @param labels labels to apply - format specific
|
||||
* @return formatted search results
|
||||
* @throws IOException if IO error
|
||||
*/
|
||||
public String getResultsString(Context context, String format, String query, int totalResults, int start,
|
||||
int pageSize,
|
||||
IndexableObject scope, List<IndexableObject> results,
|
||||
Map<String, String> labels) throws IOException;
|
||||
int pageSize, IndexableObject scope, List<IndexableObject> results)
|
||||
throws IOException;
|
||||
|
||||
/**
|
||||
* Returns a formatted set of search results as a document
|
||||
@@ -106,13 +103,11 @@ public interface OpenSearchService {
|
||||
* @param pageSize - page size
|
||||
* @param scope - search scope, null or the community/collection
|
||||
* @param results the retrieved DSpace objects satisfying search
|
||||
* @param labels labels to apply - format specific
|
||||
* @return formatted search results
|
||||
* @throws IOException if IO error
|
||||
*/
|
||||
public Document getResultsDoc(Context context, String format, String query, int totalResults, int start,
|
||||
int pageSize,
|
||||
IndexableObject scope, List<IndexableObject> results, Map<String, String> labels)
|
||||
int pageSize, IndexableObject scope, List<IndexableObject> results)
|
||||
throws IOException;
|
||||
|
||||
public DSpaceObject resolveScope(Context context, String scope) throws SQLException;
|
||||
|
@@ -17,6 +17,7 @@ import java.util.Collections;
|
||||
import java.util.Hashtable;
|
||||
import java.util.Iterator;
|
||||
import java.util.List;
|
||||
import java.util.Optional;
|
||||
import javax.naming.NamingEnumeration;
|
||||
import javax.naming.NamingException;
|
||||
import javax.naming.directory.Attribute;
|
||||
@@ -68,12 +69,8 @@ import org.dspace.services.factory.DSpaceServicesFactory;
|
||||
* @author Ivan Masár
|
||||
* @author Michael Plate
|
||||
*/
|
||||
public class LDAPAuthentication
|
||||
implements AuthenticationMethod {
|
||||
public class LDAPAuthentication implements AuthenticationMethod {
|
||||
|
||||
/**
|
||||
* log4j category
|
||||
*/
|
||||
private static final Logger log
|
||||
= org.apache.logging.log4j.LogManager.getLogger(LDAPAuthentication.class);
|
||||
|
||||
@@ -130,7 +127,7 @@ public class LDAPAuthentication
|
||||
return false;
|
||||
}
|
||||
|
||||
/*
|
||||
/**
|
||||
* This is an explicit method.
|
||||
*/
|
||||
@Override
|
||||
@@ -138,7 +135,7 @@ public class LDAPAuthentication
|
||||
return false;
|
||||
}
|
||||
|
||||
/*
|
||||
/**
|
||||
* Add authenticated users to the group defined in dspace.cfg by
|
||||
* the login.specialgroup key.
|
||||
*/
|
||||
@@ -177,7 +174,7 @@ public class LDAPAuthentication
|
||||
return Collections.EMPTY_LIST;
|
||||
}
|
||||
|
||||
/*
|
||||
/**
|
||||
* Authenticate the given credentials.
|
||||
* This is the heart of the authentication method: test the
|
||||
* credentials for authenticity, and if accepted, attempt to match
|
||||
@@ -187,7 +184,7 @@ public class LDAPAuthentication
|
||||
* @param context
|
||||
* DSpace context, will be modified (ePerson set) upon success.
|
||||
*
|
||||
* @param username
|
||||
* @param netid
|
||||
* Username (or email address) when method is explicit. Use null for
|
||||
* implicit method.
|
||||
*
|
||||
@@ -250,7 +247,7 @@ public class LDAPAuthentication
|
||||
}
|
||||
|
||||
// Check a DN was found
|
||||
if ((dn == null) || (dn.trim().equals(""))) {
|
||||
if (StringUtils.isBlank(dn)) {
|
||||
log.info(LogHelper
|
||||
.getHeader(context, "failed_login", "no DN found for user " + netid));
|
||||
return BAD_CREDENTIALS;
|
||||
@@ -269,6 +266,18 @@ public class LDAPAuthentication
|
||||
context.setCurrentUser(eperson);
|
||||
request.setAttribute(LDAP_AUTHENTICATED, true);
|
||||
|
||||
// update eperson's attributes
|
||||
context.turnOffAuthorisationSystem();
|
||||
setEpersonAttributes(context, eperson, ldap, Optional.empty());
|
||||
try {
|
||||
ePersonService.update(context, eperson);
|
||||
context.dispatchEvents();
|
||||
} catch (AuthorizeException e) {
|
||||
log.warn("update of eperson " + eperson.getID() + " failed", e);
|
||||
} finally {
|
||||
context.restoreAuthSystemState();
|
||||
}
|
||||
|
||||
// assign user to groups based on ldap dn
|
||||
assignGroups(dn, ldap.ldapGroup, context);
|
||||
|
||||
@@ -313,14 +322,13 @@ public class LDAPAuthentication
|
||||
log.info(LogHelper.getHeader(context,
|
||||
"type=ldap-login", "type=ldap_but_already_email"));
|
||||
context.turnOffAuthorisationSystem();
|
||||
eperson.setNetid(netid.toLowerCase());
|
||||
setEpersonAttributes(context, eperson, ldap, Optional.of(netid));
|
||||
ePersonService.update(context, eperson);
|
||||
context.dispatchEvents();
|
||||
context.restoreAuthSystemState();
|
||||
context.setCurrentUser(eperson);
|
||||
request.setAttribute(LDAP_AUTHENTICATED, true);
|
||||
|
||||
|
||||
// assign user to groups based on ldap dn
|
||||
assignGroups(dn, ldap.ldapGroup, context);
|
||||
|
||||
@@ -331,20 +339,7 @@ public class LDAPAuthentication
|
||||
try {
|
||||
context.turnOffAuthorisationSystem();
|
||||
eperson = ePersonService.create(context);
|
||||
if (StringUtils.isNotEmpty(email)) {
|
||||
eperson.setEmail(email);
|
||||
}
|
||||
if (StringUtils.isNotEmpty(ldap.ldapGivenName)) {
|
||||
eperson.setFirstName(context, ldap.ldapGivenName);
|
||||
}
|
||||
if (StringUtils.isNotEmpty(ldap.ldapSurname)) {
|
||||
eperson.setLastName(context, ldap.ldapSurname);
|
||||
}
|
||||
if (StringUtils.isNotEmpty(ldap.ldapPhone)) {
|
||||
ePersonService.setMetadataSingleValue(context, eperson,
|
||||
MD_PHONE, ldap.ldapPhone, null);
|
||||
}
|
||||
eperson.setNetid(netid.toLowerCase());
|
||||
setEpersonAttributes(context, eperson, ldap, Optional.of(netid));
|
||||
eperson.setCanLogIn(true);
|
||||
authenticationService.initEPerson(context, request, eperson);
|
||||
ePersonService.update(context, eperson);
|
||||
@@ -382,6 +377,29 @@ public class LDAPAuthentication
|
||||
return BAD_ARGS;
|
||||
}
|
||||
|
||||
/**
|
||||
* Update eperson's attributes
|
||||
*/
|
||||
private void setEpersonAttributes(Context context, EPerson eperson, SpeakerToLDAP ldap, Optional<String> netid)
|
||||
throws SQLException {
|
||||
|
||||
if (StringUtils.isNotEmpty(ldap.ldapEmail)) {
|
||||
eperson.setEmail(ldap.ldapEmail);
|
||||
}
|
||||
if (StringUtils.isNotEmpty(ldap.ldapGivenName)) {
|
||||
eperson.setFirstName(context, ldap.ldapGivenName);
|
||||
}
|
||||
if (StringUtils.isNotEmpty(ldap.ldapSurname)) {
|
||||
eperson.setLastName(context, ldap.ldapSurname);
|
||||
}
|
||||
if (StringUtils.isNotEmpty(ldap.ldapPhone)) {
|
||||
ePersonService.setMetadataSingleValue(context, eperson, MD_PHONE, ldap.ldapPhone, null);
|
||||
}
|
||||
if (netid.isPresent()) {
|
||||
eperson.setNetid(netid.get().toLowerCase());
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Internal class to manage LDAP query and results, mainly
|
||||
* because there are multiple values to return.
|
||||
@@ -503,6 +521,7 @@ public class LDAPAuthentication
|
||||
} else {
|
||||
searchName = ldap_provider_url + ldap_search_context;
|
||||
}
|
||||
@SuppressWarnings("BanJNDI")
|
||||
NamingEnumeration<SearchResult> answer = ctx.search(
|
||||
searchName,
|
||||
"(&({0}={1}))", new Object[] {ldap_id_field,
|
||||
@@ -553,7 +572,7 @@ public class LDAPAuthentication
|
||||
att = atts.get(attlist[4]);
|
||||
if (att != null) {
|
||||
// loop through all groups returned by LDAP
|
||||
ldapGroup = new ArrayList<String>();
|
||||
ldapGroup = new ArrayList<>();
|
||||
for (NamingEnumeration val = att.getAll(); val.hasMoreElements(); ) {
|
||||
ldapGroup.add((String) val.next());
|
||||
}
|
||||
@@ -633,7 +652,8 @@ public class LDAPAuthentication
|
||||
ctx.addToEnvironment(javax.naming.Context.AUTHORITATIVE, "true");
|
||||
ctx.addToEnvironment(javax.naming.Context.REFERRAL, "follow");
|
||||
// dummy operation to check if authentication has succeeded
|
||||
ctx.getAttributes("");
|
||||
@SuppressWarnings("BanJNDI")
|
||||
Attributes trash = ctx.getAttributes("");
|
||||
} else if (!useTLS) {
|
||||
// Authenticate
|
||||
env.put(javax.naming.Context.SECURITY_AUTHENTICATION, "Simple");
|
||||
@@ -671,7 +691,7 @@ public class LDAPAuthentication
|
||||
}
|
||||
}
|
||||
|
||||
/*
|
||||
/**
|
||||
* Returns the URL of an external login page which is not applicable for this authn method.
|
||||
*
|
||||
* Note: Prior to DSpace 7, this method return the page of login servlet.
|
||||
@@ -699,7 +719,7 @@ public class LDAPAuthentication
|
||||
return "ldap";
|
||||
}
|
||||
|
||||
/*
|
||||
/**
|
||||
* Add authenticated users to the group defined in dspace.cfg by
|
||||
* the authentication-ldap.login.groupmap.* key.
|
||||
*
|
||||
|
@@ -174,9 +174,9 @@ public class AuthorizeConfiguration {
|
||||
*
|
||||
* @return true/false
|
||||
*/
|
||||
public static boolean canCommunityAdminPerformItemReinstatiate() {
|
||||
public static boolean canCommunityAdminPerformItemReinstate() {
|
||||
init();
|
||||
return configurationService.getBooleanProperty("core.authorization.community-admin.item.reinstatiate", true);
|
||||
return configurationService.getBooleanProperty("core.authorization.community-admin.item.reinstate", true);
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -306,9 +306,9 @@ public class AuthorizeConfiguration {
|
||||
*
|
||||
* @return true/false
|
||||
*/
|
||||
public static boolean canCollectionAdminPerformItemReinstatiate() {
|
||||
public static boolean canCollectionAdminPerformItemReinstate() {
|
||||
init();
|
||||
return configurationService.getBooleanProperty("core.authorization.collection-admin.item.reinstatiate", true);
|
||||
return configurationService.getBooleanProperty("core.authorization.collection-admin.item.reinstate", true);
|
||||
}
|
||||
|
||||
/**
|
||||
|
@@ -422,9 +422,6 @@ public class BrowseEngine {
|
||||
}
|
||||
}
|
||||
|
||||
// this is the total number of results in answer to the query
|
||||
int total = getTotalResults(true);
|
||||
|
||||
// set the ordering field (there is only one option)
|
||||
dao.setOrderField("sort_value");
|
||||
|
||||
@@ -444,6 +441,9 @@ public class BrowseEngine {
|
||||
dao.setOffset(offset);
|
||||
dao.setLimit(scope.getResultsPerPage());
|
||||
|
||||
// this is the total number of results in answer to the query
|
||||
int total = getTotalResults(true);
|
||||
|
||||
// Holder for the results
|
||||
List<String[]> results = null;
|
||||
|
||||
@@ -680,33 +680,9 @@ public class BrowseEngine {
|
||||
// tell the browse query whether we are distinct
|
||||
dao.setDistinct(distinct);
|
||||
|
||||
// ensure that the select is set to "*"
|
||||
String[] select = {"*"};
|
||||
dao.setCountValues(select);
|
||||
|
||||
// FIXME: it would be nice to have a good way of doing this in the DAO
|
||||
// now reset all of the fields that we don't want to have constraining
|
||||
// our count, storing them locally to reinstate later
|
||||
String focusField = dao.getJumpToField();
|
||||
String focusValue = dao.getJumpToValue();
|
||||
int limit = dao.getLimit();
|
||||
int offset = dao.getOffset();
|
||||
|
||||
dao.setJumpToField(null);
|
||||
dao.setJumpToValue(null);
|
||||
dao.setLimit(-1);
|
||||
dao.setOffset(-1);
|
||||
|
||||
// perform the query and get the result
|
||||
int count = dao.doCountQuery();
|
||||
|
||||
// now put back the values we removed for this method
|
||||
dao.setJumpToField(focusField);
|
||||
dao.setJumpToValue(focusValue);
|
||||
dao.setLimit(limit);
|
||||
dao.setOffset(offset);
|
||||
dao.setCountValues(null);
|
||||
|
||||
log.debug(LogHelper.getHeader(context, "get_total_results_return", "return=" + count));
|
||||
|
||||
return count;
|
||||
|
@@ -543,19 +543,6 @@ public class BrowseIndex {
|
||||
return getTableName(false, false, true, false);
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the name of the column that is used to store the default value column
|
||||
*
|
||||
* @return the name of the value column
|
||||
*/
|
||||
public String getValueColumn() {
|
||||
if (!isDate()) {
|
||||
return "sort_text_value";
|
||||
} else {
|
||||
return "text_value";
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the name of the primary key index column
|
||||
*
|
||||
@@ -565,35 +552,6 @@ public class BrowseIndex {
|
||||
return "id";
|
||||
}
|
||||
|
||||
/**
|
||||
* Is this browse index type for a title?
|
||||
*
|
||||
* @return true if title type, false if not
|
||||
*/
|
||||
// public boolean isTitle()
|
||||
// {
|
||||
// return "title".equals(getDataType());
|
||||
// }
|
||||
|
||||
/**
|
||||
* Is the browse index type for a date?
|
||||
*
|
||||
* @return true if date type, false if not
|
||||
*/
|
||||
public boolean isDate() {
|
||||
return "date".equals(getDataType());
|
||||
}
|
||||
|
||||
/**
|
||||
* Is the browse index type for a plain text type?
|
||||
*
|
||||
* @return true if plain text type, false if not
|
||||
*/
|
||||
// public boolean isText()
|
||||
// {
|
||||
// return "text".equals(getDataType());
|
||||
// }
|
||||
|
||||
/**
|
||||
* Is the browse index of display type single?
|
||||
*
|
||||
|
@@ -13,6 +13,8 @@ import java.util.Collections;
|
||||
import java.util.Comparator;
|
||||
import java.util.List;
|
||||
|
||||
import com.fasterxml.jackson.databind.node.JsonNodeFactory;
|
||||
import com.fasterxml.jackson.databind.node.ObjectNode;
|
||||
import org.apache.commons.lang3.StringUtils;
|
||||
import org.apache.logging.log4j.Logger;
|
||||
import org.apache.solr.client.solrj.util.ClientUtils;
|
||||
@@ -180,18 +182,33 @@ public class SolrBrowseDAO implements BrowseDAO {
|
||||
addDefaultFilterQueries(query);
|
||||
if (distinct) {
|
||||
DiscoverFacetField dff;
|
||||
|
||||
// To get the number of distinct values we use the next "json.facet" query param
|
||||
// {"entries_count": {"type":"terms","field": "<fieldName>_filter", "limit":0, "numBuckets":true}}"
|
||||
ObjectNode jsonFacet = JsonNodeFactory.instance.objectNode();
|
||||
ObjectNode entriesCount = JsonNodeFactory.instance.objectNode();
|
||||
entriesCount.put("type", "terms");
|
||||
entriesCount.put("field", facetField + "_filter");
|
||||
entriesCount.put("limit", 0);
|
||||
entriesCount.put("numBuckets", true);
|
||||
jsonFacet.set("entries_count", entriesCount);
|
||||
|
||||
if (StringUtils.isNotBlank(startsWith)) {
|
||||
dff = new DiscoverFacetField(facetField,
|
||||
DiscoveryConfigurationParameters.TYPE_TEXT, -1,
|
||||
DiscoveryConfigurationParameters.SORT.VALUE, startsWith);
|
||||
DiscoveryConfigurationParameters.TYPE_TEXT, limit,
|
||||
DiscoveryConfigurationParameters.SORT.VALUE, startsWith, offset);
|
||||
|
||||
// Add the prefix to the json facet query
|
||||
entriesCount.put("prefix", startsWith);
|
||||
} else {
|
||||
dff = new DiscoverFacetField(facetField,
|
||||
DiscoveryConfigurationParameters.TYPE_TEXT, -1,
|
||||
DiscoveryConfigurationParameters.SORT.VALUE);
|
||||
DiscoveryConfigurationParameters.TYPE_TEXT, limit,
|
||||
DiscoveryConfigurationParameters.SORT.VALUE, offset);
|
||||
}
|
||||
query.addFacetField(dff);
|
||||
query.setFacetMinCount(1);
|
||||
query.setMaxResults(0);
|
||||
query.addProperty("json.facet", jsonFacet.toString());
|
||||
} else {
|
||||
query.setMaxResults(limit/* > 0 ? limit : 20*/);
|
||||
if (offset > 0) {
|
||||
@@ -248,8 +265,7 @@ public class SolrBrowseDAO implements BrowseDAO {
|
||||
DiscoverResult resp = getSolrResponse();
|
||||
int count = 0;
|
||||
if (distinct) {
|
||||
List<FacetResult> facetResults = resp.getFacetResult(facetField);
|
||||
count = facetResults.size();
|
||||
count = (int) resp.getTotalEntries();
|
||||
} else {
|
||||
// we need to cast to int to respect the BrowseDAO contract...
|
||||
count = (int) resp.getTotalSearchResults();
|
||||
@@ -266,8 +282,8 @@ public class SolrBrowseDAO implements BrowseDAO {
|
||||
DiscoverResult resp = getSolrResponse();
|
||||
List<FacetResult> facet = resp.getFacetResult(facetField);
|
||||
int count = doCountQuery();
|
||||
int start = offset > 0 ? offset : 0;
|
||||
int max = limit > 0 ? limit : count; //if negative, return everything
|
||||
int start = 0;
|
||||
int max = facet.size();
|
||||
List<String[]> result = new ArrayList<>();
|
||||
if (ascending) {
|
||||
for (int i = start; i < (start + max) && i < count; i++) {
|
||||
|
@@ -67,6 +67,7 @@ import org.dspace.event.Event;
|
||||
import org.dspace.harvest.HarvestedItem;
|
||||
import org.dspace.harvest.service.HarvestedItemService;
|
||||
import org.dspace.identifier.DOI;
|
||||
import org.dspace.identifier.DOIIdentifierProvider;
|
||||
import org.dspace.identifier.IdentifierException;
|
||||
import org.dspace.identifier.service.DOIService;
|
||||
import org.dspace.identifier.service.IdentifierService;
|
||||
@@ -851,6 +852,7 @@ public class ItemServiceImpl extends DSpaceObjectServiceImpl<Item> implements It
|
||||
DOI doi = doiService.findDOIByDSpaceObject(context, item);
|
||||
if (doi != null) {
|
||||
doi.setDSpaceObject(null);
|
||||
doi.setStatus(DOIIdentifierProvider.TO_BE_DELETED);
|
||||
}
|
||||
|
||||
// remove version attached to the item
|
||||
|
@@ -178,6 +178,14 @@ public class WorkspaceItemServiceImpl implements WorkspaceItemService {
|
||||
|
||||
@Override
|
||||
public WorkspaceItem create(Context c, WorkflowItem workflowItem) throws SQLException, AuthorizeException {
|
||||
WorkspaceItem potentialDuplicate = findByItem(c, workflowItem.getItem());
|
||||
if (potentialDuplicate != null) {
|
||||
throw new IllegalArgumentException(String.format(
|
||||
"A workspace item referring to item %s already exists (%d)",
|
||||
workflowItem.getItem().getID(),
|
||||
potentialDuplicate.getID()
|
||||
));
|
||||
}
|
||||
WorkspaceItem workspaceItem = workspaceItemDAO.create(c, new WorkspaceItem());
|
||||
workspaceItem.setItem(workflowItem.getItem());
|
||||
workspaceItem.setCollection(workflowItem.getCollection());
|
||||
|
@@ -8,6 +8,7 @@
|
||||
package org.dspace.content.authority;
|
||||
|
||||
import java.io.File;
|
||||
import java.nio.file.Paths;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Arrays;
|
||||
import java.util.HashMap;
|
||||
@@ -65,14 +66,17 @@ public class DSpaceControlledVocabulary extends SelfNamedPlugin implements Hiera
|
||||
protected static String labelTemplate = "//node[@label = '%s']";
|
||||
protected static String idParentTemplate = "//node[@id = '%s']/parent::isComposedBy/parent::node";
|
||||
protected static String rootTemplate = "/node";
|
||||
protected static String idAttribute = "id";
|
||||
protected static String labelAttribute = "label";
|
||||
protected static String pluginNames[] = null;
|
||||
|
||||
protected String vocabularyName = null;
|
||||
protected InputSource vocabulary = null;
|
||||
protected Boolean suggestHierarchy = false;
|
||||
protected Boolean storeHierarchy = true;
|
||||
protected String hierarchyDelimiter = "::";
|
||||
protected Integer preloadLevel = 1;
|
||||
protected String valueAttribute = labelAttribute;
|
||||
protected String valueTemplate = labelTemplate;
|
||||
|
||||
public DSpaceControlledVocabulary() {
|
||||
super();
|
||||
@@ -115,7 +119,7 @@ public class DSpaceControlledVocabulary extends SelfNamedPlugin implements Hiera
|
||||
}
|
||||
}
|
||||
|
||||
protected void init() {
|
||||
protected void init(String locale) {
|
||||
if (vocabulary == null) {
|
||||
ConfigurationService config = DSpaceServicesFactory.getInstance().getConfigurationService();
|
||||
|
||||
@@ -125,13 +129,25 @@ public class DSpaceControlledVocabulary extends SelfNamedPlugin implements Hiera
|
||||
File.separator + "controlled-vocabularies" + File.separator;
|
||||
String configurationPrefix = "vocabulary.plugin." + vocabularyName;
|
||||
storeHierarchy = config.getBooleanProperty(configurationPrefix + ".hierarchy.store", storeHierarchy);
|
||||
boolean storeIDs = config.getBooleanProperty(configurationPrefix + ".storeIDs", false);
|
||||
suggestHierarchy = config.getBooleanProperty(configurationPrefix + ".hierarchy.suggest", suggestHierarchy);
|
||||
preloadLevel = config.getIntProperty(configurationPrefix + ".hierarchy.preloadLevel", preloadLevel);
|
||||
String configuredDelimiter = config.getProperty(configurationPrefix + ".delimiter");
|
||||
if (configuredDelimiter != null) {
|
||||
hierarchyDelimiter = configuredDelimiter.replaceAll("(^\"|\"$)", "");
|
||||
}
|
||||
if (storeIDs) {
|
||||
valueAttribute = idAttribute;
|
||||
valueTemplate = idTemplate;
|
||||
}
|
||||
|
||||
String filename = vocabulariesPath + vocabularyName + ".xml";
|
||||
if (StringUtils.isNotEmpty(locale)) {
|
||||
String localizedFilename = vocabulariesPath + vocabularyName + "_" + locale + ".xml";
|
||||
if (Paths.get(localizedFilename).toFile().exists()) {
|
||||
filename = localizedFilename;
|
||||
}
|
||||
}
|
||||
log.info("Loading " + filename);
|
||||
vocabulary = new InputSource(filename);
|
||||
}
|
||||
@@ -144,9 +160,9 @@ public class DSpaceControlledVocabulary extends SelfNamedPlugin implements Hiera
|
||||
return ("");
|
||||
} else {
|
||||
String parentValue = buildString(node.getParentNode());
|
||||
Node currentLabel = node.getAttributes().getNamedItem("label");
|
||||
if (currentLabel != null) {
|
||||
String currentValue = currentLabel.getNodeValue();
|
||||
Node currentNodeValue = node.getAttributes().getNamedItem(valueAttribute);
|
||||
if (currentNodeValue != null) {
|
||||
String currentValue = currentNodeValue.getNodeValue();
|
||||
if (parentValue.equals("")) {
|
||||
return currentValue;
|
||||
} else {
|
||||
@@ -160,12 +176,13 @@ public class DSpaceControlledVocabulary extends SelfNamedPlugin implements Hiera
|
||||
|
||||
@Override
|
||||
public Choices getMatches(String text, int start, int limit, String locale) {
|
||||
init();
|
||||
init(locale);
|
||||
log.debug("Getting matches for '" + text + "'");
|
||||
String xpathExpression = "";
|
||||
String[] textHierarchy = text.split(hierarchyDelimiter, -1);
|
||||
for (int i = 0; i < textHierarchy.length; i++) {
|
||||
xpathExpression += String.format(xpathTemplate, textHierarchy[i].replaceAll("'", "'").toLowerCase());
|
||||
xpathExpression +=
|
||||
String.format(xpathTemplate, textHierarchy[i].replaceAll("'", "'").toLowerCase());
|
||||
}
|
||||
XPath xpath = XPathFactory.newInstance().newXPath();
|
||||
int total = 0;
|
||||
@@ -184,12 +201,13 @@ public class DSpaceControlledVocabulary extends SelfNamedPlugin implements Hiera
|
||||
|
||||
@Override
|
||||
public Choices getBestMatch(String text, String locale) {
|
||||
init();
|
||||
init(locale);
|
||||
log.debug("Getting best matches for '" + text + "'");
|
||||
String xpathExpression = "";
|
||||
String[] textHierarchy = text.split(hierarchyDelimiter, -1);
|
||||
for (int i = 0; i < textHierarchy.length; i++) {
|
||||
xpathExpression += String.format(labelTemplate, textHierarchy[i].replaceAll("'", "'"));
|
||||
xpathExpression +=
|
||||
String.format(valueTemplate, textHierarchy[i].replaceAll("'", "'"));
|
||||
}
|
||||
XPath xpath = XPathFactory.newInstance().newXPath();
|
||||
List<Choice> choices = new ArrayList<Choice>();
|
||||
@@ -205,19 +223,19 @@ public class DSpaceControlledVocabulary extends SelfNamedPlugin implements Hiera
|
||||
|
||||
@Override
|
||||
public String getLabel(String key, String locale) {
|
||||
return getNodeLabel(key, this.suggestHierarchy);
|
||||
return getNodeValue(key, locale, this.suggestHierarchy);
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getValue(String key, String locale) {
|
||||
return getNodeLabel(key, this.storeHierarchy);
|
||||
return getNodeValue(key, locale, this.storeHierarchy);
|
||||
}
|
||||
|
||||
@Override
|
||||
public Choice getChoice(String authKey, String locale) {
|
||||
Node node;
|
||||
try {
|
||||
node = getNode(authKey);
|
||||
node = getNode(authKey, locale);
|
||||
} catch (XPathExpressionException e) {
|
||||
return null;
|
||||
}
|
||||
@@ -226,27 +244,27 @@ public class DSpaceControlledVocabulary extends SelfNamedPlugin implements Hiera
|
||||
|
||||
@Override
|
||||
public boolean isHierarchical() {
|
||||
init();
|
||||
init(null);
|
||||
return true;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Choices getTopChoices(String authorityName, int start, int limit, String locale) {
|
||||
init();
|
||||
init(locale);
|
||||
String xpathExpression = rootTemplate;
|
||||
return getChoicesByXpath(xpathExpression, start, limit);
|
||||
}
|
||||
|
||||
@Override
|
||||
public Choices getChoicesByParent(String authorityName, String parentId, int start, int limit, String locale) {
|
||||
init();
|
||||
init(locale);
|
||||
String xpathExpression = String.format(idTemplate, parentId);
|
||||
return getChoicesByXpath(xpathExpression, start, limit);
|
||||
}
|
||||
|
||||
@Override
|
||||
public Choice getParentChoice(String authorityName, String childId, String locale) {
|
||||
init();
|
||||
init(locale);
|
||||
try {
|
||||
String xpathExpression = String.format(idParentTemplate, childId);
|
||||
Choice choice = createChoiceFromNode(getNodeFromXPath(xpathExpression));
|
||||
@@ -259,7 +277,7 @@ public class DSpaceControlledVocabulary extends SelfNamedPlugin implements Hiera
|
||||
|
||||
@Override
|
||||
public Integer getPreloadLevel() {
|
||||
init();
|
||||
init(null);
|
||||
return preloadLevel;
|
||||
}
|
||||
|
||||
@@ -270,8 +288,8 @@ public class DSpaceControlledVocabulary extends SelfNamedPlugin implements Hiera
|
||||
return false;
|
||||
}
|
||||
|
||||
private Node getNode(String key) throws XPathExpressionException {
|
||||
init();
|
||||
private Node getNode(String key, String locale) throws XPathExpressionException {
|
||||
init(locale);
|
||||
String xpathExpression = String.format(idTemplate, key);
|
||||
Node node = getNodeFromXPath(xpathExpression);
|
||||
return node;
|
||||
@@ -319,16 +337,16 @@ public class DSpaceControlledVocabulary extends SelfNamedPlugin implements Hiera
|
||||
return extras;
|
||||
}
|
||||
|
||||
private String getNodeLabel(String key, boolean useHierarchy) {
|
||||
private String getNodeValue(String key, String locale, boolean useHierarchy) {
|
||||
try {
|
||||
Node node = getNode(key);
|
||||
Node node = getNode(key, locale);
|
||||
if (Objects.isNull(node)) {
|
||||
return null;
|
||||
}
|
||||
if (useHierarchy) {
|
||||
return this.buildString(node);
|
||||
} else {
|
||||
return node.getAttributes().getNamedItem("label").getNodeValue();
|
||||
return node.getAttributes().getNamedItem(valueAttribute).getNodeValue();
|
||||
}
|
||||
} catch (XPathExpressionException e) {
|
||||
return ("");
|
||||
@@ -349,7 +367,7 @@ public class DSpaceControlledVocabulary extends SelfNamedPlugin implements Hiera
|
||||
if (this.storeHierarchy) {
|
||||
return hierarchy;
|
||||
} else {
|
||||
return node.getAttributes().getNamedItem("label").getNodeValue();
|
||||
return node.getAttributes().getNamedItem(valueAttribute).getNodeValue();
|
||||
}
|
||||
}
|
||||
|
||||
|
@@ -313,7 +313,7 @@ public abstract class AbstractHibernateDAO<T> implements GenericDAO<T> {
|
||||
org.hibernate.query.Query hquery = query.unwrap(org.hibernate.query.Query.class);
|
||||
Stream<T> stream = hquery.stream();
|
||||
Iterator<T> iter = stream.iterator();
|
||||
return new AbstractIterator<T> () {
|
||||
return new AbstractIterator<T>() {
|
||||
@Override
|
||||
protected T computeNext() {
|
||||
return iter.hasNext() ? iter.next() : endOfData();
|
||||
|
@@ -883,7 +883,19 @@ public class Context implements AutoCloseable {
|
||||
}
|
||||
|
||||
/**
|
||||
* Remove an entity from the cache. This is necessary when batch processing a large number of items.
|
||||
* Remove all entities from the cache and reload the current user entity. This is useful when batch processing
|
||||
* a large number of entities when the calling code requires the cache to be completely cleared before continuing.
|
||||
*
|
||||
* @throws SQLException if a database error occurs.
|
||||
*/
|
||||
public void uncacheEntities() throws SQLException {
|
||||
dbConnection.uncacheEntities();
|
||||
reloadContextBoundEntities();
|
||||
}
|
||||
|
||||
/**
|
||||
* Remove an entity from the cache. This is useful when batch processing a large number of entities
|
||||
* when the calling code needs to retain some items in the cache while removing others.
|
||||
*
|
||||
* @param entity The entity to reload
|
||||
* @param <E> The class of the entity. The entity must implement the {@link ReloadableEntity} interface.
|
||||
|
@@ -124,28 +124,38 @@ public interface DBConnection<T> {
|
||||
public long getCacheSize() throws SQLException;
|
||||
|
||||
/**
|
||||
* Reload a DSpace object from the database. This will make sure the object
|
||||
* Reload an entity from the database. This will make sure the object
|
||||
* is valid and stored in the cache. The returned object should be used
|
||||
* henceforth instead of the passed object.
|
||||
*
|
||||
* @param <E> type of {@link entity}
|
||||
* @param entity The DSpace object to reload
|
||||
* @param <E> type of entity.
|
||||
* @param entity The entity to reload.
|
||||
* @return the reloaded entity.
|
||||
* @throws java.sql.SQLException passed through.
|
||||
* @throws SQLException passed through.
|
||||
*/
|
||||
public <E extends ReloadableEntity> E reloadEntity(E entity) throws SQLException;
|
||||
|
||||
/**
|
||||
* Remove a DSpace object from the session cache when batch processing a
|
||||
* large number of objects.
|
||||
* Remove all entities from the session cache.
|
||||
*
|
||||
* <p>Objects removed from cache are not saved in any way. Therefore, if you
|
||||
* have modified an object, you should be sure to {@link commit()} changes
|
||||
* <p>Entities removed from cache are not saved in any way. Therefore, if you
|
||||
* have modified any entities, you should be sure to {@link #commit()} changes
|
||||
* before calling this method.
|
||||
*
|
||||
* @param <E> Type of {@link entity}
|
||||
* @param entity The DSpace object to decache.
|
||||
* @throws java.sql.SQLException passed through.
|
||||
* @throws SQLException passed through.
|
||||
*/
|
||||
public void uncacheEntities() throws SQLException;
|
||||
|
||||
/**
|
||||
* Remove an entity from the session cache.
|
||||
*
|
||||
* <p>Entities removed from cache are not saved in any way. Therefore, if you
|
||||
* have modified the entity, you should be sure to {@link #commit()} changes
|
||||
* before calling this method.
|
||||
*
|
||||
* @param <E> Type of entity.
|
||||
* @param entity The entity to decache.
|
||||
* @throws SQLException passed through.
|
||||
*/
|
||||
public <E extends ReloadableEntity> void uncacheEntity(E entity) throws SQLException;
|
||||
|
||||
|
@@ -242,6 +242,11 @@ public class HibernateDBConnection implements DBConnection<Session> {
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public void uncacheEntities() throws SQLException {
|
||||
getSession().clear();
|
||||
}
|
||||
|
||||
/**
|
||||
* Evict an entity from the hibernate cache.
|
||||
* <P>
|
||||
|
@@ -19,6 +19,8 @@ import org.dspace.content.Item;
|
||||
import org.dspace.content.MetadataValue;
|
||||
import org.dspace.curate.AbstractCurationTask;
|
||||
import org.dspace.curate.Curator;
|
||||
import org.dspace.services.ConfigurationService;
|
||||
import org.dspace.services.factory.DSpaceServicesFactory;
|
||||
|
||||
/**
|
||||
* A basic link checker that is designed to be extended. By default this link checker
|
||||
@@ -42,6 +44,9 @@ public class BasicLinkChecker extends AbstractCurationTask {
|
||||
// The log4j logger for this class
|
||||
private static Logger log = org.apache.logging.log4j.LogManager.getLogger(BasicLinkChecker.class);
|
||||
|
||||
protected static final ConfigurationService configurationService
|
||||
= DSpaceServicesFactory.getInstance().getConfigurationService();
|
||||
|
||||
|
||||
/**
|
||||
* Perform the link checking.
|
||||
@@ -110,7 +115,8 @@ public class BasicLinkChecker extends AbstractCurationTask {
|
||||
*/
|
||||
protected boolean checkURL(String url, StringBuilder results) {
|
||||
// Link check the URL
|
||||
int httpStatus = getResponseStatus(url);
|
||||
int redirects = 0;
|
||||
int httpStatus = getResponseStatus(url, redirects);
|
||||
|
||||
if ((httpStatus >= 200) && (httpStatus < 300)) {
|
||||
results.append(" - " + url + " = " + httpStatus + " - OK\n");
|
||||
@@ -128,14 +134,24 @@ public class BasicLinkChecker extends AbstractCurationTask {
|
||||
* @param url The url to open
|
||||
* @return The HTTP response code (e.g. 200 / 301 / 404 / 500)
|
||||
*/
|
||||
protected int getResponseStatus(String url) {
|
||||
protected int getResponseStatus(String url, int redirects) {
|
||||
try {
|
||||
URL theURL = new URL(url);
|
||||
HttpURLConnection connection = (HttpURLConnection) theURL.openConnection();
|
||||
int code = connection.getResponseCode();
|
||||
connection.disconnect();
|
||||
connection.setInstanceFollowRedirects(true);
|
||||
int statusCode = connection.getResponseCode();
|
||||
int maxRedirect = configurationService.getIntProperty("curate.checklinks.max-redirect", 0);
|
||||
if ((statusCode == HttpURLConnection.HTTP_MOVED_TEMP || statusCode == HttpURLConnection.HTTP_MOVED_PERM ||
|
||||
statusCode == HttpURLConnection.HTTP_SEE_OTHER)) {
|
||||
connection.disconnect();
|
||||
String newUrl = connection.getHeaderField("Location");
|
||||
if (newUrl != null && (maxRedirect >= redirects || maxRedirect == -1)) {
|
||||
redirects++;
|
||||
return getResponseStatus(newUrl, redirects);
|
||||
}
|
||||
|
||||
return code;
|
||||
}
|
||||
return statusCode;
|
||||
|
||||
} catch (IOException ioe) {
|
||||
// Must be a bad URL
|
||||
|
@@ -10,6 +10,7 @@ package org.dspace.ctask.general;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.sql.SQLException;
|
||||
import java.util.List;
|
||||
|
||||
import org.apache.logging.log4j.LogManager;
|
||||
import org.apache.logging.log4j.Logger;
|
||||
@@ -25,7 +26,6 @@ import org.dspace.identifier.IdentifierProvider;
|
||||
import org.dspace.identifier.VersionedHandleIdentifierProviderWithCanonicalHandles;
|
||||
import org.dspace.identifier.factory.IdentifierServiceFactory;
|
||||
import org.dspace.identifier.service.IdentifierService;
|
||||
import org.dspace.services.factory.DSpaceServicesFactory;
|
||||
|
||||
/**
|
||||
* Ensure that an object has all of the identifiers that it should, minting them
|
||||
@@ -45,20 +45,6 @@ public class CreateMissingIdentifiers
|
||||
return Curator.CURATE_SKIP;
|
||||
}
|
||||
|
||||
// XXX Temporary escape when an incompatible provider is configured.
|
||||
// XXX Remove this when the provider is fixed.
|
||||
boolean compatible = DSpaceServicesFactory
|
||||
.getInstance()
|
||||
.getServiceManager()
|
||||
.getServiceByName(
|
||||
VersionedHandleIdentifierProviderWithCanonicalHandles.class.getCanonicalName(),
|
||||
IdentifierProvider.class) == null;
|
||||
if (!compatible) {
|
||||
setResult("This task is not compatible with VersionedHandleIdentifierProviderWithCanonicalHandles");
|
||||
return Curator.CURATE_ERROR;
|
||||
}
|
||||
// XXX End of escape
|
||||
|
||||
String typeText = Constants.typeText[dso.getType()];
|
||||
|
||||
// Get a Context
|
||||
@@ -75,6 +61,18 @@ public class CreateMissingIdentifiers
|
||||
.getInstance()
|
||||
.getIdentifierService();
|
||||
|
||||
// XXX Temporary escape when an incompatible provider is configured.
|
||||
// XXX Remove this when the provider is fixed.
|
||||
List<IdentifierProvider> providerList = identifierService.getProviders();
|
||||
boolean compatible =
|
||||
providerList.stream().noneMatch(p -> p instanceof VersionedHandleIdentifierProviderWithCanonicalHandles);
|
||||
|
||||
if (!compatible) {
|
||||
setResult("This task is not compatible with VersionedHandleIdentifierProviderWithCanonicalHandles");
|
||||
return Curator.CURATE_ERROR;
|
||||
}
|
||||
// XXX End of escape
|
||||
|
||||
// Register any missing identifiers.
|
||||
try {
|
||||
identifierService.register(context, dso);
|
||||
|
@@ -165,7 +165,7 @@ public class Curation extends DSpaceRunnable<CurationScriptConfiguration> {
|
||||
* End of curation script; logs script time if -v verbose is set
|
||||
*
|
||||
* @param timeRun Time script was started
|
||||
* @throws SQLException If DSpace contextx can't complete
|
||||
* @throws SQLException If DSpace context can't complete
|
||||
*/
|
||||
private void endScript(long timeRun) throws SQLException {
|
||||
context.complete();
|
||||
@@ -185,7 +185,7 @@ public class Curation extends DSpaceRunnable<CurationScriptConfiguration> {
|
||||
Curator curator = new Curator(handler);
|
||||
OutputStream reporterStream;
|
||||
if (null == this.reporter) {
|
||||
reporterStream = new NullOutputStream();
|
||||
reporterStream = NullOutputStream.NULL_OUTPUT_STREAM;
|
||||
} else if ("-".equals(this.reporter)) {
|
||||
reporterStream = System.out;
|
||||
} else {
|
||||
@@ -300,9 +300,17 @@ public class Curation extends DSpaceRunnable<CurationScriptConfiguration> {
|
||||
// scope
|
||||
if (this.commandLine.getOptionValue('s') != null) {
|
||||
this.scope = this.commandLine.getOptionValue('s');
|
||||
if (this.scope != null && Curator.TxScope.valueOf(this.scope.toUpperCase()) == null) {
|
||||
this.handler.logError("Bad transaction scope '" + this.scope + "': only 'object', 'curation' or " +
|
||||
"'open' recognized");
|
||||
boolean knownScope;
|
||||
try {
|
||||
Curator.TxScope.valueOf(this.scope.toUpperCase());
|
||||
knownScope = true;
|
||||
} catch (IllegalArgumentException | NullPointerException e) {
|
||||
knownScope = false;
|
||||
}
|
||||
if (!knownScope) {
|
||||
this.handler.logError("Bad transaction scope '"
|
||||
+ this.scope
|
||||
+ "': only 'object', 'curation' or 'open' recognized");
|
||||
throw new IllegalArgumentException(
|
||||
"Bad transaction scope '" + this.scope + "': only 'object', 'curation' or " +
|
||||
"'open' recognized");
|
||||
|
@@ -32,6 +32,9 @@ public class DiscoverResult {
|
||||
private List<IndexableObject> indexableObjects;
|
||||
private Map<String, List<FacetResult>> facetResults;
|
||||
|
||||
// Total count of facet entries calculated for a metadata browsing query
|
||||
private long totalEntries;
|
||||
|
||||
/**
|
||||
* A map that contains all the documents sougth after, the key is a string representation of the Indexable Object
|
||||
*/
|
||||
@@ -64,6 +67,14 @@ public class DiscoverResult {
|
||||
this.totalSearchResults = totalSearchResults;
|
||||
}
|
||||
|
||||
public long getTotalEntries() {
|
||||
return totalEntries;
|
||||
}
|
||||
|
||||
public void setTotalEntries(long totalEntries) {
|
||||
this.totalEntries = totalEntries;
|
||||
}
|
||||
|
||||
public int getStart() {
|
||||
return start;
|
||||
}
|
||||
|
@@ -1055,6 +1055,8 @@ public class SolrServiceImpl implements SearchService, IndexingService {
|
||||
}
|
||||
//Resolve our facet field values
|
||||
resolveFacetFields(context, query, result, skipLoadingResponse, solrQueryResponse);
|
||||
//Add total entries count for metadata browsing
|
||||
resolveEntriesCount(result, solrQueryResponse);
|
||||
}
|
||||
// If any stale entries are found in the current page of results,
|
||||
// we remove those stale entries and rerun the same query again.
|
||||
@@ -1080,7 +1082,39 @@ public class SolrServiceImpl implements SearchService, IndexingService {
|
||||
return result;
|
||||
}
|
||||
|
||||
/**
|
||||
* Stores the total count of entries for metadata index browsing. The count is calculated by the
|
||||
* <code>json.facet</code> parameter with the following value:
|
||||
*
|
||||
* <pre><code>
|
||||
* {
|
||||
* "entries_count": {
|
||||
* "type": "terms",
|
||||
* "field": "facetNameField_filter",
|
||||
* "limit": 0,
|
||||
* "prefix": "prefix_value",
|
||||
* "numBuckets": true
|
||||
* }
|
||||
* }
|
||||
* </code></pre>
|
||||
*
|
||||
* This value is returned in the <code>facets</code> field of the Solr response.
|
||||
*
|
||||
* @param result DiscoverResult object where the total entries count will be stored
|
||||
* @param solrQueryResponse QueryResponse object containing the solr response
|
||||
*/
|
||||
private void resolveEntriesCount(DiscoverResult result, QueryResponse solrQueryResponse) {
|
||||
|
||||
Object facetsObj = solrQueryResponse.getResponse().get("facets");
|
||||
if (facetsObj instanceof NamedList) {
|
||||
NamedList<Object> facets = (NamedList<Object>) facetsObj;
|
||||
Object bucketsInfoObj = facets.get("entries_count");
|
||||
if (bucketsInfoObj instanceof NamedList) {
|
||||
NamedList<Object> bucketsInfo = (NamedList<Object>) bucketsInfoObj;
|
||||
result.setTotalEntries((int) bucketsInfo.get("numBuckets"));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private void resolveFacetFields(Context context, DiscoverQuery query, DiscoverResult result,
|
||||
boolean skipLoadingResponse, QueryResponse solrQueryResponse) throws SQLException {
|
||||
@@ -1411,8 +1445,6 @@ public class SolrServiceImpl implements SearchService, IndexingService {
|
||||
} else {
|
||||
return field + "_acid";
|
||||
}
|
||||
} else if (facetFieldConfig.getType().equals(DiscoveryConfigurationParameters.TYPE_STANDARD)) {
|
||||
return field;
|
||||
} else {
|
||||
return field;
|
||||
}
|
||||
|
@@ -118,20 +118,10 @@ public abstract class IndexFactoryImpl<T extends IndexableObject, S> implements
|
||||
ParseContext tikaContext = new ParseContext();
|
||||
|
||||
// Use Apache Tika to parse the full text stream(s)
|
||||
boolean extractionSucceeded = false;
|
||||
try (InputStream fullTextStreams = streams.getStream()) {
|
||||
tikaParser.parse(fullTextStreams, tikaHandler, tikaMetadata, tikaContext);
|
||||
|
||||
// Write Tika metadata to "tika_meta_*" fields.
|
||||
// This metadata is not very useful right now,
|
||||
// but we'll keep it just in case it becomes more useful.
|
||||
for (String name : tikaMetadata.names()) {
|
||||
for (String value : tikaMetadata.getValues(name)) {
|
||||
doc.addField("tika_meta_" + name, value);
|
||||
}
|
||||
}
|
||||
|
||||
// Save (parsed) full text to "fulltext" field
|
||||
doc.addField("fulltext", tikaHandler.toString());
|
||||
extractionSucceeded = true;
|
||||
} catch (SAXException saxe) {
|
||||
// Check if this SAXException is just a notice that this file was longer than the character limit.
|
||||
// Unfortunately there is not a unique, public exception type to catch here. This error is thrown
|
||||
@@ -141,6 +131,7 @@ public abstract class IndexFactoryImpl<T extends IndexableObject, S> implements
|
||||
// log that we only indexed up to that configured limit
|
||||
log.info("Full text is larger than the configured limit (discovery.solr.fulltext.charLimit)."
|
||||
+ " Only the first {} characters were indexed.", charLimit);
|
||||
extractionSucceeded = true;
|
||||
} else {
|
||||
log.error("Tika parsing error. Could not index full text.", saxe);
|
||||
throw new IOException("Tika parsing error. Could not index full text.", saxe);
|
||||
@@ -148,11 +139,19 @@ public abstract class IndexFactoryImpl<T extends IndexableObject, S> implements
|
||||
} catch (TikaException | IOException ex) {
|
||||
log.error("Tika parsing error. Could not index full text.", ex);
|
||||
throw new IOException("Tika parsing error. Could not index full text.", ex);
|
||||
} finally {
|
||||
// Add document to index
|
||||
solr.add(doc);
|
||||
}
|
||||
return;
|
||||
if (extractionSucceeded) {
|
||||
// Write Tika metadata to "tika_meta_*" fields.
|
||||
// This metadata is not very useful right now,
|
||||
// but we'll keep it just in case it becomes more useful.
|
||||
for (String name : tikaMetadata.names()) {
|
||||
for (String value : tikaMetadata.getValues(name)) {
|
||||
doc.addField("tika_meta_" + name, value);
|
||||
}
|
||||
}
|
||||
// Save (parsed) full text to "fulltext" field
|
||||
doc.addField("fulltext", tikaHandler.toString());
|
||||
}
|
||||
}
|
||||
// Add document to index
|
||||
solr.add(doc);
|
||||
|
@@ -154,9 +154,11 @@ public class ItemIndexFactoryImpl extends DSpaceObjectIndexFactoryImpl<Indexable
|
||||
doc.addField("latestVersion", isLatestVersion(context, item));
|
||||
|
||||
EPerson submitter = item.getSubmitter();
|
||||
if (submitter != null) {
|
||||
addFacetIndex(doc, "submitter", submitter.getID().toString(),
|
||||
submitter.getFullName());
|
||||
if (submitter != null && !(DSpaceServicesFactory.getInstance().getConfigurationService().getBooleanProperty(
|
||||
"discovery.index.item.submitter.enabled", false))) {
|
||||
doc.addField("submitter_authority", submitter.getID().toString());
|
||||
} else if (submitter != null) {
|
||||
addFacetIndex(doc, "submitter", submitter.getID().toString(), submitter.getFullName());
|
||||
}
|
||||
|
||||
// Add the item metadata
|
||||
|
@@ -15,6 +15,7 @@ import jakarta.persistence.Id;
|
||||
import jakarta.persistence.JoinColumn;
|
||||
import jakarta.persistence.ManyToOne;
|
||||
import jakarta.persistence.Table;
|
||||
import jakarta.persistence.UniqueConstraint;
|
||||
import org.dspace.core.HibernateProxyHelper;
|
||||
|
||||
/**
|
||||
@@ -23,7 +24,7 @@ import org.dspace.core.HibernateProxyHelper;
|
||||
* @author kevinvandevelde at atmire.com
|
||||
*/
|
||||
@Entity
|
||||
@Table(name = "group2groupcache")
|
||||
@Table(name = "group2groupcache", uniqueConstraints = { @UniqueConstraint(columnNames = {"parent_id", "child_id"}) })
|
||||
public class Group2GroupCache implements Serializable {
|
||||
|
||||
@Id
|
||||
|
@@ -20,6 +20,7 @@ import java.util.Set;
|
||||
import java.util.UUID;
|
||||
|
||||
import org.apache.commons.collections4.CollectionUtils;
|
||||
import org.apache.commons.collections4.SetUtils;
|
||||
import org.apache.commons.lang3.StringUtils;
|
||||
import org.apache.commons.lang3.tuple.Pair;
|
||||
import org.apache.logging.log4j.LogManager;
|
||||
@@ -673,15 +674,14 @@ public class GroupServiceImpl extends DSpaceObjectServiceImpl<Group> implements
|
||||
|
||||
|
||||
/**
|
||||
* Regenerate the group cache AKA the group2groupcache table in the database -
|
||||
* meant to be called when a group is added or removed from another group
|
||||
* Returns a set with pairs of parent and child group UUIDs, representing the new cache table rows.
|
||||
*
|
||||
* @param context The relevant DSpace Context.
|
||||
* @param flushQueries flushQueries Flush all pending queries
|
||||
* @param context The relevant DSpace Context.
|
||||
* @param flushQueries flushQueries Flush all pending queries
|
||||
* @return Pairs of parent and child group UUID of the new cache.
|
||||
* @throws SQLException An exception that provides information on a database access error or other errors.
|
||||
*/
|
||||
protected void rethinkGroupCache(Context context, boolean flushQueries) throws SQLException {
|
||||
|
||||
private Set<Pair<UUID, UUID>> computeNewCache(Context context, boolean flushQueries) throws SQLException {
|
||||
Map<UUID, Set<UUID>> parents = new HashMap<>();
|
||||
|
||||
List<Pair<UUID, UUID>> group2groupResults = groupDAO.getGroup2GroupResults(context, flushQueries);
|
||||
@@ -689,19 +689,8 @@ public class GroupServiceImpl extends DSpaceObjectServiceImpl<Group> implements
|
||||
UUID parent = group2groupResult.getLeft();
|
||||
UUID child = group2groupResult.getRight();
|
||||
|
||||
// if parent doesn't have an entry, create one
|
||||
if (!parents.containsKey(parent)) {
|
||||
Set<UUID> children = new HashSet<>();
|
||||
|
||||
// add child id to the list
|
||||
children.add(child);
|
||||
parents.put(parent, children);
|
||||
} else {
|
||||
// parent has an entry, now add the child to the parent's record
|
||||
// of children
|
||||
Set<UUID> children = parents.get(parent);
|
||||
children.add(child);
|
||||
}
|
||||
parents.putIfAbsent(parent, new HashSet<>());
|
||||
parents.get(parent).add(child);
|
||||
}
|
||||
|
||||
// now parents is a hash of all of the IDs of groups that are parents
|
||||
@@ -714,28 +703,43 @@ public class GroupServiceImpl extends DSpaceObjectServiceImpl<Group> implements
|
||||
parent.getValue().addAll(myChildren);
|
||||
}
|
||||
|
||||
// empty out group2groupcache table
|
||||
group2GroupCacheDAO.deleteAll(context);
|
||||
|
||||
// write out new one
|
||||
// write out new cache IN MEMORY ONLY and returns it
|
||||
Set<Pair<UUID, UUID>> newCache = new HashSet<>();
|
||||
for (Map.Entry<UUID, Set<UUID>> parent : parents.entrySet()) {
|
||||
UUID key = parent.getKey();
|
||||
|
||||
for (UUID child : parent.getValue()) {
|
||||
|
||||
Group parentGroup = find(context, key);
|
||||
Group childGroup = find(context, child);
|
||||
|
||||
|
||||
if (parentGroup != null && childGroup != null && group2GroupCacheDAO
|
||||
.find(context, parentGroup, childGroup) == null) {
|
||||
Group2GroupCache group2GroupCache = group2GroupCacheDAO.create(context, new Group2GroupCache());
|
||||
group2GroupCache.setParent(parentGroup);
|
||||
group2GroupCache.setChild(childGroup);
|
||||
group2GroupCacheDAO.save(context, group2GroupCache);
|
||||
}
|
||||
newCache.add(Pair.of(key, child));
|
||||
}
|
||||
}
|
||||
return newCache;
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* Regenerate the group cache AKA the group2groupcache table in the database -
|
||||
* meant to be called when a group is added or removed from another group
|
||||
*
|
||||
* @param context The relevant DSpace Context.
|
||||
* @param flushQueries flushQueries Flush all pending queries
|
||||
* @throws SQLException An exception that provides information on a database access error or other errors.
|
||||
*/
|
||||
protected void rethinkGroupCache(Context context, boolean flushQueries) throws SQLException {
|
||||
// current cache in the database
|
||||
Set<Pair<UUID, UUID>> oldCache = group2GroupCacheDAO.getCache(context);
|
||||
|
||||
// correct cache, computed from the Group table
|
||||
Set<Pair<UUID, UUID>> newCache = computeNewCache(context, flushQueries);
|
||||
|
||||
SetUtils.SetView<Pair<UUID, UUID>> toDelete = SetUtils.difference(oldCache, newCache);
|
||||
SetUtils.SetView<Pair<UUID, UUID>> toCreate = SetUtils.difference(newCache, oldCache);
|
||||
|
||||
for (Pair<UUID, UUID> pair : toDelete ) {
|
||||
group2GroupCacheDAO.deleteFromCache(context, pair.getLeft(), pair.getRight());
|
||||
}
|
||||
|
||||
for (Pair<UUID, UUID> pair : toCreate ) {
|
||||
group2GroupCacheDAO.addToCache(context, pair.getLeft(), pair.getRight());
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@@ -9,7 +9,10 @@ package org.dspace.eperson.dao;
|
||||
|
||||
import java.sql.SQLException;
|
||||
import java.util.List;
|
||||
import java.util.Set;
|
||||
import java.util.UUID;
|
||||
|
||||
import org.apache.commons.lang3.tuple.Pair;
|
||||
import org.dspace.core.Context;
|
||||
import org.dspace.core.GenericDAO;
|
||||
import org.dspace.eperson.Group;
|
||||
@@ -25,13 +28,74 @@ import org.dspace.eperson.Group2GroupCache;
|
||||
*/
|
||||
public interface Group2GroupCacheDAO extends GenericDAO<Group2GroupCache> {
|
||||
|
||||
public List<Group2GroupCache> findByParent(Context context, Group group) throws SQLException;
|
||||
/**
|
||||
* Returns the current cache table as a set of UUID pairs.
|
||||
* @param context The relevant DSpace Context.
|
||||
* @return Set of UUID pairs, where the first element is the parent UUID and the second one is the child UUID.
|
||||
* @throws SQLException An exception that provides information on a database access error or other errors.
|
||||
*/
|
||||
Set<Pair<UUID, UUID>> getCache(Context context) throws SQLException;
|
||||
|
||||
public List<Group2GroupCache> findByChildren(Context context, Iterable<Group> groups) throws SQLException;
|
||||
/**
|
||||
* Returns all cache entities that are children of a given parent Group entity.
|
||||
* @param context The relevant DSpace Context.
|
||||
* @param group Parent group to perform the search.
|
||||
* @return List of cached groups that are children of the parent group.
|
||||
* @throws SQLException An exception that provides information on a database access error or other errors.
|
||||
*/
|
||||
List<Group2GroupCache> findByParent(Context context, Group group) throws SQLException;
|
||||
|
||||
public Group2GroupCache findByParentAndChild(Context context, Group parent, Group child) throws SQLException;
|
||||
/**
|
||||
* Returns all cache entities that are parents of at least one group from a children groups list.
|
||||
* @param context The relevant DSpace Context.
|
||||
* @param groups Children groups to perform the search.
|
||||
* @return List of cached groups that are parents of at least one group from the children groups list.
|
||||
* @throws SQLException An exception that provides information on a database access error or other errors.
|
||||
*/
|
||||
List<Group2GroupCache> findByChildren(Context context, Iterable<Group> groups) throws SQLException;
|
||||
|
||||
public Group2GroupCache find(Context context, Group parent, Group child) throws SQLException;
|
||||
/**
|
||||
* Returns the cache entity given specific parent and child groups.
|
||||
* @param context The relevant DSpace Context.
|
||||
* @param parent Parent group.
|
||||
* @param child Child gruoup.
|
||||
* @return Cached group.
|
||||
* @throws SQLException An exception that provides information on a database access error or other errors.
|
||||
*/
|
||||
Group2GroupCache findByParentAndChild(Context context, Group parent, Group child) throws SQLException;
|
||||
|
||||
public void deleteAll(Context context) throws SQLException;
|
||||
/**
|
||||
* Returns the cache entity given specific parent and child groups.
|
||||
* @param context The relevant DSpace Context.
|
||||
* @param parent Parent group.
|
||||
* @param child Child gruoup.
|
||||
* @return Cached group.
|
||||
* @throws SQLException An exception that provides information on a database access error or other errors.
|
||||
*/
|
||||
Group2GroupCache find(Context context, Group parent, Group child) throws SQLException;
|
||||
|
||||
/**
|
||||
* Completely deletes the current cache table.
|
||||
* @param context The relevant DSpace Context.
|
||||
* @throws SQLException An exception that provides information on a database access error or other errors.
|
||||
*/
|
||||
void deleteAll(Context context) throws SQLException;
|
||||
|
||||
/**
|
||||
* Deletes a specific cache row given parent and child groups UUIDs.
|
||||
* @param context The relevant DSpace Context.
|
||||
* @param parent Parent group UUID.
|
||||
* @param child Child group UUID.
|
||||
* @throws SQLException An exception that provides information on a database access error or other errors.
|
||||
*/
|
||||
void deleteFromCache(Context context, UUID parent, UUID child) throws SQLException;
|
||||
|
||||
/**
|
||||
* Adds a single row to the cache table given parent and child groups UUIDs.
|
||||
* @param context The relevant DSpace Context.
|
||||
* @param parent Parent group UUID.
|
||||
* @param child Child group UUID.
|
||||
* @throws SQLException An exception that provides information on a database access error or other errors.
|
||||
*/
|
||||
void addToCache(Context context, UUID parent, UUID child) throws SQLException;
|
||||
}
|
||||
|
@@ -8,14 +8,18 @@
|
||||
package org.dspace.eperson.dao.impl;
|
||||
|
||||
import java.sql.SQLException;
|
||||
import java.util.HashSet;
|
||||
import java.util.LinkedList;
|
||||
import java.util.List;
|
||||
import java.util.Set;
|
||||
import java.util.UUID;
|
||||
|
||||
import jakarta.persistence.Query;
|
||||
import jakarta.persistence.criteria.CriteriaBuilder;
|
||||
import jakarta.persistence.criteria.CriteriaQuery;
|
||||
import jakarta.persistence.criteria.Predicate;
|
||||
import jakarta.persistence.criteria.Root;
|
||||
import org.apache.commons.lang3.tuple.Pair;
|
||||
import org.dspace.core.AbstractHibernateDAO;
|
||||
import org.dspace.core.Context;
|
||||
import org.dspace.eperson.Group;
|
||||
@@ -35,6 +39,16 @@ public class Group2GroupCacheDAOImpl extends AbstractHibernateDAO<Group2GroupCac
|
||||
super();
|
||||
}
|
||||
|
||||
@Override
|
||||
public Set<Pair<UUID, UUID>> getCache(Context context) throws SQLException {
|
||||
Query query = createQuery(
|
||||
context,
|
||||
"SELECT new org.apache.commons.lang3.tuple.ImmutablePair(g.parent.id, g.child.id) FROM Group2GroupCache g"
|
||||
);
|
||||
List<Pair<UUID, UUID>> results = query.getResultList();
|
||||
return new HashSet<Pair<UUID, UUID>>(results);
|
||||
}
|
||||
|
||||
@Override
|
||||
public List<Group2GroupCache> findByParent(Context context, Group group) throws SQLException {
|
||||
CriteriaBuilder criteriaBuilder = getCriteriaBuilder(context);
|
||||
@@ -90,4 +104,24 @@ public class Group2GroupCacheDAOImpl extends AbstractHibernateDAO<Group2GroupCac
|
||||
public void deleteAll(Context context) throws SQLException {
|
||||
createQuery(context, "delete from Group2GroupCache").executeUpdate();
|
||||
}
|
||||
|
||||
@Override
|
||||
public void deleteFromCache(Context context, UUID parent, UUID child) throws SQLException {
|
||||
Query query = getHibernateSession(context).createNativeQuery(
|
||||
"delete from group2groupcache g WHERE g.parent_id = :parent AND g.child_id = :child"
|
||||
);
|
||||
query.setParameter("parent", parent);
|
||||
query.setParameter("child", child);
|
||||
query.executeUpdate();
|
||||
}
|
||||
|
||||
@Override
|
||||
public void addToCache(Context context, UUID parent, UUID child) throws SQLException {
|
||||
Query query = getHibernateSession(context).createNativeQuery(
|
||||
"insert into group2groupcache (parent_id, child_id) VALUES (:parent, :child)"
|
||||
);
|
||||
query.setParameter("parent", parent);
|
||||
query.setParameter("child", child);
|
||||
query.executeUpdate();
|
||||
}
|
||||
}
|
||||
|
@@ -1,144 +0,0 @@
|
||||
/**
|
||||
* The contents of this file are subject to the license and copyright
|
||||
* detailed in the LICENSE and NOTICE files at the root of the source
|
||||
* tree and available online at
|
||||
*
|
||||
* http://www.dspace.org/license/
|
||||
*/
|
||||
|
||||
package org.dspace.google;
|
||||
|
||||
import java.io.File;
|
||||
import java.util.HashSet;
|
||||
import java.util.Set;
|
||||
|
||||
import com.google.api.client.auth.oauth2.Credential;
|
||||
import com.google.api.client.googleapis.auth.oauth2.GoogleCredential;
|
||||
import com.google.api.client.googleapis.javanet.GoogleNetHttpTransport;
|
||||
import com.google.api.client.http.HttpTransport;
|
||||
import com.google.api.client.json.JsonFactory;
|
||||
import com.google.api.client.json.jackson2.JacksonFactory;
|
||||
import com.google.api.services.analytics.Analytics;
|
||||
import com.google.api.services.analytics.AnalyticsScopes;
|
||||
import org.apache.logging.log4j.Logger;
|
||||
import org.dspace.services.factory.DSpaceServicesFactory;
|
||||
|
||||
/**
|
||||
* User: Robin Taylor
|
||||
* Date: 11/07/2014
|
||||
* Time: 13:23
|
||||
*/
|
||||
|
||||
public class GoogleAccount {
|
||||
|
||||
// Read from config
|
||||
private String applicationName;
|
||||
private String tableId;
|
||||
private String emailAddress;
|
||||
private String certificateLocation;
|
||||
|
||||
// Created from factories
|
||||
private JsonFactory jsonFactory;
|
||||
private HttpTransport httpTransport;
|
||||
|
||||
// The Google stuff
|
||||
private Credential credential;
|
||||
private Analytics client;
|
||||
|
||||
private volatile static GoogleAccount uniqueInstance;
|
||||
|
||||
private static Logger log = org.apache.logging.log4j.LogManager.getLogger(GoogleAccount.class);
|
||||
|
||||
|
||||
private GoogleAccount() {
|
||||
applicationName = DSpaceServicesFactory.getInstance().getConfigurationService()
|
||||
.getProperty("google-analytics.application.name");
|
||||
tableId = DSpaceServicesFactory.getInstance().getConfigurationService()
|
||||
.getProperty("google-analytics.table.id");
|
||||
emailAddress = DSpaceServicesFactory.getInstance().getConfigurationService()
|
||||
.getProperty("google-analytics.account.email");
|
||||
certificateLocation = DSpaceServicesFactory.getInstance().getConfigurationService()
|
||||
.getProperty("google-analytics.certificate.location");
|
||||
|
||||
jsonFactory = JacksonFactory.getDefaultInstance();
|
||||
|
||||
try {
|
||||
httpTransport = GoogleNetHttpTransport.newTrustedTransport();
|
||||
credential = authorize();
|
||||
} catch (Exception e) {
|
||||
throw new RuntimeException("Error initialising Google Analytics client", e);
|
||||
}
|
||||
|
||||
// Create an Analytics instance
|
||||
client = new Analytics.Builder(httpTransport, jsonFactory, credential).setApplicationName(applicationName)
|
||||
.build();
|
||||
|
||||
log.info("Google Analytics client successfully initialised");
|
||||
}
|
||||
|
||||
public static GoogleAccount getInstance() {
|
||||
if (uniqueInstance == null) {
|
||||
synchronized (GoogleAccount.class) {
|
||||
if (uniqueInstance == null) {
|
||||
uniqueInstance = new GoogleAccount();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return uniqueInstance;
|
||||
}
|
||||
|
||||
private Credential authorize() throws Exception {
|
||||
Set<String> scopes = new HashSet<String>();
|
||||
scopes.add(AnalyticsScopes.ANALYTICS);
|
||||
scopes.add(AnalyticsScopes.ANALYTICS_EDIT);
|
||||
scopes.add(AnalyticsScopes.ANALYTICS_MANAGE_USERS);
|
||||
scopes.add(AnalyticsScopes.ANALYTICS_PROVISION);
|
||||
scopes.add(AnalyticsScopes.ANALYTICS_READONLY);
|
||||
|
||||
credential = new GoogleCredential.Builder()
|
||||
.setTransport(httpTransport)
|
||||
.setJsonFactory(jsonFactory)
|
||||
.setServiceAccountId(emailAddress)
|
||||
.setServiceAccountScopes(scopes)
|
||||
.setServiceAccountPrivateKeyFromP12File(new File(certificateLocation))
|
||||
.build();
|
||||
|
||||
return credential;
|
||||
}
|
||||
|
||||
|
||||
public String getApplicationName() {
|
||||
return applicationName;
|
||||
}
|
||||
|
||||
public String getTableId() {
|
||||
return tableId;
|
||||
}
|
||||
|
||||
public String getEmailAddress() {
|
||||
return emailAddress;
|
||||
}
|
||||
|
||||
public String getCertificateLocation() {
|
||||
return certificateLocation;
|
||||
}
|
||||
|
||||
public JsonFactory getJsonFactory() {
|
||||
return jsonFactory;
|
||||
}
|
||||
|
||||
public HttpTransport getHttpTransport() {
|
||||
return httpTransport;
|
||||
}
|
||||
|
||||
public Credential getCredential() {
|
||||
return credential;
|
||||
}
|
||||
|
||||
public Analytics getClient() {
|
||||
return client;
|
||||
}
|
||||
|
||||
}
|
||||
|
@@ -1,49 +0,0 @@
|
||||
/**
|
||||
* The contents of this file are subject to the license and copyright
|
||||
* detailed in the LICENSE and NOTICE files at the root of the source
|
||||
* tree and available online at
|
||||
*
|
||||
* http://www.dspace.org/license/
|
||||
*/
|
||||
|
||||
package org.dspace.google;
|
||||
|
||||
import java.io.IOException;
|
||||
|
||||
import com.google.api.services.analytics.model.GaData;
|
||||
|
||||
|
||||
/**
|
||||
* User: Robin Taylor
|
||||
* Date: 20/08/2014
|
||||
* Time: 09:26
|
||||
*/
|
||||
public class GoogleQueryManager {
|
||||
|
||||
public GaData getPageViews(String startDate, String endDate, String handle) throws IOException {
|
||||
return GoogleAccount.getInstance().getClient().data().ga().get(
|
||||
GoogleAccount.getInstance().getTableId(),
|
||||
startDate,
|
||||
endDate,
|
||||
"ga:pageviews") // Metrics.
|
||||
.setDimensions("ga:year,ga:month")
|
||||
.setSort("-ga:year,-ga:month")
|
||||
.setFilters("ga:pagePath=~/handle/" + handle + "$")
|
||||
.execute();
|
||||
}
|
||||
|
||||
public GaData getBitstreamDownloads(String startDate, String endDate, String handle) throws IOException {
|
||||
return GoogleAccount.getInstance().getClient().data().ga().get(
|
||||
GoogleAccount.getInstance().getTableId(),
|
||||
startDate,
|
||||
endDate,
|
||||
"ga:totalEvents") // Metrics.
|
||||
.setDimensions("ga:year,ga:month")
|
||||
.setSort("-ga:year,-ga:month")
|
||||
.setFilters(
|
||||
"ga:eventCategory==bitstream;ga:eventAction==download;ga:pagePath=~" + handle + "/")
|
||||
.execute();
|
||||
}
|
||||
|
||||
}
|
||||
|
@@ -1,201 +0,0 @@
|
||||
/**
|
||||
* The contents of this file are subject to the license and copyright
|
||||
* detailed in the LICENSE and NOTICE files at the root of the source
|
||||
* tree and available online at
|
||||
*
|
||||
* http://www.dspace.org/license/
|
||||
*/
|
||||
|
||||
package org.dspace.google;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.sql.SQLException;
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
import java.util.UUID;
|
||||
|
||||
import jakarta.servlet.http.HttpServletRequest;
|
||||
import org.apache.commons.lang3.StringUtils;
|
||||
import org.apache.http.NameValuePair;
|
||||
import org.apache.http.client.entity.UrlEncodedFormEntity;
|
||||
import org.apache.http.client.methods.CloseableHttpResponse;
|
||||
import org.apache.http.client.methods.HttpPost;
|
||||
import org.apache.http.impl.client.CloseableHttpClient;
|
||||
import org.apache.http.impl.client.HttpClients;
|
||||
import org.apache.http.message.BasicNameValuePair;
|
||||
import org.apache.logging.log4j.Logger;
|
||||
import org.dspace.content.factory.ContentServiceFactory;
|
||||
import org.dspace.core.Constants;
|
||||
import org.dspace.service.ClientInfoService;
|
||||
import org.dspace.services.ConfigurationService;
|
||||
import org.dspace.services.model.Event;
|
||||
import org.dspace.usage.AbstractUsageEventListener;
|
||||
import org.dspace.usage.UsageEvent;
|
||||
import org.springframework.beans.factory.annotation.Autowired;
|
||||
|
||||
|
||||
/**
|
||||
* User: Robin Taylor
|
||||
* Date: 14/08/2014
|
||||
* Time: 10:05
|
||||
*
|
||||
* Notify Google Analytics of... well anything we want really.
|
||||
* @deprecated Use org.dspace.google.GoogleAsyncEventListener instead
|
||||
*/
|
||||
@Deprecated
|
||||
public class GoogleRecorderEventListener extends AbstractUsageEventListener {
|
||||
|
||||
private String analyticsKey;
|
||||
private CloseableHttpClient httpclient;
|
||||
private String GoogleURL = "https://www.google-analytics.com/collect";
|
||||
private static Logger log = org.apache.logging.log4j.LogManager.getLogger(GoogleRecorderEventListener.class);
|
||||
|
||||
protected ContentServiceFactory contentServiceFactory;
|
||||
protected ConfigurationService configurationService;
|
||||
protected ClientInfoService clientInfoService;
|
||||
|
||||
public GoogleRecorderEventListener() {
|
||||
// httpclient is threadsafe so we only need one.
|
||||
httpclient = HttpClients.createDefault();
|
||||
}
|
||||
|
||||
@Autowired
|
||||
public void setContentServiceFactory(ContentServiceFactory contentServiceFactory) {
|
||||
this.contentServiceFactory = contentServiceFactory;
|
||||
}
|
||||
|
||||
@Autowired
|
||||
public void setConfigurationService(ConfigurationService configurationService) {
|
||||
this.configurationService = configurationService;
|
||||
}
|
||||
|
||||
@Autowired
|
||||
public void setClientInfoService(ClientInfoService clientInfoService) {
|
||||
this.clientInfoService = clientInfoService;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void receiveEvent(Event event) {
|
||||
if ((event instanceof UsageEvent)) {
|
||||
log.debug("Usage event received " + event.getName());
|
||||
|
||||
// This is a wee bit messy but these keys should be combined in future.
|
||||
analyticsKey = configurationService.getProperty("google.analytics.key");
|
||||
|
||||
if (StringUtils.isNotBlank(analyticsKey)) {
|
||||
try {
|
||||
UsageEvent ue = (UsageEvent) event;
|
||||
|
||||
if (ue.getAction() == UsageEvent.Action.VIEW) {
|
||||
if (ue.getObject().getType() == Constants.BITSTREAM) {
|
||||
logEvent(ue, "bitstream", "download");
|
||||
|
||||
// Note: I've left this commented out code here to show how we could record page views
|
||||
// as events,
|
||||
// but since they are already taken care of by the Google Analytics Javascript there is
|
||||
// not much point.
|
||||
|
||||
//} else if (ue.getObject().getType() == Constants.ITEM) {
|
||||
// logEvent(ue, "item", "view");
|
||||
//} else if (ue.getObject().getType() == Constants.COLLECTION) {
|
||||
// logEvent(ue, "collection", "view");
|
||||
//} else if (ue.getObject().getType() == Constants.COMMUNITY) {
|
||||
// logEvent(ue, "community", "view");
|
||||
}
|
||||
}
|
||||
} catch (Exception e) {
|
||||
log.error(e.getMessage());
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private void logEvent(UsageEvent ue, String category, String action) throws IOException, SQLException {
|
||||
HttpPost httpPost = new HttpPost(GoogleURL);
|
||||
|
||||
List<NameValuePair> nvps = new ArrayList<NameValuePair>();
|
||||
nvps.add(new BasicNameValuePair("v", "1"));
|
||||
nvps.add(new BasicNameValuePair("tid", analyticsKey));
|
||||
|
||||
// Client Id, should uniquely identify the user or device. If we have a session id for the user
|
||||
// then lets use it, else generate a UUID.
|
||||
if (ue.getRequest().getSession(false) != null) {
|
||||
nvps.add(new BasicNameValuePair("cid", ue.getRequest().getSession().getId()));
|
||||
} else {
|
||||
nvps.add(new BasicNameValuePair("cid", UUID.randomUUID().toString()));
|
||||
}
|
||||
|
||||
nvps.add(new BasicNameValuePair("t", "event"));
|
||||
nvps.add(new BasicNameValuePair("uip", getIPAddress(ue.getRequest())));
|
||||
nvps.add(new BasicNameValuePair("ua", ue.getRequest().getHeader("USER-AGENT")));
|
||||
nvps.add(new BasicNameValuePair("dr", ue.getRequest().getHeader("referer")));
|
||||
nvps.add(new BasicNameValuePair("dp", ue.getRequest().getRequestURI()));
|
||||
nvps.add(new BasicNameValuePair("dt", getObjectName(ue)));
|
||||
nvps.add(new BasicNameValuePair("ec", category));
|
||||
nvps.add(new BasicNameValuePair("ea", action));
|
||||
|
||||
if (ue.getObject().getType() == Constants.BITSTREAM) {
|
||||
// Bitstream downloads may occasionally be for collection or community images, so we need to label them
|
||||
// with the parent object type.
|
||||
nvps.add(new BasicNameValuePair("el", getParentType(ue)));
|
||||
}
|
||||
|
||||
httpPost.setEntity(new UrlEncodedFormEntity(nvps));
|
||||
|
||||
try (CloseableHttpResponse response2 = httpclient.execute(httpPost)) {
|
||||
// I can't find a list of what are acceptable responses, so I log the response but take no action.
|
||||
log.debug("Google Analytics response is " + response2.getStatusLine());
|
||||
}
|
||||
|
||||
log.debug("Posted to Google Analytics - " + ue.getRequest().getRequestURI());
|
||||
}
|
||||
|
||||
private String getParentType(UsageEvent ue) {
|
||||
try {
|
||||
int parentType = contentServiceFactory.getDSpaceObjectService(ue.getObject())
|
||||
.getParentObject(ue.getContext(), ue.getObject()).getType();
|
||||
if (parentType == Constants.ITEM) {
|
||||
return "item";
|
||||
} else if (parentType == Constants.COLLECTION) {
|
||||
return "collection";
|
||||
} else if (parentType == Constants.COMMUNITY) {
|
||||
return "community";
|
||||
}
|
||||
} catch (SQLException e) {
|
||||
// This shouldn't merit interrupting the user's transaction so log the error and continue.
|
||||
log.error(
|
||||
"Error in Google Analytics recording - can't determine ParentObjectType for bitstream " + ue.getObject()
|
||||
.getID());
|
||||
e.printStackTrace();
|
||||
}
|
||||
|
||||
return null;
|
||||
}
|
||||
|
||||
private String getObjectName(UsageEvent ue) {
|
||||
try {
|
||||
if (ue.getObject().getType() == Constants.BITSTREAM) {
|
||||
// For a bitstream download we really want to know the title of the owning item rather than the
|
||||
// bitstream name.
|
||||
return contentServiceFactory.getDSpaceObjectService(ue.getObject())
|
||||
.getParentObject(ue.getContext(), ue.getObject()).getName();
|
||||
} else {
|
||||
return ue.getObject().getName();
|
||||
}
|
||||
} catch (SQLException e) {
|
||||
// This shouldn't merit interrupting the user's transaction so log the error and continue.
|
||||
log.error(
|
||||
"Error in Google Analytics recording - can't determine ParentObjectName for bitstream " + ue.getObject()
|
||||
.getID());
|
||||
e.printStackTrace();
|
||||
}
|
||||
|
||||
return null;
|
||||
|
||||
}
|
||||
|
||||
private String getIPAddress(HttpServletRequest request) {
|
||||
return clientInfoService.getClientIp(request);
|
||||
}
|
||||
|
||||
}
|
@@ -57,6 +57,11 @@ public class IdentifierServiceImpl implements IdentifierService {
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public List<IdentifierProvider> getProviders() {
|
||||
return this.providers;
|
||||
}
|
||||
|
||||
/**
|
||||
* Reserves identifiers for the item
|
||||
*
|
||||
|
@@ -577,7 +577,8 @@ public class DOIOrganiser {
|
||||
}
|
||||
} catch (IdentifierException ex) {
|
||||
if (!(ex instanceof DOIIdentifierException)) {
|
||||
LOG.error("It wasn't possible to register the identifier online. ", ex);
|
||||
LOG.error("Registering DOI {} for object {}: the registrar returned an error.",
|
||||
doiRow.getDoi(), dso.getID(), ex);
|
||||
}
|
||||
|
||||
DOIIdentifierException doiIdentifierException = (DOIIdentifierException) ex;
|
||||
|
@@ -461,6 +461,10 @@ public class DataCiteConnector
|
||||
log.warn("While reserving the DOI {}, we got a http status code "
|
||||
+ "{} and the message \"{}\".",
|
||||
doi, Integer.toString(resp.statusCode), resp.getContent());
|
||||
Format format = Format.getCompactFormat();
|
||||
format.setEncoding("UTF-8");
|
||||
XMLOutputter xout = new XMLOutputter(format);
|
||||
log.info("We send the following XML:\n{}", xout.outputString(root));
|
||||
throw new DOIIdentifierException("Unable to parse an answer from "
|
||||
+ "DataCite API. Please have a look into DSpace logs.",
|
||||
DOIIdentifierException.BAD_ANSWER);
|
||||
@@ -632,6 +636,14 @@ public class DataCiteConnector
|
||||
return sendHttpRequest(httpget, doi);
|
||||
}
|
||||
|
||||
/**
|
||||
* Send a DataCite metadata document to the registrar.
|
||||
*
|
||||
* @param doi identify the object.
|
||||
* @param metadataRoot describe the object. The root element of the document.
|
||||
* @return the registrar's response.
|
||||
* @throws DOIIdentifierException passed through.
|
||||
*/
|
||||
protected DataCiteResponse sendMetadataPostRequest(String doi, Element metadataRoot)
|
||||
throws DOIIdentifierException {
|
||||
Format format = Format.getCompactFormat();
|
||||
@@ -640,6 +652,14 @@ public class DataCiteConnector
|
||||
return sendMetadataPostRequest(doi, xout.outputString(new Document(metadataRoot)));
|
||||
}
|
||||
|
||||
/**
|
||||
* Send a DataCite metadata document to the registrar.
|
||||
*
|
||||
* @param doi identify the object.
|
||||
* @param metadata describe the object.
|
||||
* @return the registrar's response.
|
||||
* @throws DOIIdentifierException passed through.
|
||||
*/
|
||||
protected DataCiteResponse sendMetadataPostRequest(String doi, String metadata)
|
||||
throws DOIIdentifierException {
|
||||
// post mds/metadata/
|
||||
@@ -687,7 +707,7 @@ public class DataCiteConnector
|
||||
* properties such as request URI and method type.
|
||||
* @param doi DOI string to operate on
|
||||
* @return response from DataCite
|
||||
* @throws DOIIdentifierException if DOI error
|
||||
* @throws DOIIdentifierException if registrar returns an error.
|
||||
*/
|
||||
protected DataCiteResponse sendHttpRequest(HttpUriRequest req, String doi)
|
||||
throws DOIIdentifierException {
|
||||
|
@@ -19,6 +19,7 @@ import org.dspace.identifier.Identifier;
|
||||
import org.dspace.identifier.IdentifierException;
|
||||
import org.dspace.identifier.IdentifierNotFoundException;
|
||||
import org.dspace.identifier.IdentifierNotResolvableException;
|
||||
import org.dspace.identifier.IdentifierProvider;
|
||||
|
||||
/**
|
||||
* @author Fabio Bolognesi (fabio at atmire dot com)
|
||||
@@ -194,4 +195,9 @@ public interface IdentifierService {
|
||||
void delete(Context context, DSpaceObject dso, String identifier)
|
||||
throws AuthorizeException, SQLException, IdentifierException;
|
||||
|
||||
/**
|
||||
* Get List of currently enabled IdentifierProviders
|
||||
* @return List of enabled IdentifierProvider objects.
|
||||
*/
|
||||
List<IdentifierProvider> getProviders();
|
||||
}
|
||||
|
@@ -26,7 +26,7 @@ import org.jdom2.xpath.XPathFactory;
|
||||
* This contributor is able to concat multi value.
|
||||
* Given a certain path, if it contains several nodes,
|
||||
* the values of nodes will be concatenated into a single one.
|
||||
* The concrete example we can see in the file wos-responce.xml in the <abstract_text> node,
|
||||
* The concrete example we can see in the file wos-response.xml in the <abstract_text> node,
|
||||
* which may contain several <p> paragraphs,
|
||||
* this Contributor allows concatenating all <p> paragraphs. to obtain a single one.
|
||||
*
|
||||
|
@@ -10,6 +10,7 @@ package org.dspace.orcid.client;
|
||||
import java.util.List;
|
||||
import java.util.Optional;
|
||||
|
||||
import org.dspace.orcid.OrcidToken;
|
||||
import org.dspace.orcid.exception.OrcidClientException;
|
||||
import org.dspace.orcid.model.OrcidTokenResponseDTO;
|
||||
import org.orcid.jaxb.model.v3.release.record.Person;
|
||||
@@ -161,4 +162,11 @@ public interface OrcidClient {
|
||||
*/
|
||||
OrcidResponse deleteByPutCode(String accessToken, String orcid, String putCode, String path);
|
||||
|
||||
/**
|
||||
* Revokes the given {@param accessToken} with a POST method.
|
||||
* @param orcidToken the access token to revoke
|
||||
* @throws OrcidClientException if some error occurs during the search
|
||||
*/
|
||||
void revokeToken(OrcidToken orcidToken);
|
||||
|
||||
}
|
||||
|
@@ -42,6 +42,7 @@ import org.apache.http.client.methods.RequestBuilder;
|
||||
import org.apache.http.entity.StringEntity;
|
||||
import org.apache.http.impl.client.HttpClientBuilder;
|
||||
import org.apache.http.message.BasicNameValuePair;
|
||||
import org.dspace.orcid.OrcidToken;
|
||||
import org.dspace.orcid.exception.OrcidClientException;
|
||||
import org.dspace.orcid.model.OrcidEntityType;
|
||||
import org.dspace.orcid.model.OrcidProfileSectionType;
|
||||
@@ -178,6 +179,16 @@ public class OrcidClientImpl implements OrcidClient {
|
||||
return execute(buildDeleteUriRequest(accessToken, "/" + orcid + path + "/" + putCode), true);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void revokeToken(OrcidToken orcidToken) {
|
||||
List<NameValuePair> params = new ArrayList<>();
|
||||
params.add(new BasicNameValuePair("client_id", orcidConfiguration.getClientId()));
|
||||
params.add(new BasicNameValuePair("client_secret", orcidConfiguration.getClientSecret()));
|
||||
params.add(new BasicNameValuePair("token", orcidToken.getAccessToken()));
|
||||
|
||||
executeSuccessful(buildPostForRevokeToken(new UrlEncodedFormEntity(params, Charset.defaultCharset())));
|
||||
}
|
||||
|
||||
@Override
|
||||
public OrcidTokenResponseDTO getReadPublicAccessToken() {
|
||||
return getClientCredentialsAccessToken("/read-public");
|
||||
@@ -220,6 +231,14 @@ public class OrcidClientImpl implements OrcidClient {
|
||||
.build();
|
||||
}
|
||||
|
||||
private HttpUriRequest buildPostForRevokeToken(HttpEntity entity) {
|
||||
return post(orcidConfiguration.getRevokeUrl())
|
||||
.addHeader("Accept", "application/json")
|
||||
.addHeader("Content-Type", "application/x-www-form-urlencoded")
|
||||
.setEntity(entity)
|
||||
.build();
|
||||
}
|
||||
|
||||
private HttpUriRequest buildPutUriRequest(String accessToken, String relativePath, Object object) {
|
||||
return put(orcidConfiguration.getApiUrl() + relativePath.trim())
|
||||
.addHeader("Content-Type", "application/vnd.orcid+xml")
|
||||
@@ -234,6 +253,24 @@ public class OrcidClientImpl implements OrcidClient {
|
||||
.build();
|
||||
}
|
||||
|
||||
private void executeSuccessful(HttpUriRequest httpUriRequest) {
|
||||
try {
|
||||
HttpClient client = HttpClientBuilder.create().build();
|
||||
HttpResponse response = client.execute(httpUriRequest);
|
||||
|
||||
if (isNotSuccessfull(response)) {
|
||||
throw new OrcidClientException(
|
||||
getStatusCode(response),
|
||||
"Operation " + httpUriRequest.getMethod() + " for the resource " + httpUriRequest.getURI() +
|
||||
" was not successful: " + new String(response.getEntity().getContent().readAllBytes(),
|
||||
StandardCharsets.UTF_8)
|
||||
);
|
||||
}
|
||||
} catch (IOException e) {
|
||||
throw new RuntimeException(e);
|
||||
}
|
||||
}
|
||||
|
||||
private <T> T executeAndParseJson(HttpUriRequest httpUriRequest, Class<T> clazz) {
|
||||
|
||||
HttpClient client = HttpClientBuilder.create().build();
|
||||
|
@@ -35,6 +35,8 @@ public final class OrcidConfiguration {
|
||||
|
||||
private String scopes;
|
||||
|
||||
private String revokeUrl;
|
||||
|
||||
public String getApiUrl() {
|
||||
return apiUrl;
|
||||
}
|
||||
@@ -111,4 +113,11 @@ public final class OrcidConfiguration {
|
||||
return !StringUtils.isAnyBlank(clientId, clientSecret);
|
||||
}
|
||||
|
||||
public String getRevokeUrl() {
|
||||
return revokeUrl;
|
||||
}
|
||||
|
||||
public void setRevokeUrl(String revokeUrl) {
|
||||
this.revokeUrl = revokeUrl;
|
||||
}
|
||||
}
|
||||
|
@@ -37,6 +37,7 @@ import org.dspace.discovery.indexobject.IndexableItem;
|
||||
import org.dspace.eperson.EPerson;
|
||||
import org.dspace.eperson.service.EPersonService;
|
||||
import org.dspace.orcid.OrcidToken;
|
||||
import org.dspace.orcid.client.OrcidClient;
|
||||
import org.dspace.orcid.model.OrcidEntityType;
|
||||
import org.dspace.orcid.model.OrcidTokenResponseDTO;
|
||||
import org.dspace.orcid.service.OrcidSynchronizationService;
|
||||
@@ -47,6 +48,8 @@ import org.dspace.profile.OrcidProfileSyncPreference;
|
||||
import org.dspace.profile.OrcidSynchronizationMode;
|
||||
import org.dspace.profile.service.ResearcherProfileService;
|
||||
import org.dspace.services.ConfigurationService;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
import org.springframework.beans.factory.annotation.Autowired;
|
||||
|
||||
/**
|
||||
@@ -57,6 +60,7 @@ import org.springframework.beans.factory.annotation.Autowired;
|
||||
*/
|
||||
public class OrcidSynchronizationServiceImpl implements OrcidSynchronizationService {
|
||||
|
||||
private static final Logger log = LoggerFactory.getLogger(OrcidSynchronizationServiceImpl.class);
|
||||
@Autowired
|
||||
private ItemService itemService;
|
||||
|
||||
@@ -75,6 +79,9 @@ public class OrcidSynchronizationServiceImpl implements OrcidSynchronizationServ
|
||||
@Autowired
|
||||
private ResearcherProfileService researcherProfileService;
|
||||
|
||||
@Autowired
|
||||
private OrcidClient orcidClient;
|
||||
|
||||
@Override
|
||||
public void linkProfile(Context context, Item profile, OrcidTokenResponseDTO token) throws SQLException {
|
||||
|
||||
@@ -114,20 +121,33 @@ public class OrcidSynchronizationServiceImpl implements OrcidSynchronizationServ
|
||||
@Override
|
||||
public void unlinkProfile(Context context, Item profile) throws SQLException {
|
||||
|
||||
itemService.clearMetadata(context, profile, "person", "identifier", "orcid", Item.ANY);
|
||||
itemService.clearMetadata(context, profile, "dspace", "orcid", "scope", Item.ANY);
|
||||
itemService.clearMetadata(context, profile, "dspace", "orcid", "authenticated", Item.ANY);
|
||||
clearOrcidProfileMetadata(context, profile);
|
||||
|
||||
if (!configurationService.getBooleanProperty("orcid.disconnection.remain-sync", false)) {
|
||||
clearSynchronizationSettings(context, profile);
|
||||
}
|
||||
clearSynchronizationSettings(context, profile);
|
||||
|
||||
orcidTokenService.deleteByProfileItem(context, profile);
|
||||
clearOrcidToken(context, profile);
|
||||
|
||||
updateItem(context, profile);
|
||||
|
||||
}
|
||||
|
||||
private void clearOrcidToken(Context context, Item profile) {
|
||||
OrcidToken profileToken = orcidTokenService.findByProfileItem(context, profile);
|
||||
if (profileToken == null) {
|
||||
log.warn("Cannot find any token related to the user profile: {}", profile.getID());
|
||||
return;
|
||||
}
|
||||
|
||||
orcidTokenService.deleteByProfileItem(context, profile);
|
||||
orcidClient.revokeToken(profileToken);
|
||||
}
|
||||
|
||||
private void clearOrcidProfileMetadata(Context context, Item profile) throws SQLException {
|
||||
itemService.clearMetadata(context, profile, "person", "identifier", "orcid", Item.ANY);
|
||||
itemService.clearMetadata(context, profile, "dspace", "orcid", "scope", Item.ANY);
|
||||
itemService.clearMetadata(context, profile, "dspace", "orcid", "authenticated", Item.ANY);
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean setEntityPreference(Context context, Item profile, OrcidEntityType type,
|
||||
OrcidEntitySyncPreference value) throws SQLException {
|
||||
@@ -273,6 +293,11 @@ public class OrcidSynchronizationServiceImpl implements OrcidSynchronizationServ
|
||||
|
||||
private void clearSynchronizationSettings(Context context, Item profile)
|
||||
throws SQLException {
|
||||
|
||||
if (configurationService.getBooleanProperty("orcid.disconnection.remain-sync", false)) {
|
||||
return;
|
||||
}
|
||||
|
||||
itemService.clearMetadata(context, profile, "dspace", "orcid", "sync-mode", Item.ANY);
|
||||
itemService.clearMetadata(context, profile, "dspace", "orcid", "sync-profile", Item.ANY);
|
||||
|
||||
|
@@ -243,7 +243,7 @@ public class RDFConsumer implements Consumer {
|
||||
DSOIdentifier id = new DSOIdentifier(dso, ctx);
|
||||
// If an item gets withdrawn, a MODIFY event is fired. We have to
|
||||
// delete the item from the triple store instead of converting it.
|
||||
// we don't have to take care for reinstantions of items as they can
|
||||
// we don't have to take care for reinstate events on items as they can
|
||||
// be processed as normal modify events.
|
||||
if (dso instanceof Item
|
||||
&& event.getDetail() != null
|
||||
|
@@ -45,14 +45,15 @@ import org.dspace.core.Context;
|
||||
import org.dspace.core.LogHelper;
|
||||
import org.dspace.eperson.EPerson;
|
||||
import org.dspace.eperson.Group;
|
||||
import org.dspace.eperson.service.EPersonService;
|
||||
import org.dspace.scripts.service.ProcessService;
|
||||
import org.dspace.services.ConfigurationService;
|
||||
import org.springframework.beans.factory.InitializingBean;
|
||||
import org.springframework.beans.factory.annotation.Autowired;
|
||||
|
||||
/**
|
||||
* The implementation for the {@link ProcessService} class
|
||||
*/
|
||||
public class ProcessServiceImpl implements ProcessService {
|
||||
public class ProcessServiceImpl implements ProcessService, InitializingBean {
|
||||
|
||||
private static final Logger log = org.apache.logging.log4j.LogManager.getLogger(ProcessService.class);
|
||||
|
||||
@@ -72,7 +73,34 @@ public class ProcessServiceImpl implements ProcessService {
|
||||
private MetadataFieldService metadataFieldService;
|
||||
|
||||
@Autowired
|
||||
private EPersonService ePersonService;
|
||||
private ConfigurationService configurationService;
|
||||
|
||||
@Override
|
||||
public void afterPropertiesSet() throws Exception {
|
||||
try {
|
||||
Context context = new Context();
|
||||
|
||||
// Processes that were running or scheduled when tomcat crashed, should be cleaned up during startup.
|
||||
List<Process> processesToBeFailed = findByStatusAndCreationTimeOlderThan(
|
||||
context, List.of(ProcessStatus.RUNNING, ProcessStatus.SCHEDULED), new Date());
|
||||
for (Process process : processesToBeFailed) {
|
||||
context.setCurrentUser(process.getEPerson());
|
||||
// Fail the process.
|
||||
log.info("Process with ID {} did not complete before tomcat shutdown, failing it now.",
|
||||
process.getID());
|
||||
fail(context, process);
|
||||
// But still attach its log to the process.
|
||||
appendLog(process.getID(), process.getName(),
|
||||
"Process did not complete before tomcat shutdown.",
|
||||
ProcessLogLevel.ERROR);
|
||||
createLogBitstream(context, process);
|
||||
}
|
||||
|
||||
context.complete();
|
||||
} catch (Exception e) {
|
||||
log.error("Unable to clean up Processes: ", e);
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public Process create(Context context, EPerson ePerson, String scriptName,
|
||||
@@ -293,8 +321,8 @@ public class ProcessServiceImpl implements ProcessService {
|
||||
@Override
|
||||
public void appendLog(int processId, String scriptName, String output, ProcessLogLevel processLogLevel)
|
||||
throws IOException {
|
||||
File tmpDir = FileUtils.getTempDirectory();
|
||||
File tempFile = new File(tmpDir, scriptName + processId + ".log");
|
||||
File logsDir = getLogsDirectory();
|
||||
File tempFile = new File(logsDir, processId + "-" + scriptName + ".log");
|
||||
FileWriter out = new FileWriter(tempFile, true);
|
||||
try {
|
||||
try (BufferedWriter writer = new BufferedWriter(out)) {
|
||||
@@ -309,12 +337,15 @@ public class ProcessServiceImpl implements ProcessService {
|
||||
@Override
|
||||
public void createLogBitstream(Context context, Process process)
|
||||
throws IOException, SQLException, AuthorizeException {
|
||||
File tmpDir = FileUtils.getTempDirectory();
|
||||
File tempFile = new File(tmpDir, process.getName() + process.getID() + ".log");
|
||||
FileInputStream inputStream = FileUtils.openInputStream(tempFile);
|
||||
appendFile(context, process, inputStream, Process.OUTPUT_TYPE, process.getName() + process.getID() + ".log");
|
||||
inputStream.close();
|
||||
tempFile.delete();
|
||||
File logsDir = getLogsDirectory();
|
||||
File tempFile = new File(logsDir, process.getID() + "-" + process.getName() + ".log");
|
||||
if (tempFile.exists()) {
|
||||
FileInputStream inputStream = FileUtils.openInputStream(tempFile);
|
||||
appendFile(context, process, inputStream, Process.OUTPUT_TYPE,
|
||||
process.getID() + "-" + process.getName() + ".log");
|
||||
inputStream.close();
|
||||
tempFile.delete();
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
@@ -343,4 +374,15 @@ public class ProcessServiceImpl implements ProcessService {
|
||||
return sb.toString();
|
||||
}
|
||||
|
||||
private File getLogsDirectory() {
|
||||
String pathStr = configurationService.getProperty("dspace.dir")
|
||||
+ File.separator + "log" + File.separator + "processes";
|
||||
File logsDir = new File(pathStr);
|
||||
if (!logsDir.exists()) {
|
||||
if (!logsDir.mkdirs()) {
|
||||
throw new RuntimeException("Couldn't create [dspace.dir]/log/processes/ directory.");
|
||||
}
|
||||
}
|
||||
return logsDir;
|
||||
}
|
||||
}
|
||||
|
@@ -357,7 +357,7 @@ public class StatisticsImporter {
|
||||
SolrInputDocument sid = new SolrInputDocument();
|
||||
sid.addField("ip", ip);
|
||||
sid.addField("type", dso.getType());
|
||||
sid.addField("id", dso.getID());
|
||||
sid.addField("id", dso.getID().toString());
|
||||
sid.addField("time", DateFormatUtils.format(date, SolrLoggerServiceImpl.DATE_FORMAT_8601));
|
||||
sid.addField("continent", continent);
|
||||
sid.addField("country", country);
|
||||
@@ -471,13 +471,13 @@ public class StatisticsImporter {
|
||||
boolean verbose = line.hasOption('v');
|
||||
|
||||
// Find our solr server
|
||||
String sserver = configurationService.getProperty("solr-statistics", "server");
|
||||
String sserver = configurationService.getProperty("solr-statistics.server");
|
||||
if (verbose) {
|
||||
System.out.println("Writing to solr server at: " + sserver);
|
||||
}
|
||||
solr = new HttpSolrClient.Builder(sserver).build();
|
||||
|
||||
String dbPath = configurationService.getProperty("usage-statistics", "dbfile");
|
||||
String dbPath = configurationService.getProperty("usage-statistics.dbfile");
|
||||
try {
|
||||
File dbFile = new File(dbPath);
|
||||
geoipLookup = new DatabaseReader.Builder(dbFile).build();
|
||||
@@ -492,6 +492,11 @@ public class StatisticsImporter {
|
||||
"Unable to load GeoLite Database file (" + dbPath + ")! You may need to reinstall it. See the DSpace " +
|
||||
"installation instructions for more details.",
|
||||
e);
|
||||
} catch (NullPointerException e) {
|
||||
log.error(
|
||||
"The value of the property usage-statistics.dbfile is null. You may need to install the GeoLite " +
|
||||
"Database file and/or uncomment the property in the config file!",
|
||||
e);
|
||||
}
|
||||
|
||||
|
||||
|
@@ -25,7 +25,7 @@
|
||||
* {@code EventService}, as with the stock listeners.
|
||||
* </p>
|
||||
*
|
||||
* @see org.dspace.google.GoogleRecorderEventListener
|
||||
* @see org.dspace.google.GoogleAsyncEventListener
|
||||
* @see org.dspace.statistics.SolrLoggerUsageEventListener
|
||||
*/
|
||||
|
||||
|
@@ -8,6 +8,7 @@
|
||||
package org.dspace.xmlworkflow.state.actions.processingaction;
|
||||
|
||||
import java.sql.SQLException;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Arrays;
|
||||
import java.util.Collections;
|
||||
import java.util.List;
|
||||
@@ -20,6 +21,8 @@ import org.dspace.app.util.Util;
|
||||
import org.dspace.authorize.AuthorizeException;
|
||||
import org.dspace.content.MetadataFieldName;
|
||||
import org.dspace.core.Context;
|
||||
import org.dspace.services.ConfigurationService;
|
||||
import org.dspace.services.factory.DSpaceServicesFactory;
|
||||
import org.dspace.xmlworkflow.service.WorkflowRequirementsService;
|
||||
import org.dspace.xmlworkflow.state.Step;
|
||||
import org.dspace.xmlworkflow.state.actions.ActionAdvancedInfo;
|
||||
@@ -34,6 +37,9 @@ import org.dspace.xmlworkflow.storedcomponents.XmlWorkflowItem;
|
||||
public class ScoreReviewAction extends ProcessingAction {
|
||||
private static final Logger log = LogManager.getLogger(ScoreReviewAction.class);
|
||||
|
||||
private final ConfigurationService configurationService
|
||||
= DSpaceServicesFactory.getInstance().getConfigurationService();
|
||||
|
||||
// Option(s)
|
||||
public static final String SUBMIT_SCORE = "submit_score";
|
||||
|
||||
@@ -114,7 +120,14 @@ public class ScoreReviewAction extends ProcessingAction {
|
||||
|
||||
@Override
|
||||
public List<String> getOptions() {
|
||||
return List.of(SUBMIT_SCORE, RETURN_TO_POOL);
|
||||
List<String> options = new ArrayList<>();
|
||||
options.add(SUBMIT_SCORE);
|
||||
if (configurationService.getBooleanProperty("workflow.reviewer.file-edit", false)) {
|
||||
options.add(SUBMIT_EDIT_METADATA);
|
||||
}
|
||||
options.add(RETURN_TO_POOL);
|
||||
|
||||
return options;
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@@ -21,6 +21,8 @@ import org.dspace.content.WorkspaceItem;
|
||||
import org.dspace.content.factory.ContentServiceFactory;
|
||||
import org.dspace.core.Context;
|
||||
import org.dspace.eperson.EPerson;
|
||||
import org.dspace.services.ConfigurationService;
|
||||
import org.dspace.services.factory.DSpaceServicesFactory;
|
||||
import org.dspace.workflow.WorkflowException;
|
||||
import org.dspace.xmlworkflow.factory.XmlWorkflowServiceFactory;
|
||||
import org.dspace.xmlworkflow.state.Step;
|
||||
@@ -40,6 +42,9 @@ import org.dspace.xmlworkflow.storedcomponents.XmlWorkflowItem;
|
||||
public class SingleUserReviewAction extends ProcessingAction {
|
||||
private static final Logger log = LogManager.getLogger(SingleUserReviewAction.class);
|
||||
|
||||
private final ConfigurationService configurationService
|
||||
= DSpaceServicesFactory.getInstance().getConfigurationService();
|
||||
|
||||
public static final int OUTCOME_REJECT = 1;
|
||||
|
||||
protected static final String SUBMIT_DECLINE_TASK = "submit_decline_task";
|
||||
@@ -95,6 +100,9 @@ public class SingleUserReviewAction extends ProcessingAction {
|
||||
public List<String> getOptions() {
|
||||
List<String> options = new ArrayList<>();
|
||||
options.add(SUBMIT_APPROVE);
|
||||
if (configurationService.getBooleanProperty("workflow.reviewer.file-edit", false)) {
|
||||
options.add(SUBMIT_EDIT_METADATA);
|
||||
}
|
||||
options.add(SUBMIT_REJECT);
|
||||
options.add(SUBMIT_DECLINE_TASK);
|
||||
return options;
|
||||
|
@@ -13,6 +13,7 @@ import java.util.ArrayList;
|
||||
import java.util.Collections;
|
||||
import java.util.Iterator;
|
||||
import java.util.List;
|
||||
import java.util.Optional;
|
||||
import java.util.Set;
|
||||
|
||||
import org.apache.commons.collections4.CollectionUtils;
|
||||
@@ -100,12 +101,17 @@ public class PoolTaskServiceImpl implements PoolTaskService {
|
||||
//If the user does not have a claimedtask yet, see whether one of the groups of the user has pooltasks
|
||||
//for this workflow item
|
||||
Set<Group> groups = groupService.allMemberGroupsSet(context, ePerson);
|
||||
for (Group group : groups) {
|
||||
poolTask = poolTaskDAO.findByWorkflowItemAndGroup(context, group, workflowItem);
|
||||
if (poolTask != null) {
|
||||
return poolTask;
|
||||
}
|
||||
List<PoolTask> generalTasks = poolTaskDAO.findByWorkflowItem(context, workflowItem);
|
||||
|
||||
Optional<PoolTask> firstClaimedTask = groups.stream()
|
||||
.flatMap(group -> generalTasks.stream()
|
||||
.filter(f -> f.getGroup().getID().equals(group.getID()))
|
||||
.findFirst()
|
||||
.stream())
|
||||
.findFirst();
|
||||
|
||||
if (firstClaimedTask.isPresent()) {
|
||||
return firstClaimedTask.get();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@@ -0,0 +1,21 @@
|
||||
--
|
||||
-- The contents of this file are subject to the license and copyright
|
||||
-- detailed in the LICENSE and NOTICE files at the root of the source
|
||||
-- tree and available online at
|
||||
--
|
||||
-- http://www.dspace.org/license/
|
||||
--
|
||||
|
||||
-- In the workspaceitem table, if there are multiple rows referring to the same item ID, keep only the first of them.
|
||||
DELETE FROM workspaceitem WHERE EXISTS (
|
||||
SELECT item_id
|
||||
FROM workspaceitem
|
||||
GROUP BY item_id
|
||||
HAVING COUNT(workspace_item_id) > 1
|
||||
) AND workspaceitem.workspace_item_id NOT IN (
|
||||
SELECT MIN(workspace_item_id) AS workspace_item_id
|
||||
FROM workspaceitem
|
||||
GROUP BY item_id
|
||||
);
|
||||
-- Identify which rows have duplicates, and compute their replacements.
|
||||
ALTER TABLE workspaceitem ADD CONSTRAINT unique_item_id UNIQUE(item_id);
|
@@ -0,0 +1,21 @@
|
||||
--
|
||||
-- The contents of this file are subject to the license and copyright
|
||||
-- detailed in the LICENSE and NOTICE files at the root of the source
|
||||
-- tree and available online at
|
||||
--
|
||||
-- http://www.dspace.org/license/
|
||||
--
|
||||
|
||||
-- In the workspaceitem table, if there are multiple rows referring to the same item ID, keep only the first of them.
|
||||
WITH dedup AS (
|
||||
SELECT item_id, MIN(workspace_item_id) AS workspace_item_id
|
||||
FROM workspaceitem
|
||||
GROUP BY item_id
|
||||
HAVING COUNT(workspace_item_id) > 1
|
||||
)
|
||||
DELETE FROM workspaceitem
|
||||
USING dedup
|
||||
WHERE workspaceitem.item_id = dedup.item_id AND workspaceitem.workspace_item_id <> dedup.workspace_item_id;
|
||||
|
||||
-- Enforce uniqueness of item_id in workspaceitem table.
|
||||
ALTER TABLE workspaceitem ADD CONSTRAINT unique_item_id UNIQUE(item_id);
|
@@ -0,0 +1,10 @@
|
||||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<node id='Countries' label='Countries'>
|
||||
<isComposedBy>
|
||||
<node id='Africa' label='Africa'>
|
||||
<isComposedBy>
|
||||
<node id='DZA' label='Algeria'/>
|
||||
</isComposedBy>
|
||||
</node>
|
||||
</isComposedBy>
|
||||
</node>
|
@@ -0,0 +1,10 @@
|
||||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<node id='Countries' label='Länder'>
|
||||
<isComposedBy>
|
||||
<node id='Africa' label='Afrika'>
|
||||
<isComposedBy>
|
||||
<node id='DZA' label='Algerien'/>
|
||||
</isComposedBy>
|
||||
</node>
|
||||
</isComposedBy>
|
||||
</node>
|
@@ -175,6 +175,9 @@ authority.controlled.dspace.object.owner = true
|
||||
webui.browse.link.1 = author:dc.contributor.*
|
||||
webui.browse.link.2 = subject:dc.subject.*
|
||||
|
||||
# Configuration required for testing the controlled vocabulary functionality, which is configured using properties
|
||||
vocabulary.plugin.countries.hierarchy.store=false
|
||||
vocabulary.plugin.countries.storeIDs=true
|
||||
# Enable duplicate detection for tests
|
||||
duplicate.enable = true
|
||||
|
||||
|
@@ -21,8 +21,12 @@ import org.dspace.builder.AbstractBuilder;
|
||||
import org.dspace.discovery.SearchUtils;
|
||||
import org.dspace.servicemanager.DSpaceKernelImpl;
|
||||
import org.dspace.servicemanager.DSpaceKernelInit;
|
||||
import org.junit.After;
|
||||
import org.junit.AfterClass;
|
||||
import org.junit.Before;
|
||||
import org.junit.BeforeClass;
|
||||
import org.junit.Rule;
|
||||
import org.junit.rules.TestName;
|
||||
|
||||
/**
|
||||
* Abstract Test class copied from DSpace API
|
||||
@@ -46,6 +50,12 @@ public class AbstractDSpaceIntegrationTest {
|
||||
*/
|
||||
protected static DSpaceKernelImpl kernelImpl;
|
||||
|
||||
/**
|
||||
* Obtain the TestName from JUnit, so that we can print it out in the test logs (see below)
|
||||
*/
|
||||
@Rule
|
||||
public TestName testName = new TestName();
|
||||
|
||||
/**
|
||||
* Default constructor
|
||||
*/
|
||||
@@ -90,6 +100,20 @@ public class AbstractDSpaceIntegrationTest {
|
||||
}
|
||||
}
|
||||
|
||||
@Before
|
||||
public void printTestMethodBefore() {
|
||||
// Log the test method being executed. Put lines around it to make it stand out.
|
||||
log.info("---");
|
||||
log.info("Starting execution of test method: {}()", testName.getMethodName());
|
||||
log.info("---");
|
||||
}
|
||||
|
||||
@After
|
||||
public void printTestMethodAfter() {
|
||||
// Log the test method just completed.
|
||||
log.info("Finished execution of test method: {}()", testName.getMethodName());
|
||||
}
|
||||
|
||||
/**
|
||||
* This method will be run after all tests finish as per @AfterClass. It
|
||||
* will clean resources initialized by the @BeforeClass methods.
|
||||
|
@@ -18,9 +18,13 @@ import java.util.TimeZone;
|
||||
import org.apache.logging.log4j.Logger;
|
||||
import org.dspace.servicemanager.DSpaceKernelImpl;
|
||||
import org.dspace.servicemanager.DSpaceKernelInit;
|
||||
import org.junit.After;
|
||||
import org.junit.AfterClass;
|
||||
import org.junit.Before;
|
||||
import org.junit.BeforeClass;
|
||||
import org.junit.Ignore;
|
||||
import org.junit.Rule;
|
||||
import org.junit.rules.TestName;
|
||||
import org.junit.runner.RunWith;
|
||||
import org.mockito.junit.MockitoJUnitRunner;
|
||||
|
||||
@@ -62,6 +66,12 @@ public class AbstractDSpaceTest {
|
||||
*/
|
||||
protected static DSpaceKernelImpl kernelImpl;
|
||||
|
||||
/**
|
||||
* Obtain the TestName from JUnit, so that we can print it out in the test logs (see below)
|
||||
*/
|
||||
@Rule
|
||||
public TestName testName = new TestName();
|
||||
|
||||
/**
|
||||
* This method will be run before the first test as per @BeforeClass. It will
|
||||
* initialize shared resources required for all tests of this class.
|
||||
@@ -94,6 +104,19 @@ public class AbstractDSpaceTest {
|
||||
}
|
||||
}
|
||||
|
||||
@Before
|
||||
public void printTestMethodBefore() {
|
||||
// Log the test method being executed. Put lines around it to make it stand out.
|
||||
log.info("---");
|
||||
log.info("Starting execution of test method: {}()", testName.getMethodName());
|
||||
log.info("---");
|
||||
}
|
||||
|
||||
@After
|
||||
public void printTestMethodAfter() {
|
||||
// Log the test method just completed.
|
||||
log.info("Finished execution of test method: {}()", testName.getMethodName());
|
||||
}
|
||||
|
||||
/**
|
||||
* This method will be run after all tests finish as per @AfterClass. It
|
||||
|
@@ -20,8 +20,8 @@ import org.dspace.app.launcher.ScriptLauncher;
|
||||
import org.dspace.app.scripts.handler.impl.TestDSpaceRunnableHandler;
|
||||
import org.dspace.authority.AuthoritySearchService;
|
||||
import org.dspace.authority.MockAuthoritySolrServiceImpl;
|
||||
import org.dspace.authorize.AuthorizeException;
|
||||
import org.dspace.builder.AbstractBuilder;
|
||||
import org.dspace.builder.EPersonBuilder;
|
||||
import org.dspace.content.Community;
|
||||
import org.dspace.core.Context;
|
||||
import org.dspace.core.I18nUtil;
|
||||
@@ -127,19 +127,16 @@ public class AbstractIntegrationTestWithDatabase extends AbstractDSpaceIntegrati
|
||||
EPersonService ePersonService = EPersonServiceFactory.getInstance().getEPersonService();
|
||||
eperson = ePersonService.findByEmail(context, "test@email.com");
|
||||
if (eperson == null) {
|
||||
// This EPerson creation should only happen once (i.e. for first test run)
|
||||
log.info("Creating initial EPerson (email=test@email.com) for Unit Tests");
|
||||
eperson = ePersonService.create(context);
|
||||
eperson.setFirstName(context, "first");
|
||||
eperson.setLastName(context, "last");
|
||||
eperson.setEmail("test@email.com");
|
||||
eperson.setCanLogIn(true);
|
||||
eperson.setLanguage(context, I18nUtil.getDefaultLocale().getLanguage());
|
||||
ePersonService.setPassword(eperson, password);
|
||||
// actually save the eperson to unit testing DB
|
||||
ePersonService.update(context, eperson);
|
||||
// Create test EPerson for usage in all tests
|
||||
log.info("Creating Test EPerson (email=test@email.com) for Integration Tests");
|
||||
eperson = EPersonBuilder.createEPerson(context)
|
||||
.withNameInMetadata("first", "last")
|
||||
.withEmail("test@email.com")
|
||||
.withCanLogin(true)
|
||||
.withLanguage(I18nUtil.getDefaultLocale().getLanguage())
|
||||
.withPassword(password)
|
||||
.build();
|
||||
}
|
||||
|
||||
// Set our global test EPerson as the current user in DSpace
|
||||
context.setCurrentUser(eperson);
|
||||
|
||||
@@ -148,26 +145,23 @@ public class AbstractIntegrationTestWithDatabase extends AbstractDSpaceIntegrati
|
||||
|
||||
admin = ePersonService.findByEmail(context, "admin@email.com");
|
||||
if (admin == null) {
|
||||
// This EPerson creation should only happen once (i.e. for first test run)
|
||||
log.info("Creating initial EPerson (email=admin@email.com) for Unit Tests");
|
||||
admin = ePersonService.create(context);
|
||||
admin.setFirstName(context, "first (admin)");
|
||||
admin.setLastName(context, "last (admin)");
|
||||
admin.setEmail("admin@email.com");
|
||||
admin.setCanLogIn(true);
|
||||
admin.setLanguage(context, I18nUtil.getDefaultLocale().getLanguage());
|
||||
ePersonService.setPassword(admin, password);
|
||||
// actually save the eperson to unit testing DB
|
||||
ePersonService.update(context, admin);
|
||||
// Create test Administrator for usage in all tests
|
||||
log.info("Creating Test Admin EPerson (email=admin@email.com) for Integration Tests");
|
||||
admin = EPersonBuilder.createEPerson(context)
|
||||
.withNameInMetadata("first (admin)", "last (admin)")
|
||||
.withEmail("admin@email.com")
|
||||
.withCanLogin(true)
|
||||
.withLanguage(I18nUtil.getDefaultLocale().getLanguage())
|
||||
.withPassword(password)
|
||||
.build();
|
||||
|
||||
// Add Test Administrator to the ADMIN group in test database
|
||||
GroupService groupService = EPersonServiceFactory.getInstance().getGroupService();
|
||||
Group adminGroup = groupService.findByName(context, Group.ADMIN);
|
||||
groupService.addMember(context, adminGroup, admin);
|
||||
}
|
||||
|
||||
context.restoreAuthSystemState();
|
||||
} catch (AuthorizeException ex) {
|
||||
log.error("Error creating initial eperson or default groups", ex);
|
||||
fail("Error creating initial eperson or default groups in AbstractUnitTest init()");
|
||||
} catch (SQLException ex) {
|
||||
log.error(ex.getMessage(), ex);
|
||||
fail("SQL Error on AbstractUnitTest init()");
|
||||
|
@@ -23,7 +23,8 @@ import java.util.List;
|
||||
import com.google.common.io.Files;
|
||||
import com.opencsv.CSVReader;
|
||||
import com.opencsv.exceptions.CsvException;
|
||||
import org.apache.log4j.Logger;
|
||||
import org.apache.logging.log4j.LogManager;
|
||||
import org.apache.logging.log4j.Logger;
|
||||
import org.dspace.AbstractIntegrationTestWithDatabase;
|
||||
import org.dspace.app.launcher.ScriptLauncher;
|
||||
import org.dspace.app.scripts.handler.impl.TestDSpaceRunnableHandler;
|
||||
@@ -51,7 +52,7 @@ public class MetadataExportSearchIT extends AbstractIntegrationTestWithDatabase
|
||||
private Item[] itemsSubject2 = new Item[numberItemsSubject2];
|
||||
private String filename;
|
||||
private Collection collection;
|
||||
private Logger logger = Logger.getLogger(MetadataExportSearchIT.class);
|
||||
private Logger logger = LogManager.getLogger(MetadataExportSearchIT.class);
|
||||
private ConfigurationService configurationService = DSpaceServicesFactory.getInstance().getConfigurationService();
|
||||
private SearchService searchService;
|
||||
|
||||
|
@@ -11,7 +11,8 @@ import java.io.IOException;
|
||||
import java.sql.SQLException;
|
||||
import java.util.Date;
|
||||
|
||||
import org.apache.log4j.Logger;
|
||||
import org.apache.logging.log4j.LogManager;
|
||||
import org.apache.logging.log4j.Logger;
|
||||
import org.dspace.content.Item;
|
||||
import org.dspace.core.Context;
|
||||
import org.dspace.orcid.OrcidHistory;
|
||||
@@ -24,7 +25,7 @@ import org.dspace.orcid.service.OrcidHistoryService;
|
||||
*/
|
||||
public class OrcidHistoryBuilder extends AbstractBuilder<OrcidHistory, OrcidHistoryService> {
|
||||
|
||||
private static final Logger log = Logger.getLogger(OrcidHistoryBuilder.class);
|
||||
private static final Logger log = LogManager.getLogger(OrcidHistoryBuilder.class);
|
||||
|
||||
private OrcidHistory orcidHistory;
|
||||
|
||||
|
@@ -59,7 +59,7 @@ import org.dspace.content.virtual.Collected;
|
||||
import org.dspace.content.virtual.VirtualMetadataConfiguration;
|
||||
import org.dspace.content.virtual.VirtualMetadataPopulator;
|
||||
import org.dspace.core.Constants;
|
||||
import org.dspace.discovery.SolrSearchCore;
|
||||
import org.dspace.discovery.MockSolrSearchCore;
|
||||
import org.dspace.kernel.ServiceManager;
|
||||
import org.dspace.services.factory.DSpaceServicesFactory;
|
||||
import org.dspace.versioning.Version;
|
||||
@@ -79,8 +79,9 @@ public class VersioningWithRelationshipsIT extends AbstractIntegrationTestWithDa
|
||||
ContentServiceFactory.getInstance().getInstallItemService();
|
||||
private final ItemService itemService =
|
||||
ContentServiceFactory.getInstance().getItemService();
|
||||
private final SolrSearchCore solrSearchCore =
|
||||
DSpaceServicesFactory.getInstance().getServiceManager().getServicesByType(SolrSearchCore.class).get(0);
|
||||
private final MockSolrSearchCore solrSearchCore =
|
||||
DSpaceServicesFactory.getInstance().getServiceManager().getServiceByName(null, MockSolrSearchCore.class);
|
||||
|
||||
protected Community community;
|
||||
protected Collection collection;
|
||||
protected EntityType publicationEntityType;
|
||||
|
@@ -12,6 +12,7 @@ import static org.hamcrest.CoreMatchers.notNullValue;
|
||||
import static org.hamcrest.CoreMatchers.nullValue;
|
||||
import static org.hamcrest.MatcherAssert.assertThat;
|
||||
import static org.junit.Assert.assertFalse;
|
||||
import static org.junit.Assert.assertThrows;
|
||||
import static org.junit.Assert.assertTrue;
|
||||
import static org.junit.Assert.fail;
|
||||
import static org.mockito.ArgumentMatchers.any;
|
||||
@@ -39,6 +40,7 @@ import org.dspace.core.Context;
|
||||
import org.dspace.eperson.EPerson;
|
||||
import org.dspace.eperson.factory.EPersonServiceFactory;
|
||||
import org.dspace.eperson.service.EPersonService;
|
||||
import org.dspace.workflow.MockWorkflowItem;
|
||||
import org.junit.After;
|
||||
import org.junit.Before;
|
||||
import org.junit.Test;
|
||||
@@ -468,4 +470,14 @@ public class WorkspaceItemTest extends AbstractUnitTest {
|
||||
assertTrue("testSetPublishedBefore 0", wi.isPublishedBefore());
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testDuplicateItemID() throws Exception {
|
||||
context.turnOffAuthorisationSystem();
|
||||
Item item = wi.getItem();
|
||||
MockWorkflowItem wfItem = new MockWorkflowItem();
|
||||
wfItem.item = item;
|
||||
wfItem.collection = collection;
|
||||
assertThrows(IllegalArgumentException.class, () -> workspaceItemService.create(context, wfItem));
|
||||
context.restoreAuthSystemState();
|
||||
}
|
||||
}
|
||||
|
@@ -89,6 +89,145 @@ public class DSpaceControlledVocabularyTest extends AbstractDSpaceTest {
|
||||
assertEquals("north 40", result.values[0].value);
|
||||
}
|
||||
|
||||
/**
|
||||
* Test of getMatches method of class
|
||||
* DSpaceControlledVocabulary using a localized controlled vocabulary with no locale (fallback to default)
|
||||
* @throws java.lang.ClassNotFoundException passed through.
|
||||
*/
|
||||
@Test
|
||||
public void testGetMatchesNoLocale() throws ClassNotFoundException {
|
||||
final String PLUGIN_INTERFACE = "org.dspace.content.authority.ChoiceAuthority";
|
||||
|
||||
String idValue = "DZA";
|
||||
String labelPart = "Alge";
|
||||
int start = 0;
|
||||
int limit = 10;
|
||||
// This "countries" Controlled Vocab is included in TestEnvironment data
|
||||
// (under /src/test/data/dspaceFolder/) and it should be auto-loaded
|
||||
// by test configs in /src/test/data/dspaceFolder/config/local.cfg
|
||||
DSpaceControlledVocabulary instance = (DSpaceControlledVocabulary)
|
||||
CoreServiceFactory.getInstance().getPluginService().getNamedPlugin(Class.forName(PLUGIN_INTERFACE),
|
||||
"countries");
|
||||
assertNotNull(instance);
|
||||
Choices result = instance.getMatches(labelPart, start, limit, null);
|
||||
assertEquals(idValue, result.values[0].value);
|
||||
assertEquals("Algeria", result.values[0].label);
|
||||
}
|
||||
|
||||
/**
|
||||
* Test of getBestMatch method of class
|
||||
* DSpaceControlledVocabulary using a localized controlled vocabulary with no locale (fallback to default)
|
||||
* @throws java.lang.ClassNotFoundException passed through.
|
||||
*/
|
||||
@Test
|
||||
public void testGetBestMatchIdValueNoLocale() throws ClassNotFoundException {
|
||||
final String PLUGIN_INTERFACE = "org.dspace.content.authority.ChoiceAuthority";
|
||||
|
||||
String idValue = "DZA";
|
||||
// This "countries" Controlled Vocab is included in TestEnvironment data
|
||||
// (under /src/test/data/dspaceFolder/) and it should be auto-loaded
|
||||
// by test configs in /src/test/data/dspaceFolder/config/local.cfg
|
||||
DSpaceControlledVocabulary instance = (DSpaceControlledVocabulary)
|
||||
CoreServiceFactory.getInstance().getPluginService().getNamedPlugin(Class.forName(PLUGIN_INTERFACE),
|
||||
"countries");
|
||||
assertNotNull(instance);
|
||||
Choices result = instance.getBestMatch(idValue, null);
|
||||
assertEquals(idValue, result.values[0].value);
|
||||
assertEquals("Algeria", result.values[0].label);
|
||||
}
|
||||
|
||||
/**
|
||||
* Test of getMatches method of class
|
||||
* DSpaceControlledVocabulary using a localized controlled vocabulary with valid locale parameter (localized
|
||||
* label returned)
|
||||
*/
|
||||
@Test
|
||||
public void testGetMatchesGermanLocale() throws ClassNotFoundException {
|
||||
final String PLUGIN_INTERFACE = "org.dspace.content.authority.ChoiceAuthority";
|
||||
|
||||
String idValue = "DZA";
|
||||
String labelPart = "Alge";
|
||||
int start = 0;
|
||||
int limit = 10;
|
||||
// This "countries" Controlled Vocab is included in TestEnvironment data
|
||||
// (under /src/test/data/dspaceFolder/) and it should be auto-loaded
|
||||
// by test configs in /src/test/data/dspaceFolder/config/local.cfg
|
||||
DSpaceControlledVocabulary instance = (DSpaceControlledVocabulary)
|
||||
CoreServiceFactory.getInstance().getPluginService().getNamedPlugin(Class.forName(PLUGIN_INTERFACE),
|
||||
"countries");
|
||||
assertNotNull(instance);
|
||||
Choices result = instance.getMatches(labelPart, start, limit, "de");
|
||||
assertEquals(idValue, result.values[0].value);
|
||||
assertEquals("Algerien", result.values[0].label);
|
||||
}
|
||||
|
||||
/**
|
||||
* Test of getBestMatch method of class
|
||||
* DSpaceControlledVocabulary using a localized controlled vocabulary with valid locale parameter (localized
|
||||
* label returned)
|
||||
*/
|
||||
@Test
|
||||
public void testGetBestMatchIdValueGermanLocale() throws ClassNotFoundException {
|
||||
final String PLUGIN_INTERFACE = "org.dspace.content.authority.ChoiceAuthority";
|
||||
|
||||
String idValue = "DZA";
|
||||
// This "countries" Controlled Vocab is included in TestEnvironment data
|
||||
// (under /src/test/data/dspaceFolder/) and it should be auto-loaded
|
||||
// by test configs in /src/test/data/dspaceFolder/config/local.cfg
|
||||
DSpaceControlledVocabulary instance = (DSpaceControlledVocabulary)
|
||||
CoreServiceFactory.getInstance().getPluginService().getNamedPlugin(Class.forName(PLUGIN_INTERFACE),
|
||||
"countries");
|
||||
assertNotNull(instance);
|
||||
Choices result = instance.getBestMatch(idValue, "de");
|
||||
assertEquals(idValue, result.values[0].value);
|
||||
assertEquals("Algerien", result.values[0].label);
|
||||
}
|
||||
|
||||
/**
|
||||
* Test of getChoice method of class
|
||||
* DSpaceControlledVocabulary using a localized controlled vocabulary with no locale (fallback to default)
|
||||
* @throws java.lang.ClassNotFoundException passed through.
|
||||
*/
|
||||
@Test
|
||||
public void testGetChoiceNoLocale() throws ClassNotFoundException {
|
||||
final String PLUGIN_INTERFACE = "org.dspace.content.authority.ChoiceAuthority";
|
||||
|
||||
String idValue = "DZA";
|
||||
// This "countries" Controlled Vocab is included in TestEnvironment data
|
||||
// (under /src/test/data/dspaceFolder/) and it should be auto-loaded
|
||||
// by test configs in /src/test/data/dspaceFolder/config/local.cfg
|
||||
DSpaceControlledVocabulary instance = (DSpaceControlledVocabulary)
|
||||
CoreServiceFactory.getInstance().getPluginService().getNamedPlugin(Class.forName(PLUGIN_INTERFACE),
|
||||
"countries");
|
||||
assertNotNull(instance);
|
||||
Choice result = instance.getChoice(idValue, null);
|
||||
assertEquals(idValue, result.value);
|
||||
assertEquals("Algeria", result.label);
|
||||
}
|
||||
|
||||
/**
|
||||
* Test of getChoice method of class
|
||||
* DSpaceControlledVocabulary using a localized controlled vocabulary with valid locale parameter (localized
|
||||
* label returned)
|
||||
* @throws java.lang.ClassNotFoundException passed through.
|
||||
*/
|
||||
@Test
|
||||
public void testGetChoiceGermanLocale() throws ClassNotFoundException {
|
||||
final String PLUGIN_INTERFACE = "org.dspace.content.authority.ChoiceAuthority";
|
||||
|
||||
String idValue = "DZA";
|
||||
// This "countries" Controlled Vocab is included in TestEnvironment data
|
||||
// (under /src/test/data/dspaceFolder/) and it should be auto-loaded
|
||||
// by test configs in /src/test/data/dspaceFolder/config/local.cfg
|
||||
DSpaceControlledVocabulary instance = (DSpaceControlledVocabulary)
|
||||
CoreServiceFactory.getInstance().getPluginService().getNamedPlugin(Class.forName(PLUGIN_INTERFACE),
|
||||
"countries");
|
||||
assertNotNull(instance);
|
||||
Choice result = instance.getChoice(idValue, "de");
|
||||
assertEquals(idValue, result.value);
|
||||
assertEquals("Algerien", result.label);
|
||||
}
|
||||
|
||||
/**
|
||||
* Test of getBestMatch method, of class DSpaceControlledVocabulary.
|
||||
*/
|
||||
|
@@ -558,4 +558,29 @@ public class ContextTest extends AbstractUnitTest {
|
||||
cleanupContext(instance);
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testUncacheEntities() throws Throwable {
|
||||
// To set up the test, ensure the cache contains more than the current user entity
|
||||
groupService.findByName(context, Group.ANONYMOUS);
|
||||
assertTrue("Cache size should be greater than one", context.getDBConnection().getCacheSize() > 1);
|
||||
|
||||
context.uncacheEntities();
|
||||
|
||||
assertThat("Cache size should be one (current user)", context.getDBConnection().getCacheSize(), equalTo(1L));
|
||||
context.reloadEntity(context.getCurrentUser());
|
||||
assertThat("Cache should only contain the current user", context.getDBConnection().getCacheSize(), equalTo(1L));
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testUncacheEntity() throws Throwable {
|
||||
// Remember the cache size after loading an entity
|
||||
Group group = groupService.findByName(context, Group.ANONYMOUS);
|
||||
long oldCacheSize = context.getDBConnection().getCacheSize();
|
||||
|
||||
// Uncache the entity
|
||||
context.uncacheEntity(group);
|
||||
|
||||
long newCacheSize = context.getDBConnection().getCacheSize();
|
||||
assertThat("Cache size should be reduced by one", newCacheSize, equalTo(oldCacheSize - 1));
|
||||
}
|
||||
}
|
||||
|
@@ -205,6 +205,28 @@ public class HibernateDBConnectionTest extends AbstractUnitTest {
|
||||
.contains(person));
|
||||
}
|
||||
|
||||
/**
|
||||
* Test of uncacheEntities method
|
||||
*/
|
||||
@Test
|
||||
public void testUncacheEntities() throws SQLException {
|
||||
// Get DBConnection associated with DSpace Context
|
||||
HibernateDBConnection dbConnection = (HibernateDBConnection) context.getDBConnection();
|
||||
EPerson person = context.getCurrentUser();
|
||||
|
||||
assertTrue("Current user should be cached in session", dbConnection.getSession()
|
||||
.contains(person));
|
||||
|
||||
dbConnection.uncacheEntities();
|
||||
assertFalse("Current user should be gone from cache", dbConnection.getSession()
|
||||
.contains(person));
|
||||
|
||||
// Test ability to reload an uncached entity
|
||||
person = dbConnection.reloadEntity(person);
|
||||
assertTrue("Current user should be cached back in session", dbConnection.getSession()
|
||||
.contains(person));
|
||||
}
|
||||
|
||||
/**
|
||||
* Test of uncacheEntity method
|
||||
*/
|
||||
|
@@ -10,10 +10,7 @@ package org.dspace.ctask.general;
|
||||
import static org.junit.Assert.assertEquals;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
|
||||
import org.dspace.AbstractIntegrationTestWithDatabase;
|
||||
import org.dspace.builder.CollectionBuilder;
|
||||
import org.dspace.builder.CommunityBuilder;
|
||||
import org.dspace.builder.ItemBuilder;
|
||||
@@ -21,13 +18,11 @@ import org.dspace.content.Collection;
|
||||
import org.dspace.content.Item;
|
||||
import org.dspace.core.factory.CoreServiceFactory;
|
||||
import org.dspace.curate.Curator;
|
||||
import org.dspace.identifier.IdentifierProvider;
|
||||
import org.dspace.identifier.IdentifierServiceImpl;
|
||||
import org.dspace.identifier.AbstractIdentifierProviderIT;
|
||||
import org.dspace.identifier.VersionedHandleIdentifierProvider;
|
||||
import org.dspace.identifier.VersionedHandleIdentifierProviderWithCanonicalHandles;
|
||||
import org.dspace.kernel.ServiceManager;
|
||||
import org.dspace.services.ConfigurationService;
|
||||
import org.dspace.services.factory.DSpaceServicesFactory;
|
||||
import org.junit.After;
|
||||
import org.junit.Test;
|
||||
|
||||
/**
|
||||
@@ -36,30 +31,19 @@ import org.junit.Test;
|
||||
* @author mwood
|
||||
*/
|
||||
public class CreateMissingIdentifiersIT
|
||||
extends AbstractIntegrationTestWithDatabase {
|
||||
private ServiceManager serviceManager;
|
||||
private IdentifierServiceImpl identifierService;
|
||||
extends AbstractIdentifierProviderIT {
|
||||
|
||||
private static final String P_TASK_DEF
|
||||
= "plugin.named.org.dspace.curate.CurationTask";
|
||||
private static final String TASK_NAME = "test";
|
||||
|
||||
@Override
|
||||
public void setUp() throws Exception {
|
||||
super.setUp();
|
||||
context.turnOffAuthorisationSystem();
|
||||
|
||||
serviceManager = DSpaceServicesFactory.getInstance().getServiceManager();
|
||||
identifierService = serviceManager.getServicesByType(IdentifierServiceImpl.class).get(0);
|
||||
// Clean out providers to avoid any being used for creation of community and collection
|
||||
identifierService.setProviders(new ArrayList<>());
|
||||
}
|
||||
private ConfigurationService configurationService = DSpaceServicesFactory.getInstance().getConfigurationService();
|
||||
|
||||
@Test
|
||||
public void testPerform()
|
||||
throws IOException {
|
||||
// Must remove any cached named plugins before creating a new one
|
||||
CoreServiceFactory.getInstance().getPluginService().clearNamedPluginClasses();
|
||||
ConfigurationService configurationService = kernelImpl.getConfigurationService();
|
||||
// Define a new task dynamically
|
||||
configurationService.setProperty(P_TASK_DEF,
|
||||
CreateMissingIdentifiers.class.getCanonicalName() + " = " + TASK_NAME);
|
||||
@@ -76,14 +60,7 @@ public class CreateMissingIdentifiersIT
|
||||
.build();
|
||||
|
||||
/*
|
||||
* Curate with regular test configuration -- should succeed.
|
||||
*/
|
||||
curator.curate(context, item);
|
||||
int status = curator.getStatus(TASK_NAME);
|
||||
assertEquals("Curation should succeed", Curator.CURATE_SUCCESS, status);
|
||||
|
||||
/*
|
||||
* Now install an incompatible provider to make the task fail.
|
||||
* First, install an incompatible provider to make the task fail.
|
||||
*/
|
||||
registerProvider(VersionedHandleIdentifierProviderWithCanonicalHandles.class);
|
||||
|
||||
@@ -92,22 +69,18 @@ public class CreateMissingIdentifiersIT
|
||||
curator.getResult(TASK_NAME));
|
||||
assertEquals("Curation should fail", Curator.CURATE_ERROR,
|
||||
curator.getStatus(TASK_NAME));
|
||||
}
|
||||
|
||||
@Override
|
||||
@After
|
||||
public void destroy() throws Exception {
|
||||
super.destroy();
|
||||
DSpaceServicesFactory.getInstance().getServiceManager().getApplicationContext().refresh();
|
||||
}
|
||||
// Unregister this non-default provider
|
||||
unregisterProvider(VersionedHandleIdentifierProviderWithCanonicalHandles.class);
|
||||
// Re-register the default provider (for later tests which may depend on it)
|
||||
registerProvider(VersionedHandleIdentifierProvider.class);
|
||||
|
||||
private void registerProvider(Class type) {
|
||||
// Register our new provider
|
||||
serviceManager.registerServiceClass(type.getName(), type);
|
||||
IdentifierProvider identifierProvider =
|
||||
(IdentifierProvider) serviceManager.getServiceByName(type.getName(), type);
|
||||
|
||||
// Overwrite the identifier-service's providers with the new one to ensure only this provider is used
|
||||
identifierService.setProviders(List.of(identifierProvider));
|
||||
/*
|
||||
* Now, verify curate with default Handle Provider works
|
||||
* (and that our re-registration of the default provider above was successful)
|
||||
*/
|
||||
curator.curate(context, item);
|
||||
int status = curator.getStatus(TASK_NAME);
|
||||
assertEquals("Curation should succeed", Curator.CURATE_SUCCESS, status);
|
||||
}
|
||||
}
|
||||
|
@@ -8,6 +8,10 @@
|
||||
package org.dspace.discovery;
|
||||
|
||||
import static org.dspace.discovery.SolrServiceWorkspaceWorkflowRestrictionPlugin.DISCOVER_WORKSPACE_CONFIGURATION_NAME;
|
||||
import static org.hamcrest.MatcherAssert.assertThat;
|
||||
import static org.hamcrest.Matchers.hasItem;
|
||||
import static org.hamcrest.Matchers.hasItems;
|
||||
import static org.hamcrest.Matchers.not;
|
||||
import static org.junit.Assert.assertEquals;
|
||||
import static org.junit.Assert.assertFalse;
|
||||
import static org.junit.Assert.assertTrue;
|
||||
@@ -21,6 +25,10 @@ import java.util.List;
|
||||
import java.util.stream.Collectors;
|
||||
|
||||
import jakarta.servlet.http.HttpServletRequest;
|
||||
import org.apache.solr.client.solrj.SolrQuery;
|
||||
import org.apache.solr.client.solrj.SolrServerException;
|
||||
import org.apache.solr.client.solrj.response.QueryResponse;
|
||||
import org.apache.solr.common.SolrDocument;
|
||||
import org.dspace.AbstractIntegrationTestWithDatabase;
|
||||
import org.dspace.app.launcher.ScriptLauncher;
|
||||
import org.dspace.app.scripts.handler.impl.TestDSpaceRunnableHandler;
|
||||
@@ -99,6 +107,9 @@ public class DiscoveryIT extends AbstractIntegrationTestWithDatabase {
|
||||
MetadataAuthorityService metadataAuthorityService = ContentAuthorityServiceFactory.getInstance()
|
||||
.getMetadataAuthorityService();
|
||||
|
||||
MockSolrSearchCore solrSearchCore = DSpaceServicesFactory.getInstance().getServiceManager()
|
||||
.getServiceByName(null, MockSolrSearchCore.class);
|
||||
|
||||
@Override
|
||||
@Before
|
||||
public void setUp() throws Exception {
|
||||
@@ -796,6 +807,104 @@ public class DiscoveryIT extends AbstractIntegrationTestWithDatabase {
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Test designed to check if the submitter is not indexed in all in solr documents for items
|
||||
* and the submitter authority is still indexed
|
||||
* @throws SearchServiceException
|
||||
*/
|
||||
@Test
|
||||
public void searchWithNoSubmitterTest() throws SearchServiceException {
|
||||
|
||||
configurationService.setProperty("discovery.index.item.submitter.enabled", false);
|
||||
DiscoveryConfiguration defaultConf = SearchUtils.getDiscoveryConfiguration(context, "default", null);
|
||||
|
||||
// Populate the testing objects: create items in eperson's workspace and perform search in it
|
||||
int numberItems = 10;
|
||||
context.turnOffAuthorisationSystem();
|
||||
EPerson submitter = null;
|
||||
try {
|
||||
submitter = EPersonBuilder.createEPerson(context).withEmail("submitter@example.org")
|
||||
.withNameInMetadata("Peter", "Funny").build();
|
||||
} catch (SQLException e) {
|
||||
throw new RuntimeException(e);
|
||||
}
|
||||
context.setCurrentUser(submitter);
|
||||
Community community = CommunityBuilder.createCommunity(context).build();
|
||||
Collection collection = CollectionBuilder.createCollection(context, community).build();
|
||||
for (int i = 0; i < numberItems; i++) {
|
||||
ItemBuilder.createItem(context, collection)
|
||||
.withTitle("item " + i)
|
||||
.build();
|
||||
}
|
||||
context.restoreAuthSystemState();
|
||||
|
||||
// Build query with default parameters (except for workspaceConf)
|
||||
QueryResponse result = null;
|
||||
try {
|
||||
result = solrSearchCore.getSolr().query(new SolrQuery(String.format(
|
||||
"search.resourcetype:\"Item\"")));
|
||||
} catch (SolrServerException e) {
|
||||
throw new RuntimeException(e);
|
||||
} catch (IOException e) {
|
||||
throw new RuntimeException(e);
|
||||
}
|
||||
assertEquals(result.getResults().size(), numberItems);
|
||||
for (SolrDocument doc : result.getResults()) {
|
||||
assertThat(doc.getFieldNames(),
|
||||
not(hasItems("submitter_keyword", "submitter_ac", "submitter_acid", "submitter_filter")));
|
||||
assertThat(doc.getFieldNames(), hasItem("submitter_authority"));
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Test designed to check if the submitter is indexed in all in solr documents for items
|
||||
* @throws SearchServiceException
|
||||
*/
|
||||
@Test
|
||||
public void searchWithSubmitterTest() throws SearchServiceException {
|
||||
|
||||
configurationService.setProperty("discovery.index.item.submitter.enabled", true);
|
||||
DiscoveryConfiguration defaultConf = SearchUtils.getDiscoveryConfiguration(context, "default", null);
|
||||
|
||||
// Populate the testing objects: create items in eperson's workspace and perform search in it
|
||||
int numberItems = 10;
|
||||
context.turnOffAuthorisationSystem();
|
||||
EPerson submitter = null;
|
||||
try {
|
||||
submitter = EPersonBuilder.createEPerson(context).withEmail("submitter@example.org")
|
||||
.withNameInMetadata("Peter", "Funny").build();
|
||||
} catch (SQLException e) {
|
||||
throw new RuntimeException(e);
|
||||
}
|
||||
context.setCurrentUser(submitter);
|
||||
Community community = CommunityBuilder.createCommunity(context).build();
|
||||
Collection collection = CollectionBuilder.createCollection(context, community).build();
|
||||
for (int i = 0; i < numberItems; i++) {
|
||||
ItemBuilder.createItem(context, collection)
|
||||
.withTitle("item " + i)
|
||||
.build();
|
||||
}
|
||||
context.restoreAuthSystemState();
|
||||
|
||||
// Build query with default parameters (except for workspaceConf)
|
||||
QueryResponse result = null;
|
||||
try {
|
||||
result = solrSearchCore.getSolr().query(new SolrQuery(String.format(
|
||||
"search.resourcetype:\"Item\"")));
|
||||
} catch (SolrServerException e) {
|
||||
throw new RuntimeException(e);
|
||||
} catch (IOException e) {
|
||||
throw new RuntimeException(e);
|
||||
}
|
||||
assertEquals(result.getResults().size(), numberItems);
|
||||
for (SolrDocument doc : result.getResults()) {
|
||||
for (String fieldname : doc.getFieldNames()) {
|
||||
assertThat(doc.getFieldNames(), hasItems("submitter_keyword","submitter_ac", "submitter_filter",
|
||||
"submitter_authority"));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private void assertSearchQuery(String resourceType, int size) throws SearchServiceException {
|
||||
assertSearchQuery(resourceType, size, size, 0, -1);
|
||||
}
|
||||
|
@@ -0,0 +1,68 @@
|
||||
/**
|
||||
* The contents of this file are subject to the license and copyright
|
||||
* detailed in the LICENSE and NOTICE files at the root of the source
|
||||
* tree and available online at
|
||||
*
|
||||
* http://www.dspace.org/license/
|
||||
*/
|
||||
package org.dspace.identifier;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
|
||||
import org.dspace.AbstractIntegrationTestWithDatabase;
|
||||
import org.dspace.kernel.ServiceManager;
|
||||
import org.dspace.services.factory.DSpaceServicesFactory;
|
||||
|
||||
/**
|
||||
* AbstractIdentifierProviderIT which contains a few useful utility methods for IdentifierProvider Integration Tests
|
||||
*/
|
||||
public class AbstractIdentifierProviderIT extends AbstractIntegrationTestWithDatabase {
|
||||
|
||||
protected final ServiceManager serviceManager = DSpaceServicesFactory.getInstance().getServiceManager();
|
||||
protected final IdentifierServiceImpl identifierService =
|
||||
serviceManager.getServicesByType(IdentifierServiceImpl.class).get(0);
|
||||
|
||||
/**
|
||||
* Register a specific IdentifierProvider into the current IdentifierService (replacing any existing providers).
|
||||
* This method will also ensure the IdentifierProvider service is registered in the DSpace Service Manager.
|
||||
* @param type IdentifierProvider Class
|
||||
*/
|
||||
protected void registerProvider(Class type) {
|
||||
// Register our new provider
|
||||
IdentifierProvider identifierProvider =
|
||||
(IdentifierProvider) DSpaceServicesFactory.getInstance().getServiceManager()
|
||||
.getServiceByName(type.getName(), type);
|
||||
if (identifierProvider == null) {
|
||||
DSpaceServicesFactory.getInstance().getServiceManager().registerServiceClass(type.getName(), type);
|
||||
identifierProvider = (IdentifierProvider) DSpaceServicesFactory.getInstance().getServiceManager()
|
||||
.getServiceByName(type.getName(), type);
|
||||
}
|
||||
|
||||
identifierService.setProviders(List.of(identifierProvider));
|
||||
}
|
||||
|
||||
/**
|
||||
* Unregister a specific IdentifierProvider from the current IdentifierService (removing all existing providers).
|
||||
* This method will also ensure the IdentifierProvider service is unregistered in the DSpace Service Manager,
|
||||
* which ensures it does not conflict with other IdentifierProvider services.
|
||||
* @param type IdentifierProvider Class
|
||||
*/
|
||||
protected void unregisterProvider(Class type) {
|
||||
// Find the provider service
|
||||
IdentifierProvider identifierProvider =
|
||||
(IdentifierProvider) DSpaceServicesFactory.getInstance().getServiceManager()
|
||||
.getServiceByName(type.getName(), type);
|
||||
// If found, unregister it
|
||||
if (identifierProvider == null) {
|
||||
DSpaceServicesFactory.getInstance().getServiceManager().unregisterService(type.getName());
|
||||
}
|
||||
|
||||
// Overwrite the identifier-service's providers with an empty list
|
||||
identifierService.setProviders(new ArrayList<>());
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
|
||||
|
@@ -11,10 +11,7 @@ import static org.junit.Assert.assertEquals;
|
||||
import static org.junit.Assert.assertTrue;
|
||||
|
||||
import java.sql.SQLException;
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
|
||||
import org.dspace.AbstractIntegrationTestWithDatabase;
|
||||
import org.dspace.authorize.AuthorizeException;
|
||||
import org.dspace.builder.CollectionBuilder;
|
||||
import org.dspace.builder.CommunityBuilder;
|
||||
@@ -22,15 +19,10 @@ import org.dspace.builder.ItemBuilder;
|
||||
import org.dspace.builder.VersionBuilder;
|
||||
import org.dspace.content.Collection;
|
||||
import org.dspace.content.Item;
|
||||
import org.dspace.kernel.ServiceManager;
|
||||
import org.dspace.services.factory.DSpaceServicesFactory;
|
||||
import org.junit.After;
|
||||
import org.junit.Before;
|
||||
import org.junit.Test;
|
||||
|
||||
public class VersionedHandleIdentifierProviderIT extends AbstractIntegrationTestWithDatabase {
|
||||
private ServiceManager serviceManager;
|
||||
private IdentifierServiceImpl identifierService;
|
||||
public class VersionedHandleIdentifierProviderIT extends AbstractIdentifierProviderIT {
|
||||
|
||||
private String firstHandle;
|
||||
|
||||
@@ -44,12 +36,6 @@ public class VersionedHandleIdentifierProviderIT extends AbstractIntegrationTest
|
||||
public void setUp() throws Exception {
|
||||
super.setUp();
|
||||
context.turnOffAuthorisationSystem();
|
||||
|
||||
serviceManager = DSpaceServicesFactory.getInstance().getServiceManager();
|
||||
identifierService = serviceManager.getServicesByType(IdentifierServiceImpl.class).get(0);
|
||||
// Clean out providers to avoid any being used for creation of community and collection
|
||||
identifierService.setProviders(new ArrayList<>());
|
||||
|
||||
parentCommunity = CommunityBuilder.createCommunity(context)
|
||||
.withName("Parent Community")
|
||||
.build();
|
||||
@@ -58,33 +44,6 @@ public class VersionedHandleIdentifierProviderIT extends AbstractIntegrationTest
|
||||
.build();
|
||||
}
|
||||
|
||||
@After
|
||||
@Override
|
||||
public void destroy() throws Exception {
|
||||
super.destroy();
|
||||
// After this test has finished running, refresh application context and
|
||||
// set the expected 'default' versioned handle provider back to ensure other tests don't fail
|
||||
DSpaceServicesFactory.getInstance().getServiceManager().getApplicationContext().refresh();
|
||||
}
|
||||
|
||||
private void registerProvider(Class type) {
|
||||
// Register our new provider
|
||||
IdentifierProvider identifierProvider =
|
||||
(IdentifierProvider) DSpaceServicesFactory.getInstance().getServiceManager()
|
||||
.getServiceByName(type.getName(), type);
|
||||
if (identifierProvider == null) {
|
||||
DSpaceServicesFactory.getInstance().getServiceManager().registerServiceClass(type.getName(), type);
|
||||
identifierProvider = (IdentifierProvider) DSpaceServicesFactory.getInstance().getServiceManager()
|
||||
.getServiceByName(type.getName(), type);
|
||||
}
|
||||
|
||||
// Overwrite the identifier-service's providers with the new one to ensure only this provider is used
|
||||
identifierService = DSpaceServicesFactory.getInstance().getServiceManager()
|
||||
.getServicesByType(IdentifierServiceImpl.class).get(0);
|
||||
identifierService.setProviders(new ArrayList<>());
|
||||
identifierService.setProviders(List.of(identifierProvider));
|
||||
}
|
||||
|
||||
private void createVersions() throws SQLException, AuthorizeException {
|
||||
itemV1 = ItemBuilder.createItem(context, collection)
|
||||
.withTitle("First version")
|
||||
@@ -96,7 +55,6 @@ public class VersionedHandleIdentifierProviderIT extends AbstractIntegrationTest
|
||||
|
||||
@Test
|
||||
public void testDefaultVersionedHandleProvider() throws Exception {
|
||||
registerProvider(VersionedHandleIdentifierProvider.class);
|
||||
createVersions();
|
||||
|
||||
// Confirm the original item only has its original handle
|
||||
@@ -125,6 +83,11 @@ public class VersionedHandleIdentifierProviderIT extends AbstractIntegrationTest
|
||||
assertEquals(firstHandle, itemV3.getHandle());
|
||||
assertEquals(2, itemV3.getHandles().size());
|
||||
containsHandle(itemV3, firstHandle + ".3");
|
||||
|
||||
// Unregister this non-default provider
|
||||
unregisterProvider(VersionedHandleIdentifierProviderWithCanonicalHandles.class);
|
||||
// Re-register the default provider (for later tests)
|
||||
registerProvider(VersionedHandleIdentifierProvider.class);
|
||||
}
|
||||
|
||||
private void containsHandle(Item item, String handle) {
|
||||
|
@@ -83,6 +83,7 @@ public class DSpaceKernelInitializer
|
||||
* Initially look for JNDI Resource called "java:/comp/env/dspace.dir".
|
||||
* If not found, use value provided in "dspace.dir" in Spring Environment
|
||||
*/
|
||||
@SuppressWarnings("BanJNDI")
|
||||
private String getDSpaceHome(ConfigurableEnvironment environment) {
|
||||
// Load the "dspace.dir" property from Spring's configuration.
|
||||
// This gives us the location of our DSpace configuration, which is
|
||||
|
@@ -0,0 +1,62 @@
|
||||
/**
|
||||
* The contents of this file are subject to the license and copyright
|
||||
* detailed in the LICENSE and NOTICE files at the root of the source
|
||||
* tree and available online at
|
||||
*
|
||||
* http://www.dspace.org/license/
|
||||
*/
|
||||
package org.dspace.workflow;
|
||||
|
||||
import org.dspace.content.Collection;
|
||||
import org.dspace.content.Item;
|
||||
import org.dspace.eperson.EPerson;
|
||||
|
||||
public class MockWorkflowItem implements WorkflowItem {
|
||||
public Integer id;
|
||||
public Item item;
|
||||
public Collection collection;
|
||||
public EPerson submitter;
|
||||
boolean hasMultipleFiles;
|
||||
boolean hasMultipleTitles;
|
||||
boolean isPublishedBefore;
|
||||
|
||||
public Integer getID() {
|
||||
return id;
|
||||
}
|
||||
|
||||
public Item getItem() {
|
||||
return item;
|
||||
}
|
||||
|
||||
public Collection getCollection() {
|
||||
return collection;
|
||||
}
|
||||
|
||||
public EPerson getSubmitter() {
|
||||
return submitter;
|
||||
}
|
||||
|
||||
public boolean hasMultipleFiles() {
|
||||
return hasMultipleFiles;
|
||||
}
|
||||
|
||||
public void setMultipleFiles(boolean b) {
|
||||
hasMultipleFiles = b;
|
||||
}
|
||||
|
||||
public boolean hasMultipleTitles() {
|
||||
return hasMultipleTitles;
|
||||
}
|
||||
|
||||
public void setMultipleTitles(boolean b) {
|
||||
hasMultipleTitles = b;
|
||||
}
|
||||
|
||||
public boolean isPublishedBefore() {
|
||||
return isPublishedBefore;
|
||||
}
|
||||
|
||||
public void setPublishedBefore(boolean b) {
|
||||
isPublishedBefore = b;
|
||||
}
|
||||
}
|
@@ -44,6 +44,11 @@
|
||||
<groupId>org.springframework.boot</groupId>
|
||||
<artifactId>spring-boot-starter-logging</artifactId>
|
||||
</exclusion>
|
||||
<!-- Spring JCL is unnecessary and conflicts with commons-logging when both are on classpath -->
|
||||
<exclusion>
|
||||
<groupId>org.springframework</groupId>
|
||||
<artifactId>spring-jcl</artifactId>
|
||||
</exclusion>
|
||||
</exclusions>
|
||||
</dependency>
|
||||
|
||||
@@ -106,7 +111,7 @@
|
||||
<dependency>
|
||||
<groupId>de.digitalcollections.iiif</groupId>
|
||||
<artifactId>iiif-apis</artifactId>
|
||||
<version>0.3.10</version>
|
||||
<version>0.3.11</version>
|
||||
<exclusions>
|
||||
<exclusion>
|
||||
<groupId>org.javassist</groupId>
|
||||
|
@@ -65,9 +65,8 @@
|
||||
|
||||
<!-- Java Injection -->
|
||||
<dependency>
|
||||
<groupId>javax.inject</groupId>
|
||||
<artifactId>javax.inject</artifactId>
|
||||
<version>1</version>
|
||||
<groupId>jakarta.inject</groupId>
|
||||
<artifactId>jakarta.inject-api</artifactId>
|
||||
</dependency>
|
||||
|
||||
<!-- Needed to support Spring @Configuration classes (to register servlets/beans with Spring Boot webapp) -->
|
||||
@@ -80,6 +79,11 @@
|
||||
<groupId>org.springframework.boot</groupId>
|
||||
<artifactId>spring-boot-starter-logging</artifactId>
|
||||
</exclusion>
|
||||
<!-- Spring JCL is unnecessary and conflicts with commons-logging when both are on classpath -->
|
||||
<exclusion>
|
||||
<groupId>org.springframework</groupId>
|
||||
<artifactId>spring-jcl</artifactId>
|
||||
</exclusion>
|
||||
</exclusions>
|
||||
</dependency>
|
||||
|
||||
@@ -94,22 +98,9 @@
|
||||
<groupId>org.springframework.boot</groupId>
|
||||
<artifactId>spring-boot-starter-web</artifactId>
|
||||
</exclusion>
|
||||
<!-- More recent version is pulled in via below dependencies -->
|
||||
<exclusion>
|
||||
<groupId>org.parboiled</groupId>
|
||||
<artifactId>parboiled-java</artifactId>
|
||||
</exclusion>
|
||||
</exclusions>
|
||||
</dependency>
|
||||
|
||||
<!-- Newer version, necessary to align with newer version of ASM in parent POM
|
||||
This is needed by both jtwig-spring-boot-starter and our tests. -->
|
||||
<dependency>
|
||||
<groupId>org.parboiled</groupId>
|
||||
<artifactId>parboiled-java</artifactId>
|
||||
<version>1.3.1</version>
|
||||
</dependency>
|
||||
|
||||
<!-- Internal -->
|
||||
<dependency>
|
||||
<groupId>org.dspace</groupId>
|
||||
@@ -128,23 +119,6 @@
|
||||
<groupId>org.apache.logging.log4j</groupId>
|
||||
<artifactId>log4j-api</artifactId>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.apache.logging.log4j</groupId>
|
||||
<artifactId>log4j-core</artifactId>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.apache.logging.log4j</groupId>
|
||||
<artifactId>log4j-web</artifactId>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.apache.logging.log4j</groupId>
|
||||
<artifactId>log4j-slf4j-impl</artifactId>
|
||||
<scope>runtime</scope>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.apache.logging.log4j</groupId>
|
||||
<artifactId>log4j-1.2-api</artifactId>
|
||||
</dependency>
|
||||
|
||||
<!-- Testing -->
|
||||
<dependency>
|
||||
|
@@ -11,7 +11,7 @@ import java.util.List;
|
||||
|
||||
import com.lyncode.xoai.dataprovider.xml.xoai.Element;
|
||||
import com.lyncode.xoai.dataprovider.xml.xoai.Metadata;
|
||||
import org.apache.commons.lang.StringUtils;
|
||||
import org.apache.commons.lang3.StringUtils;
|
||||
import org.dspace.content.Item;
|
||||
import org.dspace.core.Context;
|
||||
import org.dspace.license.factory.LicenseServiceFactory;
|
||||
|
@@ -9,7 +9,7 @@ package org.dspace.xoai.app;
|
||||
|
||||
import static com.lyncode.xoai.dataprovider.core.Granularity.Second;
|
||||
import static java.util.Objects.nonNull;
|
||||
import static org.apache.commons.lang.StringUtils.EMPTY;
|
||||
import static org.apache.commons.lang3.StringUtils.EMPTY;
|
||||
import static org.apache.solr.common.params.CursorMarkParams.CURSOR_MARK_PARAM;
|
||||
import static org.apache.solr.common.params.CursorMarkParams.CURSOR_MARK_START;
|
||||
import static org.dspace.xoai.util.ItemUtils.retrieveMetadata;
|
||||
@@ -334,6 +334,11 @@ public class XOAI {
|
||||
server.add(list);
|
||||
server.commit();
|
||||
list.clear();
|
||||
try {
|
||||
context.uncacheEntities();
|
||||
} catch (SQLException ex) {
|
||||
log.error("Error uncaching entities", ex);
|
||||
}
|
||||
}
|
||||
}
|
||||
System.out.println("Total: " + i + " items");
|
||||
|
@@ -9,8 +9,8 @@ package org.dspace.xoai.filter;
|
||||
|
||||
import java.sql.SQLException;
|
||||
|
||||
import org.apache.log4j.LogManager;
|
||||
import org.apache.log4j.Logger;
|
||||
import org.apache.logging.log4j.LogManager;
|
||||
import org.apache.logging.log4j.Logger;
|
||||
import org.dspace.content.Bundle;
|
||||
import org.dspace.content.Item;
|
||||
import org.dspace.handle.factory.HandleServiceFactory;
|
||||
|
@@ -522,15 +522,14 @@
|
||||
<xsl:choose>
|
||||
<xsl:when test="normalize-space($path/../oai:resumptionToken/text()) = ''">
|
||||
<!-- on the last page of results we have to assume that @completeListSize is available -->
|
||||
<xsl:value-of
|
||||
select="$total - $count" />
|
||||
<xsl:value-of select="(number($total) - $count) + 1" />
|
||||
-
|
||||
<xsl:value-of select="$total" />
|
||||
</xsl:when>
|
||||
<xsl:otherwise>
|
||||
<xsl:value-of select="$cursor * $count" />
|
||||
<xsl:value-of select="(number($cursor) * $count) + 1" />
|
||||
-
|
||||
<xsl:value-of select="($cursor+1) * $count" />
|
||||
<xsl:value-of select="(number($cursor) + 1) * $count" />
|
||||
</xsl:otherwise>
|
||||
</xsl:choose>
|
||||
</xsl:when>
|
||||
|
@@ -13,13 +13,14 @@ import static org.hamcrest.CoreMatchers.equalTo;
|
||||
import static org.hamcrest.MatcherAssert.assertThat;
|
||||
|
||||
import java.io.InputStream;
|
||||
import java.nio.charset.Charset;
|
||||
import javax.xml.transform.TransformerFactory;
|
||||
import javax.xml.transform.stream.StreamSource;
|
||||
|
||||
import com.lyncode.xoai.util.XSLPipeline;
|
||||
import org.apache.commons.io.IOUtils;
|
||||
import org.dspace.xoai.tests.support.XmlMatcherBuilder;
|
||||
import org.junit.Test;
|
||||
import org.parboiled.common.FileUtils;
|
||||
|
||||
public class PipelineTest {
|
||||
private static TransformerFactory factory = TransformerFactory.newInstance();
|
||||
@@ -28,9 +29,9 @@ public class PipelineTest {
|
||||
public void pipelineTest() throws Exception {
|
||||
InputStream input = PipelineTest.class.getClassLoader().getResourceAsStream("item.xml");
|
||||
InputStream xslt = PipelineTest.class.getClassLoader().getResourceAsStream("oai_dc.xsl");
|
||||
String output = FileUtils.readAllText(new XSLPipeline(input, true)
|
||||
.apply(factory.newTemplates(new StreamSource(xslt)))
|
||||
.getTransformed());
|
||||
String output = IOUtils.toString(new XSLPipeline(input, true)
|
||||
.apply(factory.newTemplates(new StreamSource(xslt)))
|
||||
.getTransformed(), Charset.defaultCharset());
|
||||
|
||||
assertThat(output, oai_dc().withXPath("/oai_dc:dc/dc:title", equalTo("Teste")));
|
||||
|
||||
|
@@ -67,6 +67,11 @@
|
||||
<groupId>org.springframework.boot</groupId>
|
||||
<artifactId>spring-boot-starter-logging</artifactId>
|
||||
</exclusion>
|
||||
<!-- Spring JCL is unnecessary and conflicts with commons-logging when both are on classpath -->
|
||||
<exclusion>
|
||||
<groupId>org.springframework</groupId>
|
||||
<artifactId>spring-jcl</artifactId>
|
||||
</exclusion>
|
||||
</exclusions>
|
||||
</dependency>
|
||||
|
||||
@@ -80,14 +85,6 @@
|
||||
<groupId>org.apache.logging.log4j</groupId>
|
||||
<artifactId>log4j-api</artifactId>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.apache.logging.log4j</groupId>
|
||||
<artifactId>log4j-core</artifactId>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.apache.logging.log4j</groupId>
|
||||
<artifactId>log4j-web</artifactId>
|
||||
</dependency>
|
||||
|
||||
<dependency>
|
||||
<groupId>org.apache.commons</groupId>
|
||||
|
@@ -31,7 +31,7 @@
|
||||
<plugin>
|
||||
<groupId>org.codehaus.mojo</groupId>
|
||||
<artifactId>properties-maven-plugin</artifactId>
|
||||
<version>1.1.0</version>
|
||||
<version>1.2.1</version>
|
||||
<executions>
|
||||
<execution>
|
||||
<phase>initialize</phase>
|
||||
@@ -293,14 +293,6 @@
|
||||
<artifactId>spring-expression</artifactId>
|
||||
<version>${spring.version}</version>
|
||||
</dependency>
|
||||
|
||||
<!-- Specify the version of json-smart we want to use.
|
||||
This solves a version mismatch between nimbus-jose-jwt and json-path below. -->
|
||||
<!--<dependency>
|
||||
<groupId>net.minidev</groupId>
|
||||
<artifactId>json-smart</artifactId>
|
||||
<version>2.5.0</version>
|
||||
</dependency>-->
|
||||
</dependencies>
|
||||
</dependencyManagement>
|
||||
|
||||
@@ -343,6 +335,18 @@
|
||||
<groupId>org.springframework.boot</groupId>
|
||||
<artifactId>spring-boot-starter-actuator</artifactId>
|
||||
<version>${spring-boot.version}</version>
|
||||
<exclusions>
|
||||
<!-- Use version brought in by spring-boot-starter-web above -->
|
||||
<exclusion>
|
||||
<groupId>io.micrometer</groupId>
|
||||
<artifactId>micrometer-observation</artifactId>
|
||||
</exclusion>
|
||||
<!-- Use version brought in by spring-boot-starter-web above -->
|
||||
<exclusion>
|
||||
<groupId>io.micrometer</groupId>
|
||||
<artifactId>micrometer-commons</artifactId>
|
||||
</exclusion>
|
||||
</exclusions>
|
||||
</dependency>
|
||||
|
||||
<dependency>
|
||||
@@ -419,7 +423,7 @@
|
||||
<dependency>
|
||||
<groupId>org.webjars.npm</groupId>
|
||||
<artifactId>json-editor__json-editor</artifactId>
|
||||
<version>2.6.1</version>
|
||||
<version>2.15.1</version>
|
||||
</dependency>
|
||||
<!-- Also pull in current version of Bootstrap via WebJars.
|
||||
This is used by BOTH our HAL Browser and our OAI-PMH interface.
|
||||
@@ -437,7 +441,7 @@
|
||||
<artifactId>spring-boot-starter-security</artifactId>
|
||||
<version>${spring-boot.version}</version>
|
||||
<exclusions>
|
||||
<!-- Later version brought in by spring-boot-starter-web above -->
|
||||
<!-- Use version brought in by spring-boot-starter-web above -->
|
||||
<exclusion>
|
||||
<groupId>io.micrometer</groupId>
|
||||
<artifactId>micrometer-observation</artifactId>
|
||||
@@ -456,6 +460,11 @@
|
||||
<groupId>org.springframework.boot</groupId>
|
||||
<artifactId>spring-boot-starter-logging</artifactId>
|
||||
</exclusion>
|
||||
<!-- Spring JCL is unnecessary and conflicts with commons-logging when both are on classpath -->
|
||||
<exclusion>
|
||||
<groupId>org.springframework</groupId>
|
||||
<artifactId>spring-jcl</artifactId>
|
||||
</exclusion>
|
||||
</exclusions>
|
||||
</dependency>
|
||||
|
||||
@@ -543,7 +552,7 @@
|
||||
<dependency>
|
||||
<groupId>net.minidev</groupId>
|
||||
<artifactId>json-smart</artifactId>
|
||||
<version>2.5.0</version>
|
||||
<version>2.5.1</version>
|
||||
</dependency>
|
||||
|
||||
<dependency>
|
||||
@@ -581,7 +590,7 @@
|
||||
<dependency>
|
||||
<groupId>org.apache.httpcomponents.client5</groupId>
|
||||
<artifactId>httpclient5</artifactId>
|
||||
<version>5.3.1</version>
|
||||
<version>5.4.1</version>
|
||||
<scope>test</scope>
|
||||
</dependency>
|
||||
<dependency>
|
||||
@@ -593,6 +602,13 @@
|
||||
<dependency>
|
||||
<groupId>com.jayway.jsonpath</groupId>
|
||||
<artifactId>json-path</artifactId>
|
||||
<exclusions>
|
||||
<!-- We pull in a later version above -->
|
||||
<exclusion>
|
||||
<groupId>net.minidev</groupId>
|
||||
<artifactId>json-smart</artifactId>
|
||||
</exclusion>
|
||||
</exclusions>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>com.jayway.jsonpath</groupId>
|
||||
|
@@ -220,7 +220,7 @@ public class AuthenticationRestController implements InitializingBean {
|
||||
* @return ResponseEntity
|
||||
*/
|
||||
@RequestMapping(value = "/login", method = { RequestMethod.GET, RequestMethod.PUT, RequestMethod.PATCH,
|
||||
RequestMethod.DELETE })
|
||||
RequestMethod.DELETE })
|
||||
public ResponseEntity login() {
|
||||
return ResponseEntity.status(HttpStatus.METHOD_NOT_ALLOWED).body("Only POST is allowed for login requests.");
|
||||
}
|
||||
|
@@ -43,7 +43,7 @@ import org.springframework.web.bind.annotation.RequestParam;
|
||||
import org.springframework.web.bind.annotation.RestController;
|
||||
|
||||
/**
|
||||
* This controller will handle all the incoming calls on the api/code/items/{uuid}/owningCollection endpoint
|
||||
* This controller will handle all the incoming calls on the api/core/items/{uuid}/owningCollection endpoint
|
||||
* where the uuid corresponds to the item of which you want to edit the owning collection.
|
||||
*/
|
||||
@RestController
|
||||
|
@@ -7,11 +7,12 @@
|
||||
*/
|
||||
package org.dspace.app.rest;
|
||||
|
||||
import static org.dspace.app.rest.utils.HttpHeadersInitializer.CONTENT_DISPOSITION;
|
||||
import static org.dspace.app.rest.utils.HttpHeadersInitializer.CONTENT_DISPOSITION_INLINE;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.ArrayList;
|
||||
import java.util.HashMap;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import javax.xml.transform.Transformer;
|
||||
import javax.xml.transform.TransformerException;
|
||||
import javax.xml.transform.TransformerFactory;
|
||||
@@ -21,10 +22,12 @@ import javax.xml.transform.stream.StreamResult;
|
||||
import jakarta.servlet.ServletException;
|
||||
import jakarta.servlet.http.HttpServletRequest;
|
||||
import jakarta.servlet.http.HttpServletResponse;
|
||||
import org.apache.commons.lang3.StringUtils;
|
||||
import org.apache.logging.log4j.Logger;
|
||||
import org.dspace.app.rest.parameter.SearchFilter;
|
||||
import org.dspace.app.rest.utils.ContextUtil;
|
||||
import org.dspace.app.rest.utils.RestDiscoverQueryBuilder;
|
||||
import org.dspace.app.rest.utils.ScopeResolver;
|
||||
import org.dspace.app.util.SyndicationFeed;
|
||||
import org.dspace.app.util.factory.UtilServiceFactory;
|
||||
import org.dspace.app.util.service.OpenSearchService;
|
||||
import org.dspace.authorize.factory.AuthorizeServiceFactory;
|
||||
@@ -49,6 +52,9 @@ import org.dspace.discovery.configuration.DiscoverySortConfiguration;
|
||||
import org.dspace.discovery.configuration.DiscoverySortFieldConfiguration;
|
||||
import org.dspace.discovery.indexobject.IndexableItem;
|
||||
import org.springframework.beans.factory.annotation.Autowired;
|
||||
import org.springframework.data.domain.PageRequest;
|
||||
import org.springframework.data.domain.Pageable;
|
||||
import org.springframework.data.domain.Sort;
|
||||
import org.springframework.stereotype.Controller;
|
||||
import org.springframework.ui.Model;
|
||||
import org.springframework.web.bind.annotation.GetMapping;
|
||||
@@ -86,22 +92,28 @@ public class OpenSearchController {
|
||||
@Autowired
|
||||
private ScopeResolver scopeResolver;
|
||||
|
||||
@Autowired
|
||||
private RestDiscoverQueryBuilder restDiscoverQueryBuilder;
|
||||
|
||||
/**
|
||||
* This method provides the OpenSearch query on the path /search
|
||||
* It will pass the result as a OpenSearchDocument directly to the client
|
||||
*/
|
||||
@GetMapping("/search")
|
||||
public void search(HttpServletRequest request,
|
||||
HttpServletResponse response,
|
||||
@RequestParam(name = "query", required = false) String query,
|
||||
@RequestParam(name = "start", required = false) Integer start,
|
||||
@RequestParam(name = "rpp", required = false) Integer count,
|
||||
@RequestParam(name = "format", required = false) String format,
|
||||
@RequestParam(name = "sort", required = false) String sort,
|
||||
@RequestParam(name = "sort_direction", required = false) String sortDirection,
|
||||
@RequestParam(name = "scope", required = false) String dsoObject,
|
||||
Model model) throws IOException, ServletException {
|
||||
HttpServletResponse response,
|
||||
@RequestParam(name = "query", required = false) String query,
|
||||
@RequestParam(name = "start", required = false) Integer start,
|
||||
@RequestParam(name = "rpp", required = false) Integer count,
|
||||
@RequestParam(name = "format", required = false) String format,
|
||||
@RequestParam(name = "sort", required = false) String sort,
|
||||
@RequestParam(name = "sort_direction", required = false) String sortDirection,
|
||||
@RequestParam(name = "scope", required = false) String dsoObject,
|
||||
@RequestParam(name = "configuration", required = false) String configuration,
|
||||
List<SearchFilter> searchFilters,
|
||||
Model model) throws IOException, ServletException {
|
||||
context = ContextUtil.obtainContext(request);
|
||||
|
||||
if (start == null) {
|
||||
start = 0;
|
||||
}
|
||||
@@ -133,84 +145,103 @@ public class OpenSearchController {
|
||||
// then the rest - we are processing the query
|
||||
IndexableObject container = null;
|
||||
|
||||
// support pagination parameters
|
||||
DiscoverQuery queryArgs = new DiscoverQuery();
|
||||
if (query == null) {
|
||||
query = "";
|
||||
} else {
|
||||
queryArgs.setQuery(query);
|
||||
DiscoverQuery queryArgs;
|
||||
|
||||
DiscoveryConfiguration discoveryConfiguration = null;
|
||||
if (StringUtils.isNotBlank(configuration)) {
|
||||
discoveryConfiguration = searchConfigurationService.getDiscoveryConfiguration(configuration);
|
||||
}
|
||||
queryArgs.setStart(start);
|
||||
queryArgs.setMaxResults(count);
|
||||
queryArgs.setDSpaceObjectFilter(IndexableItem.TYPE);
|
||||
if (discoveryConfiguration == null) {
|
||||
discoveryConfiguration = searchConfigurationService.getDiscoveryConfiguration("default");
|
||||
}
|
||||
// If we have search filters, use RestDiscoverQueryBuilder.
|
||||
if (searchFilters != null && searchFilters.size() > 0) {
|
||||
IndexableObject scope = scopeResolver.resolveScope(context, dsoObject);
|
||||
Sort pageSort = sort == null || sortDirection == null
|
||||
? Sort.unsorted()
|
||||
: Sort.by(new Sort.Order(Sort.Direction.fromString(sortDirection), sort));
|
||||
// TODO count can't be < 1 so I put an arbitrary number
|
||||
Pageable page = PageRequest.of(start, count > 0 ? count : 10, pageSort);
|
||||
queryArgs = restDiscoverQueryBuilder.buildQuery(context, scope,
|
||||
discoveryConfiguration, query, searchFilters, IndexableItem.TYPE, page);
|
||||
queryArgs.setFacetMinCount(-1);
|
||||
} else { // Else, use the older behavior.
|
||||
// support pagination parameters
|
||||
queryArgs = new DiscoverQuery();
|
||||
if (query == null) {
|
||||
query = "";
|
||||
} else {
|
||||
queryArgs.setQuery(query);
|
||||
}
|
||||
queryArgs.setStart(start);
|
||||
queryArgs.setMaxResults(count);
|
||||
queryArgs.setDSpaceObjectFilter(IndexableItem.TYPE);
|
||||
|
||||
if (sort != null) {
|
||||
DiscoveryConfiguration discoveryConfiguration =
|
||||
searchConfigurationService.getDiscoveryConfiguration("");
|
||||
if (discoveryConfiguration != null) {
|
||||
DiscoverySortConfiguration searchSortConfiguration = discoveryConfiguration
|
||||
.getSearchSortConfiguration();
|
||||
if (searchSortConfiguration != null) {
|
||||
DiscoverySortFieldConfiguration sortFieldConfiguration = searchSortConfiguration
|
||||
.getSortFieldConfiguration(sort);
|
||||
if (sortFieldConfiguration != null) {
|
||||
String sortField = searchService
|
||||
.toSortFieldIndex(sortFieldConfiguration.getMetadataField(),
|
||||
sortFieldConfiguration.getType());
|
||||
if (sort != null) {
|
||||
if (discoveryConfiguration != null) {
|
||||
DiscoverySortConfiguration searchSortConfiguration = discoveryConfiguration
|
||||
.getSearchSortConfiguration();
|
||||
if (searchSortConfiguration != null) {
|
||||
DiscoverySortFieldConfiguration sortFieldConfiguration = searchSortConfiguration
|
||||
.getSortFieldConfiguration(sort);
|
||||
if (sortFieldConfiguration != null) {
|
||||
String sortField = searchService
|
||||
.toSortFieldIndex(sortFieldConfiguration.getMetadataField(),
|
||||
sortFieldConfiguration.getType());
|
||||
|
||||
if (sortDirection != null && sortDirection.equals("DESC")) {
|
||||
queryArgs.setSortField(sortField, SORT_ORDER.desc);
|
||||
if (sortDirection != null && sortDirection.equals("DESC")) {
|
||||
queryArgs.setSortField(sortField, SORT_ORDER.desc);
|
||||
} else {
|
||||
queryArgs.setSortField(sortField, SORT_ORDER.asc);
|
||||
}
|
||||
} else {
|
||||
queryArgs.setSortField(sortField, SORT_ORDER.asc);
|
||||
throw new IllegalArgumentException(sort + " is not a valid sort field");
|
||||
}
|
||||
} else {
|
||||
throw new IllegalArgumentException(sort + " is not a valid sort field");
|
||||
}
|
||||
}
|
||||
} else {
|
||||
// this is the default sort so we want to switch this to date accessioned
|
||||
queryArgs.setSortField("dc.date.accessioned_dt", SORT_ORDER.desc);
|
||||
}
|
||||
} else {
|
||||
// this is the default sort so we want to switch this to date accessioned
|
||||
queryArgs.setSortField("dc.date.accessioned_dt", SORT_ORDER.desc);
|
||||
}
|
||||
|
||||
if (dsoObject != null) {
|
||||
container = scopeResolver.resolveScope(context, dsoObject);
|
||||
DiscoveryConfiguration discoveryConfiguration = searchConfigurationService
|
||||
.getDiscoveryConfiguration(context, container);
|
||||
queryArgs.setDiscoveryConfigurationName(discoveryConfiguration.getId());
|
||||
queryArgs.addFilterQueries(discoveryConfiguration.getDefaultFilterQueries()
|
||||
.toArray(
|
||||
new String[discoveryConfiguration.getDefaultFilterQueries()
|
||||
.size()]));
|
||||
if (dsoObject != null) {
|
||||
container = scopeResolver.resolveScope(context, dsoObject);
|
||||
discoveryConfiguration = searchConfigurationService
|
||||
.getDiscoveryConfigurationByNameOrIndexableObject(context, "site", container);
|
||||
queryArgs.setDiscoveryConfigurationName(discoveryConfiguration.getId());
|
||||
queryArgs.addFilterQueries(discoveryConfiguration.getDefaultFilterQueries()
|
||||
.toArray(
|
||||
new String[discoveryConfiguration.getDefaultFilterQueries()
|
||||
.size()]));
|
||||
}
|
||||
}
|
||||
|
||||
// Perform the search
|
||||
DiscoverResult qResults = null;
|
||||
try {
|
||||
qResults = SearchUtils.getSearchService().search(context,
|
||||
container, queryArgs);
|
||||
container, queryArgs);
|
||||
} catch (SearchServiceException e) {
|
||||
log.error(LogHelper.getHeader(context, "opensearch", "query="
|
||||
+ queryArgs.getQuery()
|
||||
+ ",error=" + e.getMessage()), e);
|
||||
+ queryArgs.getQuery()
|
||||
+ ",error=" + e.getMessage()), e);
|
||||
throw new RuntimeException(e.getMessage(), e);
|
||||
}
|
||||
|
||||
// Log
|
||||
log.info("opensearch done, query=\"" + query + "\",results="
|
||||
+ qResults.getTotalSearchResults());
|
||||
+ qResults.getTotalSearchResults());
|
||||
|
||||
// format and return results
|
||||
Map<String, String> labelMap = getLabels(request);
|
||||
List<IndexableObject> dsoResults = qResults.getIndexableObjects();
|
||||
Document resultsDoc = openSearchService.getResultsDoc(context, format, query,
|
||||
(int) qResults.getTotalSearchResults(), qResults.getStart(),
|
||||
qResults.getMaxResults(), container, dsoResults, labelMap);
|
||||
(int) qResults.getTotalSearchResults(), qResults.getStart(),
|
||||
qResults.getMaxResults(), container, dsoResults);
|
||||
try {
|
||||
Transformer xf = TransformerFactory.newInstance().newTransformer();
|
||||
response.setContentType(openSearchService.getContentType(format));
|
||||
response.addHeader(CONTENT_DISPOSITION, CONTENT_DISPOSITION_INLINE);
|
||||
xf.transform(new DOMSource(resultsDoc),
|
||||
new StreamResult(response.getWriter()));
|
||||
new StreamResult(response.getWriter()));
|
||||
} catch (TransformerException e) {
|
||||
log.error(e);
|
||||
throw new ServletException(e.toString());
|
||||
@@ -231,7 +262,7 @@ public class OpenSearchController {
|
||||
*/
|
||||
@GetMapping("/service")
|
||||
public void service(HttpServletRequest request,
|
||||
HttpServletResponse response) throws IOException {
|
||||
HttpServletResponse response) throws IOException {
|
||||
log.debug("Show OpenSearch Service document");
|
||||
if (openSearchService == null) {
|
||||
openSearchService = UtilServiceFactory.getInstance().getOpenSearchService();
|
||||
@@ -240,7 +271,7 @@ public class OpenSearchController {
|
||||
String svcDescrip = openSearchService.getDescription(null);
|
||||
log.debug("opensearchdescription is " + svcDescrip);
|
||||
response.setContentType(openSearchService
|
||||
.getContentType("opensearchdescription"));
|
||||
.getContentType("opensearchdescription"));
|
||||
response.setContentLength(svcDescrip.length());
|
||||
response.getWriter().write(svcDescrip);
|
||||
} else {
|
||||
@@ -274,20 +305,4 @@ public class OpenSearchController {
|
||||
public void setOpenSearchService(OpenSearchService oSS) {
|
||||
openSearchService = oSS;
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* Internal method to get labels for the returned document
|
||||
*/
|
||||
private Map<String, String> getLabels(HttpServletRequest request) {
|
||||
// TODO: get strings from translation file or configuration
|
||||
Map<String, String> labelMap = new HashMap<String, String>();
|
||||
labelMap.put(SyndicationFeed.MSG_UNTITLED, "notitle");
|
||||
labelMap.put(SyndicationFeed.MSG_LOGO_TITLE, "logo.title");
|
||||
labelMap.put(SyndicationFeed.MSG_FEED_DESCRIPTION, "general-feed.description");
|
||||
for (String selector : SyndicationFeed.getDescriptionSelectors()) {
|
||||
labelMap.put("metadata." + selector, selector);
|
||||
}
|
||||
return labelMap;
|
||||
}
|
||||
}
|
||||
|
@@ -24,9 +24,11 @@ import org.dspace.app.rest.model.DSpaceObjectRest;
|
||||
import org.dspace.app.rest.utils.ContextUtil;
|
||||
import org.dspace.app.rest.utils.DSpaceObjectUtils;
|
||||
import org.dspace.app.rest.utils.Utils;
|
||||
import org.dspace.authorize.AuthorizeException;
|
||||
import org.dspace.authorize.service.AuthorizeService;
|
||||
import org.dspace.content.DSpaceObject;
|
||||
import org.dspace.core.Constants;
|
||||
import org.dspace.core.Context;
|
||||
import org.dspace.discovery.SearchServiceException;
|
||||
import org.springframework.beans.factory.InitializingBean;
|
||||
import org.springframework.beans.factory.annotation.Autowired;
|
||||
import org.springframework.hateoas.Link;
|
||||
@@ -65,6 +67,9 @@ public class UUIDLookupRestController implements InitializingBean {
|
||||
@Autowired
|
||||
private DiscoverableEndpointsService discoverableEndpointsService;
|
||||
|
||||
@Autowired
|
||||
private AuthorizeService authorizeService;
|
||||
|
||||
@Autowired
|
||||
private ConverterService converter;
|
||||
|
||||
@@ -85,13 +90,14 @@ public class UUIDLookupRestController implements InitializingBean {
|
||||
public void getDSObyIdentifier(HttpServletRequest request,
|
||||
HttpServletResponse response,
|
||||
@RequestParam(PARAM) UUID uuid)
|
||||
throws IOException, SQLException, SearchServiceException {
|
||||
throws IOException, SQLException, AuthorizeException {
|
||||
|
||||
Context context = null;
|
||||
try {
|
||||
context = ContextUtil.obtainContext(request);
|
||||
DSpaceObject dso = dspaceObjectUtil.findDSpaceObject(context, uuid);
|
||||
if (dso != null) {
|
||||
authorizeService.authorizeAction(context, dso, Constants.READ);
|
||||
DSpaceObjectRest dsor = converter.toRest(dso, utils.obtainProjection());
|
||||
URI link = linkTo(dsor.getController(), dsor.getCategory(), dsor.getTypePlural()).slash(dsor.getId())
|
||||
.toUri();
|
||||
|
@@ -13,7 +13,8 @@ import java.util.LinkedList;
|
||||
import java.util.List;
|
||||
|
||||
import jakarta.servlet.http.HttpServletRequest;
|
||||
import org.apache.log4j.Logger;
|
||||
import org.apache.logging.log4j.LogManager;
|
||||
import org.apache.logging.log4j.Logger;
|
||||
import org.dspace.app.rest.model.PageRest;
|
||||
import org.dspace.app.rest.model.SearchEventRest;
|
||||
import org.dspace.app.rest.model.SearchResultsRest;
|
||||
@@ -31,7 +32,7 @@ import org.springframework.stereotype.Component;
|
||||
@Component
|
||||
public class SearchEventConverter {
|
||||
/* Log4j logger */
|
||||
private static final Logger log = Logger.getLogger(SearchEventConverter.class);
|
||||
private static final Logger log = LogManager.getLogger(SearchEventConverter.class);
|
||||
|
||||
@Autowired
|
||||
private ScopeResolver scopeResolver;
|
||||
@@ -66,8 +67,8 @@ public class SearchEventConverter {
|
||||
if (searchEventRest.getScope() != null) {
|
||||
IndexableObject scopeObject =
|
||||
scopeResolver.resolveScope(context, String.valueOf(searchEventRest.getScope()));
|
||||
if (scopeObject instanceof DSpaceObject) {
|
||||
usageSearchEvent.setScope((DSpaceObject) scopeObject);
|
||||
if (scopeObject != null && scopeObject.getIndexedObject() instanceof DSpaceObject) {
|
||||
usageSearchEvent.setScope((DSpaceObject) scopeObject.getIndexedObject());
|
||||
}
|
||||
}
|
||||
usageSearchEvent.setConfiguration(searchEventRest.getConfiguration());
|
||||
|
@@ -10,6 +10,7 @@ package org.dspace.app.rest.converter;
|
||||
import java.sql.SQLException;
|
||||
|
||||
import org.apache.logging.log4j.Logger;
|
||||
import org.dspace.app.rest.model.ScopeEnum;
|
||||
import org.dspace.app.rest.model.SubmissionSectionRest;
|
||||
import org.dspace.app.rest.model.SubmissionVisibilityRest;
|
||||
import org.dspace.app.rest.model.VisibilityEnum;
|
||||
@@ -41,6 +42,7 @@ public class SubmissionSectionConverter implements DSpaceConverter<SubmissionSte
|
||||
sp.setHeader(step.getHeading());
|
||||
sp.setSectionType(step.getType());
|
||||
sp.setId(step.getId());
|
||||
sp.setScope(ScopeEnum.fromString(step.getScope()));
|
||||
sp.setVisibility(new SubmissionVisibilityRest(VisibilityEnum.fromString(step.getVisibility()),
|
||||
VisibilityEnum.fromString(step.getVisibilityOutside())));
|
||||
return sp;
|
||||
|
@@ -7,6 +7,8 @@
|
||||
*/
|
||||
package org.dspace.app.rest.converter.query;
|
||||
|
||||
import static org.dspace.app.rest.model.SearchConfigurationRest.Filter.OPERATOR_QUERY;
|
||||
|
||||
import java.util.LinkedList;
|
||||
import java.util.List;
|
||||
|
||||
@@ -16,23 +18,23 @@ import org.dspace.app.rest.model.query.RestSearchOperator;
|
||||
import org.dspace.app.rest.parameter.SearchFilter;
|
||||
|
||||
/**
|
||||
* This method will traverse a list of SearchFilters and transform any SearchFilters with an operator
|
||||
* this is equal to 'Query' into a SearchFilter that has a standard DSpace operator like 'contains'
|
||||
* Utility class for transforming a list of SearchFilters. Each SearchFilter with an operator set to 'query'
|
||||
* is converted into a SearchFilter with a standard DSpace operator like 'contains'.
|
||||
*/
|
||||
public class SearchQueryConverter {
|
||||
|
||||
/**
|
||||
* This method traverses the list of SearchFilters and transforms all of those that contain 'Query'
|
||||
* This method traverses the list of SearchFilters and transforms all of those that with 'query'
|
||||
* as the operator into a standard DSpace SearchFilter
|
||||
*
|
||||
* @param searchFilters The list of SearchFilters to be used
|
||||
* @return A list of transformed SearchFilters
|
||||
* @param searchFilters list of SearchFilters to be transformed
|
||||
* @return list of transformed SearchFilters
|
||||
*/
|
||||
public List<SearchFilter> convert(List<SearchFilter> searchFilters) {
|
||||
|
||||
List<SearchFilter> transformedSearchFilters = new LinkedList<>();
|
||||
for (SearchFilter searchFilter : CollectionUtils.emptyIfNull(searchFilters)) {
|
||||
if (StringUtils.equals(searchFilter.getOperator(), "query")) {
|
||||
if (StringUtils.equals(searchFilter.getOperator(), OPERATOR_QUERY)) {
|
||||
SearchFilter transformedSearchFilter = convertQuerySearchFilterIntoStandardSearchFilter(searchFilter);
|
||||
transformedSearchFilters.add(transformedSearchFilter);
|
||||
} else {
|
||||
@@ -46,10 +48,10 @@ public class SearchQueryConverter {
|
||||
/**
|
||||
* This method takes care of the converter of a specific SearchFilter given to it
|
||||
*
|
||||
* @param searchFilter The SearchFilter to be transformed
|
||||
* @return The transformed SearchFilter
|
||||
* @param searchFilter searchFilter to be transformed
|
||||
* @return transformed SearchFilter
|
||||
*/
|
||||
public SearchFilter convertQuerySearchFilterIntoStandardSearchFilter(SearchFilter searchFilter) {
|
||||
private SearchFilter convertQuerySearchFilterIntoStandardSearchFilter(SearchFilter searchFilter) {
|
||||
RestSearchOperator restSearchOperator = RestSearchOperator.forQuery(searchFilter.getValue());
|
||||
SearchFilter transformedSearchFilter = new SearchFilter(searchFilter.getName(),
|
||||
restSearchOperator.getDspaceOperator(), restSearchOperator.extractValue(searchFilter.getValue()));
|
||||
|
@@ -18,9 +18,9 @@ import org.dspace.app.rest.RestResourceController;
|
||||
* @author Andrea Bollini (andrea.bollini at 4science.it)
|
||||
*/
|
||||
@LinksRest(links = {
|
||||
@LinkRest(method = "getEperson", name = AuthorizationRest.EPERSON),
|
||||
@LinkRest(method = "getFeature", name = AuthorizationRest.FEATURE),
|
||||
@LinkRest(method = "getObject", name = AuthorizationRest.OBJECT)
|
||||
@LinkRest(method = "getEperson", name = AuthorizationRest.EPERSON),
|
||||
@LinkRest(method = "getFeature", name = AuthorizationRest.FEATURE),
|
||||
@LinkRest(method = "getObject", name = AuthorizationRest.OBJECT)
|
||||
})
|
||||
public class AuthorizationRest extends BaseObjectRest<String> {
|
||||
public static final String NAME = "authorization";
|
||||
|
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user