Merge branch 'main' into CST-12042-addSupportForThePrimaryBitstreamFlag

This commit is contained in:
Mykhaylo
2024-02-19 09:46:31 +01:00
437 changed files with 25551 additions and 15671 deletions

View File

@@ -21,11 +21,11 @@ jobs:
# Also specify version of Java to use (this can allow us to optionally run tests on multiple JDKs in future) # Also specify version of Java to use (this can allow us to optionally run tests on multiple JDKs in future)
matrix: matrix:
include: include:
# NOTE: Unit Tests include deprecated REST API v6 (as it has unit tests) # NOTE: Unit Tests include a retry for occasionally failing tests
# - surefire.rerunFailingTestsCount => try again for flakey tests, and keep track of/report on number of retries # - surefire.rerunFailingTestsCount => try again for flakey tests, and keep track of/report on number of retries
- type: "Unit Tests" - type: "Unit Tests"
java: 11 java: 11
mvnflags: "-DskipUnitTests=false -Pdspace-rest -Dsurefire.rerunFailingTestsCount=2" mvnflags: "-DskipUnitTests=false -Dsurefire.rerunFailingTestsCount=2"
resultsdir: "**/target/surefire-reports/**" resultsdir: "**/target/surefire-reports/**"
# NOTE: ITs skip all code validation checks, as they are already done by Unit Test job. # NOTE: ITs skip all code validation checks, as they are already done by Unit Test job.
# - enforcer.skip => Skip maven-enforcer-plugin rules # - enforcer.skip => Skip maven-enforcer-plugin rules
@@ -45,7 +45,7 @@ jobs:
steps: steps:
# https://github.com/actions/checkout # https://github.com/actions/checkout
- name: Checkout codebase - name: Checkout codebase
uses: actions/checkout@v3 uses: actions/checkout@v4
# https://github.com/actions/setup-java # https://github.com/actions/setup-java
- name: Install JDK ${{ matrix.java }} - name: Install JDK ${{ matrix.java }}
@@ -53,16 +53,7 @@ jobs:
with: with:
java-version: ${{ matrix.java }} java-version: ${{ matrix.java }}
distribution: 'temurin' distribution: 'temurin'
cache: 'maven'
# https://github.com/actions/cache
- name: Cache Maven dependencies
uses: actions/cache@v3
with:
# Cache entire ~/.m2/repository
path: ~/.m2/repository
# Cache key is hash of all pom.xml files. Therefore any changes to POMs will invalidate cache
key: ${{ runner.os }}-maven-${{ hashFiles('**/pom.xml') }}
restore-keys: ${{ runner.os }}-maven-
# Run parallel Maven builds based on the above 'strategy.matrix' # Run parallel Maven builds based on the above 'strategy.matrix'
- name: Run Maven ${{ matrix.type }} - name: Run Maven ${{ matrix.type }}
@@ -96,7 +87,7 @@ jobs:
runs-on: ubuntu-latest runs-on: ubuntu-latest
steps: steps:
- name: Checkout - name: Checkout
uses: actions/checkout@v3 uses: actions/checkout@v4
# Download artifacts from previous 'tests' job # Download artifacts from previous 'tests' job
- name: Download coverage artifacts - name: Download coverage artifacts
@@ -108,10 +99,13 @@ jobs:
# Retry action: https://github.com/marketplace/actions/retry-action # Retry action: https://github.com/marketplace/actions/retry-action
# Codecov action: https://github.com/codecov/codecov-action # Codecov action: https://github.com/codecov/codecov-action
- name: Upload coverage to Codecov.io - name: Upload coverage to Codecov.io
uses: Wandalen/wretry.action@v1.0.36 uses: Wandalen/wretry.action@v1.3.0
with: with:
action: codecov/codecov-action@v3 action: codecov/codecov-action@v3
# Try upload 5 times max # Ensure codecov-action throws an error when it fails to upload
with: |
fail_ci_if_error: true
# Try re-running action 5 times max
attempt_limit: 5 attempt_limit: 5
# Run again in 30 seconds # Run again in 30 seconds
attempt_delay: 30000 attempt_delay: 30000

View File

@@ -35,7 +35,7 @@ jobs:
steps: steps:
# https://github.com/actions/checkout # https://github.com/actions/checkout
- name: Checkout repository - name: Checkout repository
uses: actions/checkout@v3 uses: actions/checkout@v4
# https://github.com/actions/setup-java # https://github.com/actions/setup-java
- name: Install JDK - name: Install JDK

View File

@@ -3,6 +3,7 @@ name: Docker images
# Run this Build for all pushes to 'main' or maintenance branches, or tagged releases. # Run this Build for all pushes to 'main' or maintenance branches, or tagged releases.
# Also run for PRs to ensure PR doesn't break Docker build process # Also run for PRs to ensure PR doesn't break Docker build process
# NOTE: uses "reusable-docker-build.yml" to actually build each of the Docker images.
on: on:
push: push:
branches: branches:
@@ -15,83 +16,22 @@ on:
permissions: permissions:
contents: read # to fetch code (actions/checkout) contents: read # to fetch code (actions/checkout)
# Define shared environment variables for all jobs below
env:
# Define tags to use for Docker images based on Git tags/branches (for docker/metadata-action)
# For a new commit on default branch (main), use the literal tag 'latest' on Docker image.
# For a new commit on other branches, use the branch name as the tag for Docker image.
# For a new tag, copy that tag name as the tag for Docker image.
IMAGE_TAGS: |
type=raw,value=latest,enable=${{ endsWith(github.ref, github.event.repository.default_branch) }}
type=ref,event=branch,enable=${{ !endsWith(github.ref, github.event.repository.default_branch) }}
type=ref,event=tag
# Define default tag "flavor" for docker/metadata-action per
# https://github.com/docker/metadata-action#flavor-input
# We manage the 'latest' tag ourselves to the 'main' branch (see settings above)
TAGS_FLAVOR: |
latest=false
# Architectures / Platforms for which we will build Docker images
# If this is a PR, we ONLY build for AMD64. For PRs we only do a sanity check test to ensure Docker builds work.
# If this is NOT a PR (e.g. a tag or merge commit), also build for ARM64. NOTE: The ARM64 build takes MUCH
# longer (around 45mins or so) which is why we only run it when pushing a new Docker image.
PLATFORMS: linux/amd64${{ github.event_name != 'pull_request' && ', linux/arm64' || '' }}
jobs: jobs:
#################################################### ####################################################
# Build/Push the 'dspace/dspace-dependencies' image. # Build/Push the 'dspace/dspace-dependencies' image.
# This image is used by all other jobs. # This image is used by all other DSpace build jobs.
#################################################### ####################################################
dspace-dependencies: dspace-dependencies:
# Ensure this job never runs on forked repos. It's only executed for 'dspace/dspace' # Ensure this job never runs on forked repos. It's only executed for 'dspace/dspace'
if: github.repository == 'dspace/dspace' if: github.repository == 'dspace/dspace'
runs-on: ubuntu-latest uses: ./.github/workflows/reusable-docker-build.yml
steps:
# https://github.com/actions/checkout
- name: Checkout codebase
uses: actions/checkout@v3
# https://github.com/docker/setup-buildx-action
- name: Setup Docker Buildx
uses: docker/setup-buildx-action@v2
# https://github.com/docker/setup-qemu-action
- name: Set up QEMU emulation to build for multiple architectures
uses: docker/setup-qemu-action@v2
# https://github.com/docker/login-action
- name: Login to DockerHub
# Only login if not a PR, as PRs only trigger a Docker build and not a push
if: github.event_name != 'pull_request'
uses: docker/login-action@v2
with: with:
username: ${{ secrets.DOCKER_USERNAME }} build_id: dspace-dependencies
password: ${{ secrets.DOCKER_ACCESS_TOKEN }} image_name: dspace/dspace-dependencies
dockerfile_path: ./Dockerfile.dependencies
# https://github.com/docker/metadata-action secrets:
# Get Metadata for docker_build_deps step below DOCKER_USERNAME: ${{ secrets.DOCKER_USERNAME }}
- name: Sync metadata (tags, labels) from GitHub to Docker for 'dspace-dependencies' image DOCKER_ACCESS_TOKEN: ${{ secrets.DOCKER_ACCESS_TOKEN }}
id: meta_build_deps
uses: docker/metadata-action@v4
with:
images: dspace/dspace-dependencies
tags: ${{ env.IMAGE_TAGS }}
flavor: ${{ env.TAGS_FLAVOR }}
# https://github.com/docker/build-push-action
- name: Build and push 'dspace-dependencies' image
id: docker_build_deps
uses: docker/build-push-action@v4
with:
context: .
file: ./Dockerfile.dependencies
platforms: ${{ env.PLATFORMS }}
# For pull requests, we run the Docker build (to ensure no PR changes break the build),
# but we ONLY do an image push to DockerHub if it's NOT a PR
push: ${{ github.event_name != 'pull_request' }}
# Use tags / labels provided by 'docker/metadata-action' above
tags: ${{ steps.meta_build_deps.outputs.tags }}
labels: ${{ steps.meta_build_deps.outputs.labels }}
####################################### #######################################
# Build/Push the 'dspace/dspace' image # Build/Push the 'dspace/dspace' image
@@ -101,52 +41,18 @@ jobs:
if: github.repository == 'dspace/dspace' if: github.repository == 'dspace/dspace'
# Must run after 'dspace-dependencies' job above # Must run after 'dspace-dependencies' job above
needs: dspace-dependencies needs: dspace-dependencies
runs-on: ubuntu-latest uses: ./.github/workflows/reusable-docker-build.yml
steps:
# https://github.com/actions/checkout
- name: Checkout codebase
uses: actions/checkout@v3
# https://github.com/docker/setup-buildx-action
- name: Setup Docker Buildx
uses: docker/setup-buildx-action@v2
# https://github.com/docker/setup-qemu-action
- name: Set up QEMU emulation to build for multiple architectures
uses: docker/setup-qemu-action@v2
# https://github.com/docker/login-action
- name: Login to DockerHub
# Only login if not a PR, as PRs only trigger a Docker build and not a push
if: github.event_name != 'pull_request'
uses: docker/login-action@v2
with: with:
username: ${{ secrets.DOCKER_USERNAME }} build_id: dspace
password: ${{ secrets.DOCKER_ACCESS_TOKEN }} image_name: dspace/dspace
dockerfile_path: ./Dockerfile
# Get Metadata for docker_build step below secrets:
- name: Sync metadata (tags, labels) from GitHub to Docker for 'dspace' image DOCKER_USERNAME: ${{ secrets.DOCKER_USERNAME }}
id: meta_build DOCKER_ACCESS_TOKEN: ${{ secrets.DOCKER_ACCESS_TOKEN }}
uses: docker/metadata-action@v4 # Enable redeploy of sandbox & demo if the branch for this image matches the deployment branch of
with: # these sites as specified in reusable-docker-build.xml
images: dspace/dspace REDEPLOY_SANDBOX_URL: ${{ secrets.REDEPLOY_SANDBOX_URL }}
tags: ${{ env.IMAGE_TAGS }} REDEPLOY_DEMO_URL: ${{ secrets.REDEPLOY_DEMO_URL }}
flavor: ${{ env.TAGS_FLAVOR }}
- name: Build and push 'dspace' image
id: docker_build
uses: docker/build-push-action@v4
with:
context: .
file: ./Dockerfile
platforms: ${{ env.PLATFORMS }}
# For pull requests, we run the Docker build (to ensure no PR changes break the build),
# but we ONLY do an image push to DockerHub if it's NOT a PR
push: ${{ github.event_name != 'pull_request' }}
# Use tags / labels provided by 'docker/metadata-action' above
tags: ${{ steps.meta_build.outputs.tags }}
labels: ${{ steps.meta_build.outputs.labels }}
############################################################# #############################################################
# Build/Push the 'dspace/dspace' image ('-test' tag) # Build/Push the 'dspace/dspace' image ('-test' tag)
@@ -156,55 +62,17 @@ jobs:
if: github.repository == 'dspace/dspace' if: github.repository == 'dspace/dspace'
# Must run after 'dspace-dependencies' job above # Must run after 'dspace-dependencies' job above
needs: dspace-dependencies needs: dspace-dependencies
runs-on: ubuntu-latest uses: ./.github/workflows/reusable-docker-build.yml
steps:
# https://github.com/actions/checkout
- name: Checkout codebase
uses: actions/checkout@v3
# https://github.com/docker/setup-buildx-action
- name: Setup Docker Buildx
uses: docker/setup-buildx-action@v2
# https://github.com/docker/setup-qemu-action
- name: Set up QEMU emulation to build for multiple architectures
uses: docker/setup-qemu-action@v2
# https://github.com/docker/login-action
- name: Login to DockerHub
# Only login if not a PR, as PRs only trigger a Docker build and not a push
if: github.event_name != 'pull_request'
uses: docker/login-action@v2
with: with:
username: ${{ secrets.DOCKER_USERNAME }} build_id: dspace-test
password: ${{ secrets.DOCKER_ACCESS_TOKEN }} image_name: dspace/dspace
dockerfile_path: ./Dockerfile.test
# Get Metadata for docker_build_test step below
- name: Sync metadata (tags, labels) from GitHub to Docker for 'dspace-test' image
id: meta_build_test
uses: docker/metadata-action@v4
with:
images: dspace/dspace
tags: ${{ env.IMAGE_TAGS }}
# As this is a test/development image, its tags are all suffixed with "-test". Otherwise, it uses the same # As this is a test/development image, its tags are all suffixed with "-test". Otherwise, it uses the same
# tagging logic as the primary 'dspace/dspace' image above. # tagging logic as the primary 'dspace/dspace' image above.
flavor: ${{ env.TAGS_FLAVOR }} tags_flavor: suffix=-test
suffix=-test secrets:
DOCKER_USERNAME: ${{ secrets.DOCKER_USERNAME }}
- name: Build and push 'dspace-test' image DOCKER_ACCESS_TOKEN: ${{ secrets.DOCKER_ACCESS_TOKEN }}
id: docker_build_test
uses: docker/build-push-action@v4
with:
context: .
file: ./Dockerfile.test
platforms: ${{ env.PLATFORMS }}
# For pull requests, we run the Docker build (to ensure no PR changes break the build),
# but we ONLY do an image push to DockerHub if it's NOT a PR
push: ${{ github.event_name != 'pull_request' }}
# Use tags / labels provided by 'docker/metadata-action' above
tags: ${{ steps.meta_build_test.outputs.tags }}
labels: ${{ steps.meta_build_test.outputs.labels }}
########################################### ###########################################
# Build/Push the 'dspace/dspace-cli' image # Build/Push the 'dspace/dspace-cli' image
@@ -214,52 +82,14 @@ jobs:
if: github.repository == 'dspace/dspace' if: github.repository == 'dspace/dspace'
# Must run after 'dspace-dependencies' job above # Must run after 'dspace-dependencies' job above
needs: dspace-dependencies needs: dspace-dependencies
runs-on: ubuntu-latest uses: ./.github/workflows/reusable-docker-build.yml
steps:
# https://github.com/actions/checkout
- name: Checkout codebase
uses: actions/checkout@v3
# https://github.com/docker/setup-buildx-action
- name: Setup Docker Buildx
uses: docker/setup-buildx-action@v2
# https://github.com/docker/setup-qemu-action
- name: Set up QEMU emulation to build for multiple architectures
uses: docker/setup-qemu-action@v2
# https://github.com/docker/login-action
- name: Login to DockerHub
# Only login if not a PR, as PRs only trigger a Docker build and not a push
if: github.event_name != 'pull_request'
uses: docker/login-action@v2
with: with:
username: ${{ secrets.DOCKER_USERNAME }} build_id: dspace-cli
password: ${{ secrets.DOCKER_ACCESS_TOKEN }} image_name: dspace/dspace-cli
dockerfile_path: ./Dockerfile.cli
# Get Metadata for docker_build_test step below secrets:
- name: Sync metadata (tags, labels) from GitHub to Docker for 'dspace-cli' image DOCKER_USERNAME: ${{ secrets.DOCKER_USERNAME }}
id: meta_build_cli DOCKER_ACCESS_TOKEN: ${{ secrets.DOCKER_ACCESS_TOKEN }}
uses: docker/metadata-action@v4
with:
images: dspace/dspace-cli
tags: ${{ env.IMAGE_TAGS }}
flavor: ${{ env.TAGS_FLAVOR }}
- name: Build and push 'dspace-cli' image
id: docker_build_cli
uses: docker/build-push-action@v4
with:
context: .
file: ./Dockerfile.cli
platforms: ${{ env.PLATFORMS }}
# For pull requests, we run the Docker build (to ensure no PR changes break the build),
# but we ONLY do an image push to DockerHub if it's NOT a PR
push: ${{ github.event_name != 'pull_request' }}
# Use tags / labels provided by 'docker/metadata-action' above
tags: ${{ steps.meta_build_cli.outputs.tags }}
labels: ${{ steps.meta_build_cli.outputs.labels }}
########################################### ###########################################
# Build/Push the 'dspace/dspace-solr' image # Build/Push the 'dspace/dspace-solr' image
@@ -267,52 +97,20 @@ jobs:
dspace-solr: dspace-solr:
# Ensure this job never runs on forked repos. It's only executed for 'dspace/dspace' # Ensure this job never runs on forked repos. It's only executed for 'dspace/dspace'
if: github.repository == 'dspace/dspace' if: github.repository == 'dspace/dspace'
runs-on: ubuntu-latest uses: ./.github/workflows/reusable-docker-build.yml
steps:
# https://github.com/actions/checkout
- name: Checkout codebase
uses: actions/checkout@v3
# https://github.com/docker/setup-buildx-action
- name: Setup Docker Buildx
uses: docker/setup-buildx-action@v2
# https://github.com/docker/setup-qemu-action
- name: Set up QEMU emulation to build for multiple architectures
uses: docker/setup-qemu-action@v2
# https://github.com/docker/login-action
- name: Login to DockerHub
# Only login if not a PR, as PRs only trigger a Docker build and not a push
if: github.event_name != 'pull_request'
uses: docker/login-action@v2
with: with:
username: ${{ secrets.DOCKER_USERNAME }} build_id: dspace-solr
password: ${{ secrets.DOCKER_ACCESS_TOKEN }} image_name: dspace/dspace-solr
dockerfile_path: ./dspace/src/main/docker/dspace-solr/Dockerfile
# Get Metadata for docker_build_solr step below # Must pass solrconfigs to the Dockerfile so that it can find the required Solr config files
- name: Sync metadata (tags, labels) from GitHub to Docker for 'dspace-solr' image dockerfile_additional_contexts: 'solrconfigs=./dspace/solr/'
id: meta_build_solr secrets:
uses: docker/metadata-action@v4 DOCKER_USERNAME: ${{ secrets.DOCKER_USERNAME }}
with: DOCKER_ACCESS_TOKEN: ${{ secrets.DOCKER_ACCESS_TOKEN }}
images: dspace/dspace-solr # Enable redeploy of sandbox & demo SOLR instance whenever dspace-solr image changes for deployed branch.
tags: ${{ env.IMAGE_TAGS }} # These URLs MUST use different secrets than 'dspace/dspace' image build above as they are deployed separately.
flavor: ${{ env.TAGS_FLAVOR }} REDEPLOY_SANDBOX_URL: ${{ secrets.REDEPLOY_SANDBOX_SOLR_URL }}
REDEPLOY_DEMO_URL: ${{ secrets.REDEPLOY_DEMO_SOLR_URL }}
- name: Build and push 'dspace-solr' image
id: docker_build_solr
uses: docker/build-push-action@v4
with:
context: .
file: ./dspace/src/main/docker/dspace-solr/Dockerfile
platforms: ${{ env.PLATFORMS }}
# For pull requests, we run the Docker build (to ensure no PR changes break the build),
# but we ONLY do an image push to DockerHub if it's NOT a PR
push: ${{ github.event_name != 'pull_request' }}
# Use tags / labels provided by 'docker/metadata-action' above
tags: ${{ steps.meta_build_solr.outputs.tags }}
labels: ${{ steps.meta_build_solr.outputs.labels }}
########################################################### ###########################################################
# Build/Push the 'dspace/dspace-postgres-pgcrypto' image # Build/Push the 'dspace/dspace-postgres-pgcrypto' image
@@ -320,53 +118,16 @@ jobs:
dspace-postgres-pgcrypto: dspace-postgres-pgcrypto:
# Ensure this job never runs on forked repos. It's only executed for 'dspace/dspace' # Ensure this job never runs on forked repos. It's only executed for 'dspace/dspace'
if: github.repository == 'dspace/dspace' if: github.repository == 'dspace/dspace'
runs-on: ubuntu-latest uses: ./.github/workflows/reusable-docker-build.yml
steps:
# https://github.com/actions/checkout
- name: Checkout codebase
uses: actions/checkout@v3
# https://github.com/docker/setup-buildx-action
- name: Setup Docker Buildx
uses: docker/setup-buildx-action@v2
# https://github.com/docker/setup-qemu-action
- name: Set up QEMU emulation to build for multiple architectures
uses: docker/setup-qemu-action@v2
# https://github.com/docker/login-action
- name: Login to DockerHub
# Only login if not a PR, as PRs only trigger a Docker build and not a push
if: github.event_name != 'pull_request'
uses: docker/login-action@v2
with: with:
username: ${{ secrets.DOCKER_USERNAME }} build_id: dspace-postgres-pgcrypto
password: ${{ secrets.DOCKER_ACCESS_TOKEN }} image_name: dspace/dspace-postgres-pgcrypto
# Must build out of subdirectory to have access to install script for pgcrypto.
# Get Metadata for docker_build_postgres step below # NOTE: this context will build the image based on the Dockerfile in the specified directory
- name: Sync metadata (tags, labels) from GitHub to Docker for 'dspace-postgres-pgcrypto' image dockerfile_context: ./dspace/src/main/docker/dspace-postgres-pgcrypto/
id: meta_build_postgres secrets:
uses: docker/metadata-action@v4 DOCKER_USERNAME: ${{ secrets.DOCKER_USERNAME }}
with: DOCKER_ACCESS_TOKEN: ${{ secrets.DOCKER_ACCESS_TOKEN }}
images: dspace/dspace-postgres-pgcrypto
tags: ${{ env.IMAGE_TAGS }}
flavor: ${{ env.TAGS_FLAVOR }}
- name: Build and push 'dspace-postgres-pgcrypto' image
id: docker_build_postgres
uses: docker/build-push-action@v4
with:
# Must build out of subdirectory to have access to install script for pgcrypto
context: ./dspace/src/main/docker/dspace-postgres-pgcrypto/
dockerfile: Dockerfile
platforms: ${{ env.PLATFORMS }}
# For pull requests, we run the Docker build (to ensure no PR changes break the build),
# but we ONLY do an image push to DockerHub if it's NOT a PR
push: ${{ github.event_name != 'pull_request' }}
# Use tags / labels provided by 'docker/metadata-action' above
tags: ${{ steps.meta_build_postgres.outputs.tags }}
labels: ${{ steps.meta_build_postgres.outputs.labels }}
######################################################################## ########################################################################
# Build/Push the 'dspace/dspace-postgres-pgcrypto' image (-loadsql tag) # Build/Push the 'dspace/dspace-postgres-pgcrypto' image (-loadsql tag)
@@ -374,53 +135,16 @@ jobs:
dspace-postgres-pgcrypto-loadsql: dspace-postgres-pgcrypto-loadsql:
# Ensure this job never runs on forked repos. It's only executed for 'dspace/dspace' # Ensure this job never runs on forked repos. It's only executed for 'dspace/dspace'
if: github.repository == 'dspace/dspace' if: github.repository == 'dspace/dspace'
runs-on: ubuntu-latest uses: ./.github/workflows/reusable-docker-build.yml
steps:
# https://github.com/actions/checkout
- name: Checkout codebase
uses: actions/checkout@v3
# https://github.com/docker/setup-buildx-action
- name: Setup Docker Buildx
uses: docker/setup-buildx-action@v2
# https://github.com/docker/setup-qemu-action
- name: Set up QEMU emulation to build for multiple architectures
uses: docker/setup-qemu-action@v2
# https://github.com/docker/login-action
- name: Login to DockerHub
# Only login if not a PR, as PRs only trigger a Docker build and not a push
if: github.event_name != 'pull_request'
uses: docker/login-action@v2
with: with:
username: ${{ secrets.DOCKER_USERNAME }} build_id: dspace-postgres-pgcrypto-loadsql
password: ${{ secrets.DOCKER_ACCESS_TOKEN }} image_name: dspace/dspace-postgres-pgcrypto
# Must build out of subdirectory to have access to install script for pgcrypto.
# Get Metadata for docker_build_postgres_loadsql step below # NOTE: this context will build the image based on the Dockerfile in the specified directory
- name: Sync metadata (tags, labels) from GitHub to Docker for 'dspace-postgres-pgcrypto-loadsql' image dockerfile_context: ./dspace/src/main/docker/dspace-postgres-pgcrypto-curl/
id: meta_build_postgres_loadsql
uses: docker/metadata-action@v4
with:
images: dspace/dspace-postgres-pgcrypto
tags: ${{ env.IMAGE_TAGS }}
# Suffix all tags with "-loadsql". Otherwise, it uses the same # Suffix all tags with "-loadsql". Otherwise, it uses the same
# tagging logic as the primary 'dspace/dspace-postgres-pgcrypto' image above. # tagging logic as the primary 'dspace/dspace-postgres-pgcrypto' image above.
flavor: ${{ env.TAGS_FLAVOR }} tags_flavor: suffix=-loadsql
suffix=-loadsql secrets:
DOCKER_USERNAME: ${{ secrets.DOCKER_USERNAME }}
- name: Build and push 'dspace-postgres-pgcrypto-loadsql' image DOCKER_ACCESS_TOKEN: ${{ secrets.DOCKER_ACCESS_TOKEN }}
id: docker_build_postgres_loadsql
uses: docker/build-push-action@v4
with:
# Must build out of subdirectory to have access to install script for pgcrypto
context: ./dspace/src/main/docker/dspace-postgres-pgcrypto-curl/
dockerfile: Dockerfile
platforms: ${{ env.PLATFORMS }}
# For pull requests, we run the Docker build (to ensure no PR changes break the build),
# but we ONLY do an image push to DockerHub if it's NOT a PR
push: ${{ github.event_name != 'pull_request' }}
# Use tags / labels provided by 'docker/metadata-action' above
tags: ${{ steps.meta_build_postgres_loadsql.outputs.tags }}
labels: ${{ steps.meta_build_postgres_loadsql.outputs.labels }}

View File

@@ -23,11 +23,11 @@ jobs:
if: github.event.pull_request.merged if: github.event.pull_request.merged
steps: steps:
# Checkout code # Checkout code
- uses: actions/checkout@v3 - uses: actions/checkout@v4
# Port PR to other branch (ONLY if labeled with "port to") # Port PR to other branch (ONLY if labeled with "port to")
# See https://github.com/korthout/backport-action # See https://github.com/korthout/backport-action
- name: Create backport pull requests - name: Create backport pull requests
uses: korthout/backport-action@v1 uses: korthout/backport-action@v2
with: with:
# Trigger based on a "port to [branch]" label on PR # Trigger based on a "port to [branch]" label on PR
# (This label must specify the branch name to port to) # (This label must specify the branch name to port to)

View File

@@ -21,4 +21,4 @@ jobs:
# Assign the PR to whomever created it. This is useful for visualizing assignments on project boards # Assign the PR to whomever created it. This is useful for visualizing assignments on project boards
# See https://github.com/toshimaru/auto-author-assign # See https://github.com/toshimaru/auto-author-assign
- name: Assign PR to creator - name: Assign PR to creator
uses: toshimaru/auto-author-assign@v1.6.2 uses: toshimaru/auto-author-assign@v2.0.1

View File

@@ -0,0 +1,225 @@
#
# DSpace's reusable Docker build/push workflow.
#
# This is used by docker.yml for all Docker image builds
name: Reusable DSpace Docker Build
on:
workflow_call:
# Possible Inputs to this reusable job
inputs:
# Build name/id for this Docker build. Used for digest storage to avoid digest overlap between builds.
build_id:
required: true
type: string
# Requires the image name to build (e.g dspace/dspace-test)
image_name:
required: true
type: string
# Optionally the path to the Dockerfile to use for the build. (Default is [dockerfile_context]/Dockerfile)
dockerfile_path:
required: false
type: string
# Optionally the context directory to build the Dockerfile within. Defaults to "." (current directory)
dockerfile_context:
required: false
type: string
default: '.'
# Optionally a list of "additional_contexts" to pass to Dockerfile. Defaults to empty
dockerfile_additional_contexts:
required: false
type: string
default: ''
# If Docker image should have additional tag flavor details (e.g. a suffix), it may be passed in.
tags_flavor:
required: false
type: string
secrets:
# Requires that Docker login info be passed in as secrets.
DOCKER_USERNAME:
required: true
DOCKER_ACCESS_TOKEN:
required: true
# These URL secrets are optional. When specified & branch checks match, the redeployment code below will trigger.
# Therefore builds which need to trigger redeployment MUST specify these URLs. All others should leave them empty.
REDEPLOY_SANDBOX_URL:
required: false
REDEPLOY_DEMO_URL:
required: false
# Define shared default settings as environment variables
env:
IMAGE_NAME: ${{ inputs.image_name }}
# Define tags to use for Docker images based on Git tags/branches (for docker/metadata-action)
# For a new commit on default branch (main), use the literal tag 'latest' on Docker image.
# For a new commit on other branches, use the branch name as the tag for Docker image.
# For a new tag, copy that tag name as the tag for Docker image.
IMAGE_TAGS: |
type=raw,value=latest,enable=${{ github.ref_name == github.event.repository.default_branch }}
type=ref,event=branch,enable=${{ github.ref_name != github.event.repository.default_branch }}
type=ref,event=tag
# Define default tag "flavor" for docker/metadata-action per
# https://github.com/docker/metadata-action#flavor-input
# We manage the 'latest' tag ourselves to the 'main' branch (see settings above)
TAGS_FLAVOR: |
latest=false
${{ inputs.tags_flavor }}
# When these URL variables are specified & required branch matches, then the sandbox or demo site will be redeployed.
# See "Redeploy" steps below for more details.
REDEPLOY_SANDBOX_URL: ${{ secrets.REDEPLOY_SANDBOX_URL }}
REDEPLOY_DEMO_URL: ${{ secrets.REDEPLOY_DEMO_URL }}
# Current DSpace maintenance branch (and architecture) which is deployed to demo.dspace.org / sandbox.dspace.org
# (NOTE: No deployment branch specified for sandbox.dspace.org as it uses the default_branch)
DEPLOY_DEMO_BRANCH: 'dspace-7_x'
DEPLOY_ARCH: 'linux/amd64'
jobs:
docker-build:
strategy:
matrix:
# Architectures / Platforms for which we will build Docker images
arch: [ 'linux/amd64', 'linux/arm64' ]
os: [ ubuntu-latest ]
isPr:
- ${{ github.event_name == 'pull_request' }}
# If this is a PR, we ONLY build for AMD64. For PRs we only do a sanity check test to ensure Docker builds work.
# The below exclude therefore ensures we do NOT build ARM64 for PRs.
exclude:
- isPr: true
os: ubuntu-latest
arch: linux/arm64
runs-on: ${{ matrix.os }}
steps:
# https://github.com/actions/checkout
- name: Checkout codebase
uses: actions/checkout@v4
# https://github.com/docker/setup-buildx-action
- name: Setup Docker Buildx
uses: docker/setup-buildx-action@v3
# https://github.com/docker/setup-qemu-action
- name: Set up QEMU emulation to build for multiple architectures
uses: docker/setup-qemu-action@v3
# https://github.com/docker/login-action
- name: Login to DockerHub
# Only login if not a PR, as PRs only trigger a Docker build and not a push
if: ${{ ! matrix.isPr }}
uses: docker/login-action@v3
with:
username: ${{ secrets.DOCKER_USERNAME }}
password: ${{ secrets.DOCKER_ACCESS_TOKEN }}
# https://github.com/docker/metadata-action
# Get Metadata for docker_build_deps step below
- name: Sync metadata (tags, labels) from GitHub to Docker for image
id: meta_build
uses: docker/metadata-action@v5
with:
images: ${{ env.IMAGE_NAME }}
tags: ${{ env.IMAGE_TAGS }}
flavor: ${{ env.TAGS_FLAVOR }}
# https://github.com/docker/build-push-action
- name: Build and push image
id: docker_build
uses: docker/build-push-action@v5
with:
build-contexts: |
${{ inputs.dockerfile_additional_contexts }}
context: ${{ inputs.dockerfile_context }}
file: ${{ inputs.dockerfile_path }}
platforms: ${{ matrix.arch }}
# For pull requests, we run the Docker build (to ensure no PR changes break the build),
# but we ONLY do an image push to DockerHub if it's NOT a PR
push: ${{ ! matrix.isPr }}
# Use tags / labels provided by 'docker/metadata-action' above
tags: ${{ steps.meta_build.outputs.tags }}
labels: ${{ steps.meta_build.outputs.labels }}
# Export the digest of Docker build locally (for non PRs only)
- name: Export Docker build digest
if: ${{ ! matrix.isPr }}
run: |
mkdir -p /tmp/digests
digest="${{ steps.docker_build.outputs.digest }}"
touch "/tmp/digests/${digest#sha256:}"
# Upload digest to an artifact, so that it can be used in manifest below
- name: Upload Docker build digest to artifact
if: ${{ ! matrix.isPr }}
uses: actions/upload-artifact@v3
with:
name: digests-${{ inputs.build_id }}
path: /tmp/digests/*
if-no-files-found: error
retention-days: 1
# If this build is NOT a PR and passed in a REDEPLOY_SANDBOX_URL secret,
# Then redeploy https://sandbox.dspace.org if this build is for our deployment architecture and 'main' branch.
- name: Redeploy sandbox.dspace.org (based on main branch)
if: |
!matrix.isPR &&
env.REDEPLOY_SANDBOX_URL != '' &&
matrix.arch == env.DEPLOY_ARCH &&
github.ref_name == github.event.repository.default_branch
run: |
curl -X POST $REDEPLOY_SANDBOX_URL
# If this build is NOT a PR and passed in a REDEPLOY_DEMO_URL secret,
# Then redeploy https://demo.dspace.org if this build is for our deployment architecture and demo branch.
- name: Redeploy demo.dspace.org (based on maintenace branch)
if: |
!matrix.isPR &&
env.REDEPLOY_DEMO_URL != '' &&
matrix.arch == env.DEPLOY_ARCH &&
github.ref_name == env.DEPLOY_DEMO_BRANCH
run: |
curl -X POST $REDEPLOY_DEMO_URL
# Merge Docker digests (from various architectures) into a manifest.
# This runs after all Docker builds complete above, and it tells hub.docker.com
# that these builds should be all included in the manifest for this tag.
# (e.g. AMD64 and ARM64 should be listed as options under the same tagged Docker image)
docker-build_manifest:
if: ${{ github.event_name != 'pull_request' }}
runs-on: ubuntu-latest
needs:
- docker-build
steps:
- name: Download Docker build digests
uses: actions/download-artifact@v3
with:
name: digests-${{ inputs.build_id }}
path: /tmp/digests
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@v3
- name: Add Docker metadata for image
id: meta
uses: docker/metadata-action@v5
with:
images: ${{ env.IMAGE_NAME }}
tags: ${{ env.IMAGE_TAGS }}
flavor: ${{ env.TAGS_FLAVOR }}
- name: Login to Docker Hub
uses: docker/login-action@v3
with:
username: ${{ secrets.DOCKER_USERNAME }}
password: ${{ secrets.DOCKER_ACCESS_TOKEN }}
- name: Create manifest list from digests and push
working-directory: /tmp/digests
run: |
docker buildx imagetools create $(jq -cr '.tags | map("-t " + .) | join(" ")' <<< "$DOCKER_METADATA_OUTPUT_JSON") \
$(printf '${{ env.IMAGE_NAME }}@sha256:%s ' *)
- name: Inspect image
run: |
docker buildx imagetools inspect ${{ env.IMAGE_NAME }}:${{ steps.meta.outputs.version }}

View File

@@ -19,9 +19,12 @@ RUN mkdir /install \
USER dspace USER dspace
# Copy the DSpace source code (from local machine) into the workdir (excluding .dockerignore contents) # Copy the DSpace source code (from local machine) into the workdir (excluding .dockerignore contents)
ADD --chown=dspace . /app/ ADD --chown=dspace . /app/
# Build DSpace (note: this build doesn't include the optional, deprecated "dspace-rest" webapp) # Build DSpace
# Copy the dspace-installer directory to /install. Clean up the build to keep the docker image small # Copy the dspace-installer directory to /install. Clean up the build to keep the docker image small
RUN mvn --no-transfer-progress package && \ # Maven flags here ensure that we skip building test environment and skip all code verification checks.
# These flags speed up this compilation as much as reasonably possible.
ENV MAVEN_FLAGS="-P-test-environment -Denforcer.skip=true -Dcheckstyle.skip=true -Dlicense.skip=true -Dxml.skip=true"
RUN mvn --no-transfer-progress package ${MAVEN_FLAGS} && \
mv /app/dspace/target/${TARGET_DIR}/* /install && \ mv /app/dspace/target/${TARGET_DIR}/* /install && \
mvn clean mvn clean

View File

@@ -15,11 +15,6 @@ RUN useradd dspace \
&& mkdir -p /home/dspace \ && mkdir -p /home/dspace \
&& chown -Rv dspace: /home/dspace && chown -Rv dspace: /home/dspace
RUN chown -Rv dspace: /app RUN chown -Rv dspace: /app
# Need git to support buildnumber-maven-plugin, which lets us know what version of DSpace is being run.
RUN apt-get update \
&& apt-get install -y --no-install-recommends git \
&& apt-get purge -y --auto-remove \
&& rm -rf /var/lib/apt/lists/*
# Switch to dspace user & run below commands as that user # Switch to dspace user & run below commands as that user
USER dspace USER dspace
@@ -28,7 +23,10 @@ USER dspace
ADD --chown=dspace . /app/ ADD --chown=dspace . /app/
# Trigger the installation of all maven dependencies (hide download progress messages) # Trigger the installation of all maven dependencies (hide download progress messages)
RUN mvn --no-transfer-progress package # Maven flags here ensure that we skip final assembly, skip building test environment and skip all code verification checks.
# These flags speed up this installation as much as reasonably possible.
ENV MAVEN_FLAGS="-P-assembly -P-test-environment -Denforcer.skip=true -Dcheckstyle.skip=true -Dlicense.skip=true -Dxml.skip=true"
RUN mvn --no-transfer-progress install ${MAVEN_FLAGS}
# Clear the contents of the /app directory (including all maven builds), so no artifacts remain. # Clear the contents of the /app directory (including all maven builds), so no artifacts remain.
# This ensures when dspace:dspace is built, it will use the Maven local cache (~/.m2) for dependencies # This ensures when dspace:dspace is built, it will use the Maven local cache (~/.m2) for dependencies

View File

@@ -21,9 +21,9 @@ RUN mkdir /install \
USER dspace USER dspace
# Copy the DSpace source code (from local machine) into the workdir (excluding .dockerignore contents) # Copy the DSpace source code (from local machine) into the workdir (excluding .dockerignore contents)
ADD --chown=dspace . /app/ ADD --chown=dspace . /app/
# Build DSpace (INCLUDING the optional, deprecated "dspace-rest" webapp) # Build DSpace
# Copy the dspace-installer directory to /install. Clean up the build to keep the docker image small # Copy the dspace-installer directory to /install. Clean up the build to keep the docker image small
RUN mvn --no-transfer-progress package -Pdspace-rest && \ RUN mvn --no-transfer-progress package && \
mv /app/dspace/target/${TARGET_DIR}/* /install && \ mv /app/dspace/target/${TARGET_DIR}/* /install && \
mvn clean mvn clean
@@ -67,17 +67,10 @@ ENV CATALINA_OPTS=-Xrunjdwp:transport=dt_socket,server=y,suspend=n,address=*:800
# Link the DSpace 'server' webapp into Tomcat's webapps directory. # Link the DSpace 'server' webapp into Tomcat's webapps directory.
# This ensures that when we start Tomcat, it runs from /server path (e.g. http://localhost:8080/server/) # This ensures that when we start Tomcat, it runs from /server path (e.g. http://localhost:8080/server/)
# Also link the v6.x (deprecated) REST API off the "/rest" path RUN ln -s $DSPACE_INSTALL/webapps/server /usr/local/tomcat/webapps/server
RUN ln -s $DSPACE_INSTALL/webapps/server /usr/local/tomcat/webapps/server && \
ln -s $DSPACE_INSTALL/webapps/rest /usr/local/tomcat/webapps/rest
# If you wish to run "server" webapp off the ROOT path, then comment out the above RUN, and uncomment the below RUN. # If you wish to run "server" webapp off the ROOT path, then comment out the above RUN, and uncomment the below RUN.
# You also MUST update the 'dspace.server.url' configuration to match. # You also MUST update the 'dspace.server.url' configuration to match.
# Please note that server webapp should only run on one path at a time. # Please note that server webapp should only run on one path at a time.
#RUN mv /usr/local/tomcat/webapps/ROOT /usr/local/tomcat/webapps/ROOT.bk && \ #RUN mv /usr/local/tomcat/webapps/ROOT /usr/local/tomcat/webapps/ROOT.bk && \
# ln -s $DSPACE_INSTALL/webapps/server /usr/local/tomcat/webapps/ROOT && \ # ln -s $DSPACE_INSTALL/webapps/server /usr/local/tomcat/webapps/ROOT
# ln -s $DSPACE_INSTALL/webapps/rest /usr/local/tomcat/webapps/rest
# Overwrite the v6.x (deprecated) REST API's web.xml, so that we can run it on HTTP (defaults to requiring HTTPS)
# WARNING: THIS IS OBVIOUSLY INSECURE. NEVER DO THIS IN PRODUCTION.
COPY dspace/src/main/docker/test/rest_web.xml $DSPACE_INSTALL/webapps/rest/WEB-INF/web.xml
RUN sed -i -e "s|\${dspace.dir}|$DSPACE_INSTALL|" $DSPACE_INSTALL/webapps/rest/WEB-INF/web.xml

View File

@@ -1,5 +1,10 @@
version: "3.7" version: "3.7"
networks:
# Default to using network named 'dspacenet' from docker-compose.yml.
# Its full name will be prepended with the project name (e.g. "-p d7" means it will be named "d7_dspacenet")
default:
name: ${COMPOSE_PROJECT_NAME}_dspacenet
external: true
services: services:
dspace-cli: dspace-cli:
image: "${DOCKER_OWNER:-dspace}/dspace-cli:${DSPACE_VER:-latest}" image: "${DOCKER_OWNER:-dspace}/dspace-cli:${DSPACE_VER:-latest}"
@@ -26,13 +31,8 @@ services:
- ./dspace/config:/dspace/config - ./dspace/config:/dspace/config
entrypoint: /dspace/bin/dspace entrypoint: /dspace/bin/dspace
command: help command: help
networks:
- dspacenet
tty: true tty: true
stdin_open: true stdin_open: true
volumes: volumes:
assetstore: assetstore:
networks:
dspacenet:

View File

@@ -36,7 +36,7 @@ services:
depends_on: depends_on:
- dspacedb - dspacedb
networks: networks:
dspacenet: - dspacenet
ports: ports:
- published: 8080 - published: 8080
target: 8080 target: 8080
@@ -89,8 +89,10 @@ services:
container_name: dspacesolr container_name: dspacesolr
image: "${DOCKER_OWNER:-dspace}/dspace-solr:${DSPACE_VER:-latest}" image: "${DOCKER_OWNER:-dspace}/dspace-solr:${DSPACE_VER:-latest}"
build: build:
context: . context: ./dspace/src/main/docker/dspace-solr/
dockerfile: ./dspace/src/main/docker/dspace-solr/Dockerfile # Provide path to Solr configs necessary to build Docker image
additional_contexts:
solrconfigs: ./dspace/solr/
args: args:
SOLR_VERSION: "${SOLR_VER:-8.11}" SOLR_VERSION: "${SOLR_VER:-8.11}"
networks: networks:
@@ -121,6 +123,10 @@ services:
cp -r /opt/solr/server/solr/configsets/search/* search cp -r /opt/solr/server/solr/configsets/search/* search
precreate-core statistics /opt/solr/server/solr/configsets/statistics precreate-core statistics /opt/solr/server/solr/configsets/statistics
cp -r /opt/solr/server/solr/configsets/statistics/* statistics cp -r /opt/solr/server/solr/configsets/statistics/* statistics
precreate-core qaevent /opt/solr/server/solr/configsets/qaevent
cp -r /opt/solr/server/solr/configsets/qaevent/* qaevent
precreate-core suggestion /opt/solr/server/solr/configsets/suggestion
cp -r /opt/solr/server/solr/configsets/suggestion/* suggestion
exec solr -f exec solr -f
volumes: volumes:
assetstore: assetstore:

View File

@@ -528,7 +528,7 @@
</dependency> </dependency>
<dependency> <dependency>
<groupId>org.hamcrest</groupId> <groupId>org.hamcrest</groupId>
<artifactId>hamcrest-all</artifactId> <artifactId>hamcrest</artifactId>
<scope>test</scope> <scope>test</scope>
</dependency> </dependency>
<dependency> <dependency>
@@ -620,7 +620,7 @@
<dependency> <dependency>
<groupId>com.maxmind.geoip2</groupId> <groupId>com.maxmind.geoip2</groupId>
<artifactId>geoip2</artifactId> <artifactId>geoip2</artifactId>
<version>2.11.0</version> <version>2.17.0</version>
</dependency> </dependency>
<dependency> <dependency>
<groupId>org.apache.ant</groupId> <groupId>org.apache.ant</groupId>
@@ -784,7 +784,7 @@
<dependency> <dependency>
<groupId>com.opencsv</groupId> <groupId>com.opencsv</groupId>
<artifactId>opencsv</artifactId> <artifactId>opencsv</artifactId>
<version>5.7.1</version> <version>5.9</version>
</dependency> </dependency>
<!-- Email templating --> <!-- Email templating -->
@@ -820,6 +820,12 @@
</exclusions> </exclusions>
</dependency> </dependency>
<dependency>
<groupId>eu.openaire</groupId>
<artifactId>broker-client</artifactId>
<version>1.1.2</version>
</dependency>
<dependency> <dependency>
<groupId>org.mock-server</groupId> <groupId>org.mock-server</groupId>
<artifactId>mockserver-junit-rule</artifactId> <artifactId>mockserver-junit-rule</artifactId>
@@ -861,32 +867,32 @@
<dependency> <dependency>
<groupId>io.netty</groupId> <groupId>io.netty</groupId>
<artifactId>netty-buffer</artifactId> <artifactId>netty-buffer</artifactId>
<version>4.1.94.Final</version> <version>4.1.106.Final</version>
</dependency> </dependency>
<dependency> <dependency>
<groupId>io.netty</groupId> <groupId>io.netty</groupId>
<artifactId>netty-transport</artifactId> <artifactId>netty-transport</artifactId>
<version>4.1.94.Final</version> <version>4.1.106.Final</version>
</dependency> </dependency>
<dependency> <dependency>
<groupId>io.netty</groupId> <groupId>io.netty</groupId>
<artifactId>netty-transport-native-unix-common</artifactId> <artifactId>netty-transport-native-unix-common</artifactId>
<version>4.1.94.Final</version> <version>4.1.106.Final</version>
</dependency> </dependency>
<dependency> <dependency>
<groupId>io.netty</groupId> <groupId>io.netty</groupId>
<artifactId>netty-common</artifactId> <artifactId>netty-common</artifactId>
<version>4.1.94.Final</version> <version>4.1.106.Final</version>
</dependency> </dependency>
<dependency> <dependency>
<groupId>io.netty</groupId> <groupId>io.netty</groupId>
<artifactId>netty-handler</artifactId> <artifactId>netty-handler</artifactId>
<version>4.1.94.Final</version> <version>4.1.106.Final</version>
</dependency> </dependency>
<dependency> <dependency>
<groupId>io.netty</groupId> <groupId>io.netty</groupId>
<artifactId>netty-codec</artifactId> <artifactId>netty-codec</artifactId>
<version>4.1.94.Final</version> <version>4.1.106.Final</version>
</dependency> </dependency>
<dependency> <dependency>
<groupId>org.apache.velocity</groupId> <groupId>org.apache.velocity</groupId>
@@ -896,7 +902,7 @@
<dependency> <dependency>
<groupId>org.xmlunit</groupId> <groupId>org.xmlunit</groupId>
<artifactId>xmlunit-core</artifactId> <artifactId>xmlunit-core</artifactId>
<version>2.8.0</version> <version>2.9.1</version>
<scope>test</scope> <scope>test</scope>
</dependency> </dependency>
<dependency> <dependency>

View File

@@ -116,6 +116,17 @@ public final class CreateAdministrator {
protected CreateAdministrator() protected CreateAdministrator()
throws Exception { throws Exception {
context = new Context(); context = new Context();
try {
context.getDBConfig();
} catch (NullPointerException npr) {
// if database is null, there is no point in continuing. Prior to this exception and catch,
// NullPointerException was thrown, that wasn't very helpful.
throw new IllegalStateException("Problem connecting to database. This " +
"indicates issue with either network or version (or possibly some other). " +
"If you are running this in docker-compose, please make sure dspace-cli was " +
"built from the same sources as running dspace container AND that they are in " +
"the same project/network.");
}
groupService = EPersonServiceFactory.getInstance().getGroupService(); groupService = EPersonServiceFactory.getInstance().getGroupService();
ePersonService = EPersonServiceFactory.getInstance().getEPersonService(); ePersonService = EPersonServiceFactory.getInstance().getEPersonService();
} }

View File

@@ -464,7 +464,7 @@ public class BulkAccessControl extends DSpaceRunnable<BulkAccessControlScriptCon
.forEach(accessCondition -> createResourcePolicy(item, accessCondition, .forEach(accessCondition -> createResourcePolicy(item, accessCondition,
itemAccessConditions.get(accessCondition.getName()))); itemAccessConditions.get(accessCondition.getName())));
itemService.adjustItemPolicies(context, item, item.getOwningCollection()); itemService.adjustItemPolicies(context, item, item.getOwningCollection(), false);
} }
/** /**

View File

@@ -7,18 +7,10 @@
*/ */
package org.dspace.app.sitemap; package org.dspace.app.sitemap;
import java.io.BufferedReader;
import java.io.File; import java.io.File;
import java.io.IOException; import java.io.IOException;
import java.io.InputStreamReader;
import java.io.UnsupportedEncodingException;
import java.net.HttpURLConnection;
import java.net.MalformedURLException;
import java.net.URL;
import java.net.URLEncoder;
import java.sql.SQLException; import java.sql.SQLException;
import java.util.Date; import java.util.Date;
import java.util.Iterator;
import java.util.List; import java.util.List;
import org.apache.commons.cli.CommandLine; import org.apache.commons.cli.CommandLine;
@@ -29,12 +21,8 @@ import org.apache.commons.cli.Options;
import org.apache.commons.cli.ParseException; import org.apache.commons.cli.ParseException;
import org.apache.commons.collections4.CollectionUtils; import org.apache.commons.collections4.CollectionUtils;
import org.apache.commons.io.FileUtils; import org.apache.commons.io.FileUtils;
import org.apache.commons.lang3.ArrayUtils;
import org.apache.commons.lang3.StringUtils; import org.apache.commons.lang3.StringUtils;
import org.apache.logging.log4j.Logger; import org.apache.logging.log4j.Logger;
import org.dspace.content.Collection;
import org.dspace.content.Community;
import org.dspace.content.Item;
import org.dspace.content.factory.ContentServiceFactory; import org.dspace.content.factory.ContentServiceFactory;
import org.dspace.content.service.CollectionService; import org.dspace.content.service.CollectionService;
import org.dspace.content.service.CommunityService; import org.dspace.content.service.CommunityService;
@@ -43,6 +31,7 @@ import org.dspace.core.Context;
import org.dspace.core.LogHelper; import org.dspace.core.LogHelper;
import org.dspace.discovery.DiscoverQuery; import org.dspace.discovery.DiscoverQuery;
import org.dspace.discovery.DiscoverResult; import org.dspace.discovery.DiscoverResult;
import org.dspace.discovery.IndexableObject;
import org.dspace.discovery.SearchService; import org.dspace.discovery.SearchService;
import org.dspace.discovery.SearchServiceException; import org.dspace.discovery.SearchServiceException;
import org.dspace.discovery.SearchUtils; import org.dspace.discovery.SearchUtils;
@@ -68,6 +57,7 @@ public class GenerateSitemaps {
private static final ConfigurationService configurationService = private static final ConfigurationService configurationService =
DSpaceServicesFactory.getInstance().getConfigurationService(); DSpaceServicesFactory.getInstance().getConfigurationService();
private static final SearchService searchService = SearchUtils.getSearchService(); private static final SearchService searchService = SearchUtils.getSearchService();
private static final int PAGE_SIZE = 100;
/** /**
* Default constructor * Default constructor
@@ -87,11 +77,6 @@ public class GenerateSitemaps {
"do not generate sitemaps.org protocol sitemap"); "do not generate sitemaps.org protocol sitemap");
options.addOption("b", "no_htmlmap", false, options.addOption("b", "no_htmlmap", false,
"do not generate a basic HTML sitemap"); "do not generate a basic HTML sitemap");
options.addOption("a", "ping_all", false,
"ping configured search engines");
options
.addOption("p", "ping", true,
"ping specified search engine URL");
options options
.addOption("d", "delete", false, .addOption("d", "delete", false,
"delete sitemaps dir and its contents"); "delete sitemaps dir and its contents");
@@ -116,14 +101,13 @@ public class GenerateSitemaps {
} }
/* /*
* Sanity check -- if no sitemap generation or pinging to do, or deletion, print usage * Sanity check -- if no sitemap generation or deletion, print usage
*/ */
if (line.getArgs().length != 0 || line.hasOption('d') || line.hasOption('b') if (line.getArgs().length != 0 || line.hasOption('d') || line.hasOption('b')
&& line.hasOption('s') && !line.hasOption('g') && line.hasOption('s') && !line.hasOption('g')
&& !line.hasOption('m') && !line.hasOption('y') && !line.hasOption('m') && !line.hasOption('y')) {
&& !line.hasOption('p')) {
System.err System.err
.println("Nothing to do (no sitemap to generate, no search engines to ping)"); .println("Nothing to do (no sitemap to generate)");
hf.printHelp(usage, options); hf.printHelp(usage, options);
System.exit(1); System.exit(1);
} }
@@ -137,20 +121,6 @@ public class GenerateSitemaps {
deleteSitemaps(); deleteSitemaps();
} }
if (line.hasOption('a')) {
pingConfiguredSearchEngines();
}
if (line.hasOption('p')) {
try {
pingSearchEngine(line.getOptionValue('p'));
} catch (MalformedURLException me) {
System.err
.println("Bad search engine URL (include all except sitemap URL)");
System.exit(1);
}
}
System.exit(0); System.exit(0);
} }
@@ -211,171 +181,113 @@ public class GenerateSitemaps {
} }
Context c = new Context(Context.Mode.READ_ONLY); Context c = new Context(Context.Mode.READ_ONLY);
int offset = 0;
List<Community> comms = communityService.findAll(c); long commsCount = 0;
long collsCount = 0;
for (Community comm : comms) { long itemsCount = 0;
String url = uiURLStem + "communities/" + comm.getID();
if (makeHTMLMap) {
html.addURL(url, null);
}
if (makeSitemapOrg) {
sitemapsOrg.addURL(url, null);
}
c.uncacheEntity(comm);
}
List<Collection> colls = collectionService.findAll(c);
for (Collection coll : colls) {
String url = uiURLStem + "collections/" + coll.getID();
if (makeHTMLMap) {
html.addURL(url, null);
}
if (makeSitemapOrg) {
sitemapsOrg.addURL(url, null);
}
c.uncacheEntity(coll);
}
Iterator<Item> allItems = itemService.findAll(c);
int itemCount = 0;
while (allItems.hasNext()) {
Item i = allItems.next();
DiscoverQuery entityQuery = new DiscoverQuery();
entityQuery.setQuery("search.uniqueid:\"Item-" + i.getID() + "\" and entityType:*");
entityQuery.addSearchField("entityType");
try { try {
DiscoverResult discoverResult = searchService.search(c, entityQuery); DiscoverQuery discoveryQuery = new DiscoverQuery();
discoveryQuery.setMaxResults(PAGE_SIZE);
discoveryQuery.setQuery("search.resourcetype:Community");
do {
discoveryQuery.setStart(offset);
DiscoverResult discoverResult = searchService.search(c, discoveryQuery);
List<IndexableObject> docs = discoverResult.getIndexableObjects();
commsCount = discoverResult.getTotalSearchResults();
String url; for (IndexableObject doc : docs) {
if (CollectionUtils.isNotEmpty(discoverResult.getIndexableObjects()) String url = uiURLStem + "communities/" + doc.getID();
&& CollectionUtils.isNotEmpty(discoverResult.getSearchDocument( c.uncacheEntity(doc.getIndexedObject());
discoverResult.getIndexableObjects().get(0)).get(0).getSearchFieldValues("entityType"))
&& StringUtils.isNotBlank(discoverResult.getSearchDocument(
discoverResult.getIndexableObjects().get(0)).get(0).getSearchFieldValues("entityType").get(0))
) {
url = uiURLStem + "entities/" + StringUtils.lowerCase(discoverResult.getSearchDocument(
discoverResult.getIndexableObjects().get(0))
.get(0).getSearchFieldValues("entityType").get(0)) + "/" + i.getID();
} else {
url = uiURLStem + "items/" + i.getID();
}
Date lastMod = i.getLastModified();
if (makeHTMLMap) { if (makeHTMLMap) {
html.addURL(url, lastMod); html.addURL(url, null);
} }
if (makeSitemapOrg) { if (makeSitemapOrg) {
sitemapsOrg.addURL(url, lastMod); sitemapsOrg.addURL(url, null);
} }
} catch (SearchServiceException e) {
log.error("Failed getting entitytype through solr for item " + i.getID() + ": " + e.getMessage());
} }
offset += PAGE_SIZE;
} while (offset < commsCount);
c.uncacheEntity(i); offset = 0;
discoveryQuery = new DiscoverQuery();
discoveryQuery.setMaxResults(PAGE_SIZE);
discoveryQuery.setQuery("search.resourcetype:Collection");
do {
discoveryQuery.setStart(offset);
DiscoverResult discoverResult = searchService.search(c, discoveryQuery);
List<IndexableObject> docs = discoverResult.getIndexableObjects();
collsCount = discoverResult.getTotalSearchResults();
itemCount++; for (IndexableObject doc : docs) {
String url = uiURLStem + "collections/" + doc.getID();
c.uncacheEntity(doc.getIndexedObject());
if (makeHTMLMap) {
html.addURL(url, null);
} }
if (makeSitemapOrg) {
sitemapsOrg.addURL(url, null);
}
}
offset += PAGE_SIZE;
} while (offset < collsCount);
offset = 0;
discoveryQuery = new DiscoverQuery();
discoveryQuery.setMaxResults(PAGE_SIZE);
discoveryQuery.setQuery("search.resourcetype:Item");
discoveryQuery.addSearchField("search.entitytype");
do {
discoveryQuery.setStart(offset);
DiscoverResult discoverResult = searchService.search(c, discoveryQuery);
List<IndexableObject> docs = discoverResult.getIndexableObjects();
itemsCount = discoverResult.getTotalSearchResults();
for (IndexableObject doc : docs) {
String url;
List<String> entityTypeFieldValues = discoverResult.getSearchDocument(doc).get(0)
.getSearchFieldValues("search.entitytype");
if (CollectionUtils.isNotEmpty(entityTypeFieldValues)) {
url = uiURLStem + "entities/" + StringUtils.lowerCase(entityTypeFieldValues.get(0)) + "/"
+ doc.getID();
} else {
url = uiURLStem + "items/" + doc.getID();
}
Date lastMod = doc.getLastModified();
c.uncacheEntity(doc.getIndexedObject());
if (makeHTMLMap) {
html.addURL(url, null);
}
if (makeSitemapOrg) {
sitemapsOrg.addURL(url, null);
}
}
offset += PAGE_SIZE;
} while (offset < itemsCount);
if (makeHTMLMap) { if (makeHTMLMap) {
int files = html.finish(); int files = html.finish();
log.info(LogHelper.getHeader(c, "write_sitemap", log.info(LogHelper.getHeader(c, "write_sitemap",
"type=html,num_files=" + files + ",communities=" "type=html,num_files=" + files + ",communities="
+ comms.size() + ",collections=" + colls.size() + commsCount + ",collections=" + collsCount
+ ",items=" + itemCount)); + ",items=" + itemsCount));
} }
if (makeSitemapOrg) { if (makeSitemapOrg) {
int files = sitemapsOrg.finish(); int files = sitemapsOrg.finish();
log.info(LogHelper.getHeader(c, "write_sitemap", log.info(LogHelper.getHeader(c, "write_sitemap",
"type=html,num_files=" + files + ",communities=" "type=html,num_files=" + files + ",communities="
+ comms.size() + ",collections=" + colls.size() + commsCount + ",collections=" + collsCount
+ ",items=" + itemCount)); + ",items=" + itemsCount));
} }
} catch (SearchServiceException e) {
throw new RuntimeException(e);
} finally {
c.abort(); c.abort();
} }
/**
* Ping all search engines configured in {@code dspace.cfg}.
*
* @throws UnsupportedEncodingException theoretically should never happen
*/
public static void pingConfiguredSearchEngines()
throws UnsupportedEncodingException {
String[] engineURLs = configurationService
.getArrayProperty("sitemap.engineurls");
if (ArrayUtils.isEmpty(engineURLs)) {
log.warn("No search engine URLs configured to ping");
return;
}
for (int i = 0; i < engineURLs.length; i++) {
try {
pingSearchEngine(engineURLs[i]);
} catch (MalformedURLException me) {
log.warn("Bad search engine URL in configuration: "
+ engineURLs[i]);
}
}
}
/**
* Ping the given search engine.
*
* @param engineURL Search engine URL minus protocol etc, e.g.
* {@code www.google.com}
* @throws MalformedURLException if the passed in URL is malformed
* @throws UnsupportedEncodingException theoretically should never happen
*/
public static void pingSearchEngine(String engineURL)
throws MalformedURLException, UnsupportedEncodingException {
// Set up HTTP proxy
if ((StringUtils.isNotBlank(configurationService.getProperty("http.proxy.host")))
&& (StringUtils.isNotBlank(configurationService.getProperty("http.proxy.port")))) {
System.setProperty("proxySet", "true");
System.setProperty("proxyHost", configurationService
.getProperty("http.proxy.host"));
System.getProperty("proxyPort", configurationService
.getProperty("http.proxy.port"));
}
String sitemapURL = configurationService.getProperty("dspace.ui.url")
+ "/sitemap";
URL url = new URL(engineURL + URLEncoder.encode(sitemapURL, "UTF-8"));
try {
HttpURLConnection connection = (HttpURLConnection) url
.openConnection();
BufferedReader in = new BufferedReader(new InputStreamReader(
connection.getInputStream()));
String inputLine;
StringBuffer resp = new StringBuffer();
while ((inputLine = in.readLine()) != null) {
resp.append(inputLine).append("\n");
}
in.close();
if (connection.getResponseCode() == 200) {
log.info("Pinged " + url.toString() + " successfully");
} else {
log.warn("Error response pinging " + url.toString() + ":\n"
+ resp);
}
} catch (IOException e) {
log.warn("Error pinging " + url.toString(), e);
}
} }
} }

View File

@@ -0,0 +1,140 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.app.suggestion;
import java.io.IOException;
import java.util.List;
import java.util.UUID;
import org.apache.logging.log4j.Logger;
import org.apache.solr.client.solrj.SolrServerException;
import org.dspace.content.service.ItemService;
import org.dspace.core.Context;
import org.dspace.external.model.ExternalDataObject;
import org.springframework.beans.factory.annotation.Autowired;
/**
* Suggestion provider that read the suggestion from the local suggestion solr
* core
*
* @author Andrea Bollini (andrea.bollini at 4science dot it)
*
*/
public abstract class SolrSuggestionProvider implements SuggestionProvider {
private static final Logger log = org.apache.logging.log4j.LogManager.getLogger(SolrSuggestionProvider.class);
@Autowired
protected ItemService itemService;
@Autowired
protected SolrSuggestionStorageService solrSuggestionStorageService;
private String sourceName;
public String getSourceName() {
return sourceName;
}
public void setSourceName(String sourceName) {
this.sourceName = sourceName;
}
public void setItemService(ItemService itemService) {
this.itemService = itemService;
}
@Override
public long countAllTargets(Context context) {
try {
return this.solrSuggestionStorageService.countAllTargets(context, sourceName);
} catch (SolrServerException | IOException e) {
throw new RuntimeException(e);
}
}
@Override
public long countUnprocessedSuggestionByTarget(Context context, UUID target) {
try {
return this.solrSuggestionStorageService.countUnprocessedSuggestionByTarget(context, sourceName, target);
} catch (SolrServerException | IOException e) {
throw new RuntimeException(e);
}
}
@Override
public List<Suggestion> findAllUnprocessedSuggestions(Context context, UUID target, int pageSize, long offset,
boolean ascending) {
try {
return this.solrSuggestionStorageService.findAllUnprocessedSuggestions(context, sourceName,
target, pageSize, offset, ascending);
} catch (SolrServerException | IOException e) {
throw new RuntimeException(e);
}
}
@Override
public List<SuggestionTarget> findAllTargets(Context context, int pageSize, long offset) {
try {
return this.solrSuggestionStorageService.findAllTargets(context, sourceName, pageSize, offset);
} catch (SolrServerException | IOException e) {
throw new RuntimeException(e);
}
}
@Override
public Suggestion findUnprocessedSuggestion(Context context, UUID target, String id) {
try {
return this.solrSuggestionStorageService.findUnprocessedSuggestion(context, sourceName, target, id);
} catch (SolrServerException | IOException e) {
throw new RuntimeException(e);
}
}
@Override
public SuggestionTarget findTarget(Context context, UUID target) {
try {
return this.solrSuggestionStorageService.findTarget(context, sourceName, target);
} catch (SolrServerException | IOException e) {
throw new RuntimeException(e);
}
}
@Override
public void rejectSuggestion(Context context, UUID target, String idPart) {
Suggestion suggestion = findUnprocessedSuggestion(context, target, idPart);
try {
solrSuggestionStorageService.flagSuggestionAsProcessed(suggestion);
} catch (SolrServerException | IOException e) {
throw new RuntimeException(e);
}
}
@Override
public void flagRelatedSuggestionsAsProcessed(Context context, ExternalDataObject externalDataObject) {
if (!isExternalDataObjectPotentiallySuggested(context, externalDataObject)) {
return;
}
try {
solrSuggestionStorageService.flagAllSuggestionAsProcessed(sourceName, externalDataObject.getId());
} catch (SolrServerException | IOException e) {
log.error(e.getMessage(), e);
}
}
/**
* check if the externalDataObject may have suggestion
* @param context
* @param externalDataObject
* @return true if the externalDataObject could be suggested by this provider
* (i.e. it comes from a DataProvider used by this suggestor)
*/
protected abstract boolean isExternalDataObjectPotentiallySuggested(Context context,
ExternalDataObject externalDataObject);
}

View File

@@ -0,0 +1,191 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.app.suggestion;
import java.io.IOException;
import java.util.List;
import java.util.UUID;
import org.apache.solr.client.solrj.SolrServerException;
import org.dspace.core.Context;
/**
* Service to deal with the local suggestion solr core used by the
* SolrSuggestionProvider(s)
*
* @author Andrea Bollini (andrea.bollini at 4science dot it)
* @author Luca Giamminonni (luca.giamminonni at 4science dot it)
*
*/
public interface SolrSuggestionStorageService {
public static final String SOURCE = "source";
/** This is the URI Part of the suggestion source:target:id */
public static final String SUGGESTION_FULLID = "suggestion_fullid";
public static final String SUGGESTION_ID = "suggestion_id";
public static final String TARGET_ID = "target_id";
public static final String TITLE = "title";
public static final String DATE = "date";
public static final String DISPLAY = "display";
public static final String CONTRIBUTORS = "contributors";
public static final String ABSTRACT = "abstract";
public static final String CATEGORY = "category";
public static final String EXTERNAL_URI = "external-uri";
public static final String PROCESSED = "processed";
public static final String SCORE = "trust";
public static final String EVIDENCES = "evidences";
/**
* Add a new suggestion to SOLR
*
* @param suggestion
* @param force true if the suggestion must be reindexed
* @param commit
* @throws IOException
* @throws SolrServerException
*/
public void addSuggestion(Suggestion suggestion, boolean force, boolean commit)
throws SolrServerException, IOException;
/**
* Return true if the suggestion is already in SOLR and flagged as processed
*
* @param suggestion
* @return true if the suggestion is already in SOLR and flagged as processed
* @throws IOException
* @throws SolrServerException
*/
public boolean exist(Suggestion suggestion) throws SolrServerException, IOException;
/**
* Delete a suggestion from SOLR if any
*
* @param suggestion
* @throws IOException
* @throws SolrServerException
*/
public void deleteSuggestion(Suggestion suggestion) throws SolrServerException, IOException;
/**
* Flag a suggestion as processed in SOLR if any
*
* @param suggestion
* @throws IOException
* @throws SolrServerException
*/
public void flagSuggestionAsProcessed(Suggestion suggestion) throws SolrServerException, IOException;
/**
* Delete all the suggestions from SOLR if any related to a specific target
*
* @param target
* @throws IOException
* @throws SolrServerException
*/
public void deleteTarget(SuggestionTarget target) throws SolrServerException, IOException;
/**
* Performs an explicit commit, causing pending documents to be committed for
* indexing.
*
* @throws SolrServerException
* @throws IOException
*/
void commit() throws SolrServerException, IOException;
/**
* Flag all the suggestion related to the given source and id as processed.
*
* @param source the source name
* @param idPart the id's last part
* @throws SolrServerException
* @throws IOException
*/
void flagAllSuggestionAsProcessed(String source, String idPart) throws SolrServerException, IOException;
/**
* Count all the targets related to the given source.
*
* @param source the source name
* @return the target's count
* @throws IOException
* @throws SolrServerException
*/
long countAllTargets(Context context, String source) throws SolrServerException, IOException;
/**
* Count all the unprocessed suggestions related to the given source and target.
*
* @param context the DSpace Context
* @param source the source name
* @param target the target id
* @return the suggestion count
* @throws SolrServerException
* @throws IOException
*/
long countUnprocessedSuggestionByTarget(Context context, String source, UUID target)
throws SolrServerException, IOException;
/**
* Find all the unprocessed suggestions related to the given source and target.
*
* @param context the DSpace Context
* @param source the source name
* @param target the target id
* @param pageSize the page size
* @param offset the page offset
* @param ascending true to retrieve the suggestions ordered by score
* ascending
* @return the found suggestions
* @throws SolrServerException
* @throws IOException
*/
List<Suggestion> findAllUnprocessedSuggestions(Context context, String source, UUID target,
int pageSize, long offset, boolean ascending) throws SolrServerException, IOException;
/**
*
* Find all the suggestion targets related to the given source.
*
* @param context the DSpace Context
* @param source the source name
* @param pageSize the page size
* @param offset the page offset
* @return the found suggestion targets
* @throws SolrServerException
* @throws IOException
*/
List<SuggestionTarget> findAllTargets(Context context, String source, int pageSize, long offset)
throws SolrServerException, IOException;
/**
* Find an unprocessed suggestion by the given source, target id and suggestion
* id.
*
* @param context the DSpace Context
* @param source the source name
* @param target the target id
* @param id the suggestion id
* @return the suggestion, if any
* @throws SolrServerException
* @throws IOException
*/
Suggestion findUnprocessedSuggestion(Context context, String source, UUID target, String id)
throws SolrServerException, IOException;
/**
* Find a suggestion target by the given source and target.
*
* @param context the DSpace Context
* @param source the source name
* @param target the target id
* @return the suggestion target, if any
* @throws SolrServerException
* @throws IOException
*/
SuggestionTarget findTarget(Context context, String source, UUID target) throws SolrServerException, IOException;
}

View File

@@ -0,0 +1,360 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.app.suggestion;
import static org.apache.commons.collections.CollectionUtils.isEmpty;
import java.io.IOException;
import java.sql.SQLException;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.LinkedList;
import java.util.List;
import java.util.Map;
import java.util.UUID;
import java.util.stream.Collectors;
import com.fasterxml.jackson.core.JsonProcessingException;
import com.fasterxml.jackson.core.type.TypeReference;
import com.fasterxml.jackson.databind.DeserializationFeature;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.fasterxml.jackson.databind.json.JsonMapper;
import org.apache.commons.lang3.StringUtils;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
import org.apache.solr.client.solrj.SolrClient;
import org.apache.solr.client.solrj.SolrQuery;
import org.apache.solr.client.solrj.SolrQuery.SortClause;
import org.apache.solr.client.solrj.SolrServerException;
import org.apache.solr.client.solrj.impl.HttpSolrClient;
import org.apache.solr.client.solrj.response.FacetField;
import org.apache.solr.client.solrj.response.FacetField.Count;
import org.apache.solr.client.solrj.response.QueryResponse;
import org.apache.solr.common.SolrDocument;
import org.apache.solr.common.SolrDocumentList;
import org.apache.solr.common.SolrInputDocument;
import org.apache.solr.common.params.FacetParams;
import org.dspace.content.Item;
import org.dspace.content.dto.MetadataValueDTO;
import org.dspace.content.service.ItemService;
import org.dspace.core.Context;
import org.dspace.services.factory.DSpaceServicesFactory;
import org.dspace.util.UUIDUtils;
import org.springframework.beans.factory.annotation.Autowired;
/**
* Service to deal with the local suggestion solr core used by the
* SolrSuggestionProvider(s)
*
* @author Andrea Bollini (andrea.bollini at 4science dot it)
*
*/
public class SolrSuggestionStorageServiceImpl implements SolrSuggestionStorageService {
private static final Logger log = LogManager.getLogger(SolrSuggestionStorageServiceImpl.class);
protected SolrClient solrSuggestionClient;
@Autowired
private ItemService itemService;
/**
* Get solr client which use suggestion core
*
* @return solr client
*/
protected SolrClient getSolr() {
if (solrSuggestionClient == null) {
String solrService = DSpaceServicesFactory.getInstance().getConfigurationService()
.getProperty("suggestion.solr.server", "http://localhost:8983/solr/suggestion");
solrSuggestionClient = new HttpSolrClient.Builder(solrService).build();
}
return solrSuggestionClient;
}
@Override
public void addSuggestion(Suggestion suggestion, boolean force, boolean commit)
throws SolrServerException, IOException {
if (force || !exist(suggestion)) {
ObjectMapper jsonMapper = new JsonMapper();
jsonMapper.configure(DeserializationFeature.FAIL_ON_UNKNOWN_PROPERTIES, false);
SolrInputDocument document = new SolrInputDocument();
document.addField(SOURCE, suggestion.getSource());
// suggestion id is written as concatenation of
// source + ":" + targetID + ":" + idPart (of externalDataObj)
String suggestionFullID = suggestion.getID();
document.addField(SUGGESTION_FULLID, suggestionFullID);
document.addField(SUGGESTION_ID, suggestionFullID.split(":", 3)[2]);
document.addField(TARGET_ID, suggestion.getTarget().getID().toString());
document.addField(DISPLAY, suggestion.getDisplay());
document.addField(TITLE, getFirstValue(suggestion, "dc", "title", null));
document.addField(DATE, getFirstValue(suggestion, "dc", "date", "issued"));
document.addField(CONTRIBUTORS, getAllValues(suggestion, "dc", "contributor", "author"));
document.addField(ABSTRACT, getFirstValue(suggestion, "dc", "description", "abstract"));
document.addField(CATEGORY, getAllValues(suggestion, "dc", "source", null));
document.addField(EXTERNAL_URI, suggestion.getExternalSourceUri());
document.addField(SCORE, suggestion.getScore());
document.addField(PROCESSED, false);
document.addField(EVIDENCES, jsonMapper.writeValueAsString(suggestion.getEvidences()));
getSolr().add(document);
if (commit) {
getSolr().commit();
}
}
}
@Override
public void commit() throws SolrServerException, IOException {
getSolr().commit();
}
private List<String> getAllValues(Suggestion suggestion, String schema, String element, String qualifier) {
return suggestion.getMetadata().stream()
.filter(st -> StringUtils.isNotBlank(st.getValue()) && StringUtils.equals(st.getSchema(), schema)
&& StringUtils.equals(st.getElement(), element)
&& StringUtils.equals(st.getQualifier(), qualifier))
.map(st -> st.getValue()).collect(Collectors.toList());
}
private String getFirstValue(Suggestion suggestion, String schema, String element, String qualifier) {
return suggestion.getMetadata().stream()
.filter(st -> StringUtils.isNotBlank(st.getValue())
&& StringUtils.equals(st.getSchema(), schema)
&& StringUtils.equals(st.getElement(), element)
&& StringUtils.equals(st.getQualifier(), qualifier))
.map(st -> st.getValue()).findFirst().orElse(null);
}
@Override
public boolean exist(Suggestion suggestion) throws SolrServerException, IOException {
SolrQuery query = new SolrQuery(
SUGGESTION_FULLID + ":\"" + suggestion.getID() + "\" AND " + PROCESSED + ":true");
return getSolr().query(query).getResults().getNumFound() == 1;
}
@Override
public void deleteSuggestion(Suggestion suggestion) throws SolrServerException, IOException {
getSolr().deleteById(suggestion.getID());
getSolr().commit();
}
@Override
public void flagSuggestionAsProcessed(Suggestion suggestion) throws SolrServerException, IOException {
SolrInputDocument sdoc = new SolrInputDocument();
sdoc.addField(SUGGESTION_FULLID, suggestion.getID());
Map<String, Object> fieldModifier = new HashMap<>(1);
fieldModifier.put("set", true);
sdoc.addField(PROCESSED, fieldModifier); // add the map as the field value
getSolr().add(sdoc);
getSolr().commit();
}
@Override
public void flagAllSuggestionAsProcessed(String source, String idPart) throws SolrServerException, IOException {
SolrQuery query = new SolrQuery(SOURCE + ":" + source + " AND " + SUGGESTION_ID + ":\"" + idPart + "\"");
query.setRows(Integer.MAX_VALUE);
query.setFields(SUGGESTION_FULLID);
SolrDocumentList results = getSolr().query(query).getResults();
if (results.getNumFound() > 0) {
for (SolrDocument rDoc : results) {
SolrInputDocument sdoc = new SolrInputDocument();
sdoc.addField(SUGGESTION_FULLID, rDoc.getFieldValue(SUGGESTION_FULLID));
Map<String, Object> fieldModifier = new HashMap<>(1);
fieldModifier.put("set", true);
sdoc.addField(PROCESSED, fieldModifier); // add the map as the field value
getSolr().add(sdoc);
}
}
getSolr().commit();
}
@Override
public void deleteTarget(SuggestionTarget target) throws SolrServerException, IOException {
getSolr().deleteByQuery(
SOURCE + ":" + target.getSource() + " AND " + TARGET_ID + ":" + target.getTarget().getID().toString());
getSolr().commit();
}
@Override
public long countAllTargets(Context context, String source) throws SolrServerException, IOException {
SolrQuery solrQuery = new SolrQuery();
solrQuery.setRows(0);
solrQuery.setQuery(SOURCE + ":" + source);
solrQuery.addFilterQuery(PROCESSED + ":false");
solrQuery.setFacet(true);
solrQuery.setFacetMinCount(1);
solrQuery.addFacetField(TARGET_ID);
solrQuery.setFacetLimit(Integer.MAX_VALUE);
QueryResponse response = getSolr().query(solrQuery);
return response.getFacetField(TARGET_ID).getValueCount();
}
@Override
public long countUnprocessedSuggestionByTarget(Context context, String source, UUID target)
throws SolrServerException, IOException {
SolrQuery solrQuery = new SolrQuery();
solrQuery.setRows(0);
solrQuery.setQuery("*:*");
solrQuery.addFilterQuery(
SOURCE + ":" + source,
TARGET_ID + ":" + target.toString(),
PROCESSED + ":false");
QueryResponse response = getSolr().query(solrQuery);
return response.getResults().getNumFound();
}
@Override
public List<Suggestion> findAllUnprocessedSuggestions(Context context, String source, UUID target,
int pageSize, long offset, boolean ascending) throws SolrServerException, IOException {
SolrQuery solrQuery = new SolrQuery();
solrQuery.setRows(pageSize);
solrQuery.setStart((int) offset);
solrQuery.setQuery("*:*");
solrQuery.addFilterQuery(
SOURCE + ":" + source,
TARGET_ID + ":" + target.toString(),
PROCESSED + ":false");
if (ascending) {
solrQuery.addSort(SortClause.asc("trust"));
} else {
solrQuery.addSort(SortClause.desc("trust"));
}
solrQuery.addSort(SortClause.desc("date"));
solrQuery.addSort(SortClause.asc("title"));
QueryResponse response = getSolr().query(solrQuery);
List<Suggestion> suggestions = new ArrayList<Suggestion>();
for (SolrDocument solrDoc : response.getResults()) {
suggestions.add(convertSolrDoc(context, solrDoc, source));
}
return suggestions;
}
@Override
public List<SuggestionTarget> findAllTargets(Context context, String source, int pageSize, long offset)
throws SolrServerException, IOException {
SolrQuery solrQuery = new SolrQuery();
solrQuery.setRows(0);
solrQuery.setQuery(SOURCE + ":" + source);
solrQuery.addFilterQuery(PROCESSED + ":false");
solrQuery.setFacet(true);
solrQuery.setFacetMinCount(1);
solrQuery.addFacetField(TARGET_ID);
solrQuery.setParam(FacetParams.FACET_OFFSET, String.valueOf(offset));
solrQuery.setFacetLimit((int) (pageSize));
QueryResponse response = getSolr().query(solrQuery);
FacetField facetField = response.getFacetField(TARGET_ID);
List<SuggestionTarget> suggestionTargets = new ArrayList<SuggestionTarget>();
int idx = 0;
for (Count c : facetField.getValues()) {
SuggestionTarget target = new SuggestionTarget();
target.setSource(source);
target.setTotal((int) c.getCount());
target.setTarget(findItem(context, c.getName()));
suggestionTargets.add(target);
idx++;
}
return suggestionTargets;
}
@Override
public Suggestion findUnprocessedSuggestion(Context context, String source, UUID target, String id)
throws SolrServerException, IOException {
SolrQuery solrQuery = new SolrQuery();
solrQuery.setRows(1);
solrQuery.setQuery("*:*");
solrQuery.addFilterQuery(
SOURCE + ":" + source,
TARGET_ID + ":" + target.toString(),
SUGGESTION_ID + ":\"" + id + "\"",
PROCESSED + ":false");
SolrDocumentList results = getSolr().query(solrQuery).getResults();
return isEmpty(results) ? null : convertSolrDoc(context, results.get(0), source);
}
@Override
public SuggestionTarget findTarget(Context context, String source, UUID target)
throws SolrServerException, IOException {
SolrQuery solrQuery = new SolrQuery();
solrQuery.setRows(0);
solrQuery.setQuery(SOURCE + ":" + source);
solrQuery.addFilterQuery(
TARGET_ID + ":" + target.toString(),
PROCESSED + ":false");
QueryResponse response = getSolr().query(solrQuery);
SuggestionTarget sTarget = new SuggestionTarget();
sTarget.setSource(source);
sTarget.setTotal((int) response.getResults().getNumFound());
Item itemTarget = findItem(context, target);
if (itemTarget != null) {
sTarget.setTarget(itemTarget);
} else {
return null;
}
return sTarget;
}
private Suggestion convertSolrDoc(Context context, SolrDocument solrDoc, String sourceName) {
Item target = findItem(context, (String) solrDoc.getFieldValue(TARGET_ID));
Suggestion suggestion = new Suggestion(sourceName, target, (String) solrDoc.getFieldValue(SUGGESTION_ID));
suggestion.setDisplay((String) solrDoc.getFieldValue(DISPLAY));
suggestion.getMetadata()
.add(new MetadataValueDTO("dc", "title", null, null, (String) solrDoc.getFieldValue(TITLE)));
suggestion.getMetadata()
.add(new MetadataValueDTO("dc", "date", "issued", null, (String) solrDoc.getFieldValue(DATE)));
suggestion.getMetadata().add(
new MetadataValueDTO("dc", "description", "abstract", null, (String) solrDoc.getFieldValue(ABSTRACT)));
suggestion.setExternalSourceUri((String) solrDoc.getFieldValue(EXTERNAL_URI));
if (solrDoc.containsKey(CATEGORY)) {
for (Object o : solrDoc.getFieldValues(CATEGORY)) {
suggestion.getMetadata().add(
new MetadataValueDTO("dc", "source", null, null, (String) o));
}
}
if (solrDoc.containsKey(CONTRIBUTORS)) {
for (Object o : solrDoc.getFieldValues(CONTRIBUTORS)) {
suggestion.getMetadata().add(
new MetadataValueDTO("dc", "contributor", "author", null, (String) o));
}
}
String evidencesJson = (String) solrDoc.getFieldValue(EVIDENCES);
ObjectMapper jsonMapper = new JsonMapper();
jsonMapper.configure(DeserializationFeature.FAIL_ON_UNKNOWN_PROPERTIES, false);
List<SuggestionEvidence> evidences = new LinkedList<SuggestionEvidence>();
try {
evidences = jsonMapper.readValue(evidencesJson, new TypeReference<List<SuggestionEvidence>>() {});
} catch (JsonProcessingException e) {
log.error(e);
}
suggestion.getEvidences().addAll(evidences);
return suggestion;
}
private Item findItem(Context context, UUID itemId) {
try {
return itemService.find(context, itemId);
} catch (SQLException e) {
throw new RuntimeException(e);
}
}
private Item findItem(Context context, String itemId) {
return findItem(context, UUIDUtils.fromString(itemId));
}
}

View File

@@ -0,0 +1,99 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.app.suggestion;
import java.util.LinkedList;
import java.util.List;
import org.dspace.content.Item;
import org.dspace.content.dto.MetadataValueDTO;
/**
* This entity contains metadatas that should be added to the targeted Item
*
* @author Andrea Bollini (andrea.bollini at 4science.it)
*/
public class Suggestion {
/** id of the suggestion */
private String id;
/** the dc.title of the item */
private String display;
/** the external source name the suggestion comes from */
private String source;
/** external uri of the item */
private String externalSourceUri;
/** item targeted by this suggestion */
private Item target;
private List<SuggestionEvidence> evidences = new LinkedList<SuggestionEvidence>();
private List<MetadataValueDTO> metadata = new LinkedList<MetadataValueDTO>();
/** suggestion creation
* @param source name of the external source
* @param target the targeted item in repository
* @param idPart external item id, used mainly for suggestion @see #id creation
* */
public Suggestion(String source, Item target, String idPart) {
this.source = source;
this.target = target;
this.id = source + ":" + target.getID().toString() + ":" + idPart;
}
public String getDisplay() {
return display;
}
public void setDisplay(String display) {
this.display = display;
}
public String getSource() {
return source;
}
public String getExternalSourceUri() {
return externalSourceUri;
}
public void setExternalSourceUri(String externalSourceUri) {
this.externalSourceUri = externalSourceUri;
}
public List<SuggestionEvidence> getEvidences() {
return evidences;
}
public List<MetadataValueDTO> getMetadata() {
return metadata;
}
public Item getTarget() {
return target;
}
public String getID() {
return id;
}
public Double getScore() {
if (evidences != null && evidences.size() > 0) {
double score = 0;
for (SuggestionEvidence evidence : evidences) {
score += evidence.getScore();
}
return score;
}
return null;
}
}

View File

@@ -0,0 +1,61 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.app.suggestion;
/**
* This DTO class is returned by an {@link org.dspace.app.suggestion.openaire.EvidenceScorer} to model the concept of
* an evidence / fact that has been used to evaluate the precision of a suggestion increasing or decreasing the score
* of the suggestion.
*
* @author Andrea Bollini (andrea.bollini at 4science.it)
*/
public class SuggestionEvidence {
/** name of the evidence */
private String name;
/** positive or negative value to influence the score of the suggestion */
private double score;
/** additional notes */
private String notes;
public SuggestionEvidence() {
}
public SuggestionEvidence(String name, double score, String notes) {
this.name = name;
this.score = score;
this.notes = notes;
}
public String getName() {
return name;
}
public void setName(String name) {
this.name = name;
}
public double getScore() {
return score;
}
public void setScore(double score) {
this.score = score;
}
public String getNotes() {
return notes;
}
public void setNotes(String notes) {
this.notes = notes;
}
}

View File

@@ -0,0 +1,54 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.app.suggestion;
import java.util.List;
import java.util.UUID;
import org.dspace.core.Context;
import org.dspace.external.model.ExternalDataObject;
/**
*
* Interface for suggestion management like finding and counting.
* @see org.dspace.app.suggestion.SuggestionTarget
* @author Francesco Bacchelli (francesco.bacchelli at 4science.com)
*
*/
public interface SuggestionProvider {
/** find all suggestion targets
* @see org.dspace.app.suggestion.SuggestionTarget
* */
public List<SuggestionTarget> findAllTargets(Context context, int pageSize, long offset);
/** count all suggestion targets */
public long countAllTargets(Context context);
/** find a suggestion target by UUID */
public SuggestionTarget findTarget(Context context, UUID target);
/** find unprocessed suggestions (paged) by target UUID
* @see org.dspace.app.suggestion.Suggestion
* */
public List<Suggestion> findAllUnprocessedSuggestions(Context context, UUID target, int pageSize, long offset,
boolean ascending);
/** find unprocessed suggestions by target UUID */
public long countUnprocessedSuggestionByTarget(Context context, UUID target);
/** find an unprocessed suggestion by target UUID and suggestion id */
public Suggestion findUnprocessedSuggestion(Context context, UUID target, String id);
/** reject a specific suggestion by target @param target and by suggestion id @param idPart */
public void rejectSuggestion(Context context, UUID target, String idPart);
/** flag a suggestion as processed */
public void flagRelatedSuggestionsAsProcessed(Context context, ExternalDataObject externalDataObject);
}

View File

@@ -0,0 +1,61 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.app.suggestion;
import java.util.List;
import java.util.UUID;
import org.dspace.core.Context;
/**
* Service that handles {@link Suggestion}.
*
* @author Andrea Bollini (andrea.bollini at 4science.it)
*/
public interface SuggestionService {
/** find a {@link SuggestionTarget } by source name and suggestion id */
public SuggestionTarget find(Context context, String source, UUID id);
/** count all suggetion targets by suggestion source */
public long countAll(Context context, String source);
/** find all suggestion targets by source (paged) */
public List<SuggestionTarget> findAllTargets(Context context, String source, int pageSize, long offset);
/** count all (unprocessed) suggestions by the given target uuid */
public long countAllByTarget(Context context, UUID target);
/** find suggestion target by targeted item (paged) */
public List<SuggestionTarget> findByTarget(Context context, UUID target, int pageSize, long offset);
/** find suggestion source by source name */
public SuggestionSource findSource(Context context, String source);
/** count all suggestion sources */
public long countSources(Context context);
/** find all suggestion sources (paged) */
public List<SuggestionSource> findAllSources(Context context, int pageSize, long offset);
/** find unprocessed suggestion by id */
public Suggestion findUnprocessedSuggestion(Context context, String id);
/** reject a specific suggestion by its id */
public void rejectSuggestion(Context context, String id);
/** find all suggestions by targeted item and external source */
public List<Suggestion> findByTargetAndSource(Context context, UUID target, String source, int pageSize,
long offset, boolean ascending);
/** count all suggestions by targeted item id and source name */
public long countAllByTargetAndSource(Context context, String source, UUID target);
/** returns all suggestion providers */
public List<SuggestionProvider> getSuggestionProviders();
}

View File

@@ -0,0 +1,194 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.app.suggestion;
import java.util.ArrayList;
import java.util.Comparator;
import java.util.List;
import java.util.Map;
import java.util.UUID;
import java.util.stream.Collectors;
import javax.annotation.Resource;
import org.apache.logging.log4j.Logger;
import org.dspace.core.Context;
import org.springframework.stereotype.Service;
@Service
public class SuggestionServiceImpl implements SuggestionService {
private static final Logger log = org.apache.logging.log4j.LogManager.getLogger(SuggestionServiceImpl.class);
@Resource(name = "suggestionProviders")
private Map<String, SuggestionProvider> providersMap;
@Override
public List<SuggestionProvider> getSuggestionProviders() {
if (providersMap != null) {
return providersMap.values().stream().collect(Collectors.toList());
}
return null;
}
@Override
public SuggestionTarget find(Context context, String source, UUID id) {
if (providersMap.containsKey(source)) {
return providersMap.get(source).findTarget(context, id);
} else {
return null;
}
}
@Override
public long countAll(Context context, String source) {
if (providersMap.containsKey(source)) {
return providersMap.get(source).countAllTargets(context);
} else {
return 0;
}
}
@Override
public List<SuggestionTarget> findAllTargets(Context context, String source, int pageSize, long offset) {
if (providersMap.containsKey(source)) {
return providersMap.get(source).findAllTargets(context, pageSize, offset);
} else {
return null;
}
}
@Override
public long countAllByTarget(Context context, UUID target) {
int count = 0;
for (String provider : providersMap.keySet()) {
if (providersMap.get(provider).countUnprocessedSuggestionByTarget(context, target) > 0) {
count++;
}
}
return count;
}
@Override
public List<SuggestionTarget> findByTarget(Context context, UUID target, int pageSize, long offset) {
List<SuggestionTarget> fullSourceTargets = new ArrayList<SuggestionTarget>();
for (String source : providersMap.keySet()) {
// all the suggestion target will be related to the same target (i.e. the same researcher - person item)
SuggestionTarget sTarget = providersMap.get(source).findTarget(context, target);
if (sTarget != null && sTarget.getTotal() > 0) {
fullSourceTargets.add(sTarget);
}
}
fullSourceTargets.sort(new Comparator<SuggestionTarget>() {
@Override
public int compare(SuggestionTarget arg0, SuggestionTarget arg1) {
return -(arg0.getTotal() - arg1.getTotal());
}
}
);
// this list will be as large as the number of sources available in the repository so it is unlikely that
// real pagination will occur
return fullSourceTargets.stream().skip(offset).limit(pageSize).collect(Collectors.toList());
}
@Override
public long countSources(Context context) {
return providersMap.size();
}
@Override
public SuggestionSource findSource(Context context, String source) {
if (providersMap.containsKey(source)) {
SuggestionSource ssource = new SuggestionSource(source);
ssource.setTotal((int) providersMap.get(source).countAllTargets(context));
return ssource;
} else {
return null;
}
}
@Override
public List<SuggestionSource> findAllSources(Context context, int pageSize, long offset) {
List<SuggestionSource> fullSources = getSources(context).stream().skip(offset).limit(pageSize)
.collect(Collectors.toList());
return fullSources;
}
private List<SuggestionSource> getSources(Context context) {
List<SuggestionSource> results = new ArrayList<SuggestionSource>();
for (String source : providersMap.keySet()) {
SuggestionSource ssource = new SuggestionSource(source);
ssource.setTotal((int) providersMap.get(source).countAllTargets(context));
results.add(ssource);
}
return results;
}
@Override
public long countAllByTargetAndSource(Context context, String source, UUID target) {
if (providersMap.containsKey(source)) {
return providersMap.get(source).countUnprocessedSuggestionByTarget(context, target);
}
return 0;
}
@Override
public List<Suggestion> findByTargetAndSource(Context context, UUID target, String source, int pageSize,
long offset, boolean ascending) {
if (providersMap.containsKey(source)) {
return providersMap.get(source).findAllUnprocessedSuggestions(context, target, pageSize, offset, ascending);
}
return null;
}
@Override
public Suggestion findUnprocessedSuggestion(Context context, String id) {
String source = null;
UUID target = null;
String idPart = null;
String[] split;
try {
split = id.split(":", 3);
source = split[0];
target = UUID.fromString(split[1]);
idPart = split[2];
} catch (Exception e) {
log.warn("findSuggestion got an invalid id " + id + ", return null");
return null;
}
if (split.length != 3) {
return null;
}
if (providersMap.containsKey(source)) {
return providersMap.get(source).findUnprocessedSuggestion(context, target, idPart);
}
return null;
}
@Override
public void rejectSuggestion(Context context, String id) {
String source = null;
UUID target = null;
String idPart = null;
String[] split;
try {
split = id.split(":", 3);
source = split[0];
target = UUID.fromString(split[1]);
idPart = split[2];
} catch (Exception e) {
log.warn("rejectSuggestion got an invalid id " + id + ", doing nothing");
return;
}
if (split.length != 3) {
return;
}
if (providersMap.containsKey(source)) {
providersMap.get(source).rejectSuggestion(context, target, idPart);
}
}
}

View File

@@ -0,0 +1,49 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.app.suggestion;
/**
* This DTO class is used to pass around the number of items interested by suggestion provided by a specific source
* (i.e. openaire)
*
* @author Andrea Bollini (andrea.bollini at 4science.it)
*/
public class SuggestionSource {
/** source name of the suggestion */
private String name;
/** number of targeted items */
private int total;
public SuggestionSource() {
}
/**
* Summarize the available suggestions from a source.
*
* @param name the name must be not null
*/
public SuggestionSource(String name) {
super();
this.name = name;
}
public String getID() {
return name;
}
public int getTotal() {
return total;
}
public void setTotal(int total) {
this.total = total;
}
}

View File

@@ -0,0 +1,75 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.app.suggestion;
import org.dspace.content.Item;
/**
* This DTO class is used to pass around the number of suggestions available from a specific source for a target
* repository item
*
* @author Andrea Bollini (andrea.bollini at 4science.it)
*/
public class SuggestionTarget {
/** the item targeted */
private Item target;
/** source name of the suggestion */
private String source;
/** total count of suggestions for same target and source */
private int total;
public SuggestionTarget() {
}
/**
* Wrap a target repository item (usually a person item) into a suggestion target.
*
* @param item must be not null
*/
public SuggestionTarget(Item item) {
super();
this.target = item;
}
/**
* The suggestion target uses the concatenation of the source and target uuid separated by colon as id
*
* @return the source:uuid of the wrapped item
*/
public String getID() {
return source + ":" + (target != null ? target.getID() : "");
}
public Item getTarget() {
return target;
}
public void setTarget(Item target) {
this.target = target;
}
public String getSource() {
return source;
}
public void setSource(String source) {
this.source = source;
}
public int getTotal() {
return total;
}
public void setTotal(int total) {
this.total = total;
}
}

View File

@@ -0,0 +1,111 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.app.suggestion;
import java.util.Collections;
import java.util.List;
import java.util.stream.Collectors;
import org.apache.commons.lang3.StringUtils;
import org.dspace.external.model.ExternalDataObject;
/**
* This utility class provides convenient methods to deal with the
* {@link ExternalDataObject} for the purpose of the Suggestion framework
*
* @author Andrea Bollini (andrea.bollini at 4science.it)
*/
public class SuggestionUtils {
private SuggestionUtils() {
}
/**
* This method receive an ExternalDataObject and a metadatum key.
* It return only the values of the Metadata associated with the key.
*
* @param record the ExternalDataObject to extract metadata from
* @param schema schema of the searching record
* @param element element of the searching record
* @param qualifier qualifier of the searching record
* @return value of the first matching record
*/
public static List<String> getAllEntriesByMetadatum(ExternalDataObject record, String schema, String element,
String qualifier) {
return record.getMetadata().stream()
.filter(x ->
StringUtils.equals(x.getSchema(), schema)
&& StringUtils.equals(x.getElement(), element)
&& StringUtils.equals(x.getQualifier(), qualifier))
.map(x -> x.getValue()).collect(Collectors.toList());
}
/**
* This method receive an ExternalDataObject and a metadatum key.
* It return only the values of the Metadata associated with the key.
*
* @param record the ExternalDataObject to extract metadata from
* @param metadataFieldKey the metadata field key (i.e. dc.title or dc.contributor.author),
* the jolly char is not supported
* @return value of the first matching record
*/
public static List<String> getAllEntriesByMetadatum(ExternalDataObject record, String metadataFieldKey) {
if (metadataFieldKey == null) {
return Collections.EMPTY_LIST;
}
String[] fields = metadataFieldKey.split("\\.");
String schema = fields[0];
String element = fields[1];
String qualifier = null;
if (fields.length == 3) {
qualifier = fields[2];
}
return getAllEntriesByMetadatum(record, schema, element, qualifier);
}
/**
* This method receive and ExternalDataObject and a metadatum key.
* It return only the value of the first Metadatum from the list associated with the key.
*
* @param record the ExternalDataObject to extract metadata from
* @param schema schema of the searching record
* @param element element of the searching record
* @param qualifier qualifier of the searching record
* @return value of the first matching record
*/
public static String getFirstEntryByMetadatum(ExternalDataObject record, String schema, String element,
String qualifier) {
return record.getMetadata().stream()
.filter(x ->
StringUtils.equals(x.getSchema(), schema)
&& StringUtils.equals(x.getElement(), element)
&& StringUtils.equals(x.getQualifier(), qualifier))
.map(x -> x.getValue()).findFirst().orElse(null);
}
/**
* This method receive and ExternalDataObject and a metadatum key.
* It return only the value of the first Metadatum from the list associated with the key.
*
* @param record the ExternalDataObject to extract metadata from
* @param metadataFieldKey the metadata field key (i.e. dc.title or dc.contributor.author),
* the jolly char is not supported
* @return value of the first matching record
*/
public static String getFirstEntryByMetadatum(ExternalDataObject record, String metadataFieldKey) {
if (metadataFieldKey == null) {
return null;
}
String[] fields = metadataFieldKey.split("\\.");
String schema = fields[0];
String element = fields[1];
String qualifier = null;
if (fields.length == 3) {
qualifier = fields[2];
}
return getFirstEntryByMetadatum(record, schema, element, qualifier);
}
}

View File

@@ -0,0 +1,151 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.app.suggestion.openaire;
import static org.dspace.app.suggestion.SuggestionUtils.getAllEntriesByMetadatum;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
import java.util.Locale;
import java.util.Optional;
import java.util.stream.Collectors;
import com.ibm.icu.text.CharsetDetector;
import com.ibm.icu.text.CharsetMatch;
import com.ibm.icu.text.Normalizer;
import org.apache.commons.lang3.StringUtils;
import org.dspace.app.suggestion.SuggestionEvidence;
import org.dspace.content.Item;
import org.dspace.content.MetadataValue;
import org.dspace.content.service.ItemService;
import org.dspace.external.model.ExternalDataObject;
import org.springframework.beans.factory.annotation.Autowired;
/**
* Implementation of {@see org.dspace.app.suggestion.oaire.EvidenceScorer} which evaluate ImportRecords
* based on Author's name.
*
* @author Andrea Bollini (andrea.bollini at 4science dot it)
* @author Pasquale Cavallo (pasquale.cavallo at 4science dot it)
*
*/
public class AuthorNamesScorer implements EvidenceScorer {
private List<String> contributorMetadata;
private List<String> names;
@Autowired
private ItemService itemService;
/**
* returns the metadata key of the Item which to base the filter on
* @return metadata key
*/
public List<String> getContributorMetadata() {
return contributorMetadata;
}
/**
* set the metadata key of the Item which to base the filter on
*/
public void setContributorMetadata(List<String> contributorMetadata) {
this.contributorMetadata = contributorMetadata;
}
/**
* return the metadata key of ImportRecord which to base the filter on
* @return
*/
public List<String> getNames() {
return names;
}
/**
* set the metadata key of ImportRecord which to base the filter on
*/
public void setNames(List<String> names) {
this.names = names;
}
/**
* Method which is responsible to evaluate ImportRecord based on authors name.
* This method extract the researcher name from Item using contributorMetadata fields
* and try to match them with values extract from ImportRecord using metadata keys defined
* in names.
* ImportRecords which don't match will be discarded.
*
* @param importRecord the import record to check
* @param researcher DSpace item
* @return the generated evidence or null if the record must be discarded
*/
@Override
public SuggestionEvidence computeEvidence(Item researcher, ExternalDataObject importRecord) {
List<String[]> names = searchMetadataValues(researcher);
int maxNameLenght = names.stream().mapToInt(n -> n[0].length()).max().orElse(1);
List<String> metadataAuthors = new ArrayList<>();
for (String contributorMetadatum : contributorMetadata) {
metadataAuthors.addAll(getAllEntriesByMetadatum(importRecord, contributorMetadatum));
}
List<String> normalizedMetadataAuthors = metadataAuthors.stream().map(x -> normalize(x))
.collect(Collectors.toList());
int idx = 0;
for (String nMetadataAuthor : normalizedMetadataAuthors) {
Optional<String[]> found = names.stream()
.filter(a -> StringUtils.equalsIgnoreCase(a[0], nMetadataAuthor)).findFirst();
if (found.isPresent()) {
return new SuggestionEvidence(this.getClass().getSimpleName(),
100 * ((double) nMetadataAuthor.length() / (double) maxNameLenght),
"The author " + metadataAuthors.get(idx) + " at position " + (idx + 1)
+ " in the authors list matches the name " + found.get()[1]
+ " in the researcher profile");
}
idx++;
}
return null;
}
/**
* Return list of Item metadata values starting from metadata keys defined in class level variable names.
*
* @param researcher DSpace item
* @return list of metadata values
*/
private List<String[]> searchMetadataValues(Item researcher) {
List<String[]> authors = new ArrayList<String[]>();
for (String name : names) {
List<MetadataValue> values = itemService.getMetadataByMetadataString(researcher, name);
if (values != null) {
for (MetadataValue v : values) {
authors.add(new String[] {normalize(v.getValue()), v.getValue()});
}
}
}
return authors;
}
/**
* cleans up undesired characters
* @param value the string to clean up
* @return cleaned up string
* */
private String normalize(String value) {
String norm = Normalizer.normalize(value, Normalizer.NFD);
CharsetDetector cd = new CharsetDetector();
cd.setText(value.getBytes());
CharsetMatch detect = cd.detect();
if (detect != null && detect.getLanguage() != null) {
norm = norm.replaceAll("[^\\p{L}]", " ").toLowerCase(new Locale(detect.getLanguage()));
} else {
norm = norm.replaceAll("[^\\p{L}]", " ").toLowerCase();
}
return Arrays.asList(norm.split("\\s+")).stream().sorted().collect(Collectors.joining());
}
}

View File

@@ -0,0 +1,214 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.app.suggestion.openaire;
import java.util.Calendar;
import java.util.Collections;
import java.util.Date;
import java.util.GregorianCalendar;
import java.util.List;
import org.dspace.app.suggestion.SuggestionEvidence;
import org.dspace.app.suggestion.SuggestionUtils;
import org.dspace.content.Item;
import org.dspace.content.MetadataValue;
import org.dspace.content.service.ItemService;
import org.dspace.external.model.ExternalDataObject;
import org.dspace.util.MultiFormatDateParser;
import org.springframework.beans.factory.annotation.Autowired;
/**
* Implementation of {@see org.dspace.app.suggestion.oaire.EvidenceScorer} which evaluate ImportRecords
* based on the distance from a date extracted from the ResearcherProfile (birthday / graduation date)
*
* @author Andrea Bollini (andrea.bollini at 4science dot it)
*
*/
public class DateScorer implements EvidenceScorer {
/**
* if available it should contains the metadata field key in the form (schema.element[.qualifier]) that contains
* the birth date of the researcher
*/
private String birthDateMetadata;
/**
* if available it should contains the metadata field key in the form (schema.element[.qualifier]) that contains
* the date of graduation of the researcher. If the metadata has multiple values the min will be used
*/
private String educationDateMetadata;
/**
* The minimal age that is expected for a researcher to be a potential author of a scholarly contribution
* (i.e. the minimum delta from the publication date and the birth date)
*/
private int birthDateDelta = 20;
/**
* The maximum age that is expected for a researcher to be a potential author of a scholarly contribution
* (i.e. the maximum delta from the publication date and the birth date)
*/
private int birthDateRange = 50;
/**
* The number of year from/before the graduation that is expected for a researcher to be a potential
* author of a scholarly contribution (i.e. the minimum delta from the publication date and the first
* graduation date)
*/
private int educationDateDelta = -3;
/**
* The maximum scientific longevity that is expected for a researcher from its graduation to be a potential
* author of a scholarly contribution (i.e. the maximum delta from the publication date and the first
* graduation date)
*/
private int educationDateRange = 50;
@Autowired
private ItemService itemService;
/**
* the metadata used in the publication to track the publication date (i.e. dc.date.issued)
*/
private String publicationDateMetadata;
public void setItemService(ItemService itemService) {
this.itemService = itemService;
}
public void setBirthDateMetadata(String birthDate) {
this.birthDateMetadata = birthDate;
}
public String getBirthDateMetadata() {
return birthDateMetadata;
}
public void setEducationDateMetadata(String educationDate) {
this.educationDateMetadata = educationDate;
}
public String getEducationDateMetadata() {
return educationDateMetadata;
}
public void setBirthDateDelta(int birthDateDelta) {
this.birthDateDelta = birthDateDelta;
}
public void setBirthDateRange(int birthDateRange) {
this.birthDateRange = birthDateRange;
}
public void setEducationDateDelta(int educationDateDelta) {
this.educationDateDelta = educationDateDelta;
}
public void setEducationDateRange(int educationDateRange) {
this.educationDateRange = educationDateRange;
}
public void setPublicationDateMetadata(String publicationDateMetadata) {
this.publicationDateMetadata = publicationDateMetadata;
}
/**
* Method which is responsible to evaluate ImportRecord based on the publication date.
* ImportRecords which have a date outside the defined or calculated expected range will be discarded.
* {@link DateScorer#birthDateMetadata}, {@link DateScorer#educationDateMetadata}
*
* @param importRecord the ExternalDataObject to check
* @param researcher DSpace item
* @return the generated evidence or null if the record must be discarded
*/
@Override
public SuggestionEvidence computeEvidence(Item researcher, ExternalDataObject importRecord) {
Integer[] range = calculateRange(researcher);
if (range == null) {
return new SuggestionEvidence(this.getClass().getSimpleName(),
0,
"No assumption was possible about the publication year range. "
+ "Please consider setting your birthday in your profile.");
} else {
String optDate = SuggestionUtils.getFirstEntryByMetadatum(importRecord, publicationDateMetadata);
int year = getYear(optDate);
if (year > 0) {
if ((range[0] == null || year >= range[0]) &&
(range[1] == null || year <= range[1])) {
return new SuggestionEvidence(this.getClass().getSimpleName(),
10,
"The publication date is within the expected range [" + range[0] + ", "
+ range[1] + "]");
} else {
// outside the range, discard the suggestion
return null;
}
} else {
return new SuggestionEvidence(this.getClass().getSimpleName(),
0,
"No assumption was possible as the publication date is " + (optDate != null
? "unprocessable [" + optDate + "]"
: "unknown"));
}
}
}
/**
* returns min and max year interval in between it's probably that the researcher
* actually contributed to the suggested item
* @param researcher
* @return
*/
private Integer[] calculateRange(Item researcher) {
String birthDateStr = getSingleValue(researcher, birthDateMetadata);
int birthDateYear = getYear(birthDateStr);
int educationDateYear = getListMetadataValues(researcher, educationDateMetadata).stream()
.mapToInt(x -> getYear(x.getValue())).filter(d -> d > 0).min().orElse(-1);
if (educationDateYear > 0) {
return new Integer[] {
educationDateYear + educationDateDelta,
educationDateYear + educationDateDelta + educationDateRange
};
} else if (birthDateYear > 0) {
return new Integer[] {
birthDateYear + birthDateDelta,
birthDateYear + birthDateDelta + birthDateRange
};
} else {
return null;
}
}
private List<MetadataValue> getListMetadataValues(Item researcher, String metadataKey) {
if (metadataKey != null) {
return itemService.getMetadataByMetadataString(researcher, metadataKey);
} else {
return Collections.EMPTY_LIST;
}
}
private String getSingleValue(Item researcher, String metadataKey) {
if (metadataKey != null) {
return itemService.getMetadata(researcher, metadataKey);
}
return null;
}
private int getYear(String birthDateStr) {
int birthDateYear = -1;
if (birthDateStr != null) {
Date birthDate = MultiFormatDateParser.parse(birthDateStr);
if (birthDate != null) {
Calendar calendar = new GregorianCalendar();
calendar.setTime(birthDate);
birthDateYear = calendar.get(Calendar.YEAR);
}
}
return birthDateYear;
}
}

View File

@@ -0,0 +1,37 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.app.suggestion.openaire;
import org.dspace.app.suggestion.SuggestionEvidence;
import org.dspace.content.Item;
import org.dspace.external.model.ExternalDataObject;
/**
* Interface used in {@see org.dspace.app.suggestion.oaire.PublicationApproverServiceImpl}
* to construct filtering pipeline.
*
* For each EvidenceScorer, the service call computeEvidence method.
*
* @author Andrea Bollini (andrea.bollini at 4science dot it)
* @author Pasquale Cavallo (pasquale.cavallo at 4science dot it)
*
*/
public interface EvidenceScorer {
/**
* Method to compute the suggestion evidence of an ImportRecord, a null evidence
* would lead the record to be discarded.
*
* @param researcher DSpace item
* @param importRecord the record to evaluate
* @return the generated suggestion evidence or null if the record should be
* discarded
*/
public SuggestionEvidence computeEvidence(Item researcher, ExternalDataObject importRecord);
}

View File

@@ -0,0 +1,256 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.app.suggestion.openaire;
import static org.dspace.app.suggestion.SuggestionUtils.getAllEntriesByMetadatum;
import static org.dspace.app.suggestion.SuggestionUtils.getFirstEntryByMetadatum;
import java.io.IOException;
import java.util.ArrayList;
import java.util.List;
import org.apache.commons.lang3.StringUtils;
import org.apache.solr.client.solrj.SolrServerException;
import org.dspace.app.suggestion.SolrSuggestionProvider;
import org.dspace.app.suggestion.Suggestion;
import org.dspace.app.suggestion.SuggestionEvidence;
import org.dspace.content.Item;
import org.dspace.content.dto.MetadataValueDTO;
import org.dspace.core.Context;
import org.dspace.external.model.ExternalDataObject;
import org.dspace.external.provider.ExternalDataProvider;
import org.dspace.services.ConfigurationService;
import org.springframework.beans.factory.annotation.Autowired;
/**
* Class responsible to load and manage ImportRecords from OpenAIRE
*
* @author Pasquale Cavallo (pasquale.cavallo at 4science dot it)
*
*/
public class PublicationLoader extends SolrSuggestionProvider {
private List<String> names;
private ExternalDataProvider primaryProvider;
private List<ExternalDataProvider> otherProviders;
@Autowired
private ConfigurationService configurationService;
private List<EvidenceScorer> pipeline;
public void setPrimaryProvider(ExternalDataProvider primaryProvider) {
this.primaryProvider = primaryProvider;
}
public void setOtherProviders(List<ExternalDataProvider> otherProviders) {
this.otherProviders = otherProviders;
}
/**
* Set the pipeline of Approver
* @param pipeline list Approver
*/
public void setPipeline(List<EvidenceScorer> pipeline) {
this.pipeline = pipeline;
}
/**
* This method filter a list of ImportRecords using a pipeline of AuthorNamesApprover
* and return a filtered list of ImportRecords.
*
* @see org.dspace.app.suggestion.openaire.AuthorNamesScorer
* @param researcher the researcher Item
* @param importRecords List of import record
* @return a list of filtered import records
*/
public List<Suggestion> reduceAndTransform(Item researcher, List<ExternalDataObject> importRecords) {
List<Suggestion> results = new ArrayList<>();
for (ExternalDataObject r : importRecords) {
boolean skip = false;
List<SuggestionEvidence> evidences = new ArrayList<SuggestionEvidence>();
for (EvidenceScorer authorNameApprover : pipeline) {
SuggestionEvidence evidence = authorNameApprover.computeEvidence(researcher, r);
if (evidence != null) {
evidences.add(evidence);
} else {
skip = true;
break;
}
}
if (!skip) {
Suggestion suggestion = translateImportRecordToSuggestion(researcher, r);
suggestion.getEvidences().addAll(evidences);
results.add(suggestion);
}
}
return results;
}
/**
* Save a List of ImportRecord into Solr.
* ImportRecord will be translate into a SolrDocument by the method translateImportRecordToSolrDocument.
*
* @param context the DSpace Context
* @param researcher a DSpace Item
* @throws SolrServerException
* @throws IOException
*/
public void importAuthorRecords(Context context, Item researcher)
throws SolrServerException, IOException {
int offset = 0;
int limit = 10;
int loaded = limit;
List<String> searchValues = searchMetadataValues(researcher);
while (loaded > 0) {
List<ExternalDataObject> metadata = getImportRecords(searchValues, researcher, offset, limit);
if (metadata.isEmpty()) {
loaded = 0;
continue;
}
offset += limit;
loaded = metadata.size();
List<Suggestion> records = reduceAndTransform(researcher, metadata);
for (Suggestion record : records) {
solrSuggestionStorageService.addSuggestion(record, false, false);
}
}
solrSuggestionStorageService.commit();
}
/**
* Translate an ImportRecord into a Suggestion
* @param item DSpace item
* @param record ImportRecord
* @return Suggestion
*/
private Suggestion translateImportRecordToSuggestion(Item item, ExternalDataObject record) {
String openAireId = record.getId();
Suggestion suggestion = new Suggestion(getSourceName(), item, openAireId);
suggestion.setDisplay(getFirstEntryByMetadatum(record, "dc", "title", null));
suggestion.getMetadata().add(
new MetadataValueDTO("dc", "title", null, null, getFirstEntryByMetadatum(record, "dc", "title", null)));
suggestion.getMetadata().add(new MetadataValueDTO("dc", "date", "issued", null,
getFirstEntryByMetadatum(record, "dc", "date", "issued")));
suggestion.getMetadata().add(new MetadataValueDTO("dc", "description", "abstract", null,
getFirstEntryByMetadatum(record, "dc", "description", "abstract")));
suggestion.setExternalSourceUri(configurationService.getProperty("dspace.server.url")
+ "/api/integration/externalsources/" + primaryProvider.getSourceIdentifier() + "/entryValues/"
+ openAireId);
for (String o : getAllEntriesByMetadatum(record, "dc", "source", null)) {
suggestion.getMetadata().add(new MetadataValueDTO("dc", "source", null, null, o));
}
for (String o : getAllEntriesByMetadatum(record, "dc", "contributor", "author")) {
suggestion.getMetadata().add(new MetadataValueDTO("dc", "contributor", "author", null, o));
}
return suggestion;
}
public List<String> getNames() {
return names;
}
public void setNames(List<String> names) {
this.names = names;
}
/**
* Load metadata from OpenAIRE using the import service. The service use the value
* get from metadata key defined in class level variable names as author to query OpenAIRE.
*
* @see org.dspace.importer.external.openaire.service.OpenAireImportMetadataSourceServiceImpl
* @param searchValues query
* @param researcher item to extract metadata from
* @param limit for pagination purpose
* @param offset for pagination purpose
* @return list of ImportRecord
*/
private List<ExternalDataObject> getImportRecords(List<String> searchValues,
Item researcher, int offset, int limit) {
List<ExternalDataObject> matchingRecords = new ArrayList<>();
for (String searchValue : searchValues) {
matchingRecords.addAll(
primaryProvider.searchExternalDataObjects(searchValue, offset, limit));
}
List<ExternalDataObject> toReturn = removeDuplicates(matchingRecords);
return toReturn;
}
/**
* This method remove duplicates from importRecords list.
* An element is a duplicate if in the list exist another element
* with the same value of the metadatum 'dc.identifier.other'
*
* @param importRecords list of ImportRecord
* @return list of ImportRecords without duplicates
*/
private List<ExternalDataObject> removeDuplicates(List<ExternalDataObject> importRecords) {
List<ExternalDataObject> filteredRecords = new ArrayList<>();
for (ExternalDataObject currentRecord : importRecords) {
if (!isDuplicate(currentRecord, filteredRecords)) {
filteredRecords.add(currentRecord);
}
}
return filteredRecords;
}
/**
* Check if the ImportRecord is already present in the list.
* The comparison is made on the value of metadatum with key 'dc.identifier.other'
*
* @param dto An importRecord instance
* @param importRecords a list of importRecord
* @return true if dto is already present in importRecords, false otherwise
*/
private boolean isDuplicate(ExternalDataObject dto, List<ExternalDataObject> importRecords) {
String currentItemId = dto.getId();
if (currentItemId == null) {
return true;
}
for (ExternalDataObject importRecord : importRecords) {
if (currentItemId.equals(importRecord.getId())) {
return true;
}
}
return false;
}
/**
* Return list of Item metadata values starting from metadata keys defined in class level variable names.
*
* @param researcher DSpace item
* @return list of metadata values
*/
private List<String> searchMetadataValues(Item researcher) {
List<String> authors = new ArrayList<String>();
for (String name : names) {
String value = itemService.getMetadata(researcher, name);
if (value != null) {
authors.add(value);
}
}
return authors;
}
@Override
protected boolean isExternalDataObjectPotentiallySuggested(Context context, ExternalDataObject externalDataObject) {
if (StringUtils.equals(externalDataObject.getSource(), primaryProvider.getSourceIdentifier())) {
return true;
} else if (otherProviders != null) {
return otherProviders.stream()
.anyMatch(x -> StringUtils.equals(externalDataObject.getSource(), x.getSourceIdentifier()));
} else {
return false;
}
}
}

View File

@@ -0,0 +1,29 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.app.suggestion.openaire;
import org.apache.commons.cli.Options;
/**
* Extension of {@link PublicationLoaderScriptConfiguration} for CLI.
*
* @author Alessandro Martelli (alessandro.martelli at 4science.it)
*/
public class PublicationLoaderCliScriptConfiguration<T extends PublicationLoaderRunnable>
extends PublicationLoaderScriptConfiguration<T> {
@Override
public Options getOptions() {
Options options = super.getOptions();
options.addOption("h", "help", false, "help");
options.getOption("h").setType(boolean.class);
super.options = options;
return options;
}
}

View File

@@ -0,0 +1,115 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.app.suggestion.openaire;
import java.util.ArrayList;
import java.util.Iterator;
import java.util.List;
import org.apache.commons.cli.ParseException;
import org.dspace.content.Item;
import org.dspace.core.Context;
import org.dspace.discovery.DiscoverQuery;
import org.dspace.discovery.SearchService;
import org.dspace.discovery.SearchServiceException;
import org.dspace.discovery.SearchUtils;
import org.dspace.discovery.utils.DiscoverQueryBuilder;
import org.dspace.discovery.utils.parameter.QueryBuilderSearchFilter;
import org.dspace.scripts.DSpaceRunnable;
import org.dspace.sort.SortOption;
import org.dspace.utils.DSpace;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* Runner responsible to import metadata about authors from OpenAIRE to Solr.
* This runner works in two ways:
* If -s parameter with a valid UUID is received, then the specific researcher
* with this UUID will be used.
* Invocation without any parameter results in massive import, processing all
* authors registered in DSpace.
*
* @author Alessandro Martelli (alessandro.martelli at 4science.it)
*/
public class PublicationLoaderRunnable
extends DSpaceRunnable<PublicationLoaderScriptConfiguration<PublicationLoaderRunnable>> {
private static final Logger LOGGER = LoggerFactory.getLogger(PublicationLoaderRunnable.class);
private PublicationLoader oairePublicationLoader = null;
protected Context context;
protected String profile;
@Override
@SuppressWarnings({ "rawtypes", "unchecked" })
public PublicationLoaderScriptConfiguration<PublicationLoaderRunnable> getScriptConfiguration() {
PublicationLoaderScriptConfiguration configuration = new DSpace().getServiceManager()
.getServiceByName("import-openaire-suggestions", PublicationLoaderScriptConfiguration.class);
return configuration;
}
@Override
public void setup() throws ParseException {
oairePublicationLoader = new DSpace().getServiceManager().getServiceByName(
"OpenairePublicationLoader", PublicationLoader.class);
profile = commandLine.getOptionValue("s");
if (profile == null) {
LOGGER.info("No argument for -s, process all profile");
} else {
LOGGER.info("Process eperson item with UUID " + profile);
}
}
@Override
public void internalRun() throws Exception {
context = new Context();
Iterator<Item> researchers = getResearchers(profile);
while (researchers.hasNext()) {
Item researcher = researchers.next();
oairePublicationLoader.importAuthorRecords(context, researcher);
}
}
/**
* Get the Item(s) which map a researcher from Solr. If the uuid is specified,
* the researcher with this UUID will be chosen. If the uuid doesn't match any
* researcher, the method returns an empty array list. If uuid is null, all
* research will be return.
*
* @param profileUUID uuid of the researcher. If null, all researcher will be
* returned.
* @return the researcher with specified UUID or all researchers
*/
@SuppressWarnings("rawtypes")
private Iterator<Item> getResearchers(String profileUUID) {
SearchService searchService = new DSpace().getSingletonService(SearchService.class);
DiscoverQueryBuilder queryBuilder = SearchUtils.getQueryBuilder();
List<QueryBuilderSearchFilter> filters = new ArrayList<QueryBuilderSearchFilter>();
String query = "*:*";
if (profileUUID != null) {
query = "search.resourceid:" + profileUUID.toString();
}
try {
DiscoverQuery discoverQuery = queryBuilder.buildQuery(context, null,
SearchUtils.getDiscoveryConfigurationByName("person"),
query, filters,
"Item", 10, Long.getLong("0"), null, SortOption.DESCENDING);
return searchService.iteratorSearch(context, null, discoverQuery);
} catch (SearchServiceException e) {
LOGGER.error("Unable to read researcher on solr", e);
}
return null;
}
}

View File

@@ -0,0 +1,36 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.app.suggestion.openaire;
import org.apache.commons.cli.HelpFormatter;
import org.apache.commons.cli.ParseException;
import org.dspace.utils.DSpace;
public class PublicationLoaderRunnableCli extends PublicationLoaderRunnable {
@Override
@SuppressWarnings({ "rawtypes", "unchecked" })
public PublicationLoaderCliScriptConfiguration getScriptConfiguration() {
PublicationLoaderCliScriptConfiguration configuration = new DSpace().getServiceManager()
.getServiceByName("import-openaire-suggestions", PublicationLoaderCliScriptConfiguration.class);
return configuration;
}
@Override
public void setup() throws ParseException {
super.setup();
// in case of CLI we show the help prompt
if (commandLine.hasOption('h')) {
HelpFormatter formatter = new HelpFormatter();
formatter.printHelp("Import Researchers Suggestions", getScriptConfiguration().getOptions());
System.exit(0);
}
}
}

View File

@@ -0,0 +1,56 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.app.suggestion.openaire;
import org.apache.commons.cli.Options;
import org.dspace.scripts.configuration.ScriptConfiguration;
public class PublicationLoaderScriptConfiguration<T extends PublicationLoaderRunnable>
extends ScriptConfiguration<T> {
private Class<T> dspaceRunnableClass;
@Override
public Class<T> getDspaceRunnableClass() {
return dspaceRunnableClass;
}
/**
* Generic setter for the dspaceRunnableClass
* @param dspaceRunnableClass The dspaceRunnableClass to be set on this PublicationLoaderScriptConfiguration
*/
@Override
public void setDspaceRunnableClass(Class<T> dspaceRunnableClass) {
this.dspaceRunnableClass = dspaceRunnableClass;
}
/*
@Override
public boolean isAllowedToExecute(Context context) {
try {
return authorizeService.isAdmin(context);
} catch (SQLException e) {
throw new RuntimeException("SQLException occurred when checking if the current user is an admin", e);
}
}
*/
@Override
public Options getOptions() {
if (options == null) {
Options options = new Options();
options.addOption("s", "single-researcher", true, "Single researcher UUID");
options.getOption("s").setType(String.class);
super.options = options;
}
return options;
}
}

View File

@@ -628,12 +628,23 @@ public class AuthorizeUtil {
// actually expected to be returning true. // actually expected to be returning true.
// For example the LDAP canSelfRegister will return true due to auto-register, while that // For example the LDAP canSelfRegister will return true due to auto-register, while that
// does not imply a new user can register explicitly // does not imply a new user can register explicitly
return AuthenticateServiceFactory.getInstance().getAuthenticationService() return authorizePasswordChange(context, request);
.allowSetPassword(context, request, null);
} }
return false; return false;
} }
/**
* This method will return a boolean indicating whether the current user is allowed to reset the password
* or not
*
* @return A boolean indicating whether the current user can reset its password or not
* @throws SQLException If something goes wrong
*/
public static boolean authorizeForgotPassword() {
return DSpaceServicesFactory.getInstance().getConfigurationService()
.getBooleanProperty("user.forgot-password", true);
}
/** /**
* This method will return a boolean indicating whether it's allowed to update the password for the EPerson * This method will return a boolean indicating whether it's allowed to update the password for the EPerson
* with the given email and canLogin property * with the given email and canLogin property
@@ -647,8 +658,7 @@ public class AuthorizeUtil {
if (eperson != null && eperson.canLogIn()) { if (eperson != null && eperson.canLogIn()) {
HttpServletRequest request = new DSpace().getRequestService().getCurrentRequest() HttpServletRequest request = new DSpace().getRequestService().getCurrentRequest()
.getHttpServletRequest(); .getHttpServletRequest();
return AuthenticateServiceFactory.getInstance().getAuthenticationService() return authorizePasswordChange(context, request);
.allowSetPassword(context, request, null);
} }
} catch (SQLException e) { } catch (SQLException e) {
log.error("Something went wrong trying to retrieve EPerson for email: " + email, e); log.error("Something went wrong trying to retrieve EPerson for email: " + email, e);
@@ -656,6 +666,19 @@ public class AuthorizeUtil {
return false; return false;
} }
/**
* Checks if the current configuration has at least one password based authentication method
*
* @param context Dspace Context
* @param request Current Request
* @return True if the password change is enabled
* @throws SQLException
*/
protected static boolean authorizePasswordChange(Context context, HttpServletRequest request) throws SQLException {
return AuthenticateServiceFactory.getInstance().getAuthenticationService()
.allowSetPassword(context, request, null);
}
/** /**
* This method checks if the community Admin can manage accounts * This method checks if the community Admin can manage accounts
* *

View File

@@ -14,7 +14,6 @@ import java.util.Iterator;
import java.util.LinkedList; import java.util.LinkedList;
import java.util.List; import java.util.List;
import java.util.Map; import java.util.Map;
import javax.servlet.ServletException;
import javax.xml.parsers.DocumentBuilder; import javax.xml.parsers.DocumentBuilder;
import javax.xml.parsers.DocumentBuilderFactory; import javax.xml.parsers.DocumentBuilderFactory;
import javax.xml.parsers.FactoryConfigurationError; import javax.xml.parsers.FactoryConfigurationError;
@@ -24,6 +23,7 @@ import org.dspace.content.Collection;
import org.dspace.content.MetadataSchemaEnum; import org.dspace.content.MetadataSchemaEnum;
import org.dspace.core.Utils; import org.dspace.core.Utils;
import org.dspace.services.factory.DSpaceServicesFactory; import org.dspace.services.factory.DSpaceServicesFactory;
import org.dspace.submit.factory.SubmissionServiceFactory;
import org.w3c.dom.Document; import org.w3c.dom.Document;
import org.w3c.dom.NamedNodeMap; import org.w3c.dom.NamedNodeMap;
import org.w3c.dom.Node; import org.w3c.dom.Node;
@@ -149,16 +149,16 @@ public class DCInputsReader {
* Returns the set of DC inputs used for a particular collection, or the * Returns the set of DC inputs used for a particular collection, or the
* default set if no inputs defined for the collection * default set if no inputs defined for the collection
* *
* @param collectionHandle collection's unique Handle * @param collection collection for which search the set of DC inputs
* @return DC input set * @return DC input set
* @throws DCInputsReaderException if no default set defined * @throws DCInputsReaderException if no default set defined
* @throws ServletException
*/ */
public List<DCInputSet> getInputsByCollectionHandle(String collectionHandle) public List<DCInputSet> getInputsByCollection(Collection collection)
throws DCInputsReaderException { throws DCInputsReaderException {
SubmissionConfig config; SubmissionConfig config;
try { try {
config = new SubmissionConfigReader().getSubmissionConfigByCollection(collectionHandle); config = SubmissionServiceFactory.getInstance().getSubmissionConfigService()
.getSubmissionConfigByCollection(collection);
String formName = config.getSubmissionName(); String formName = config.getSubmissionName();
if (formName == null) { if (formName == null) {
throw new DCInputsReaderException("No form designated as default"); throw new DCInputsReaderException("No form designated as default");
@@ -180,7 +180,8 @@ public class DCInputsReader {
throws DCInputsReaderException { throws DCInputsReaderException {
SubmissionConfig config; SubmissionConfig config;
try { try {
config = new SubmissionConfigReader().getSubmissionConfigByName(name); config = SubmissionServiceFactory.getInstance().getSubmissionConfigService()
.getSubmissionConfigByName(name);
String formName = config.getSubmissionName(); String formName = config.getSubmissionName();
if (formName == null) { if (formName == null) {
throw new DCInputsReaderException("No form designated as default"); throw new DCInputsReaderException("No form designated as default");
@@ -688,7 +689,7 @@ public class DCInputsReader {
public String getInputFormNameByCollectionAndField(Collection collection, String field) public String getInputFormNameByCollectionAndField(Collection collection, String field)
throws DCInputsReaderException { throws DCInputsReaderException {
List<DCInputSet> inputSets = getInputsByCollectionHandle(collection.getHandle()); List<DCInputSet> inputSets = getInputsByCollection(collection);
for (DCInputSet inputSet : inputSets) { for (DCInputSet inputSet : inputSets) {
String[] tokenized = Utils.tokenize(field); String[] tokenized = Utils.tokenize(field);
String schema = tokenized[0]; String schema = tokenized[0];

View File

@@ -11,6 +11,7 @@ import java.io.File;
import java.sql.SQLException; import java.sql.SQLException;
import java.util.ArrayList; import java.util.ArrayList;
import java.util.HashMap; import java.util.HashMap;
import java.util.LinkedHashMap;
import java.util.LinkedList; import java.util.LinkedList;
import java.util.List; import java.util.List;
import java.util.Map; import java.util.Map;
@@ -21,6 +22,7 @@ import javax.xml.parsers.FactoryConfigurationError;
import org.apache.commons.lang3.StringUtils; import org.apache.commons.lang3.StringUtils;
import org.apache.logging.log4j.Logger; import org.apache.logging.log4j.Logger;
import org.dspace.content.Collection; import org.dspace.content.Collection;
import org.dspace.content.Community;
import org.dspace.content.DSpaceObject; import org.dspace.content.DSpaceObject;
import org.dspace.content.factory.ContentServiceFactory; import org.dspace.content.factory.ContentServiceFactory;
import org.dspace.content.service.CollectionService; import org.dspace.content.service.CollectionService;
@@ -90,6 +92,13 @@ public class SubmissionConfigReader {
*/ */
private Map<String, String> collectionToSubmissionConfig = null; private Map<String, String> collectionToSubmissionConfig = null;
/**
* Hashmap which stores which submission process configuration is used by
* which community, computed from the item submission config file
* (specifically, the 'submission-map' tag)
*/
private Map<String, String> communityToSubmissionConfig = null;
/** /**
* Reference to the global submission step definitions defined in the * Reference to the global submission step definitions defined in the
* "step-definitions" section * "step-definitions" section
@@ -127,6 +136,7 @@ public class SubmissionConfigReader {
public void reload() throws SubmissionConfigReaderException { public void reload() throws SubmissionConfigReaderException {
collectionToSubmissionConfig = null; collectionToSubmissionConfig = null;
communityToSubmissionConfig = null;
stepDefns = null; stepDefns = null;
submitDefns = null; submitDefns = null;
buildInputs(configDir + SUBMIT_DEF_FILE_PREFIX + SUBMIT_DEF_FILE_SUFFIX); buildInputs(configDir + SUBMIT_DEF_FILE_PREFIX + SUBMIT_DEF_FILE_SUFFIX);
@@ -145,7 +155,8 @@ public class SubmissionConfigReader {
*/ */
private void buildInputs(String fileName) throws SubmissionConfigReaderException { private void buildInputs(String fileName) throws SubmissionConfigReaderException {
collectionToSubmissionConfig = new HashMap<String, String>(); collectionToSubmissionConfig = new HashMap<String, String>();
submitDefns = new HashMap<String, List<Map<String, String>>>(); communityToSubmissionConfig = new HashMap<String, String>();
submitDefns = new LinkedHashMap<String, List<Map<String, String>>>();
String uri = "file:" + new File(fileName).getAbsolutePath(); String uri = "file:" + new File(fileName).getAbsolutePath();
@@ -210,18 +221,41 @@ public class SubmissionConfigReader {
* Returns the Item Submission process config used for a particular * Returns the Item Submission process config used for a particular
* collection, or the default if none is defined for the collection * collection, or the default if none is defined for the collection
* *
* @param collectionHandle collection's unique Handle * @param col collection for which search Submission process config
* @return the SubmissionConfig representing the item submission config * @return the SubmissionConfig representing the item submission config
* @throws SubmissionConfigReaderException if no default submission process configuration defined * @throws IllegalStateException if no default submission process configuration defined
*/ */
public SubmissionConfig getSubmissionConfigByCollection(String collectionHandle) { public SubmissionConfig getSubmissionConfigByCollection(Collection col) {
String submitName;
if (col != null) {
// get the name of the submission process config for this collection // get the name of the submission process config for this collection
String submitName = collectionToSubmissionConfig
.get(collectionHandle);
if (submitName == null) {
submitName = collectionToSubmissionConfig submitName = collectionToSubmissionConfig
.get(DEFAULT_COLLECTION); .get(col.getHandle());
if (submitName != null) {
return getSubmissionConfigByName(submitName);
} }
if (!communityToSubmissionConfig.isEmpty()) {
try {
List<Community> communities = col.getCommunities();
for (Community com : communities) {
submitName = getSubmissionConfigByCommunity(com);
if (submitName != null) {
return getSubmissionConfigByName(submitName);
}
}
} catch (SQLException sqle) {
throw new IllegalStateException("Error occurred while getting item submission configured " +
"by community", sqle);
}
}
}
submitName = collectionToSubmissionConfig.get(DEFAULT_COLLECTION);
if (submitName == null) { if (submitName == null) {
throw new IllegalStateException( throw new IllegalStateException(
"No item submission process configuration designated as 'default' in 'submission-map' section of " + "No item submission process configuration designated as 'default' in 'submission-map' section of " +
@@ -230,6 +264,30 @@ public class SubmissionConfigReader {
return getSubmissionConfigByName(submitName); return getSubmissionConfigByName(submitName);
} }
/**
* Recursive function to return the Item Submission process config
* used for a community or the closest community parent, or null
* if none is defined
*
* @param com community for which search Submission process config
* @return the SubmissionConfig representing the item submission config
*/
private String getSubmissionConfigByCommunity(Community com) {
String submitName = communityToSubmissionConfig
.get(com.getHandle());
if (submitName != null) {
return submitName;
}
List<Community> communities = com.getParentCommunities();
for (Community parentCom : communities) {
submitName = getSubmissionConfigByCommunity(parentCom);
if (submitName != null) {
return submitName;
}
}
return null;
}
/** /**
* Returns the Item Submission process config * Returns the Item Submission process config
* *
@@ -357,13 +415,14 @@ public class SubmissionConfigReader {
Node nd = nl.item(i); Node nd = nl.item(i);
if (nd.getNodeName().equals("name-map")) { if (nd.getNodeName().equals("name-map")) {
String id = getAttribute(nd, "collection-handle"); String id = getAttribute(nd, "collection-handle");
String communityId = getAttribute(nd, "community-handle");
String entityType = getAttribute(nd, "collection-entity-type"); String entityType = getAttribute(nd, "collection-entity-type");
String value = getAttribute(nd, "submission-name"); String value = getAttribute(nd, "submission-name");
String content = getValue(nd); String content = getValue(nd);
if (id == null && entityType == null) { if (id == null && communityId == null && entityType == null) {
throw new SAXException( throw new SAXException(
"name-map element is missing collection-handle or collection-entity-type attribute " + "name-map element is missing collection-handle or community-handle or collection-entity-type " +
"in 'item-submission.xml'"); "attribute in 'item-submission.xml'");
} }
if (value == null) { if (value == null) {
throw new SAXException( throw new SAXException(
@@ -375,7 +434,8 @@ public class SubmissionConfigReader {
} }
if (id != null) { if (id != null) {
collectionToSubmissionConfig.put(id, value); collectionToSubmissionConfig.put(id, value);
} else if (communityId != null) {
communityToSubmissionConfig.put(communityId, value);
} else { } else {
// get all collections for this entity-type // get all collections for this entity-type
List<Collection> collections = collectionService.findAllCollectionsByEntityType( context, List<Collection> collections = collectionService.findAllCollectionsByEntityType( context,

View File

@@ -405,21 +405,13 @@ public class Util {
DCInput myInputs = null; DCInput myInputs = null;
boolean myInputsFound = false; boolean myInputsFound = false;
String formFileName = I18nUtil.getInputFormsFileName(locale); String formFileName = I18nUtil.getInputFormsFileName(locale);
String col_handle = "";
Collection collection = item.getOwningCollection(); Collection collection = item.getOwningCollection();
if (collection == null) {
// set an empty handle so to get the default input set
col_handle = "";
} else {
col_handle = collection.getHandle();
}
// Read the input form file for the specific collection // Read the input form file for the specific collection
DCInputsReader inputsReader = new DCInputsReader(formFileName); DCInputsReader inputsReader = new DCInputsReader(formFileName);
List<DCInputSet> inputSets = inputsReader.getInputsByCollectionHandle(col_handle); List<DCInputSet> inputSets = inputsReader.getInputsByCollection(collection);
// Replace the values of Metadatum[] with the correct ones in case // Replace the values of Metadatum[] with the correct ones in case
// of // of
@@ -500,8 +492,8 @@ public class Util {
public static List<String> differenceInSubmissionFields(Collection fromCollection, Collection toCollection) public static List<String> differenceInSubmissionFields(Collection fromCollection, Collection toCollection)
throws DCInputsReaderException { throws DCInputsReaderException {
DCInputsReader reader = new DCInputsReader(); DCInputsReader reader = new DCInputsReader();
List<DCInputSet> from = reader.getInputsByCollectionHandle(fromCollection.getHandle()); List<DCInputSet> from = reader.getInputsByCollection(fromCollection);
List<DCInputSet> to = reader.getInputsByCollectionHandle(toCollection.getHandle()); List<DCInputSet> to = reader.getInputsByCollection(toCollection);
Set<String> fromFieldName = new HashSet<>(); Set<String> fromFieldName = new HashSet<>();
Set<String> toFieldName = new HashSet<>(); Set<String> toFieldName = new HashSet<>();

View File

@@ -153,6 +153,22 @@ public interface AuthenticationMethod {
public List<Group> getSpecialGroups(Context context, HttpServletRequest request) public List<Group> getSpecialGroups(Context context, HttpServletRequest request)
throws SQLException; throws SQLException;
/**
* Returns true if the special groups returned by
* {@link org.dspace.authenticate.AuthenticationMethod#getSpecialGroups(Context, HttpServletRequest)}
* should be implicitly be added to the groups related to the current user. By
* default this is true if the authentication method is the actual
* authentication mechanism used by the user.
* @param context A valid DSpace context.
* @param request The request that started this operation, or null if not
* applicable.
* @return true is the special groups must be considered, false
* otherwise
*/
public default boolean areSpecialGroupsApplicable(Context context, HttpServletRequest request) {
return getName().equals(context.getAuthenticationMethod());
}
/** /**
* Authenticate the given or implicit credentials. * Authenticate the given or implicit credentials.
* This is the heart of the authentication method: test the * This is the heart of the authentication method: test the

View File

@@ -179,11 +179,16 @@ public class AuthenticationServiceImpl implements AuthenticationService {
int totalLen = 0; int totalLen = 0;
for (AuthenticationMethod method : getAuthenticationMethodStack()) { for (AuthenticationMethod method : getAuthenticationMethodStack()) {
if (method.areSpecialGroupsApplicable(context, request)) {
List<Group> gl = method.getSpecialGroups(context, request); List<Group> gl = method.getSpecialGroups(context, request);
if (gl.size() > 0) { if (gl.size() > 0) {
result.addAll(gl); result.addAll(gl);
totalLen += gl.size(); totalLen += gl.size();
} }
}
} }
return result; return result;

View File

@@ -252,6 +252,11 @@ public class IPAuthentication implements AuthenticationMethod {
return groups; return groups;
} }
@Override
public boolean areSpecialGroupsApplicable(Context context, HttpServletRequest request) {
return true;
}
@Override @Override
public int authenticate(Context context, String username, String password, public int authenticate(Context context, String username, String password,
String realm, HttpServletRequest request) throws SQLException { String realm, HttpServletRequest request) throws SQLException {

View File

@@ -494,6 +494,8 @@ public class LDAPAuthentication
try { try {
SearchControls ctrls = new SearchControls(); SearchControls ctrls = new SearchControls();
ctrls.setSearchScope(ldap_search_scope_value); ctrls.setSearchScope(ldap_search_scope_value);
// Fetch both user attributes '*' (eg. uid, cn) and operational attributes '+' (eg. memberOf)
ctrls.setReturningAttributes(new String[] {"*", "+"});
String searchName; String searchName;
if (useTLS) { if (useTLS) {
@@ -713,8 +715,8 @@ public class LDAPAuthentication
private void assignGroups(String dn, ArrayList<String> group, Context context) { private void assignGroups(String dn, ArrayList<String> group, Context context) {
if (StringUtils.isNotBlank(dn)) { if (StringUtils.isNotBlank(dn)) {
System.out.println("dn:" + dn); System.out.println("dn:" + dn);
int i = 1; int groupmapIndex = 1;
String groupMap = configurationService.getProperty("authentication-ldap.login.groupmap." + i); String groupMap = configurationService.getProperty("authentication-ldap.login.groupmap." + groupmapIndex);
boolean cmp; boolean cmp;
@@ -725,6 +727,13 @@ public class LDAPAuthentication
String ldapSearchString = t[0]; String ldapSearchString = t[0];
String dspaceGroupName = t[1]; String dspaceGroupName = t[1];
if (group == null) {
cmp = StringUtils.containsIgnoreCase(dn, ldapSearchString + ",");
if (cmp) {
assignGroup(context, groupmapIndex, dspaceGroupName);
}
} else {
// list of strings with dn from LDAP groups // list of strings with dn from LDAP groups
// inner loop // inner loop
Iterator<String> groupIterator = group.iterator(); Iterator<String> groupIterator = group.iterator();
@@ -741,7 +750,29 @@ public class LDAPAuthentication
} }
if (cmp) { if (cmp) {
// assign user to this group assignGroup(context, groupmapIndex, dspaceGroupName);
}
}
}
groupMap = configurationService.getProperty("authentication-ldap.login.groupmap." + ++groupmapIndex);
}
}
}
/**
* Add the current authenticated user to the specified group
*
* @param context
* DSpace context
*
* @param groupmapIndex
* authentication-ldap.login.groupmap.* key index defined in dspace.cfg
*
* @param dspaceGroupName
* The DSpace group to add the user to
*/
private void assignGroup(Context context, int groupmapIndex, String dspaceGroupName) {
try { try {
Group ldapGroup = groupService.findByName(context, dspaceGroupName); Group ldapGroup = groupService.findByName(context, dspaceGroupName);
if (ldapGroup != null) { if (ldapGroup != null) {
@@ -751,7 +782,7 @@ public class LDAPAuthentication
// The group does not exist // The group does not exist
log.warn(LogHelper.getHeader(context, log.warn(LogHelper.getHeader(context,
"ldap_assignGroupsBasedOnLdapDn", "ldap_assignGroupsBasedOnLdapDn",
"Group defined in authentication-ldap.login.groupmap." + i "Group defined in authentication-ldap.login.groupmap." + groupmapIndex
+ " does not exist :: " + dspaceGroupName)); + " does not exist :: " + dspaceGroupName));
} }
} catch (AuthorizeException ae) { } catch (AuthorizeException ae) {
@@ -764,12 +795,6 @@ public class LDAPAuthentication
dspaceGroupName)); dspaceGroupName));
} }
} }
}
groupMap = configurationService.getProperty("authentication-ldap.login.groupmap." + ++i);
}
}
}
@Override @Override
public boolean isUsed(final Context context, final HttpServletRequest request) { public boolean isUsed(final Context context, final HttpServletRequest request) {

View File

@@ -451,7 +451,7 @@ public class AuthorizeServiceImpl implements AuthorizeService {
if (e == null) { if (e == null) {
return false; // anonymous users can't be admins.... return false; // anonymous users can't be admins....
} else { } else {
return groupService.isMember(c, e, Group.ADMIN); return groupService.isMember(c, e, c.getAdminGroup());
} }
} }
@@ -895,7 +895,7 @@ public class AuthorizeServiceImpl implements AuthorizeService {
return true; return true;
} }
} catch (SearchServiceException e) { } catch (SearchServiceException e) {
log.error("Failed getting getting community/collection admin status for " log.error("Failed getting community/collection admin status for "
+ context.getCurrentUser().getEmail() + " The search error is: " + e.getMessage() + context.getCurrentUser().getEmail() + " The search error is: " + e.getMessage()
+ " The search resourceType filter was: " + query); + " The search resourceType filter was: " + query);
} }

View File

@@ -276,6 +276,11 @@ public class BitstreamServiceImpl extends DSpaceObjectServiceImpl<Bitstream> imp
//Remove our bitstream from all our bundles //Remove our bitstream from all our bundles
final List<Bundle> bundles = bitstream.getBundles(); final List<Bundle> bundles = bitstream.getBundles();
for (Bundle bundle : bundles) { for (Bundle bundle : bundles) {
authorizeService.authorizeAction(context, bundle, Constants.REMOVE);
//We also need to remove the bitstream id when it's set as bundle's primary bitstream
if (bitstream.equals(bundle.getPrimaryBitstream())) {
bundle.unsetPrimaryBitstreamID();
}
bundle.removeBitstream(bitstream); bundle.removeBitstream(bitstream);
} }
@@ -403,7 +408,7 @@ public class BitstreamServiceImpl extends DSpaceObjectServiceImpl<Bitstream> imp
@Override @Override
public Bitstream getThumbnail(Context context, Bitstream bitstream) throws SQLException { public Bitstream getThumbnail(Context context, Bitstream bitstream) throws SQLException {
Pattern pattern = Pattern.compile("^" + bitstream.getName() + ".([^.]+)$"); Pattern pattern = getBitstreamNamePattern(bitstream);
for (Bundle bundle : bitstream.getBundles()) { for (Bundle bundle : bitstream.getBundles()) {
for (Item item : bundle.getItems()) { for (Item item : bundle.getItems()) {
@@ -420,6 +425,13 @@ public class BitstreamServiceImpl extends DSpaceObjectServiceImpl<Bitstream> imp
return null; return null;
} }
protected Pattern getBitstreamNamePattern(Bitstream bitstream) {
if (bitstream.getName() != null) {
return Pattern.compile("^" + Pattern.quote(bitstream.getName()) + ".([^.]+)$");
}
return Pattern.compile("^" + bitstream.getName() + ".([^.]+)$");
}
@Override @Override
public BitstreamFormat getFormat(Context context, Bitstream bitstream) throws SQLException { public BitstreamFormat getFormat(Context context, Bitstream bitstream) throws SQLException {
if (bitstream.getBitstreamFormat() == null) { if (bitstream.getBitstreamFormat() == null) {
@@ -446,11 +458,16 @@ public class BitstreamServiceImpl extends DSpaceObjectServiceImpl<Bitstream> imp
@Override @Override
public Bitstream findByIdOrLegacyId(Context context, String id) throws SQLException { public Bitstream findByIdOrLegacyId(Context context, String id) throws SQLException {
try {
if (StringUtils.isNumeric(id)) { if (StringUtils.isNumeric(id)) {
return findByLegacyId(context, Integer.parseInt(id)); return findByLegacyId(context, Integer.parseInt(id));
} else { } else {
return find(context, UUID.fromString(id)); return find(context, UUID.fromString(id));
} }
} catch (IllegalArgumentException e) {
// Not a valid legacy ID or valid UUID
return null;
}
} }
@Override @Override

View File

@@ -126,7 +126,7 @@ public class Bundle extends DSpaceObject implements DSpaceObjectLegacySupport {
* Unset the primary bitstream ID of the bundle * Unset the primary bitstream ID of the bundle
*/ */
public void unsetPrimaryBitstreamID() { public void unsetPrimaryBitstreamID() {
primaryBitstream = null; setPrimaryBitstreamID(null);
} }
/** /**

View File

@@ -562,11 +562,16 @@ public class BundleServiceImpl extends DSpaceObjectServiceImpl<Bundle> implement
@Override @Override
public Bundle findByIdOrLegacyId(Context context, String id) throws SQLException { public Bundle findByIdOrLegacyId(Context context, String id) throws SQLException {
try {
if (StringUtils.isNumeric(id)) { if (StringUtils.isNumeric(id)) {
return findByLegacyId(context, Integer.parseInt(id)); return findByLegacyId(context, Integer.parseInt(id));
} else { } else {
return find(context, UUID.fromString(id)); return find(context, UUID.fromString(id));
} }
} catch (IllegalArgumentException e) {
// Not a valid legacy ID or valid UUID
return null;
}
} }
@Override @Override

View File

@@ -895,11 +895,16 @@ public class CollectionServiceImpl extends DSpaceObjectServiceImpl<Collection> i
@Override @Override
public Collection findByIdOrLegacyId(Context context, String id) throws SQLException { public Collection findByIdOrLegacyId(Context context, String id) throws SQLException {
try {
if (StringUtils.isNumeric(id)) { if (StringUtils.isNumeric(id)) {
return findByLegacyId(context, Integer.parseInt(id)); return findByLegacyId(context, Integer.parseInt(id));
} else { } else {
return find(context, UUID.fromString(id)); return find(context, UUID.fromString(id));
} }
} catch (IllegalArgumentException e) {
// Not a valid legacy ID or valid UUID
return null;
}
} }
@Override @Override
@@ -1021,6 +1026,61 @@ public class CollectionServiceImpl extends DSpaceObjectServiceImpl<Collection> i
return resp; return resp;
} }
@Override
public Collection retrieveCollectionWithSubmitByEntityType(Context context, Item item,
String entityType) throws SQLException {
Collection ownCollection = item.getOwningCollection();
return retrieveWithSubmitCollectionByEntityType(context, ownCollection.getCommunities(), entityType);
}
private Collection retrieveWithSubmitCollectionByEntityType(Context context, List<Community> communities,
String entityType) {
for (Community community : communities) {
Collection collection = retrieveCollectionWithSubmitByCommunityAndEntityType(context, community,
entityType);
if (collection != null) {
return collection;
}
}
for (Community community : communities) {
List<Community> parentCommunities = community.getParentCommunities();
Collection collection = retrieveWithSubmitCollectionByEntityType(context, parentCommunities, entityType);
if (collection != null) {
return collection;
}
}
return retrieveCollectionWithSubmitByCommunityAndEntityType(context, null, entityType);
}
@Override
public Collection retrieveCollectionWithSubmitByCommunityAndEntityType(Context context, Community community,
String entityType) {
context.turnOffAuthorisationSystem();
List<Collection> collections;
try {
collections = findCollectionsWithSubmit(null, context, community, entityType, 0, 1);
} catch (SQLException | SearchServiceException e) {
throw new RuntimeException(e);
}
context.restoreAuthSystemState();
if (collections != null && collections.size() > 0) {
return collections.get(0);
}
if (community != null) {
for (Community subCommunity : community.getSubcommunities()) {
Collection collection = retrieveCollectionWithSubmitByCommunityAndEntityType(context,
subCommunity, entityType);
if (collection != null) {
return collection;
}
}
}
return null;
}
@Override @Override
public List<Collection> findCollectionsWithSubmit(String q, Context context, Community community, String entityType, public List<Collection> findCollectionsWithSubmit(String q, Context context, Community community, String entityType,
int offset, int limit) throws SQLException, SearchServiceException { int offset, int limit) throws SQLException, SearchServiceException {

View File

@@ -694,11 +694,16 @@ public class CommunityServiceImpl extends DSpaceObjectServiceImpl<Community> imp
@Override @Override
public Community findByIdOrLegacyId(Context context, String id) throws SQLException { public Community findByIdOrLegacyId(Context context, String id) throws SQLException {
try {
if (StringUtils.isNumeric(id)) { if (StringUtils.isNumeric(id)) {
return findByLegacyId(context, Integer.parseInt(id)); return findByLegacyId(context, Integer.parseInt(id));
} else { } else {
return find(context, UUID.fromString(id)); return find(context, UUID.fromString(id));
} }
} catch (IllegalArgumentException e) {
// Not a valid legacy ID or valid UUID
return null;
}
} }
@Override @Override

View File

@@ -93,7 +93,7 @@ public class InstallItemServiceImpl implements InstallItemService {
// As this is a BRAND NEW item, as a final step we need to remove the // As this is a BRAND NEW item, as a final step we need to remove the
// submitter item policies created during deposit and replace them with // submitter item policies created during deposit and replace them with
// the default policies from the collection. // the default policies from the collection.
itemService.inheritCollectionDefaultPolicies(c, item, collection); itemService.inheritCollectionDefaultPolicies(c, item, collection, false);
return item; return item;
} }
@@ -150,7 +150,6 @@ public class InstallItemServiceImpl implements InstallItemService {
return finishItem(c, item, is); return finishItem(c, item, is);
} }
protected void populateMetadata(Context c, Item item) protected void populateMetadata(Context c, Item item)
throws SQLException, AuthorizeException { throws SQLException, AuthorizeException {
// create accession date // create accession date
@@ -158,15 +157,6 @@ public class InstallItemServiceImpl implements InstallItemService {
itemService.addMetadata(c, item, MetadataSchemaEnum.DC.getName(), itemService.addMetadata(c, item, MetadataSchemaEnum.DC.getName(),
"date", "accessioned", null, now.toString()); "date", "accessioned", null, now.toString());
// add date available if not under embargo, otherwise it will
// be set when the embargo is lifted.
// this will flush out fatal embargo metadata
// problems before we set inArchive.
if (embargoService.getEmbargoTermsAsDate(c, item) == null) {
itemService.addMetadata(c, item, MetadataSchemaEnum.DC.getName(),
"date", "available", null, now.toString());
}
// If issue date is set as "today" (literal string), then set it to current date // If issue date is set as "today" (literal string), then set it to current date
// In the below loop, we temporarily clear all issued dates and re-add, one-by-one, // In the below loop, we temporarily clear all issued dates and re-add, one-by-one,
// replacing "today" with today's date. // replacing "today" with today's date.

View File

@@ -77,6 +77,7 @@ import org.dspace.orcid.service.OrcidQueueService;
import org.dspace.orcid.service.OrcidSynchronizationService; import org.dspace.orcid.service.OrcidSynchronizationService;
import org.dspace.orcid.service.OrcidTokenService; import org.dspace.orcid.service.OrcidTokenService;
import org.dspace.profile.service.ResearcherProfileService; import org.dspace.profile.service.ResearcherProfileService;
import org.dspace.qaevent.dao.QAEventsDAO;
import org.dspace.services.ConfigurationService; import org.dspace.services.ConfigurationService;
import org.dspace.versioning.service.VersioningService; import org.dspace.versioning.service.VersioningService;
import org.dspace.workflow.WorkflowItemService; import org.dspace.workflow.WorkflowItemService;
@@ -170,6 +171,9 @@ public class ItemServiceImpl extends DSpaceObjectServiceImpl<Item> implements It
@Autowired(required = true) @Autowired(required = true)
protected SubscribeService subscribeService; protected SubscribeService subscribeService;
@Autowired
private QAEventsDAO qaEventsDao;
protected ItemServiceImpl() { protected ItemServiceImpl() {
super(); super();
} }
@@ -819,6 +823,11 @@ public class ItemServiceImpl extends DSpaceObjectServiceImpl<Item> implements It
orcidToken.setProfileItem(null); orcidToken.setProfileItem(null);
} }
List<QAEventProcessed> qaEvents = qaEventsDao.findByItem(context, item);
for (QAEventProcessed qaEvent : qaEvents) {
qaEventsDao.delete(context, qaEvent);
}
//Only clear collections after we have removed everything else from the item //Only clear collections after we have removed everything else from the item
item.clearCollections(); item.clearCollections();
item.setOwningCollection(null); item.setOwningCollection(null);
@@ -920,8 +929,16 @@ public class ItemServiceImpl extends DSpaceObjectServiceImpl<Item> implements It
@Override @Override
public void inheritCollectionDefaultPolicies(Context context, Item item, Collection collection) public void inheritCollectionDefaultPolicies(Context context, Item item, Collection collection)
throws SQLException, AuthorizeException { throws SQLException, AuthorizeException {
adjustItemPolicies(context, item, collection); inheritCollectionDefaultPolicies(context, item, collection, true);
adjustBundleBitstreamPolicies(context, item, collection); }
@Override
public void inheritCollectionDefaultPolicies(Context context, Item item, Collection collection,
boolean replaceReadRPWithCollectionRP)
throws SQLException, AuthorizeException {
adjustItemPolicies(context, item, collection, replaceReadRPWithCollectionRP);
adjustBundleBitstreamPolicies(context, item, collection, replaceReadRPWithCollectionRP);
log.debug(LogHelper.getHeader(context, "item_inheritCollectionDefaultPolicies", log.debug(LogHelper.getHeader(context, "item_inheritCollectionDefaultPolicies",
"item_id=" + item.getID())); "item_id=" + item.getID()));
@@ -930,6 +947,13 @@ public class ItemServiceImpl extends DSpaceObjectServiceImpl<Item> implements It
@Override @Override
public void adjustBundleBitstreamPolicies(Context context, Item item, Collection collection) public void adjustBundleBitstreamPolicies(Context context, Item item, Collection collection)
throws SQLException, AuthorizeException { throws SQLException, AuthorizeException {
adjustBundleBitstreamPolicies(context, item, collection, true);
}
@Override
public void adjustBundleBitstreamPolicies(Context context, Item item, Collection collection,
boolean replaceReadRPWithCollectionRP)
throws SQLException, AuthorizeException {
// Bundles should inherit from DEFAULT_ITEM_READ so that if the item is readable, the files // Bundles should inherit from DEFAULT_ITEM_READ so that if the item is readable, the files
// can be listed (even if they are themselves not readable as per DEFAULT_BITSTREAM_READ or other // can be listed (even if they are themselves not readable as per DEFAULT_BITSTREAM_READ or other
// policies or embargos applied // policies or embargos applied
@@ -948,10 +972,19 @@ public class ItemServiceImpl extends DSpaceObjectServiceImpl<Item> implements It
} }
// TODO: should we also throw an exception if no DEFAULT_ITEM_READ? // TODO: should we also throw an exception if no DEFAULT_ITEM_READ?
boolean removeCurrentReadRPBitstream =
replaceReadRPWithCollectionRP && defaultCollectionBitstreamPolicies.size() > 0;
boolean removeCurrentReadRPBundle =
replaceReadRPWithCollectionRP && defaultCollectionBundlePolicies.size() > 0;
// remove all policies from bundles, add new ones // remove all policies from bundles, add new ones
// Remove bundles // Remove bundles
List<Bundle> bunds = item.getBundles(); List<Bundle> bunds = item.getBundles();
for (Bundle mybundle : bunds) { for (Bundle mybundle : bunds) {
// If collection has default READ policies, remove the bundle's READ policies.
if (removeCurrentReadRPBundle) {
authorizeService.removePoliciesActionFilter(context, mybundle, Constants.READ);
}
// if come from InstallItem: remove all submission/workflow policies // if come from InstallItem: remove all submission/workflow policies
authorizeService.removeAllPoliciesByDSOAndType(context, mybundle, ResourcePolicy.TYPE_SUBMISSION); authorizeService.removeAllPoliciesByDSOAndType(context, mybundle, ResourcePolicy.TYPE_SUBMISSION);
@@ -960,6 +993,11 @@ public class ItemServiceImpl extends DSpaceObjectServiceImpl<Item> implements It
addDefaultPoliciesNotInPlace(context, mybundle, defaultCollectionBundlePolicies); addDefaultPoliciesNotInPlace(context, mybundle, defaultCollectionBundlePolicies);
for (Bitstream bitstream : mybundle.getBitstreams()) { for (Bitstream bitstream : mybundle.getBitstreams()) {
// If collection has default READ policies, remove the bundle's READ policies.
if (removeCurrentReadRPBitstream) {
authorizeService.removePoliciesActionFilter(context, bitstream, Constants.READ);
}
// if come from InstallItem: remove all submission/workflow policies // if come from InstallItem: remove all submission/workflow policies
removeAllPoliciesAndAddDefault(context, bitstream, defaultItemPolicies, removeAllPoliciesAndAddDefault(context, bitstream, defaultItemPolicies,
defaultCollectionBitstreamPolicies); defaultCollectionBitstreamPolicies);
@@ -968,7 +1006,14 @@ public class ItemServiceImpl extends DSpaceObjectServiceImpl<Item> implements It
} }
@Override @Override
public void adjustBitstreamPolicies(Context context, Item item, Collection collection , Bitstream bitstream) public void adjustBitstreamPolicies(Context context, Item item, Collection collection, Bitstream bitstream)
throws SQLException, AuthorizeException {
adjustBitstreamPolicies(context, item, collection, bitstream, true);
}
@Override
public void adjustBitstreamPolicies(Context context, Item item, Collection collection , Bitstream bitstream,
boolean replaceReadRPWithCollectionRP)
throws SQLException, AuthorizeException { throws SQLException, AuthorizeException {
List<ResourcePolicy> defaultCollectionPolicies = authorizeService List<ResourcePolicy> defaultCollectionPolicies = authorizeService
.getPoliciesActionFilter(context, collection, Constants.DEFAULT_BITSTREAM_READ); .getPoliciesActionFilter(context, collection, Constants.DEFAULT_BITSTREAM_READ);
@@ -998,10 +1043,22 @@ public class ItemServiceImpl extends DSpaceObjectServiceImpl<Item> implements It
@Override @Override
public void adjustItemPolicies(Context context, Item item, Collection collection) public void adjustItemPolicies(Context context, Item item, Collection collection)
throws SQLException, AuthorizeException { throws SQLException, AuthorizeException {
adjustItemPolicies(context, item, collection, true);
}
@Override
public void adjustItemPolicies(Context context, Item item, Collection collection,
boolean replaceReadRPWithCollectionRP)
throws SQLException, AuthorizeException {
// read collection's default READ policies // read collection's default READ policies
List<ResourcePolicy> defaultCollectionPolicies = authorizeService List<ResourcePolicy> defaultCollectionPolicies = authorizeService
.getPoliciesActionFilter(context, collection, Constants.DEFAULT_ITEM_READ); .getPoliciesActionFilter(context, collection, Constants.DEFAULT_ITEM_READ);
// If collection has defaultREAD policies, remove the item's READ policies.
if (replaceReadRPWithCollectionRP && defaultCollectionPolicies.size() > 0) {
authorizeService.removePoliciesActionFilter(context, item, Constants.READ);
}
// MUST have default policies // MUST have default policies
if (defaultCollectionPolicies.size() < 1) { if (defaultCollectionPolicies.size() < 1) {
throw new SQLException("Collection " + collection.getID() throw new SQLException("Collection " + collection.getID()
@@ -1378,16 +1435,6 @@ prevent the generation of resource policy entry values with null dspace_object a
} }
} }
@Override
public Iterator<Item> findByMetadataQuery(Context context, List<List<MetadataField>> listFieldList,
List<String> query_op, List<String> query_val, List<UUID> collectionUuids,
String regexClause, int offset, int limit)
throws SQLException, AuthorizeException, IOException {
return itemDAO
.findByMetadataQuery(context, listFieldList, query_op, query_val, collectionUuids, regexClause, offset,
limit);
}
@Override @Override
public DSpaceObject getAdminObject(Context context, Item item, int action) throws SQLException { public DSpaceObject getAdminObject(Context context, Item item, int action) throws SQLException {
DSpaceObject adminObject = null; DSpaceObject adminObject = null;
@@ -1561,11 +1608,16 @@ prevent the generation of resource policy entry values with null dspace_object a
@Override @Override
public Item findByIdOrLegacyId(Context context, String id) throws SQLException { public Item findByIdOrLegacyId(Context context, String id) throws SQLException {
try {
if (StringUtils.isNumeric(id)) { if (StringUtils.isNumeric(id)) {
return findByLegacyId(context, Integer.parseInt(id)); return findByLegacyId(context, Integer.parseInt(id));
} else { } else {
return find(context, UUID.fromString(id)); return find(context, UUID.fromString(id));
} }
} catch (IllegalArgumentException e) {
// Not a valid legacy ID or valid UUID
return null;
}
} }
@Override @Override

View File

@@ -0,0 +1,213 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.content;
import java.io.UnsupportedEncodingException;
import java.security.MessageDigest;
import java.security.NoSuchAlgorithmException;
import java.util.Date;
import com.fasterxml.jackson.databind.annotation.JsonDeserialize;
import org.dspace.qaevent.service.dto.OpenaireMessageDTO;
import org.dspace.qaevent.service.dto.QAMessageDTO;
import org.dspace.util.RawJsonDeserializer;
/**
* This class represent the Quality Assurance broker data as loaded in our solr
* qaevent core
*
*/
public class QAEvent {
public static final char[] HEX_DIGITS = { '0', '1', '2', '3', '4', '5', '6', '7', '8', '9', 'a', 'b', 'c', 'd', 'e',
'f' };
public static final String ACCEPTED = "accepted";
public static final String REJECTED = "rejected";
public static final String DISCARDED = "discarded";
public static final String OPENAIRE_SOURCE = "openaire";
private String source;
private String eventId;
/**
* contains the targeted dspace object,
* ie: oai:www.openstarts.units.it:123456789/1120 contains the handle
* of the DSpace pbject in its final part 123456789/1120
* */
private String originalId;
/**
* evaluated with the targeted dspace object id
*
* */
private String target;
private String related;
private String title;
private String topic;
private double trust;
@JsonDeserialize(using = RawJsonDeserializer.class)
private String message;
private Date lastUpdate;
private String status = "PENDING";
public QAEvent() {
}
public QAEvent(String source, String originalId, String target, String title,
String topic, double trust, String message, Date lastUpdate) {
super();
this.source = source;
this.originalId = originalId;
this.target = target;
this.title = title;
this.topic = topic;
this.trust = trust;
this.message = message;
this.lastUpdate = lastUpdate;
try {
computedEventId();
} catch (NoSuchAlgorithmException | UnsupportedEncodingException e) {
throw new IllegalStateException(e);
}
}
public String getOriginalId() {
return originalId;
}
public void setOriginalId(String originalId) {
this.originalId = originalId;
}
public String getTitle() {
return title;
}
public void setTitle(String title) {
this.title = title;
}
public String getTopic() {
return topic;
}
public void setTopic(String topic) {
this.topic = topic;
}
public double getTrust() {
return trust;
}
public void setTrust(double trust) {
this.trust = trust;
}
public String getMessage() {
return message;
}
public void setMessage(String message) {
this.message = message;
}
public String getEventId() {
if (eventId == null) {
try {
computedEventId();
} catch (NoSuchAlgorithmException | UnsupportedEncodingException e) {
throw new RuntimeException(e);
}
}
return eventId;
}
public void setEventId(String eventId) {
this.eventId = eventId;
}
public String getTarget() {
return target;
}
public void setTarget(String target) {
this.target = target;
}
public Date getLastUpdate() {
return lastUpdate;
}
public void setLastUpdate(Date lastUpdate) {
this.lastUpdate = lastUpdate;
}
public void setRelated(String related) {
this.related = related;
}
public String getRelated() {
return related;
}
public void setStatus(String status) {
this.status = status;
}
public String getStatus() {
return status;
}
public String getSource() {
return source != null ? source : OPENAIRE_SOURCE;
}
public void setSource(String source) {
this.source = source;
}
/*
* DTO constructed via Jackson use empty constructor. In this case, the eventId
* must be compute on the get method. This method create a signature based on
* the event fields and store it in the eventid attribute.
*/
private void computedEventId() throws NoSuchAlgorithmException, UnsupportedEncodingException {
MessageDigest digester = MessageDigest.getInstance("MD5");
String dataToString = "source=" + source + ",originalId=" + originalId + ", title=" + title + ", topic="
+ topic + ", trust=" + trust + ", message=" + message;
digester.update(dataToString.getBytes("UTF-8"));
byte[] signature = digester.digest();
char[] arr = new char[signature.length << 1];
for (int i = 0; i < signature.length; i++) {
int b = signature[i];
int idx = i << 1;
arr[idx] = HEX_DIGITS[(b >> 4) & 0xf];
arr[idx + 1] = HEX_DIGITS[b & 0xf];
}
eventId = new String(arr);
}
public Class<? extends QAMessageDTO> getMessageDtoClass() {
switch (getSource()) {
case OPENAIRE_SOURCE:
return OpenaireMessageDTO.class;
default:
throw new IllegalArgumentException("Unknown event's source: " + getSource());
}
}
}

View File

@@ -0,0 +1,82 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.content;
import java.io.Serializable;
import java.util.Date;
import javax.persistence.Column;
import javax.persistence.Entity;
import javax.persistence.Id;
import javax.persistence.JoinColumn;
import javax.persistence.ManyToOne;
import javax.persistence.Table;
import javax.persistence.Temporal;
import javax.persistence.TemporalType;
import org.dspace.eperson.EPerson;
/**
* This class represent the stored information about processed notification
* broker events
*
*/
@Entity
@Table(name = "qaevent_processed")
public class QAEventProcessed implements Serializable {
private static final long serialVersionUID = 3427340199132007814L;
@Id
@Column(name = "qaevent_id")
private String eventId;
@Temporal(TemporalType.TIMESTAMP)
@Column(name = "qaevent_timestamp")
private Date eventTimestamp;
@JoinColumn(name = "eperson_uuid")
@ManyToOne
private EPerson eperson;
@JoinColumn(name = "item_uuid")
@ManyToOne
private Item item;
public String getEventId() {
return eventId;
}
public void setEventId(String eventId) {
this.eventId = eventId;
}
public Date getEventTimestamp() {
return eventTimestamp;
}
public void setEventTimestamp(Date eventTimestamp) {
this.eventTimestamp = eventTimestamp;
}
public EPerson getEperson() {
return eperson;
}
public void setEperson(EPerson eperson) {
this.eperson = eperson;
}
public Item getItem() {
return item;
}
public void setItem(Item item) {
this.item = item;
}
}

View File

@@ -25,7 +25,6 @@ import org.dspace.app.util.DCInputSet;
import org.dspace.app.util.DCInputsReader; import org.dspace.app.util.DCInputsReader;
import org.dspace.app.util.DCInputsReaderException; import org.dspace.app.util.DCInputsReaderException;
import org.dspace.app.util.SubmissionConfig; import org.dspace.app.util.SubmissionConfig;
import org.dspace.app.util.SubmissionConfigReader;
import org.dspace.app.util.SubmissionConfigReaderException; import org.dspace.app.util.SubmissionConfigReaderException;
import org.dspace.content.Collection; import org.dspace.content.Collection;
import org.dspace.content.MetadataValue; import org.dspace.content.MetadataValue;
@@ -35,6 +34,8 @@ import org.dspace.core.service.PluginService;
import org.dspace.discovery.configuration.DiscoveryConfigurationService; import org.dspace.discovery.configuration.DiscoveryConfigurationService;
import org.dspace.discovery.configuration.DiscoverySearchFilterFacet; import org.dspace.discovery.configuration.DiscoverySearchFilterFacet;
import org.dspace.services.ConfigurationService; import org.dspace.services.ConfigurationService;
import org.dspace.submit.factory.SubmissionServiceFactory;
import org.dspace.submit.service.SubmissionConfigService;
import org.springframework.beans.factory.annotation.Autowired; import org.springframework.beans.factory.annotation.Autowired;
/** /**
@@ -88,7 +89,7 @@ public final class ChoiceAuthorityServiceImpl implements ChoiceAuthorityService
protected Map<String, DSpaceControlledVocabularyIndex> vocabularyIndexMap = new HashMap<>(); protected Map<String, DSpaceControlledVocabularyIndex> vocabularyIndexMap = new HashMap<>();
// the item submission reader // the item submission reader
private SubmissionConfigReader itemSubmissionConfigReader; private SubmissionConfigService submissionConfigService;
@Autowired(required = true) @Autowired(required = true)
protected ConfigurationService configurationService; protected ConfigurationService configurationService;
@@ -135,7 +136,7 @@ public final class ChoiceAuthorityServiceImpl implements ChoiceAuthorityService
private synchronized void init() { private synchronized void init() {
if (!initialized) { if (!initialized) {
try { try {
itemSubmissionConfigReader = new SubmissionConfigReader(); submissionConfigService = SubmissionServiceFactory.getInstance().getSubmissionConfigService();
} catch (SubmissionConfigReaderException e) { } catch (SubmissionConfigReaderException e) {
// the system is in an illegal state as the submission definition is not valid // the system is in an illegal state as the submission definition is not valid
throw new IllegalStateException("Error reading the item submission configuration: " + e.getMessage(), throw new IllegalStateException("Error reading the item submission configuration: " + e.getMessage(),
@@ -240,8 +241,8 @@ public final class ChoiceAuthorityServiceImpl implements ChoiceAuthorityService
// there is an authority configured for the metadata valid for some collections, // there is an authority configured for the metadata valid for some collections,
// check if it is the requested collection // check if it is the requested collection
Map<String, ChoiceAuthority> controllerFormDef = controllerFormDefinitions.get(fieldKey); Map<String, ChoiceAuthority> controllerFormDef = controllerFormDefinitions.get(fieldKey);
SubmissionConfig submissionConfig = itemSubmissionConfigReader SubmissionConfig submissionConfig = submissionConfigService
.getSubmissionConfigByCollection(collection.getHandle()); .getSubmissionConfigByCollection(collection);
String submissionName = submissionConfig.getSubmissionName(); String submissionName = submissionConfig.getSubmissionName();
// check if the requested collection has a submission definition that use an authority for the metadata // check if the requested collection has a submission definition that use an authority for the metadata
if (controllerFormDef.containsKey(submissionName)) { if (controllerFormDef.containsKey(submissionName)) {
@@ -262,14 +263,14 @@ public final class ChoiceAuthorityServiceImpl implements ChoiceAuthorityService
} }
@Override @Override
public void clearCache() { public void clearCache() throws SubmissionConfigReaderException {
controller.clear(); controller.clear();
authorities.clear(); authorities.clear();
presentation.clear(); presentation.clear();
closed.clear(); closed.clear();
controllerFormDefinitions.clear(); controllerFormDefinitions.clear();
authoritiesFormDefinitions.clear(); authoritiesFormDefinitions.clear();
itemSubmissionConfigReader = null; submissionConfigService.reload();
initialized = false; initialized = false;
} }
@@ -319,7 +320,7 @@ public final class ChoiceAuthorityServiceImpl implements ChoiceAuthorityService
*/ */
private void autoRegisterChoiceAuthorityFromInputReader() { private void autoRegisterChoiceAuthorityFromInputReader() {
try { try {
List<SubmissionConfig> submissionConfigs = itemSubmissionConfigReader List<SubmissionConfig> submissionConfigs = submissionConfigService
.getAllSubmissionConfigs(Integer.MAX_VALUE, 0); .getAllSubmissionConfigs(Integer.MAX_VALUE, 0);
DCInputsReader dcInputsReader = new DCInputsReader(); DCInputsReader dcInputsReader = new DCInputsReader();
@@ -490,10 +491,11 @@ public final class ChoiceAuthorityServiceImpl implements ChoiceAuthorityService
init(); init();
ChoiceAuthority ma = controller.get(fieldKey); ChoiceAuthority ma = controller.get(fieldKey);
if (ma == null && collection != null) { if (ma == null && collection != null) {
SubmissionConfigReader configReader; SubmissionConfigService configReaderService;
try { try {
configReader = new SubmissionConfigReader(); configReaderService = SubmissionServiceFactory.getInstance().getSubmissionConfigService();
SubmissionConfig submissionName = configReader.getSubmissionConfigByCollection(collection.getHandle()); SubmissionConfig submissionName = configReaderService
.getSubmissionConfigByCollection(collection);
ma = controllerFormDefinitions.get(fieldKey).get(submissionName.getSubmissionName()); ma = controllerFormDefinitions.get(fieldKey).get(submissionName.getSubmissionName());
} catch (SubmissionConfigReaderException e) { } catch (SubmissionConfigReaderException e) {
// the system is in an illegal state as the submission definition is not valid // the system is in an illegal state as the submission definition is not valid

View File

@@ -156,7 +156,8 @@ public class DCInputAuthority extends SelfNamedPlugin implements ChoiceAuthority
int found = 0; int found = 0;
List<Choice> v = new ArrayList<Choice>(); List<Choice> v = new ArrayList<Choice>();
for (int i = 0; i < valuesLocale.length; ++i) { for (int i = 0; i < valuesLocale.length; ++i) {
if (query == null || StringUtils.containsIgnoreCase(valuesLocale[i], query)) { // In a DCInputAuthority context, a user will want to query the labels, not the values
if (query == null || StringUtils.containsIgnoreCase(labelsLocale[i], query)) {
if (found >= start && v.size() < limit) { if (found >= start && v.size() < limit) {
v.add(new Choice(null, valuesLocale[i], labelsLocale[i])); v.add(new Choice(null, valuesLocale[i], labelsLocale[i]));
if (valuesLocale[i].equalsIgnoreCase(query)) { if (valuesLocale[i].equalsIgnoreCase(query)) {

View File

@@ -10,6 +10,7 @@ package org.dspace.content.authority.service;
import java.util.List; import java.util.List;
import java.util.Set; import java.util.Set;
import org.dspace.app.util.SubmissionConfigReaderException;
import org.dspace.content.Collection; import org.dspace.content.Collection;
import org.dspace.content.MetadataValue; import org.dspace.content.MetadataValue;
import org.dspace.content.authority.Choice; import org.dspace.content.authority.Choice;
@@ -174,7 +175,7 @@ public interface ChoiceAuthorityService {
/** /**
* This method has been created to have a way of clearing the cache kept inside the service * This method has been created to have a way of clearing the cache kept inside the service
*/ */
public void clearCache(); public void clearCache() throws SubmissionConfigReaderException;
/** /**
* Should we store the authority key (if any) for such field key and collection? * Should we store the authority key (if any) for such field key and collection?

View File

@@ -11,7 +11,6 @@ import java.sql.SQLException;
import java.util.Date; import java.util.Date;
import java.util.Iterator; import java.util.Iterator;
import java.util.List; import java.util.List;
import java.util.UUID;
import org.dspace.content.Collection; import org.dspace.content.Collection;
import org.dspace.content.Community; import org.dspace.content.Community;
@@ -80,10 +79,6 @@ public interface ItemDAO extends DSpaceObjectLegacySupportDAO<Item> {
public Iterator<Item> findByMetadataField(Context context, MetadataField metadataField, String value, public Iterator<Item> findByMetadataField(Context context, MetadataField metadataField, String value,
boolean inArchive) throws SQLException; boolean inArchive) throws SQLException;
public Iterator<Item> findByMetadataQuery(Context context, List<List<MetadataField>> listFieldList,
List<String> query_op, List<String> query_val, List<UUID> collectionUuids,
String regexClause, int offset, int limit) throws SQLException;
public Iterator<Item> findByAuthorityValue(Context context, MetadataField metadataField, String authority, public Iterator<Item> findByAuthorityValue(Context context, MetadataField metadataField, String authority,
boolean inArchive) throws SQLException; boolean inArchive) throws SQLException;

View File

@@ -68,9 +68,9 @@ public class BitstreamDAOImpl extends AbstractHibernateDSODAO<Bitstream> impleme
@Override @Override
public List<Bitstream> findBitstreamsWithNoRecentChecksum(Context context) throws SQLException { public List<Bitstream> findBitstreamsWithNoRecentChecksum(Context context) throws SQLException {
Query query = createQuery(context, Query query = createQuery(context, "SELECT b FROM MostRecentChecksum c RIGHT JOIN Bitstream b " +
"select b from Bitstream b where b not in (select c.bitstream from " + "ON c.bitstream = b WHERE c IS NULL" );
"MostRecentChecksum c)");
return query.getResultList(); return query.getResultList();
} }

View File

@@ -12,7 +12,6 @@ import java.util.Collections;
import java.util.Date; import java.util.Date;
import java.util.Iterator; import java.util.Iterator;
import java.util.List; import java.util.List;
import java.util.UUID;
import javax.persistence.Query; import javax.persistence.Query;
import javax.persistence.TemporalType; import javax.persistence.TemporalType;
import javax.persistence.criteria.CriteriaBuilder; import javax.persistence.criteria.CriteriaBuilder;
@@ -24,20 +23,10 @@ import org.dspace.content.Collection;
import org.dspace.content.Item; import org.dspace.content.Item;
import org.dspace.content.Item_; import org.dspace.content.Item_;
import org.dspace.content.MetadataField; import org.dspace.content.MetadataField;
import org.dspace.content.MetadataValue;
import org.dspace.content.dao.ItemDAO; import org.dspace.content.dao.ItemDAO;
import org.dspace.core.AbstractHibernateDSODAO; import org.dspace.core.AbstractHibernateDSODAO;
import org.dspace.core.Context; import org.dspace.core.Context;
import org.dspace.eperson.EPerson; import org.dspace.eperson.EPerson;
import org.hibernate.Criteria;
import org.hibernate.criterion.Criterion;
import org.hibernate.criterion.DetachedCriteria;
import org.hibernate.criterion.Order;
import org.hibernate.criterion.Projections;
import org.hibernate.criterion.Property;
import org.hibernate.criterion.Restrictions;
import org.hibernate.criterion.Subqueries;
import org.hibernate.type.StandardBasicTypes;
/** /**
* Hibernate implementation of the Database Access Object interface class for the Item object. * Hibernate implementation of the Database Access Object interface class for the Item object.
@@ -174,120 +163,6 @@ public class ItemDAOImpl extends AbstractHibernateDSODAO<Item> implements ItemDA
return iterate(query); return iterate(query);
} }
enum OP {
equals {
public Criterion buildPredicate(String val, String regexClause) {
return Property.forName("mv.value").eq(val);
}
},
not_equals {
public Criterion buildPredicate(String val, String regexClause) {
return OP.equals.buildPredicate(val, regexClause);
}
},
like {
public Criterion buildPredicate(String val, String regexClause) {
return Property.forName("mv.value").like(val);
}
},
not_like {
public Criterion buildPredicate(String val, String regexClause) {
return OP.like.buildPredicate(val, regexClause);
}
},
contains {
public Criterion buildPredicate(String val, String regexClause) {
return Property.forName("mv.value").like("%" + val + "%");
}
},
doesnt_contain {
public Criterion buildPredicate(String val, String regexClause) {
return OP.contains.buildPredicate(val, regexClause);
}
},
exists {
public Criterion buildPredicate(String val, String regexClause) {
return Property.forName("mv.value").isNotNull();
}
},
doesnt_exist {
public Criterion buildPredicate(String val, String regexClause) {
return OP.exists.buildPredicate(val, regexClause);
}
},
matches {
public Criterion buildPredicate(String val, String regexClause) {
return Restrictions.sqlRestriction(regexClause, val, StandardBasicTypes.STRING);
}
},
doesnt_match {
public Criterion buildPredicate(String val, String regexClause) {
return OP.matches.buildPredicate(val, regexClause);
}
};
public abstract Criterion buildPredicate(String val, String regexClause);
}
@Override
@Deprecated
public Iterator<Item> findByMetadataQuery(Context context, List<List<MetadataField>> listFieldList,
List<String> query_op, List<String> query_val, List<UUID> collectionUuids,
String regexClause, int offset, int limit) throws SQLException {
Criteria criteria = getHibernateSession(context).createCriteria(Item.class, "item");
criteria.setFirstResult(offset);
criteria.setMaxResults(limit);
if (!collectionUuids.isEmpty()) {
DetachedCriteria dcollCriteria = DetachedCriteria.forClass(Collection.class, "coll");
dcollCriteria.setProjection(Projections.property("coll.id"));
dcollCriteria.add(Restrictions.eqProperty("coll.id", "item.owningCollection"));
dcollCriteria.add(Restrictions.in("coll.id", collectionUuids));
criteria.add(Subqueries.exists(dcollCriteria));
}
int index = Math.min(listFieldList.size(), Math.min(query_op.size(), query_val.size()));
StringBuilder sb = new StringBuilder();
for (int i = 0; i < index; i++) {
OP op = OP.valueOf(query_op.get(i));
if (op == null) {
log.warn("Skipping Invalid Operator: " + query_op.get(i));
continue;
}
if (op == OP.matches || op == OP.doesnt_match) {
if (regexClause.isEmpty()) {
log.warn("Skipping Unsupported Regex Operator: " + query_op.get(i));
continue;
}
}
DetachedCriteria subcriteria = DetachedCriteria.forClass(MetadataValue.class, "mv");
subcriteria.add(Property.forName("mv.dSpaceObject").eqProperty("item.id"));
subcriteria.setProjection(Projections.property("mv.dSpaceObject"));
if (!listFieldList.get(i).isEmpty()) {
subcriteria.add(Restrictions.in("metadataField", listFieldList.get(i)));
}
subcriteria.add(op.buildPredicate(query_val.get(i), regexClause));
if (op == OP.exists || op == OP.equals || op == OP.like || op == OP.contains || op == OP.matches) {
criteria.add(Subqueries.exists(subcriteria));
} else {
criteria.add(Subqueries.notExists(subcriteria));
}
}
criteria.addOrder(Order.asc("item.id"));
log.debug(String.format("Running custom query with %d filters", index));
return ((List<Item>) criteria.list()).iterator();
}
@Override @Override
public Iterator<Item> findByAuthorityValue(Context context, MetadataField metadataField, String authority, public Iterator<Item> findByAuthorityValue(Context context, MetadataField metadataField, String authority,
boolean inArchive) throws SQLException { boolean inArchive) throws SQLException {

View File

@@ -417,6 +417,34 @@ public interface CollectionService
public List<Collection> findCollectionsWithSubmit(String q, Context context, Community community, public List<Collection> findCollectionsWithSubmit(String q, Context context, Community community,
int offset, int limit) throws SQLException, SearchServiceException; int offset, int limit) throws SQLException, SearchServiceException;
/**
* Retrieve the first collection in the community or its descending that support
* the provided entityType
*
* @param context the DSpace context
* @param community the root from where the search start
* @param entityType the requested entity type
* @return the first collection in the community or its descending
* that support the provided entityType
*/
public Collection retrieveCollectionWithSubmitByCommunityAndEntityType(Context context, Community community,
String entityType);
/**
* Retrieve the close collection to the item for which the current user has
* 'submit' privileges that support the provided entityType. Close mean the
* collection that can be reach with the minimum steps starting from the item
* (owningCollection, brothers collections, etc)
*
* @param context the DSpace context
* @param item the item from where the search start
* @param entityType the requested entity type
* @return the first collection in the community or its descending
* that support the provided entityType
*/
public Collection retrieveCollectionWithSubmitByEntityType(Context context, Item item, String entityType)
throws SQLException;
/** /**
* Counts the number of Collection for which the current user has 'submit' privileges. * Counts the number of Collection for which the current user has 'submit' privileges.
* NOTE: for better performance, this method retrieves its results from an index (cache) * NOTE: for better performance, this method retrieves its results from an index (cache)

View File

@@ -23,7 +23,6 @@ import org.dspace.content.Collection;
import org.dspace.content.Community; import org.dspace.content.Community;
import org.dspace.content.EntityType; import org.dspace.content.EntityType;
import org.dspace.content.Item; import org.dspace.content.Item;
import org.dspace.content.MetadataField;
import org.dspace.content.MetadataValue; import org.dspace.content.MetadataValue;
import org.dspace.content.Thumbnail; import org.dspace.content.Thumbnail;
import org.dspace.content.WorkspaceItem; import org.dspace.content.WorkspaceItem;
@@ -473,7 +472,7 @@ public interface ItemService
public void removeGroupPolicies(Context context, Item item, Group group) throws SQLException, AuthorizeException; public void removeGroupPolicies(Context context, Item item, Group group) throws SQLException, AuthorizeException;
/** /**
* remove all policies on an item and its contents, and replace them with * Remove all policies on an item and its contents, and replace them with
* the DEFAULT_ITEM_READ and DEFAULT_BITSTREAM_READ policies belonging to * the DEFAULT_ITEM_READ and DEFAULT_BITSTREAM_READ policies belonging to
* the collection. * the collection.
* *
@@ -488,6 +487,26 @@ public interface ItemService
public void inheritCollectionDefaultPolicies(Context context, Item item, Collection collection) public void inheritCollectionDefaultPolicies(Context context, Item item, Collection collection)
throws java.sql.SQLException, AuthorizeException; throws java.sql.SQLException, AuthorizeException;
/**
* Remove all submission and workflow policies on an item and its contents, and add
* default collection policies which are not yet already in place.
* If overrideItemReadPolicies is true, then all read policies on the item are replaced (but only if the
* collection has a default read policy).
*
* @param context DSpace context object
* @param item item to reset policies on
* @param collection Collection
* @param overrideItemReadPolicies if true, all read policies on the item are replaced (but only if the
* collection has a default read policy)
* @throws SQLException if database error
* if an SQL error or if no default policies found. It's a bit
* draconian, but default policies must be enforced.
* @throws AuthorizeException if authorization error
*/
public void inheritCollectionDefaultPolicies(Context context, Item item, Collection collection,
boolean overrideItemReadPolicies)
throws java.sql.SQLException, AuthorizeException;
/** /**
* Adjust the Bundle and Bitstream policies to reflect what have been defined * Adjust the Bundle and Bitstream policies to reflect what have been defined
* during the submission/workflow. The temporary SUBMISSION and WORKFLOW * during the submission/workflow. The temporary SUBMISSION and WORKFLOW
@@ -507,6 +526,28 @@ public interface ItemService
public void adjustBundleBitstreamPolicies(Context context, Item item, Collection collection) public void adjustBundleBitstreamPolicies(Context context, Item item, Collection collection)
throws SQLException, AuthorizeException; throws SQLException, AuthorizeException;
/**
* Adjust the Bundle and Bitstream policies to reflect what have been defined
* during the submission/workflow. The temporary SUBMISSION and WORKFLOW
* policies are removed and the policies defined at the item and collection
* level are copied and inherited as appropriate. Custom selected Item policies
* are copied to the bundle/bitstream only if no explicit custom policies were
* already applied to the bundle/bitstream. Collection's policies are inherited
* if there are no other policies defined or if the append mode is defined by
* the configuration via the core.authorization.installitem.inheritance-read.append-mode property
*
* @param context DSpace context object
* @param item Item to adjust policies on
* @param collection Collection
* @param replaceReadRPWithCollectionRP if true, all read policies on the item are replaced (but only if the
* collection has a default read policy)
* @throws SQLException If database error
* @throws AuthorizeException If authorization error
*/
public void adjustBundleBitstreamPolicies(Context context, Item item, Collection collection,
boolean replaceReadRPWithCollectionRP)
throws SQLException, AuthorizeException;
/** /**
* Adjust the Bitstream policies to reflect what have been defined * Adjust the Bitstream policies to reflect what have been defined
* during the submission/workflow. The temporary SUBMISSION and WORKFLOW * during the submission/workflow. The temporary SUBMISSION and WORKFLOW
@@ -527,6 +568,29 @@ public interface ItemService
public void adjustBitstreamPolicies(Context context, Item item, Collection collection, Bitstream bitstream) public void adjustBitstreamPolicies(Context context, Item item, Collection collection, Bitstream bitstream)
throws SQLException, AuthorizeException; throws SQLException, AuthorizeException;
/**
* Adjust the Bitstream policies to reflect what have been defined
* during the submission/workflow. The temporary SUBMISSION and WORKFLOW
* policies are removed and the policies defined at the item and collection
* level are copied and inherited as appropriate. Custom selected Item policies
* are copied to the bitstream only if no explicit custom policies were
* already applied to the bitstream. Collection's policies are inherited
* if there are no other policies defined or if the append mode is defined by
* the configuration via the core.authorization.installitem.inheritance-read.append-mode property
*
* @param context DSpace context object
* @param item Item to adjust policies on
* @param collection Collection
* @param bitstream Bitstream to adjust policies on
* @param replaceReadRPWithCollectionRP If true, all read policies on the bitstream are replaced (but only if the
* collection has a default read policy)
* @throws SQLException If database error
* @throws AuthorizeException If authorization error
*/
public void adjustBitstreamPolicies(Context context, Item item, Collection collection, Bitstream bitstream,
boolean replaceReadRPWithCollectionRP)
throws SQLException, AuthorizeException;
/** /**
* Adjust the Item's policies to reflect what have been defined during the * Adjust the Item's policies to reflect what have been defined during the
@@ -545,6 +609,26 @@ public interface ItemService
public void adjustItemPolicies(Context context, Item item, Collection collection) public void adjustItemPolicies(Context context, Item item, Collection collection)
throws SQLException, AuthorizeException; throws SQLException, AuthorizeException;
/**
* Adjust the Item's policies to reflect what have been defined during the
* submission/workflow. The temporary SUBMISSION and WORKFLOW policies are
* removed and the default policies defined at the collection level are
* inherited as appropriate. Collection's policies are inherited if there are no
* other policies defined or if the append mode is defined by the configuration
* via the core.authorization.installitem.inheritance-read.append-mode property
*
* @param context DSpace context object
* @param item Item to adjust policies on
* @param collection Collection
* @param replaceReadRPWithCollectionRP If true, all read policies on the item are replaced (but only if the
* collection has a default read policy)
* @throws SQLException If database error
* @throws AuthorizeException If authorization error
*/
public void adjustItemPolicies(Context context, Item item, Collection collection,
boolean replaceReadRPWithCollectionRP)
throws SQLException, AuthorizeException;
/** /**
* Moves the item from one collection to another one * Moves the item from one collection to another one
* *
@@ -664,11 +748,6 @@ public interface ItemService
String schema, String element, String qualifier, String value) String schema, String element, String qualifier, String value)
throws SQLException, AuthorizeException, IOException; throws SQLException, AuthorizeException, IOException;
public Iterator<Item> findByMetadataQuery(Context context, List<List<MetadataField>> listFieldList,
List<String> query_op, List<String> query_val, List<UUID> collectionUuids,
String regexClause, int offset, int limit)
throws SQLException, AuthorizeException, IOException;
/** /**
* Find all the items in the archive with a given authority key value * Find all the items in the archive with a given authority key value
* in the indicated metadata field. * in the indicated metadata field.

View File

@@ -83,13 +83,14 @@ public abstract class AbstractHibernateDSODAO<T extends DSpaceObject> extends Ab
if (CollectionUtils.isNotEmpty(metadataFields) || StringUtils.isNotBlank(additionalWhere)) { if (CollectionUtils.isNotEmpty(metadataFields) || StringUtils.isNotBlank(additionalWhere)) {
//Add the where query on metadata //Add the where query on metadata
query.append(" WHERE "); query.append(" WHERE ");
// Group the 'OR' clauses below in outer parentheses, e.g. "WHERE (clause1 OR clause2 OR clause3)".
// Grouping these 'OR' clauses allows for later code to append 'AND' clauses without unexpected behaviors
query.append("(");
for (int i = 0; i < metadataFields.size(); i++) { for (int i = 0; i < metadataFields.size(); i++) {
MetadataField metadataField = metadataFields.get(i); MetadataField metadataField = metadataFields.get(i);
if (StringUtils.isNotBlank(operator)) { if (StringUtils.isNotBlank(operator)) {
query.append(" (");
query.append("lower(STR(" + metadataField.toString()).append(".value)) ").append(operator) query.append("lower(STR(" + metadataField.toString()).append(".value)) ").append(operator)
.append(" lower(:queryParam)"); .append(" lower(:queryParam)");
query.append(")");
if (i < metadataFields.size() - 1) { if (i < metadataFields.size() - 1) {
query.append(" OR "); query.append(" OR ");
} }
@@ -102,6 +103,7 @@ public abstract class AbstractHibernateDSODAO<T extends DSpaceObject> extends Ab
} }
query.append(additionalWhere); query.append(additionalWhere);
} }
query.append(")");
} }
} }

View File

@@ -128,6 +128,11 @@ public class Context implements AutoCloseable {
private DBConnection dbConnection; private DBConnection dbConnection;
/**
* The default administrator group
*/
private Group adminGroup;
public enum Mode { public enum Mode {
READ_ONLY, READ_ONLY,
READ_WRITE, READ_WRITE,
@@ -810,6 +815,15 @@ public class Context implements AutoCloseable {
readOnlyCache.clear(); readOnlyCache.clear();
} }
// When going to READ_ONLY, flush database changes to ensure that the current data is retrieved
if (newMode == Mode.READ_ONLY && mode != Mode.READ_ONLY) {
try {
dbConnection.flushSession();
} catch (SQLException ex) {
log.warn("Unable to flush database changes after switching to READ_ONLY mode", ex);
}
}
//save the new mode //save the new mode
mode = newMode; mode = newMode;
} }
@@ -951,4 +965,15 @@ public class Context implements AutoCloseable {
public boolean isContextUserSwitched() { public boolean isContextUserSwitched() {
return currentUserPreviousState != null; return currentUserPreviousState != null;
} }
/**
* Returns the default "Administrator" group for DSpace administrators.
* The result is cached in the 'adminGroup' field, so it is only looked up once.
* This is done to improve performance, as this method is called quite often.
*/
public Group getAdminGroup() throws SQLException {
return (adminGroup == null) ? EPersonServiceFactory.getInstance()
.getGroupService()
.findByName(this, Group.ADMIN) : adminGroup;
}
} }

View File

@@ -148,4 +148,12 @@ public interface DBConnection<T> {
* @throws java.sql.SQLException passed through. * @throws java.sql.SQLException passed through.
*/ */
public <E extends ReloadableEntity> void uncacheEntity(E entity) throws SQLException; public <E extends ReloadableEntity> void uncacheEntity(E entity) throws SQLException;
/**
* Do a manual flush. This synchronizes the in-memory state of the Session
* with the database (write changes to the database)
*
* @throws SQLException passed through.
*/
public void flushSession() throws SQLException;
} }

View File

@@ -337,4 +337,17 @@ public class HibernateDBConnection implements DBConnection<Session> {
} }
} }
} }
/**
* Do a manual flush. This synchronizes the in-memory state of the Session
* with the database (write changes to the database)
*
* @throws SQLException passed through.
*/
@Override
public void flushSession() throws SQLException {
if (getSession().isDirty()) {
getSession().flush();
}
}
} }

View File

@@ -17,6 +17,7 @@ import org.dspace.app.util.DCInput;
import org.dspace.app.util.DCInputSet; import org.dspace.app.util.DCInputSet;
import org.dspace.app.util.DCInputsReader; import org.dspace.app.util.DCInputsReader;
import org.dspace.app.util.DCInputsReaderException; import org.dspace.app.util.DCInputsReaderException;
import org.dspace.content.Collection;
import org.dspace.content.DSpaceObject; import org.dspace.content.DSpaceObject;
import org.dspace.content.Item; import org.dspace.content.Item;
import org.dspace.content.MetadataValue; import org.dspace.content.MetadataValue;
@@ -69,7 +70,7 @@ public class RequiredMetadata extends AbstractCurationTask {
handle = "in workflow"; handle = "in workflow";
} }
sb.append("Item: ").append(handle); sb.append("Item: ").append(handle);
for (String req : getReqList(item.getOwningCollection().getHandle())) { for (String req : getReqList(item.getOwningCollection())) {
List<MetadataValue> vals = itemService.getMetadataByMetadataString(item, req); List<MetadataValue> vals = itemService.getMetadataByMetadataString(item, req);
if (vals.size() == 0) { if (vals.size() == 0) {
sb.append(" missing required field: ").append(req); sb.append(" missing required field: ").append(req);
@@ -91,14 +92,14 @@ public class RequiredMetadata extends AbstractCurationTask {
} }
} }
protected List<String> getReqList(String handle) throws DCInputsReaderException { protected List<String> getReqList(Collection collection) throws DCInputsReaderException {
List<String> reqList = reqMap.get(handle); List<String> reqList = reqMap.get(collection.getHandle());
if (reqList == null) { if (reqList == null) {
reqList = reqMap.get("default"); reqList = reqMap.get("default");
} }
if (reqList == null) { if (reqList == null) {
reqList = new ArrayList<String>(); reqList = new ArrayList<String>();
List<DCInputSet> inputSet = reader.getInputsByCollectionHandle(handle); List<DCInputSet> inputSet = reader.getInputsByCollection(collection);
for (DCInputSet inputs : inputSet) { for (DCInputSet inputs : inputSet) {
for (DCInput[] row : inputs.getFields()) { for (DCInput[] row : inputs.getFields()) {
for (DCInput input : row) { for (DCInput input : row) {

View File

@@ -152,17 +152,10 @@ public class Curation extends DSpaceRunnable<CurationScriptConfiguration> {
super.handler.logInfo("Curating id: " + entry.getObjectId()); super.handler.logInfo("Curating id: " + entry.getObjectId());
} }
curator.clear(); curator.clear();
// does entry relate to a DSO or workflow object?
if (entry.getObjectId().indexOf('/') > 0) {
for (String taskName : entry.getTaskNames()) { for (String taskName : entry.getTaskNames()) {
curator.addTask(taskName); curator.addTask(taskName);
} }
curator.curate(context, entry.getObjectId()); curator.curate(context, entry.getObjectId());
} else {
// TODO: Remove this exception once curation tasks are supported by configurable workflow
// e.g. see https://github.com/DSpace/DSpace/pull/3157
throw new IllegalArgumentException("curation for workflow items is no longer supported");
}
} }
queue.release(this.queue, ticket, true); queue.release(this.queue, ticket, true);
return ticket; return ticket;

View File

@@ -13,6 +13,8 @@ import java.sql.SQLException;
import java.util.ArrayList; import java.util.ArrayList;
import java.util.List; import java.util.List;
import org.apache.commons.lang3.StringUtils;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger; import org.apache.logging.log4j.Logger;
import org.dspace.authorize.AuthorizeException; import org.dspace.authorize.AuthorizeException;
import org.dspace.content.Collection; import org.dspace.content.Collection;
@@ -30,6 +32,7 @@ import org.dspace.workflow.CurationTaskConfig;
import org.dspace.workflow.FlowStep; import org.dspace.workflow.FlowStep;
import org.dspace.workflow.Task; import org.dspace.workflow.Task;
import org.dspace.workflow.TaskSet; import org.dspace.workflow.TaskSet;
import org.dspace.xmlworkflow.Role;
import org.dspace.xmlworkflow.RoleMembers; import org.dspace.xmlworkflow.RoleMembers;
import org.dspace.xmlworkflow.WorkflowConfigurationException; import org.dspace.xmlworkflow.WorkflowConfigurationException;
import org.dspace.xmlworkflow.factory.XmlWorkflowFactory; import org.dspace.xmlworkflow.factory.XmlWorkflowFactory;
@@ -47,14 +50,17 @@ import org.springframework.stereotype.Service;
* Manage interactions between curation and workflow. A curation task can be * Manage interactions between curation and workflow. A curation task can be
* attached to a workflow step, to be executed during the step. * attached to a workflow step, to be executed during the step.
* *
* <p>
* <strong>NOTE:</strong> when run in workflow, curation tasks <em>run with
* authorization disabled</em>.
*
* @see CurationTaskConfig * @see CurationTaskConfig
* @author mwood * @author mwood
*/ */
@Service @Service
public class XmlWorkflowCuratorServiceImpl public class XmlWorkflowCuratorServiceImpl
implements XmlWorkflowCuratorService { implements XmlWorkflowCuratorService {
private static final Logger LOG private static final Logger LOG = LogManager.getLogger();
= org.apache.logging.log4j.LogManager.getLogger();
@Autowired(required = true) @Autowired(required = true)
protected XmlWorkflowFactory workflowFactory; protected XmlWorkflowFactory workflowFactory;
@@ -97,7 +103,18 @@ public class XmlWorkflowCuratorServiceImpl
throws AuthorizeException, IOException, SQLException { throws AuthorizeException, IOException, SQLException {
Curator curator = new Curator(); Curator curator = new Curator();
curator.setReporter(reporter); curator.setReporter(reporter);
return curate(curator, c, wfi); c.turnOffAuthorisationSystem();
boolean wasAnonymous = false;
if (null == c.getCurrentUser()) { // We need someone to email
wasAnonymous = true;
c.setCurrentUser(ePersonService.getSystemEPerson(c));
}
boolean failedP = curate(curator, c, wfi);
if (wasAnonymous) {
c.setCurrentUser(null);
}
c.restoreAuthSystemState();
return failedP;
} }
@Override @Override
@@ -123,7 +140,13 @@ public class XmlWorkflowCuratorServiceImpl
item.setOwningCollection(wfi.getCollection()); item.setOwningCollection(wfi.getCollection());
for (Task task : step.tasks) { for (Task task : step.tasks) {
curator.addTask(task.name); curator.addTask(task.name);
curator.curate(item); // Check whether the task is configured to be queued rather than automatically run
if (StringUtils.isNotEmpty(step.queue)) {
// queue attribute has been set in the FlowStep configuration: add task to configured queue
curator.queue(c, item.getID().toString(), step.queue);
} else {
// Task is configured to be run automatically
curator.curate(c, item);
int status = curator.getStatus(task.name); int status = curator.getStatus(task.name);
String result = curator.getResult(task.name); String result = curator.getResult(task.name);
String action = "none"; String action = "none";
@@ -158,6 +181,7 @@ public class XmlWorkflowCuratorServiceImpl
default: default:
break; break;
} }
}
curator.clear(); curator.clear();
} }
@@ -223,8 +247,12 @@ public class XmlWorkflowCuratorServiceImpl
String status, String action, String message) String status, String action, String message)
throws AuthorizeException, IOException, SQLException { throws AuthorizeException, IOException, SQLException {
List<EPerson> epa = resolveContacts(c, task.getContacts(status), wfi); List<EPerson> epa = resolveContacts(c, task.getContacts(status), wfi);
if (epa.size() > 0) { if (!epa.isEmpty()) {
workflowService.notifyOfCuration(c, wfi, epa, task.name, action, message); workflowService.notifyOfCuration(c, wfi, epa, task.name, action, message);
} else {
LOG.warn("No contacts were found for workflow item {}: "
+ "task {} returned action {} with message {}",
wfi.getID(), task.name, action, message);
} }
} }
@@ -247,8 +275,7 @@ public class XmlWorkflowCuratorServiceImpl
// decode contacts // decode contacts
if ("$flowgroup".equals(contact)) { if ("$flowgroup".equals(contact)) {
// special literal for current flowgoup // special literal for current flowgoup
ClaimedTask claimedTask = claimedTaskService.findByWorkflowIdAndEPerson(c, wfi, c.getCurrentUser()); String stepID = getFlowStep(c, wfi).step;
String stepID = claimedTask.getStepID();
Step step; Step step;
try { try {
Workflow workflow = workflowFactory.getWorkflow(wfi.getCollection()); Workflow workflow = workflowFactory.getWorkflow(wfi.getCollection());
@@ -258,19 +285,26 @@ public class XmlWorkflowCuratorServiceImpl
String.valueOf(wfi.getID()), e); String.valueOf(wfi.getID()), e);
return epList; return epList;
} }
RoleMembers roleMembers = step.getRole().getMembers(c, wfi); Role role = step.getRole();
if (null != role) {
RoleMembers roleMembers = role.getMembers(c, wfi);
for (EPerson ep : roleMembers.getEPersons()) { for (EPerson ep : roleMembers.getEPersons()) {
epList.add(ep); epList.add(ep);
} }
for (Group group : roleMembers.getGroups()) { for (Group group : roleMembers.getGroups()) {
epList.addAll(group.getMembers()); epList.addAll(group.getMembers());
} }
} else {
epList.add(ePersonService.getSystemEPerson(c));
}
} else if ("$colladmin".equals(contact)) { } else if ("$colladmin".equals(contact)) {
// special literal for collection administrators
Group adGroup = wfi.getCollection().getAdministrators(); Group adGroup = wfi.getCollection().getAdministrators();
if (adGroup != null) { if (adGroup != null) {
epList.addAll(groupService.allMembers(c, adGroup)); epList.addAll(groupService.allMembers(c, adGroup));
} }
} else if ("$siteadmin".equals(contact)) { } else if ("$siteadmin".equals(contact)) {
// special literal for site administrator
EPerson siteEp = ePersonService.findByEmail(c, EPerson siteEp = ePersonService.findByEmail(c,
configurationService.getProperty("mail.admin")); configurationService.getProperty("mail.admin"));
if (siteEp != null) { if (siteEp != null) {

View File

@@ -42,9 +42,9 @@ public interface XmlWorkflowCuratorService {
* *
* @param c the context * @param c the context
* @param wfi the workflow item * @param wfi the workflow item
* @return true if curation was completed or not required, * @return true if curation was completed or not required;
* false if tasks were queued for later completion, * false if tasks were queued for later completion,
* or item was rejected * or item was rejected.
* @throws AuthorizeException if authorization error * @throws AuthorizeException if authorization error
* @throws IOException if IO error * @throws IOException if IO error
* @throws SQLException if database error * @throws SQLException if database error
@@ -58,7 +58,9 @@ public interface XmlWorkflowCuratorService {
* @param curator the curation context * @param curator the curation context
* @param c the user context * @param c the user context
* @param wfId the workflow item's ID * @param wfId the workflow item's ID
* @return true if curation failed. * @return true if curation curation was completed or not required;
* false if tasks were queued for later completion,
* or item was rejected.
* @throws AuthorizeException if authorization error * @throws AuthorizeException if authorization error
* @throws IOException if IO error * @throws IOException if IO error
* @throws SQLException if database error * @throws SQLException if database error
@@ -72,7 +74,9 @@ public interface XmlWorkflowCuratorService {
* @param curator the curation context * @param curator the curation context
* @param c the user context * @param c the user context
* @param wfi the workflow item * @param wfi the workflow item
* @return true if curation failed. * @return true if workflow curation was completed or not required;
* false if tasks were queued for later completion,
* or item was rejected.
* @throws AuthorizeException if authorization error * @throws AuthorizeException if authorization error
* @throws IOException if IO error * @throws IOException if IO error
* @throws SQLException if database error * @throws SQLException if database error

View File

@@ -7,14 +7,20 @@
*/ */
package org.dspace.discovery; package org.dspace.discovery;
import static org.dspace.discovery.IndexClientOptions.TYPE_OPTION;
import java.io.IOException; import java.io.IOException;
import java.sql.SQLException; import java.sql.SQLException;
import java.util.Arrays;
import java.util.Iterator; import java.util.Iterator;
import java.util.List;
import java.util.Optional; import java.util.Optional;
import java.util.UUID; import java.util.UUID;
import java.util.stream.Collectors;
import org.apache.commons.cli.CommandLine; import org.apache.commons.cli.CommandLine;
import org.apache.commons.cli.ParseException; import org.apache.commons.cli.ParseException;
import org.apache.commons.lang3.StringUtils;
import org.dspace.content.Collection; import org.dspace.content.Collection;
import org.dspace.content.Community; import org.dspace.content.Community;
import org.dspace.content.DSpaceObject; import org.dspace.content.DSpaceObject;
@@ -51,6 +57,17 @@ public class IndexClient extends DSpaceRunnable<IndexDiscoveryScriptConfiguratio
return; return;
} }
String type = null;
if (commandLine.hasOption(TYPE_OPTION)) {
List<String> indexableObjectTypes = IndexObjectFactoryFactory.getInstance().getIndexFactories().stream()
.map((indexFactory -> indexFactory.getType())).collect(Collectors.toList());
type = commandLine.getOptionValue(TYPE_OPTION);
if (!indexableObjectTypes.contains(type)) {
handler.handleException(String.format("%s is not a valid indexable object type, options: %s",
type, Arrays.toString(indexableObjectTypes.toArray())));
}
}
/** Acquire from dspace-services in future */ /** Acquire from dspace-services in future */
/** /**
* new DSpace.getServiceManager().getServiceByName("org.dspace.discovery.SolrIndexer"); * new DSpace.getServiceManager().getServiceByName("org.dspace.discovery.SolrIndexer");
@@ -113,6 +130,10 @@ public class IndexClient extends DSpaceRunnable<IndexDiscoveryScriptConfiguratio
} else if (indexClientOptions == IndexClientOptions.BUILD || } else if (indexClientOptions == IndexClientOptions.BUILD ||
indexClientOptions == IndexClientOptions.BUILDANDSPELLCHECK) { indexClientOptions == IndexClientOptions.BUILDANDSPELLCHECK) {
handler.logInfo("(Re)building index from scratch."); handler.logInfo("(Re)building index from scratch.");
if (StringUtils.isNotBlank(type)) {
handler.logWarning(String.format("Type option, %s, not applicable for entire index rebuild option, b" +
", type will be ignored", TYPE_OPTION));
}
indexer.deleteIndex(); indexer.deleteIndex();
indexer.createIndex(context); indexer.createIndex(context);
if (indexClientOptions == IndexClientOptions.BUILDANDSPELLCHECK) { if (indexClientOptions == IndexClientOptions.BUILDANDSPELLCHECK) {
@@ -133,14 +154,14 @@ public class IndexClient extends DSpaceRunnable<IndexDiscoveryScriptConfiguratio
} else if (indexClientOptions == IndexClientOptions.UPDATE || } else if (indexClientOptions == IndexClientOptions.UPDATE ||
indexClientOptions == IndexClientOptions.UPDATEANDSPELLCHECK) { indexClientOptions == IndexClientOptions.UPDATEANDSPELLCHECK) {
handler.logInfo("Updating Index"); handler.logInfo("Updating Index");
indexer.updateIndex(context, false); indexer.updateIndex(context, false, type);
if (indexClientOptions == IndexClientOptions.UPDATEANDSPELLCHECK) { if (indexClientOptions == IndexClientOptions.UPDATEANDSPELLCHECK) {
checkRebuildSpellCheck(commandLine, indexer); checkRebuildSpellCheck(commandLine, indexer);
} }
} else if (indexClientOptions == IndexClientOptions.FORCEUPDATE || } else if (indexClientOptions == IndexClientOptions.FORCEUPDATE ||
indexClientOptions == IndexClientOptions.FORCEUPDATEANDSPELLCHECK) { indexClientOptions == IndexClientOptions.FORCEUPDATEANDSPELLCHECK) {
handler.logInfo("Updating Index"); handler.logInfo("Updating Index");
indexer.updateIndex(context, true); indexer.updateIndex(context, true, type);
if (indexClientOptions == IndexClientOptions.FORCEUPDATEANDSPELLCHECK) { if (indexClientOptions == IndexClientOptions.FORCEUPDATEANDSPELLCHECK) {
checkRebuildSpellCheck(commandLine, indexer); checkRebuildSpellCheck(commandLine, indexer);
} }

View File

@@ -8,8 +8,13 @@
package org.dspace.discovery; package org.dspace.discovery;
import java.util.Arrays;
import java.util.List;
import java.util.stream.Collectors;
import org.apache.commons.cli.CommandLine; import org.apache.commons.cli.CommandLine;
import org.apache.commons.cli.Options; import org.apache.commons.cli.Options;
import org.dspace.discovery.indexobject.factory.IndexObjectFactoryFactory;
/** /**
* This Enum holds all the possible options and combinations for the Index discovery script * This Enum holds all the possible options and combinations for the Index discovery script
@@ -29,6 +34,8 @@ public enum IndexClientOptions {
FORCEUPDATEANDSPELLCHECK, FORCEUPDATEANDSPELLCHECK,
HELP; HELP;
public static final String TYPE_OPTION = "t";
/** /**
* This method resolves the CommandLine parameters to figure out which action the index-discovery script should * This method resolves the CommandLine parameters to figure out which action the index-discovery script should
* perform * perform
@@ -71,11 +78,15 @@ public enum IndexClientOptions {
protected static Options constructOptions() { protected static Options constructOptions() {
Options options = new Options(); Options options = new Options();
List<String> indexableObjectTypes = IndexObjectFactoryFactory.getInstance().getIndexFactories().stream()
.map((indexFactory -> indexFactory.getType())).collect(Collectors.toList());
options options
.addOption("r", "remove", true, "remove an Item, Collection or Community from index based on its handle"); .addOption("r", "remove", true, "remove an Item, Collection or Community from index based on its handle");
options.addOption("i", "index", true, options.addOption("i", "index", true,
"add or update an Item, Collection or Community based on its handle or uuid"); "add or update an Item, Collection or Community based on its handle or uuid");
options.addOption(TYPE_OPTION, "type", true, "reindex only specific type of " +
"(re)indexable objects; options: " + Arrays.toString(indexableObjectTypes.toArray()));
options.addOption("c", "clean", false, options.addOption("c", "clean", false,
"clean existing index removing any documents that no longer exist in the db"); "clean existing index removing any documents that no longer exist in the db");
options.addOption("d", "delete", false, options.addOption("d", "delete", false,

View File

@@ -154,7 +154,11 @@ public class IndexEventConsumer implements Consumer {
case Event.REMOVE: case Event.REMOVE:
case Event.ADD: case Event.ADD:
if (object == null) { // At this time, ADD and REMOVE actions are ignored on SITE object. They are only triggered for
// top-level communities. No action is necessary as Community itself is indexed (or deleted) separately.
if (event.getSubjectType() == Constants.SITE) {
log.debug(event.getEventTypeAsString() + " event triggered for Site object. Skipping it.");
} else if (object == null) {
log.warn(event.getEventTypeAsString() + " event, could not get object for " log.warn(event.getEventTypeAsString() + " event, could not get object for "
+ event.getObjectTypeAsString() + " id=" + event.getObjectTypeAsString() + " id="
+ event.getObjectID() + event.getObjectID()
@@ -201,6 +205,10 @@ public class IndexEventConsumer implements Consumer {
@Override @Override
public void end(Context ctx) throws Exception { public void end(Context ctx) throws Exception {
// Change the mode to readonly to improve performance
Context.Mode originalMode = ctx.getCurrentMode();
ctx.setMode(Context.Mode.READ_ONLY);
try { try {
for (String uid : uniqueIdsToDelete) { for (String uid : uniqueIdsToDelete) {
try { try {
@@ -230,6 +238,8 @@ public class IndexEventConsumer implements Consumer {
uniqueIdsToDelete.clear(); uniqueIdsToDelete.clear();
createdItemsToUpdate.clear(); createdItemsToUpdate.clear();
} }
ctx.setMode(originalMode);
} }
} }

View File

@@ -1031,9 +1031,8 @@ public class SolrServiceImpl implements SearchService, IndexingService {
// Add information about our search fields // Add information about our search fields
for (String field : searchFields) { for (String field : searchFields) {
List<String> valuesAsString = new ArrayList<>(); List<String> valuesAsString = new ArrayList<>();
for (Object o : doc.getFieldValues(field)) { Optional.ofNullable(doc.getFieldValues(field))
valuesAsString.add(String.valueOf(o)); .ifPresent(l -> l.forEach(o -> valuesAsString.add(String.valueOf(o))));
}
resultDoc.addSearchField(field, valuesAsString.toArray(new String[valuesAsString.size()])); resultDoc.addSearchField(field, valuesAsString.toArray(new String[valuesAsString.size()]));
} }
result.addSearchDocument(indexableObject, resultDoc); result.addSearchDocument(indexableObject, resultDoc);

View File

@@ -64,7 +64,14 @@ public abstract class IndexFactoryImpl<T extends IndexableObject, S> implements
//Do any additional indexing, depends on the plugins //Do any additional indexing, depends on the plugins
for (SolrServiceIndexPlugin solrServiceIndexPlugin : ListUtils.emptyIfNull(solrServiceIndexPlugins)) { for (SolrServiceIndexPlugin solrServiceIndexPlugin : ListUtils.emptyIfNull(solrServiceIndexPlugins)) {
try {
solrServiceIndexPlugin.additionalIndex(context, indexableObject, doc); solrServiceIndexPlugin.additionalIndex(context, indexableObject, doc);
} catch (Exception e) {
log.error("An error occurred while indexing additional fields. " +
"Could not fully index item with UUID: {}. Plugin: {}",
indexableObject.getUniqueIndexID(), solrServiceIndexPlugin.getClass().getSimpleName());
}
} }
return doc; return doc;
@@ -113,6 +120,18 @@ public abstract class IndexFactoryImpl<T extends IndexableObject, S> implements
// Use Apache Tika to parse the full text stream(s) // Use Apache Tika to parse the full text stream(s)
try (InputStream fullTextStreams = streams.getStream()) { try (InputStream fullTextStreams = streams.getStream()) {
tikaParser.parse(fullTextStreams, tikaHandler, tikaMetadata, tikaContext); tikaParser.parse(fullTextStreams, tikaHandler, tikaMetadata, tikaContext);
// Write Tika metadata to "tika_meta_*" fields.
// This metadata is not very useful right now,
// but we'll keep it just in case it becomes more useful.
for (String name : tikaMetadata.names()) {
for (String value : tikaMetadata.getValues(name)) {
doc.addField("tika_meta_" + name, value);
}
}
// Save (parsed) full text to "fulltext" field
doc.addField("fulltext", tikaHandler.toString());
} catch (SAXException saxe) { } catch (SAXException saxe) {
// Check if this SAXException is just a notice that this file was longer than the character limit. // Check if this SAXException is just a notice that this file was longer than the character limit.
// Unfortunately there is not a unique, public exception type to catch here. This error is thrown // Unfortunately there is not a unique, public exception type to catch here. This error is thrown
@@ -126,26 +145,19 @@ public abstract class IndexFactoryImpl<T extends IndexableObject, S> implements
log.error("Tika parsing error. Could not index full text.", saxe); log.error("Tika parsing error. Could not index full text.", saxe);
throw new IOException("Tika parsing error. Could not index full text.", saxe); throw new IOException("Tika parsing error. Could not index full text.", saxe);
} }
} catch (TikaException ex) { } catch (TikaException | IOException ex) {
log.error("Tika parsing error. Could not index full text.", ex); log.error("Tika parsing error. Could not index full text.", ex);
throw new IOException("Tika parsing error. Could not index full text.", ex); throw new IOException("Tika parsing error. Could not index full text.", ex);
} } finally {
// Write Tika metadata to "tika_meta_*" fields.
// This metadata is not very useful right now, but we'll keep it just in case it becomes more useful.
for (String name : tikaMetadata.names()) {
for (String value : tikaMetadata.getValues(name)) {
doc.addField("tika_meta_" + name, value);
}
}
// Save (parsed) full text to "fulltext" field
doc.addField("fulltext", tikaHandler.toString());
}
// Add document to index // Add document to index
solr.add(doc); solr.add(doc);
} }
return;
}
// Add document to index
solr.add(doc);
}
} }

View File

@@ -33,6 +33,7 @@ import org.dspace.content.DSpaceObjectServiceImpl;
import org.dspace.content.Item; import org.dspace.content.Item;
import org.dspace.content.MetadataField; import org.dspace.content.MetadataField;
import org.dspace.content.MetadataValue; import org.dspace.content.MetadataValue;
import org.dspace.content.QAEventProcessed;
import org.dspace.content.WorkspaceItem; import org.dspace.content.WorkspaceItem;
import org.dspace.content.factory.ContentServiceFactory; import org.dspace.content.factory.ContentServiceFactory;
import org.dspace.content.service.ItemService; import org.dspace.content.service.ItemService;
@@ -47,6 +48,8 @@ import org.dspace.eperson.service.GroupService;
import org.dspace.eperson.service.SubscribeService; import org.dspace.eperson.service.SubscribeService;
import org.dspace.event.Event; import org.dspace.event.Event;
import org.dspace.orcid.service.OrcidTokenService; import org.dspace.orcid.service.OrcidTokenService;
import org.dspace.qaevent.dao.QAEventsDAO;
import org.dspace.services.ConfigurationService;
import org.dspace.util.UUIDUtils; import org.dspace.util.UUIDUtils;
import org.dspace.versioning.Version; import org.dspace.versioning.Version;
import org.dspace.versioning.VersionHistory; import org.dspace.versioning.VersionHistory;
@@ -101,8 +104,12 @@ public class EPersonServiceImpl extends DSpaceObjectServiceImpl<EPerson> impleme
protected VersionDAO versionDAO; protected VersionDAO versionDAO;
@Autowired(required = true) @Autowired(required = true)
protected ClaimedTaskService claimedTaskService; protected ClaimedTaskService claimedTaskService;
@Autowired(required = true)
protected ConfigurationService configurationService;
@Autowired @Autowired
protected OrcidTokenService orcidTokenService; protected OrcidTokenService orcidTokenService;
@Autowired
protected QAEventsDAO qaEventsDao;
protected EPersonServiceImpl() { protected EPersonServiceImpl() {
super(); super();
@@ -113,13 +120,42 @@ public class EPersonServiceImpl extends DSpaceObjectServiceImpl<EPerson> impleme
return ePersonDAO.findByID(context, EPerson.class, id); return ePersonDAO.findByID(context, EPerson.class, id);
} }
/**
* Create a fake EPerson which can receive email. Its address will be the
* value of "mail.admin", or "postmaster" if all else fails.
* @param c
* @return
* @throws SQLException
*/
@Override
public EPerson getSystemEPerson(Context c)
throws SQLException {
String adminEmail = configurationService.getProperty("mail.admin");
if (null == adminEmail) {
adminEmail = "postmaster"; // Last-ditch attempt to send *somewhere*
}
EPerson systemEPerson = findByEmail(c, adminEmail);
if (null == systemEPerson) {
systemEPerson = new EPerson();
systemEPerson.setEmail(adminEmail);
}
return systemEPerson;
}
@Override @Override
public EPerson findByIdOrLegacyId(Context context, String id) throws SQLException { public EPerson findByIdOrLegacyId(Context context, String id) throws SQLException {
try {
if (StringUtils.isNumeric(id)) { if (StringUtils.isNumeric(id)) {
return findByLegacyId(context, Integer.parseInt(id)); return findByLegacyId(context, Integer.parseInt(id));
} else { } else {
return find(context, UUID.fromString(id)); return find(context, UUID.fromString(id));
} }
} catch (IllegalArgumentException e) {
// Not a valid legacy ID or valid UUID
return null;
}
} }
@Override @Override
@@ -157,32 +193,98 @@ public class EPersonServiceImpl extends DSpaceObjectServiceImpl<EPerson> impleme
@Override @Override
public List<EPerson> search(Context context, String query, int offset, int limit) throws SQLException { public List<EPerson> search(Context context, String query, int offset, int limit) throws SQLException {
try { List<EPerson> ePersons = new ArrayList<>();
List<EPerson> ePerson = new ArrayList<>(); UUID uuid = UUIDUtils.fromString(query);
EPerson person = find(context, UUID.fromString(query)); if (uuid == null) {
if (person != null) { // Search by firstname & lastname (NOTE: email will also be included automatically)
ePerson.add(person);
}
return ePerson;
} catch (IllegalArgumentException e) {
MetadataField firstNameField = metadataFieldService.findByElement(context, "eperson", "firstname", null); MetadataField firstNameField = metadataFieldService.findByElement(context, "eperson", "firstname", null);
MetadataField lastNameField = metadataFieldService.findByElement(context, "eperson", "lastname", null); MetadataField lastNameField = metadataFieldService.findByElement(context, "eperson", "lastname", null);
if (StringUtils.isBlank(query)) { if (StringUtils.isBlank(query)) {
query = null; query = null;
} }
return ePersonDAO.search(context, query, Arrays.asList(firstNameField, lastNameField), ePersons = ePersonDAO.search(context, query, Arrays.asList(firstNameField, lastNameField),
Arrays.asList(firstNameField, lastNameField), offset, limit); Arrays.asList(firstNameField, lastNameField), offset, limit);
} else {
// Search by UUID
EPerson person = find(context, uuid);
if (person != null) {
ePersons.add(person);
} }
} }
return ePersons;
}
@Override @Override
public int searchResultCount(Context context, String query) throws SQLException { public int searchResultCount(Context context, String query) throws SQLException {
int result = 0;
UUID uuid = UUIDUtils.fromString(query);
if (uuid == null) {
// Count results found by firstname & lastname (email is also included automatically)
MetadataField firstNameField = metadataFieldService.findByElement(context, "eperson", "firstname", null); MetadataField firstNameField = metadataFieldService.findByElement(context, "eperson", "firstname", null);
MetadataField lastNameField = metadataFieldService.findByElement(context, "eperson", "lastname", null); MetadataField lastNameField = metadataFieldService.findByElement(context, "eperson", "lastname", null);
if (StringUtils.isBlank(query)) { if (StringUtils.isBlank(query)) {
query = null; query = null;
} }
return ePersonDAO.searchResultCount(context, query, Arrays.asList(firstNameField, lastNameField)); result = ePersonDAO.searchResultCount(context, query, Arrays.asList(firstNameField, lastNameField));
} else {
// Search by UUID
EPerson person = find(context, uuid);
if (person != null) {
result = 1;
}
}
return result;
}
@Override
public List<EPerson> searchNonMembers(Context context, String query, Group excludeGroup, int offset, int limit)
throws SQLException {
List<EPerson> ePersons = new ArrayList<>();
UUID uuid = UUIDUtils.fromString(query);
if (uuid == null) {
// Search by firstname & lastname (NOTE: email will also be included automatically)
MetadataField firstNameField = metadataFieldService.findByElement(context, "eperson", "firstname", null);
MetadataField lastNameField = metadataFieldService.findByElement(context, "eperson", "lastname", null);
if (StringUtils.isBlank(query)) {
query = null;
}
ePersons = ePersonDAO.searchNotMember(context, query, Arrays.asList(firstNameField, lastNameField),
excludeGroup, Arrays.asList(firstNameField, lastNameField),
offset, limit);
} else {
// Search by UUID
EPerson person = find(context, uuid);
// Verify EPerson is NOT a member of the given excludeGroup before adding
if (person != null && !groupService.isDirectMember(excludeGroup, person)) {
ePersons.add(person);
}
}
return ePersons;
}
@Override
public int searchNonMembersCount(Context context, String query, Group excludeGroup) throws SQLException {
int result = 0;
UUID uuid = UUIDUtils.fromString(query);
if (uuid == null) {
// Count results found by firstname & lastname (email is also included automatically)
MetadataField firstNameField = metadataFieldService.findByElement(context, "eperson", "firstname", null);
MetadataField lastNameField = metadataFieldService.findByElement(context, "eperson", "lastname", null);
if (StringUtils.isBlank(query)) {
query = null;
}
result = ePersonDAO.searchNotMemberCount(context, query, Arrays.asList(firstNameField, lastNameField),
excludeGroup);
} else {
// Search by UUID
EPerson person = find(context, uuid);
// Verify EPerson is NOT a member of the given excludeGroup before counting
if (person != null && !groupService.isDirectMember(excludeGroup, person)) {
result = 1;
}
}
return result;
} }
@Override @Override
@@ -278,10 +380,13 @@ public class EPersonServiceImpl extends DSpaceObjectServiceImpl<EPerson> impleme
throw new AuthorizeException( throw new AuthorizeException(
"You must be an admin to delete an EPerson"); "You must be an admin to delete an EPerson");
} }
// Get all workflow-related groups that the current EPerson belongs to
Set<Group> workFlowGroups = getAllWorkFlowGroups(context, ePerson); Set<Group> workFlowGroups = getAllWorkFlowGroups(context, ePerson);
for (Group group: workFlowGroups) { for (Group group: workFlowGroups) {
List<EPerson> ePeople = groupService.allMembers(context, group); // Get total number of unique EPerson objs who are a member of this group (or subgroup)
if (ePeople.size() == 1 && ePeople.contains(ePerson)) { int totalMembers = groupService.countAllMembers(context, group);
// If only one EPerson is a member, then we cannot delete the last member of this group.
if (totalMembers == 1) {
throw new EmptyWorkflowGroupException(ePerson.getID(), group.getID()); throw new EmptyWorkflowGroupException(ePerson.getID(), group.getID());
} }
} }
@@ -391,6 +496,11 @@ public class EPersonServiceImpl extends DSpaceObjectServiceImpl<EPerson> impleme
// Remove any subscriptions // Remove any subscriptions
subscribeService.deleteByEPerson(context, ePerson); subscribeService.deleteByEPerson(context, ePerson);
List<QAEventProcessed> qaEvents = qaEventsDao.findByEPerson(context, ePerson);
for (QAEventProcessed qaEvent : qaEvents) {
qaEventsDao.delete(context, qaEvent);
}
// Remove ourself // Remove ourself
ePersonDAO.delete(context, ePerson); ePersonDAO.delete(context, ePerson);
@@ -540,14 +650,29 @@ public class EPersonServiceImpl extends DSpaceObjectServiceImpl<EPerson> impleme
@Override @Override
public List<EPerson> findByGroups(Context c, Set<Group> groups) throws SQLException { public List<EPerson> findByGroups(Context c, Set<Group> groups) throws SQLException {
return findByGroups(c, groups, -1, -1);
}
@Override
public List<EPerson> findByGroups(Context c, Set<Group> groups, int pageSize, int offset) throws SQLException {
//Make sure we at least have one group, if not don't even bother searching. //Make sure we at least have one group, if not don't even bother searching.
if (CollectionUtils.isNotEmpty(groups)) { if (CollectionUtils.isNotEmpty(groups)) {
return ePersonDAO.findByGroups(c, groups); return ePersonDAO.findByGroups(c, groups, pageSize, offset);
} else { } else {
return new ArrayList<>(); return new ArrayList<>();
} }
} }
@Override
public int countByGroups(Context c, Set<Group> groups) throws SQLException {
//Make sure we at least have one group, if not don't even bother counting.
if (CollectionUtils.isNotEmpty(groups)) {
return ePersonDAO.countByGroups(c, groups);
} else {
return 0;
}
}
@Override @Override
public List<EPerson> findEPeopleWithSubscription(Context context) throws SQLException { public List<EPerson> findEPeopleWithSubscription(Context context) throws SQLException {
return ePersonDAO.findAllSubscribers(context); return ePersonDAO.findAllSubscribers(context);

View File

@@ -98,7 +98,11 @@ public class Group extends DSpaceObject implements DSpaceObjectLegacySupport {
} }
/** /**
* Return EPerson members of a Group * Return EPerson members of a Group.
* <P>
* WARNING: This method may have bad performance for Groups with large numbers of EPerson members.
* Therefore, only use this when you need to access every EPerson member. Instead, consider using
* EPersonService.findByGroups() for a paginated list of EPersons.
* *
* @return list of EPersons * @return list of EPersons
*/ */
@@ -143,9 +147,13 @@ public class Group extends DSpaceObject implements DSpaceObjectLegacySupport {
} }
/** /**
* Return Group members of a Group. * Return Group members (i.e. direct subgroups) of a Group.
* <P>
* WARNING: This method may have bad performance for Groups with large numbers of Subgroups.
* Therefore, only use this when you need to access every Subgroup. Instead, consider using
* GroupService.findByParent() for a paginated list of Subgroups.
* *
* @return list of groups * @return list of subgroups
*/ */
public List<Group> getMemberGroups() { public List<Group> getMemberGroups() {
return groups; return groups;

View File

@@ -179,8 +179,13 @@ public class GroupServiceImpl extends DSpaceObjectServiceImpl<Group> implements
for (CollectionRole collectionRole : collectionRoles) { for (CollectionRole collectionRole : collectionRoles) {
if (StringUtils.equals(collectionRole.getRoleId(), role.getId()) if (StringUtils.equals(collectionRole.getRoleId(), role.getId())
&& claimedTask.getWorkflowItem().getCollection() == collectionRole.getCollection()) { && claimedTask.getWorkflowItem().getCollection() == collectionRole.getCollection()) {
List<EPerson> ePeople = allMembers(context, group); // Count number of EPersons who are *direct* members of this group
if (ePeople.size() == 1 && ePeople.contains(ePerson)) { int totalDirectEPersons = ePersonService.countByGroups(context, Set.of(group));
// Count number of Groups which have this groupParent as a direct parent
int totalChildGroups = countByParent(context, group);
// If this group has only one direct EPerson and *zero* child groups, then we cannot delete the
// EPerson or we will leave this group empty.
if (totalDirectEPersons == 1 && totalChildGroups == 0) {
throw new IllegalStateException( throw new IllegalStateException(
"Refused to remove user " + ePerson "Refused to remove user " + ePerson
.getID() + " from workflow group because the group " + group .getID() + " from workflow group because the group " + group
@@ -191,8 +196,13 @@ public class GroupServiceImpl extends DSpaceObjectServiceImpl<Group> implements
} }
} }
if (!poolTasks.isEmpty()) { if (!poolTasks.isEmpty()) {
List<EPerson> ePeople = allMembers(context, group); // Count number of EPersons who are *direct* members of this group
if (ePeople.size() == 1 && ePeople.contains(ePerson)) { int totalDirectEPersons = ePersonService.countByGroups(context, Set.of(group));
// Count number of Groups which have this groupParent as a direct parent
int totalChildGroups = countByParent(context, group);
// If this group has only one direct EPerson and *zero* child groups, then we cannot delete the
// EPerson or we will leave this group empty.
if (totalDirectEPersons == 1 && totalChildGroups == 0) {
throw new IllegalStateException( throw new IllegalStateException(
"Refused to remove user " + ePerson "Refused to remove user " + ePerson
.getID() + " from workflow group because the group " + group .getID() + " from workflow group because the group " + group
@@ -212,9 +222,13 @@ public class GroupServiceImpl extends DSpaceObjectServiceImpl<Group> implements
if (!collectionRoles.isEmpty()) { if (!collectionRoles.isEmpty()) {
List<PoolTask> poolTasks = poolTaskService.findByGroup(context, groupParent); List<PoolTask> poolTasks = poolTaskService.findByGroup(context, groupParent);
if (!poolTasks.isEmpty()) { if (!poolTasks.isEmpty()) {
List<EPerson> parentPeople = allMembers(context, groupParent); // Count number of Groups which have this groupParent as a direct parent
List<EPerson> childPeople = allMembers(context, childGroup); int totalChildGroups = countByParent(context, groupParent);
if (childPeople.containsAll(parentPeople)) { // Count number of EPersons who are *direct* members of this group
int totalDirectEPersons = ePersonService.countByGroups(context, Set.of(groupParent));
// If this group has only one childGroup and *zero* direct EPersons, then we cannot delete the
// childGroup or we will leave this group empty.
if (totalChildGroups == 1 && totalDirectEPersons == 0) {
throw new IllegalStateException( throw new IllegalStateException(
"Refused to remove sub group " + childGroup "Refused to remove sub group " + childGroup
.getID() + " from workflow group because the group " + groupParent .getID() + " from workflow group because the group " + groupParent
@@ -368,7 +382,8 @@ public class GroupServiceImpl extends DSpaceObjectServiceImpl<Group> implements
// Get all groups which are a member of this group // Get all groups which are a member of this group
List<Group2GroupCache> group2GroupCaches = group2GroupCacheDAO.findByParent(c, g); List<Group2GroupCache> group2GroupCaches = group2GroupCacheDAO.findByParent(c, g);
Set<Group> groups = new HashSet<>(); // Initialize HashSet based on List size to avoid Set resizing. See https://stackoverflow.com/a/21822273
Set<Group> groups = new HashSet<>((int) (group2GroupCaches.size() / 0.75 + 1));
for (Group2GroupCache group2GroupCache : group2GroupCaches) { for (Group2GroupCache group2GroupCache : group2GroupCaches) {
groups.add(group2GroupCache.getChild()); groups.add(group2GroupCache.getChild());
} }
@@ -381,6 +396,23 @@ public class GroupServiceImpl extends DSpaceObjectServiceImpl<Group> implements
return new ArrayList<>(childGroupChildren); return new ArrayList<>(childGroupChildren);
} }
@Override
public int countAllMembers(Context context, Group group) throws SQLException {
// Get all groups which are a member of this group
List<Group2GroupCache> group2GroupCaches = group2GroupCacheDAO.findByParent(context, group);
// Initialize HashSet based on List size + current 'group' to avoid Set resizing.
// See https://stackoverflow.com/a/21822273
Set<Group> groups = new HashSet<>((int) ((group2GroupCaches.size() + 1) / 0.75 + 1));
for (Group2GroupCache group2GroupCache : group2GroupCaches) {
groups.add(group2GroupCache.getChild());
}
// Append current group as well
groups.add(group);
// Return total number of unique EPerson objects in any of these groups
return ePersonService.countByGroups(context, groups);
}
@Override @Override
public Group find(Context context, UUID id) throws SQLException { public Group find(Context context, UUID id) throws SQLException {
if (id == null) { if (id == null) {
@@ -428,17 +460,17 @@ public class GroupServiceImpl extends DSpaceObjectServiceImpl<Group> implements
} }
@Override @Override
public List<Group> search(Context context, String groupIdentifier) throws SQLException { public List<Group> search(Context context, String query) throws SQLException {
return search(context, groupIdentifier, -1, -1); return search(context, query, -1, -1);
} }
@Override @Override
public List<Group> search(Context context, String groupIdentifier, int offset, int limit) throws SQLException { public List<Group> search(Context context, String query, int offset, int limit) throws SQLException {
List<Group> groups = new ArrayList<>(); List<Group> groups = new ArrayList<>();
UUID uuid = UUIDUtils.fromString(groupIdentifier); UUID uuid = UUIDUtils.fromString(query);
if (uuid == null) { if (uuid == null) {
//Search by group name //Search by group name
groups = groupDAO.findByNameLike(context, groupIdentifier, offset, limit); groups = groupDAO.findByNameLike(context, query, offset, limit);
} else { } else {
//Search by group id //Search by group id
Group group = find(context, uuid); Group group = find(context, uuid);
@@ -451,12 +483,12 @@ public class GroupServiceImpl extends DSpaceObjectServiceImpl<Group> implements
} }
@Override @Override
public int searchResultCount(Context context, String groupIdentifier) throws SQLException { public int searchResultCount(Context context, String query) throws SQLException {
int result = 0; int result = 0;
UUID uuid = UUIDUtils.fromString(groupIdentifier); UUID uuid = UUIDUtils.fromString(query);
if (uuid == null) { if (uuid == null) {
//Search by group name //Search by group name
result = groupDAO.countByNameLike(context, groupIdentifier); result = groupDAO.countByNameLike(context, query);
} else { } else {
//Search by group id //Search by group id
Group group = find(context, uuid); Group group = find(context, uuid);
@@ -468,6 +500,44 @@ public class GroupServiceImpl extends DSpaceObjectServiceImpl<Group> implements
return result; return result;
} }
@Override
public List<Group> searchNonMembers(Context context, String query, Group excludeParentGroup,
int offset, int limit) throws SQLException {
List<Group> groups = new ArrayList<>();
UUID uuid = UUIDUtils.fromString(query);
if (uuid == null) {
// Search by group name
groups = groupDAO.findByNameLikeAndNotMember(context, query, excludeParentGroup, offset, limit);
} else if (!uuid.equals(excludeParentGroup.getID())) {
// Search by group id
Group group = find(context, uuid);
// Verify it is NOT a member of the given excludeParentGroup before adding
if (group != null && !isMember(excludeParentGroup, group)) {
groups.add(group);
}
}
return groups;
}
@Override
public int searchNonMembersCount(Context context, String query, Group excludeParentGroup) throws SQLException {
int result = 0;
UUID uuid = UUIDUtils.fromString(query);
if (uuid == null) {
// Search by group name
result = groupDAO.countByNameLikeAndNotMember(context, query, excludeParentGroup);
} else if (!uuid.equals(excludeParentGroup.getID())) {
// Search by group id
Group group = find(context, uuid);
// Verify it is NOT a member of the given excludeParentGroup before adding
if (group != null && !isMember(excludeParentGroup, group)) {
result = 1;
}
}
return result;
}
@Override @Override
public void delete(Context context, Group group) throws SQLException { public void delete(Context context, Group group) throws SQLException {
if (group.isPermanent()) { if (group.isPermanent()) {
@@ -802,10 +872,15 @@ public class GroupServiceImpl extends DSpaceObjectServiceImpl<Group> implements
@Override @Override
public Group findByIdOrLegacyId(Context context, String id) throws SQLException { public Group findByIdOrLegacyId(Context context, String id) throws SQLException {
if (org.apache.commons.lang3.StringUtils.isNumeric(id)) { try {
if (StringUtils.isNumeric(id)) {
return findByLegacyId(context, Integer.parseInt(id)); return findByLegacyId(context, Integer.parseInt(id));
} else { } else {
return find(context, UUIDUtils.fromString(id)); return find(context, UUID.fromString(id));
}
} catch (IllegalArgumentException e) {
// Not a valid legacy ID or valid UUID
return null;
} }
} }
@@ -829,4 +904,20 @@ public class GroupServiceImpl extends DSpaceObjectServiceImpl<Group> implements
public String getName(Group dso) { public String getName(Group dso) {
return dso.getName(); return dso.getName();
} }
@Override
public List<Group> findByParent(Context context, Group parent, int pageSize, int offset) throws SQLException {
if (parent == null) {
return null;
}
return groupDAO.findByParent(context, parent, pageSize, offset);
}
@Override
public int countByParent(Context context, Group parent) throws SQLException {
if (parent == null) {
return 0;
}
return groupDAO.countByParent(context, parent);
}
} }

View File

@@ -33,12 +33,91 @@ public interface EPersonDAO extends DSpaceObjectDAO<EPerson>, DSpaceObjectLegacy
public EPerson findByNetid(Context context, String netid) throws SQLException; public EPerson findByNetid(Context context, String netid) throws SQLException;
/**
* Search all EPersons by the given MetadataField objects, sorting by the given sort fields.
* <P>
* NOTE: As long as a query is specified, the EPerson's email address is included in the search alongside any given
* metadata fields.
*
* @param context DSpace context
* @param query the text to search EPersons for
* @param queryFields the metadata fields to search within (email is also included automatically)
* @param sortFields the metadata field(s) to sort the results by
* @param offset the position of the first result to return
* @param limit how many results return
* @return List of matching EPerson objects
* @throws SQLException if an error occurs
*/
public List<EPerson> search(Context context, String query, List<MetadataField> queryFields, public List<EPerson> search(Context context, String query, List<MetadataField> queryFields,
List<MetadataField> sortFields, int offset, int limit) throws SQLException; List<MetadataField> sortFields, int offset, int limit) throws SQLException;
/**
* Count number of EPersons who match a search on the given metadata fields. This returns the count of total
* results for the same query using the 'search()', and therefore can be used to provide pagination.
*
* @param context DSpace context
* @param query the text to search EPersons for
* @param queryFields the metadata fields to search within (email is also included automatically)
* @return total number of EPersons who match the query
* @throws SQLException if an error occurs
*/
public int searchResultCount(Context context, String query, List<MetadataField> queryFields) throws SQLException; public int searchResultCount(Context context, String query, List<MetadataField> queryFields) throws SQLException;
public List<EPerson> findByGroups(Context context, Set<Group> groups) throws SQLException; /**
* Search all EPersons via their firstname, lastname, email (fuzzy match), limited to those EPersons which are NOT
* a member of the given group. This may be used to search across EPersons which are valid to add as members to the
* given group.
*
* @param context The DSpace context
* @param query the text to search EPersons for
* @param queryFields the metadata fields to search within (email is also included automatically)
* @param excludeGroup Group to exclude results from. Members of this group will never be returned.
* @param offset the position of the first result to return
* @param limit how many results return
* @return EPersons matching the query (which are not members of the given group)
* @throws SQLException if database error
*/
List<EPerson> searchNotMember(Context context, String query, List<MetadataField> queryFields, Group excludeGroup,
List<MetadataField> sortFields, int offset, int limit) throws SQLException;
/**
* Count number of EPersons that match a given search (fuzzy match) across firstname, lastname and email. This
* search is limited to those EPersons which are NOT a member of the given group. This may be used
* (with searchNotMember()) to perform a paginated search across EPersons which are valid to add to the given group.
*
* @param context The DSpace context
* @param query querystring to fuzzy match against.
* @param queryFields the metadata fields to search within (email is also included automatically)
* @param excludeGroup Group to exclude results from. Members of this group will never be returned.
* @return Groups matching the query (which are not members of the given parent)
* @throws SQLException if database error
*/
int searchNotMemberCount(Context context, String query, List<MetadataField> queryFields, Group excludeGroup)
throws SQLException;
/**
* Find all EPersons who are a member of one or more of the listed groups in a paginated fashion. This returns
* EPersons ordered by UUID.
*
* @param context current Context
* @param groups Set of group(s) to check membership in
* @param pageSize number of EPerson objects to load at one time. Set to <=0 to disable pagination
* @param offset number of page to load (starting with 1). Set to <=0 to disable pagination
* @return List of all EPersons who are a member of one or more groups.
* @throws SQLException
*/
List<EPerson> findByGroups(Context context, Set<Group> groups, int pageSize, int offset) throws SQLException;
/**
* Count total number of EPersons who are a member of one or more of the listed groups. This provides the total
* number of results to expect from corresponding findByGroups() for pagination purposes.
*
* @param context current Context
* @param groups Set of group(s) to check membership in
* @return total number of (unique) EPersons who are a member of one or more groups.
* @throws SQLException
*/
int countByGroups(Context context, Set<Group> groups) throws SQLException;
public List<EPerson> findWithPasswordWithoutDigestAlgorithm(Context context) throws SQLException; public List<EPerson> findWithPasswordWithoutDigestAlgorithm(Context context) throws SQLException;

View File

@@ -135,6 +135,38 @@ public interface GroupDAO extends DSpaceObjectDAO<Group>, DSpaceObjectLegacySupp
*/ */
int countByNameLike(Context context, String groupName) throws SQLException; int countByNameLike(Context context, String groupName) throws SQLException;
/**
* Search all groups via their name (fuzzy match), limited to those groups which are NOT a member of the given
* parent group. This may be used to search across groups which are valid to add to the given parent group.
* <P>
* NOTE: The parent group itself is also excluded from the search.
*
* @param context The DSpace context
* @param groupName Group name to fuzzy match against.
* @param excludeParent Parent Group to exclude results from. Groups under this parent will never be returned.
* @param offset Offset to use for pagination (-1 to disable)
* @param limit The maximum number of results to return (-1 to disable)
* @return Groups matching the query (which are not members of the given parent)
* @throws SQLException if database error
*/
List<Group> findByNameLikeAndNotMember(Context context, String groupName, Group excludeParent,
int offset, int limit) throws SQLException;
/**
* Count number of groups that match a given name (fuzzy match), limited to those groups which are NOT a member of
* the given parent group. This may be used (with findByNameLikeAndNotMember()) to search across groups which are
* valid to add to the given parent group.
* <P>
* NOTE: The parent group itself is also excluded from the count.
*
* @param context The DSpace context
* @param groupName Group name to fuzzy match against.
* @param excludeParent Parent Group to exclude results from. Groups under this parent will never be returned.
* @return Groups matching the query (which are not members of the given parent)
* @throws SQLException if database error
*/
int countByNameLikeAndNotMember(Context context, String groupName, Group excludeParent) throws SQLException;
/** /**
* Find a group by its name and the membership of the given EPerson * Find a group by its name and the membership of the given EPerson
* *
@@ -146,4 +178,28 @@ public interface GroupDAO extends DSpaceObjectDAO<Group>, DSpaceObjectLegacySupp
*/ */
Group findByIdAndMembership(Context context, UUID id, EPerson ePerson) throws SQLException; Group findByIdAndMembership(Context context, UUID id, EPerson ePerson) throws SQLException;
/**
* Find all groups which are members of a given parent group.
* This provides the same behavior as group.getMemberGroups(), but in a paginated fashion.
*
* @param context The DSpace context
* @param parent Parent Group to search within
* @param pageSize how many results return
* @param offset the position of the first result to return
* @return Groups matching the query
* @throws SQLException if database error
*/
List<Group> findByParent(Context context, Group parent, int pageSize, int offset) throws SQLException;
/**
* Returns the number of groups which are members of a given parent group.
* This provides the same behavior as group.getMemberGroups().size(), but with better performance for large groups.
* This method may be used with findByParent() to perform pagination.
*
* @param context The DSpace context
* @param parent Parent Group to search within
* @return Number of Groups matching the query
* @throws SQLException if database error
*/
int countByParent(Context context, Group parent) throws SQLException;
} }

View File

@@ -70,17 +70,9 @@ public class EPersonDAOImpl extends AbstractHibernateDSODAO<EPerson> implements
String queryString = "SELECT " + EPerson.class.getSimpleName() String queryString = "SELECT " + EPerson.class.getSimpleName()
.toLowerCase() + " FROM EPerson as " + EPerson.class .toLowerCase() + " FROM EPerson as " + EPerson.class
.getSimpleName().toLowerCase() + " "; .getSimpleName().toLowerCase() + " ";
if (query != null) {
query = "%" + query.toLowerCase() + "%";
}
Query hibernateQuery = getSearchQuery(context, queryString, query, queryFields, sortFields, null);
if (0 <= offset) { Query hibernateQuery = getSearchQuery(context, queryString, query, queryFields, null,
hibernateQuery.setFirstResult(offset); sortFields, null, limit, offset);
}
if (0 <= limit) {
hibernateQuery.setMaxResults(limit);
}
return list(hibernateQuery); return list(hibernateQuery);
} }
@@ -92,6 +84,28 @@ public class EPersonDAOImpl extends AbstractHibernateDSODAO<EPerson> implements
return count(hibernateQuery); return count(hibernateQuery);
} }
@Override
public List<EPerson> searchNotMember(Context context, String query, List<MetadataField> queryFields,
Group excludeGroup, List<MetadataField> sortFields,
int offset, int limit) throws SQLException {
String queryString = "SELECT " + EPerson.class.getSimpleName()
.toLowerCase() + " FROM EPerson as " + EPerson.class
.getSimpleName().toLowerCase() + " ";
Query hibernateQuery = getSearchQuery(context, queryString, query, queryFields, excludeGroup,
sortFields, null, limit, offset);
return list(hibernateQuery);
}
public int searchNotMemberCount(Context context, String query, List<MetadataField> queryFields,
Group excludeGroup) throws SQLException {
String queryString = "SELECT count(*) FROM EPerson as " + EPerson.class.getSimpleName().toLowerCase();
Query hibernateQuery = getSearchQuery(context, queryString, query, queryFields, excludeGroup,
Collections.EMPTY_LIST, null, -1, -1);
return count(hibernateQuery);
}
@Override @Override
public List<EPerson> findAll(Context context, MetadataField metadataSortField, String sortField, int pageSize, public List<EPerson> findAll(Context context, MetadataField metadataSortField, String sortField, int pageSize,
int offset) throws SQLException { int offset) throws SQLException {
@@ -105,19 +119,43 @@ public class EPersonDAOImpl extends AbstractHibernateDSODAO<EPerson> implements
sortFields = Collections.singletonList(metadataSortField); sortFields = Collections.singletonList(metadataSortField);
} }
Query query = getSearchQuery(context, queryString, null, ListUtils.EMPTY_LIST, sortFields, sortField, pageSize, Query query = getSearchQuery(context, queryString, null, ListUtils.EMPTY_LIST, null,
offset); sortFields, sortField, pageSize, offset);
return list(query); return list(query);
} }
@Override @Override
public List<EPerson> findByGroups(Context context, Set<Group> groups) throws SQLException { public List<EPerson> findByGroups(Context context, Set<Group> groups, int pageSize, int offset)
throws SQLException {
Query query = createQuery(context, Query query = createQuery(context,
"SELECT DISTINCT e FROM EPerson e " + "SELECT DISTINCT e FROM EPerson e " +
"JOIN e.groups g " + "JOIN e.groups g " +
"WHERE g.id IN (:idList) "); "WHERE g.id IN (:idList) ");
List<UUID> idList = new ArrayList<>(groups.size());
for (Group group : groups) {
idList.add(group.getID());
}
query.setParameter("idList", idList);
if (pageSize > 0) {
query.setMaxResults(pageSize);
}
if (offset > 0) {
query.setFirstResult(offset);
}
return list(query);
}
@Override
public int countByGroups(Context context, Set<Group> groups) throws SQLException {
Query query = createQuery(context,
"SELECT count(DISTINCT e) FROM EPerson e " +
"JOIN e.groups g " +
"WHERE g.id IN (:idList) ");
List<UUID> idList = new ArrayList<>(groups.size()); List<UUID> idList = new ArrayList<>(groups.size());
for (Group group : groups) { for (Group group : groups) {
idList.add(group.getID()); idList.add(group.getID());
@@ -125,7 +163,7 @@ public class EPersonDAOImpl extends AbstractHibernateDSODAO<EPerson> implements
query.setParameter("idList", idList); query.setParameter("idList", idList);
return list(query); return count(query);
} }
@Override @Override
@@ -154,43 +192,88 @@ public class EPersonDAOImpl extends AbstractHibernateDSODAO<EPerson> implements
protected Query getSearchQuery(Context context, String queryString, String queryParam, protected Query getSearchQuery(Context context, String queryString, String queryParam,
List<MetadataField> queryFields, List<MetadataField> sortFields, String sortField) List<MetadataField> queryFields, List<MetadataField> sortFields, String sortField)
throws SQLException { throws SQLException {
return getSearchQuery(context, queryString, queryParam, queryFields, sortFields, sortField, -1, -1); return getSearchQuery(context, queryString, queryParam, queryFields, null, sortFields, sortField, -1, -1);
} }
/**
* Build a search query across EPersons based on the given metadata fields and sorted based on the given metadata
* field(s) or database column.
* <P>
* NOTE: the EPerson's email address is included in the search alongside any given metadata fields.
*
* @param context DSpace Context
* @param queryString String which defines the beginning "SELECT" for the SQL query
* @param queryParam Actual text being searched for
* @param queryFields List of metadata fields to search within
* @param excludeGroup Optional Group which should be excluded from search. Any EPersons who are members
* of this group will not be included in the results.
* @param sortFields Optional List of metadata fields to sort by (should not be specified if sortField is used)
* @param sortField Optional database column to sort on (should not be specified if sortFields is used)
* @param pageSize how many results return
* @param offset the position of the first result to return
* @return built Query object
* @throws SQLException if error occurs
*/
protected Query getSearchQuery(Context context, String queryString, String queryParam, protected Query getSearchQuery(Context context, String queryString, String queryParam,
List<MetadataField> queryFields, List<MetadataField> sortFields, String sortField, List<MetadataField> queryFields, Group excludeGroup,
List<MetadataField> sortFields, String sortField,
int pageSize, int offset) throws SQLException { int pageSize, int offset) throws SQLException {
// Initialize SQL statement using the passed in "queryString"
StringBuilder queryBuilder = new StringBuilder(); StringBuilder queryBuilder = new StringBuilder();
queryBuilder.append(queryString); queryBuilder.append(queryString);
Set<MetadataField> metadataFieldsToJoin = new LinkedHashSet<>(); Set<MetadataField> metadataFieldsToJoin = new LinkedHashSet<>();
metadataFieldsToJoin.addAll(queryFields); metadataFieldsToJoin.addAll(queryFields);
metadataFieldsToJoin.addAll(sortFields); metadataFieldsToJoin.addAll(sortFields);
// Append necessary join information for MetadataFields we will search within
if (!CollectionUtils.isEmpty(metadataFieldsToJoin)) { if (!CollectionUtils.isEmpty(metadataFieldsToJoin)) {
addMetadataLeftJoin(queryBuilder, EPerson.class.getSimpleName().toLowerCase(), metadataFieldsToJoin); addMetadataLeftJoin(queryBuilder, EPerson.class.getSimpleName().toLowerCase(), metadataFieldsToJoin);
} }
if (queryParam != null) { // Always append a search on EPerson "email" based on query
if (StringUtils.isNotBlank(queryParam)) {
addMetadataValueWhereQuery(queryBuilder, queryFields, "like", addMetadataValueWhereQuery(queryBuilder, queryFields, "like",
EPerson.class.getSimpleName().toLowerCase() + ".email like :queryParam"); EPerson.class.getSimpleName().toLowerCase() + ".email like :queryParam");
} }
// If excludeGroup is specified, exclude members of that group from results
// This uses a subquery to find the excluded group & verify that it is not in the EPerson list of "groups"
if (excludeGroup != null) {
// If query params exist, then we already have a WHERE clause (see above) and just need to append an AND
if (StringUtils.isNotBlank(queryParam)) {
queryBuilder.append(" AND ");
} else {
// no WHERE clause yet, so this is the start of the WHERE
queryBuilder.append(" WHERE ");
}
queryBuilder.append("(FROM Group g where g.id = :group_id) NOT IN elements (")
.append(EPerson.class.getSimpleName().toLowerCase()).append(".groups)");
}
// Add sort/order by info to query, if specified
if (!CollectionUtils.isEmpty(sortFields) || StringUtils.isNotBlank(sortField)) { if (!CollectionUtils.isEmpty(sortFields) || StringUtils.isNotBlank(sortField)) {
addMetadataSortQuery(queryBuilder, sortFields, Collections.singletonList(sortField)); addMetadataSortQuery(queryBuilder, sortFields, Collections.singletonList(sortField));
} }
// Create the final SQL SELECT statement (based on included params above)
Query query = createQuery(context, queryBuilder.toString()); Query query = createQuery(context, queryBuilder.toString());
// Set pagesize & offset for pagination
if (pageSize > 0) { if (pageSize > 0) {
query.setMaxResults(pageSize); query.setMaxResults(pageSize);
} }
if (offset > 0) { if (offset > 0) {
query.setFirstResult(offset); query.setFirstResult(offset);
} }
// Set all parameters to the SQL SELECT statement (based on included params above)
if (StringUtils.isNotBlank(queryParam)) { if (StringUtils.isNotBlank(queryParam)) {
query.setParameter("queryParam", "%" + queryParam.toLowerCase() + "%"); query.setParameter("queryParam", "%" + queryParam.toLowerCase() + "%");
} }
for (MetadataField metadataField : metadataFieldsToJoin) { for (MetadataField metadataField : metadataFieldsToJoin) {
query.setParameter(metadataField.toString(), metadataField.getID()); query.setParameter(metadataField.toString(), metadataField.getID());
} }
if (excludeGroup != null) {
query.setParameter("group_id", excludeGroup.getID());
}
query.setHint("org.hibernate.cacheable", Boolean.TRUE);
return query; return query;
} }

View File

@@ -164,6 +164,41 @@ public class GroupDAOImpl extends AbstractHibernateDSODAO<Group> implements Grou
return count(query); return count(query);
} }
@Override
public List<Group> findByNameLikeAndNotMember(Context context, String groupName, Group excludeParent,
int offset, int limit) throws SQLException {
Query query = createQuery(context,
"FROM Group " +
"WHERE lower(name) LIKE lower(:group_name) " +
"AND id != :parent_id " +
"AND (from Group g where g.id = :parent_id) not in elements (parentGroups)");
query.setParameter("parent_id", excludeParent.getID());
query.setParameter("group_name", "%" + StringUtils.trimToEmpty(groupName) + "%");
if (0 <= offset) {
query.setFirstResult(offset);
}
if (0 <= limit) {
query.setMaxResults(limit);
}
query.setHint("org.hibernate.cacheable", Boolean.TRUE);
return list(query);
}
@Override
public int countByNameLikeAndNotMember(Context context, String groupName, Group excludeParent) throws SQLException {
Query query = createQuery(context,
"SELECT count(*) FROM Group " +
"WHERE lower(name) LIKE lower(:group_name) " +
"AND id != :parent_id " +
"AND (from Group g where g.id = :parent_id) not in elements (parentGroups)");
query.setParameter("parent_id", excludeParent.getID());
query.setParameter("group_name", "%" + StringUtils.trimToEmpty(groupName) + "%");
return count(query);
}
@Override @Override
public void delete(Context context, Group group) throws SQLException { public void delete(Context context, Group group) throws SQLException {
Query query = getHibernateSession(context) Query query = getHibernateSession(context)
@@ -196,4 +231,29 @@ public class GroupDAOImpl extends AbstractHibernateDSODAO<Group> implements Grou
return count(createQuery(context, "SELECT count(*) FROM Group")); return count(createQuery(context, "SELECT count(*) FROM Group"));
} }
@Override
public List<Group> findByParent(Context context, Group parent, int pageSize, int offset) throws SQLException {
Query query = createQuery(context,
"SELECT g FROM Group g JOIN g.parentGroups pg " +
"WHERE pg.id = :parent_id");
query.setParameter("parent_id", parent.getID());
if (pageSize > 0) {
query.setMaxResults(pageSize);
}
if (offset > 0) {
query.setFirstResult(offset);
}
query.setHint("org.hibernate.cacheable", Boolean.TRUE);
return list(query);
}
@Override
public int countByParent(Context context, Group parent) throws SQLException {
Query query = createQuery(context, "SELECT count(g) FROM Group g JOIN g.parentGroups pg " +
"WHERE pg.id = :parent_id");
query.setParameter("parent_id", parent.getID());
return count(query);
}
} }

View File

@@ -13,6 +13,7 @@ import java.sql.SQLException;
import java.util.Date; import java.util.Date;
import java.util.List; import java.util.List;
import java.util.Set; import java.util.Set;
import javax.validation.constraints.NotNull;
import org.dspace.authorize.AuthorizeException; import org.dspace.authorize.AuthorizeException;
import org.dspace.content.Item; import org.dspace.content.Item;
@@ -97,9 +98,9 @@ public interface EPersonService extends DSpaceObjectService<EPerson>, DSpaceObje
* *
* @param context The relevant DSpace Context. * @param context The relevant DSpace Context.
* @param query The search string * @param query The search string
* @param offset Inclusive offset * @param offset Inclusive offset (the position of the first result to return)
* @param limit Maximum number of matches returned * @param limit Maximum number of matches returned
* @return array of EPerson objects * @return List of matching EPerson objects
* @throws SQLException An exception that provides information on a database access error or other errors. * @throws SQLException An exception that provides information on a database access error or other errors.
*/ */
public List<EPerson> search(Context context, String query, int offset, int limit) public List<EPerson> search(Context context, String query, int offset, int limit)
@@ -117,6 +118,34 @@ public interface EPersonService extends DSpaceObjectService<EPerson>, DSpaceObje
public int searchResultCount(Context context, String query) public int searchResultCount(Context context, String query)
throws SQLException; throws SQLException;
/**
* Find the EPersons that match the search query which are NOT currently members of the given Group. The search
* query is run against firstname, lastname or email.
*
* @param context DSpace context
* @param query The search string
* @param excludeGroup Group to exclude results from. Members of this group will never be returned.
* @param offset Inclusive offset (the position of the first result to return)
* @param limit Maximum number of matches returned
* @return List of matching EPerson objects
* @throws SQLException if error
*/
List<EPerson> searchNonMembers(Context context, String query, Group excludeGroup,
int offset, int limit) throws SQLException;
/**
* Returns the total number of EPersons that match the search query which are NOT currently members of the given
* Group. The search query is run against firstname, lastname or email. Can be used with searchNonMembers() to
* support pagination
*
* @param context DSpace context
* @param query The search string
* @param excludeGroup Group to exclude results from. Members of this group will never be returned.
* @return List of matching EPerson objects
* @throws SQLException if error
*/
int searchNonMembersCount(Context context, String query, Group excludeGroup) throws SQLException;
/** /**
* Find all the {@code EPerson}s in a specific order by field. * Find all the {@code EPerson}s in a specific order by field.
* The sortable fields are: * The sortable fields are:
@@ -157,6 +186,19 @@ public interface EPersonService extends DSpaceObjectService<EPerson>, DSpaceObje
public List<EPerson> findAll(Context context, int sortField, int pageSize, int offset) public List<EPerson> findAll(Context context, int sortField, int pageSize, int offset)
throws SQLException; throws SQLException;
/**
* The "System EPerson" is a fake account that exists only to receive email.
* It has an email address that should be presumed usable. It does not
* exist in the database and is not complete.
*
* @param context current DSpace session.
* @return an EPerson that can presumably receive email.
* @throws SQLException
*/
@NotNull
public EPerson getSystemEPerson(Context context)
throws SQLException;
/** /**
* Create a new eperson * Create a new eperson
* *
@@ -238,14 +280,42 @@ public interface EPersonService extends DSpaceObjectService<EPerson>, DSpaceObje
public List<String> getDeleteConstraints(Context context, EPerson ePerson) throws SQLException; public List<String> getDeleteConstraints(Context context, EPerson ePerson) throws SQLException;
/** /**
* Retrieve all accounts which belong to at least one of the specified groups. * Retrieve all EPerson accounts which belong to at least one of the specified groups.
* <P>
* WARNING: This method may have bad performance issues for Groups with a very large number of members,
* as it will load all member EPerson objects into memory.
* <P>
* For better performance, use the paginated version of this method.
* *
* @param c The relevant DSpace Context. * @param c The relevant DSpace Context.
* @param groups set of eperson groups * @param groups set of eperson groups
* @return a list of epeople * @return a list of epeople
* @throws SQLException An exception that provides information on a database access error or other errors. * @throws SQLException An exception that provides information on a database access error or other errors.
*/ */
public List<EPerson> findByGroups(Context c, Set<Group> groups) throws SQLException; List<EPerson> findByGroups(Context c, Set<Group> groups) throws SQLException;
/**
* Retrieve all EPerson accounts which belong to at least one of the specified groups, in a paginated fashion.
*
* @param c The relevant DSpace Context.
* @param groups Set of group(s) to check membership in
* @param pageSize number of EPerson objects to load at one time. Set to <=0 to disable pagination
* @param offset number of page to load (starting with 1). Set to <=0 to disable pagination
* @return a list of epeople
* @throws SQLException An exception that provides information on a database access error or other errors.
*/
List<EPerson> findByGroups(Context c, Set<Group> groups, int pageSize, int offset) throws SQLException;
/**
* Count all EPerson accounts which belong to at least one of the specified groups. This provides the total
* number of results to expect from corresponding findByGroups() for pagination purposes.
*
* @param c The relevant DSpace Context.
* @param groups Set of group(s) to check membership in
* @return total number of (unique) EPersons who are a member of one or more groups.
* @throws SQLException An exception that provides information on a database access error or other errors.
*/
int countByGroups(Context c, Set<Group> groups) throws SQLException;
/** /**
* Retrieve all accounts which are subscribed to receive information about new items. * Retrieve all accounts which are subscribed to receive information about new items.

View File

@@ -189,9 +189,11 @@ public interface GroupService extends DSpaceObjectService<Group>, DSpaceObjectLe
Set<Group> allMemberGroupsSet(Context context, EPerson ePerson) throws SQLException; Set<Group> allMemberGroupsSet(Context context, EPerson ePerson) throws SQLException;
/** /**
* Get all of the epeople who are a member of the * Get all of the EPerson objects who are a member of the specified group, or a member of a subgroup of the
* specified group, or a member of a sub-group of the
* specified group, etc. * specified group, etc.
* <P>
* WARNING: This method may have bad performance for Groups with a very large number of members, as it will load
* all member EPerson objects into memory. Only use if you need access to *every* EPerson object at once.
* *
* @param context The relevant DSpace Context. * @param context The relevant DSpace Context.
* @param group Group object * @param group Group object
@@ -200,6 +202,18 @@ public interface GroupService extends DSpaceObjectService<Group>, DSpaceObjectLe
*/ */
public List<EPerson> allMembers(Context context, Group group) throws SQLException; public List<EPerson> allMembers(Context context, Group group) throws SQLException;
/**
* Count all of the EPerson objects who are a member of the specified group, or a member of a subgroup of the
* specified group, etc.
* In other words, this will return the size of "allMembers()" without having to load all EPerson objects into
* memory.
* @param context current DSpace context
* @param group Group object
* @return count of EPerson object members
* @throws SQLException if error
*/
int countAllMembers(Context context, Group group) throws SQLException;
/** /**
* Find the group by its name - assumes name is unique * Find the group by its name - assumes name is unique
* *
@@ -247,37 +261,67 @@ public interface GroupService extends DSpaceObjectService<Group>, DSpaceObjectLe
public List<Group> findAll(Context context, int sortField) throws SQLException; public List<Group> findAll(Context context, int sortField) throws SQLException;
/** /**
* Find the groups that match the search query across eperson_group_id or name * Find the Groups that match the query across both Group name and Group ID. This is an unpaginated search,
* which means it will load all matching groups into memory at once. This may provide POOR PERFORMANCE when a large
* number of groups are matched.
* *
* @param context DSpace context * @param context DSpace context
* @param groupIdentifier The group name or group ID * @param query The search string used to search across group name or group ID
* @return array of Group objects * @return List of matching Group objects
* @throws SQLException if error * @throws SQLException if error
*/ */
public List<Group> search(Context context, String groupIdentifier) throws SQLException; List<Group> search(Context context, String query) throws SQLException;
/** /**
* Find the groups that match the search query across eperson_group_id or name * Find the Groups that match the query across both Group name and Group ID. This method supports pagination,
* which provides better performance than the above non-paginated search() method.
* *
* @param context DSpace context * @param context DSpace context
* @param groupIdentifier The group name or group ID * @param query The search string used to search across group name or group ID
* @param offset Inclusive offset * @param offset Inclusive offset (the position of the first result to return)
* @param limit Maximum number of matches returned * @param limit Maximum number of matches returned
* @return array of Group objects * @return List of matching Group objects
* @throws SQLException if error * @throws SQLException if error
*/ */
public List<Group> search(Context context, String groupIdentifier, int offset, int limit) throws SQLException; List<Group> search(Context context, String query, int offset, int limit) throws SQLException;
/** /**
* Returns the total number of groups returned by a specific query, without the overhead * Returns the total number of Groups returned by a specific query. Search is performed based on Group name
* of creating the Group objects to store the results. * and Group ID. May be used with search() above to support pagination of matching Groups.
* *
* @param context DSpace context * @param context DSpace context
* @param query The search string * @param query The search string used to search across group name or group ID
* @return the number of groups matching the query * @return the number of groups matching the query
* @throws SQLException if error * @throws SQLException if error
*/ */
public int searchResultCount(Context context, String query) throws SQLException; int searchResultCount(Context context, String query) throws SQLException;
/**
* Find the groups that match the search query which are NOT currently members (subgroups)
* of the given parentGroup
*
* @param context DSpace context
* @param query The search string used to search across group name or group ID
* @param excludeParentGroup Parent group to exclude results from
* @param offset Inclusive offset (the position of the first result to return)
* @param limit Maximum number of matches returned
* @return List of matching Group objects
* @throws SQLException if error
*/
List<Group> searchNonMembers(Context context, String query, Group excludeParentGroup,
int offset, int limit) throws SQLException;
/**
* Returns the total number of groups that match the search query which are NOT currently members (subgroups)
* of the given parentGroup. Can be used with searchNonMembers() to support pagination.
*
* @param context DSpace context
* @param query The search string used to search across group name or group ID
* @param excludeParentGroup Parent group to exclude results from
* @return the number of Groups matching the query
* @throws SQLException if error
*/
int searchNonMembersCount(Context context, String query, Group excludeParentGroup) throws SQLException;
/** /**
* Return true if group has no direct or indirect members * Return true if group has no direct or indirect members
@@ -327,4 +371,29 @@ public interface GroupService extends DSpaceObjectService<Group>, DSpaceObjectLe
*/ */
List<Group> findByMetadataField(Context context, String searchValue, MetadataField metadataField) List<Group> findByMetadataField(Context context, String searchValue, MetadataField metadataField)
throws SQLException; throws SQLException;
/**
* Find all groups which are a member of the given Parent group
*
* @param context The relevant DSpace Context.
* @param parent The parent Group to search on
* @param pageSize how many results return
* @param offset the position of the first result to return
* @return List of all groups which are members of the parent group
* @throws SQLException database exception if error
*/
List<Group> findByParent(Context context, Group parent, int pageSize, int offset)
throws SQLException;
/**
* Return number of groups which are a member of the given Parent group.
* Can be used with findByParent() for pagination of all groups within a given Parent group.
*
* @param context The relevant DSpace Context.
* @param parent The parent Group to search on
* @return number of groups which are members of the parent group
* @throws SQLException database exception if error
*/
int countByParent(Context context, Group parent)
throws SQLException;
} }

View File

@@ -40,20 +40,20 @@ import org.json.JSONObject;
import org.springframework.beans.factory.annotation.Autowired; import org.springframework.beans.factory.annotation.Autowired;
/** /**
* based on OrcidRestConnector it's a rest connector for OpenAIRE API providing * based on OrcidRestConnector it's a rest connector for Openaire API providing
* ways to perform searches and token grabbing * ways to perform searches and token grabbing
* *
* @author paulo-graca * @author paulo-graca
* *
*/ */
public class OpenAIRERestConnector { public class OpenaireRestConnector {
/** /**
* log4j logger * log4j logger
*/ */
private static Logger log = org.apache.logging.log4j.LogManager.getLogger(OpenAIRERestConnector.class); private static Logger log = org.apache.logging.log4j.LogManager.getLogger(OpenaireRestConnector.class);
/** /**
* OpenAIRE API Url * Openaire API Url
* and can be configured with: openaire.api.url * and can be configured with: openaire.api.url
*/ */
private String url = "https://api.openaire.eu"; private String url = "https://api.openaire.eu";
@@ -65,30 +65,30 @@ public class OpenAIRERestConnector {
boolean tokenEnabled = false; boolean tokenEnabled = false;
/** /**
* OpenAIRE Authorization and Authentication Token Service URL * Openaire Authorization and Authentication Token Service URL
* and can be configured with: openaire.token.url * and can be configured with: openaire.token.url
*/ */
private String tokenServiceUrl; private String tokenServiceUrl;
/** /**
* OpenAIRE clientId * Openaire clientId
* and can be configured with: openaire.token.clientId * and can be configured with: openaire.token.clientId
*/ */
private String clientId; private String clientId;
/** /**
* OpenAIRERest access token * OpenaireRest access token
*/ */
private OpenAIRERestToken accessToken; private OpenaireRestToken accessToken;
/** /**
* OpenAIRE clientSecret * Openaire clientSecret
* and can be configured with: openaire.token.clientSecret * and can be configured with: openaire.token.clientSecret
*/ */
private String clientSecret; private String clientSecret;
public OpenAIRERestConnector(String url) { public OpenaireRestConnector(String url) {
this.url = url; this.url = url;
} }
@@ -99,7 +99,7 @@ public class OpenAIRERestConnector {
* *
* @throws IOException * @throws IOException
*/ */
public OpenAIRERestToken grabNewAccessToken() throws IOException { public OpenaireRestToken grabNewAccessToken() throws IOException {
if (StringUtils.isBlank(tokenServiceUrl) || StringUtils.isBlank(clientId) if (StringUtils.isBlank(tokenServiceUrl) || StringUtils.isBlank(clientId)
|| StringUtils.isBlank(clientSecret)) { || StringUtils.isBlank(clientSecret)) {
@@ -145,13 +145,13 @@ public class OpenAIRERestConnector {
throw new IOException("Unable to grab the access token using provided service url, client id and secret"); throw new IOException("Unable to grab the access token using provided service url, client id and secret");
} }
return new OpenAIRERestToken(responseObject.get("access_token").toString(), return new OpenaireRestToken(responseObject.get("access_token").toString(),
Long.valueOf(responseObject.get("expires_in").toString())); Long.valueOf(responseObject.get("expires_in").toString()));
} }
/** /**
* Perform a GET request to the OpenAIRE API * Perform a GET request to the Openaire API
* *
* @param file * @param file
* @param accessToken * @param accessToken
@@ -218,12 +218,12 @@ public class OpenAIRERestConnector {
} }
/** /**
* Perform an OpenAIRE Project Search By Keywords * Perform an Openaire Project Search By Keywords
* *
* @param page * @param page
* @param size * @param size
* @param keywords * @param keywords
* @return OpenAIRE Response * @return Openaire Response
*/ */
public Response searchProjectByKeywords(int page, int size, String... keywords) { public Response searchProjectByKeywords(int page, int size, String... keywords) {
String path = "search/projects?keywords=" + String.join("+", keywords); String path = "search/projects?keywords=" + String.join("+", keywords);
@@ -231,13 +231,13 @@ public class OpenAIRERestConnector {
} }
/** /**
* Perform an OpenAIRE Project Search By ID and by Funder * Perform an Openaire Project Search By ID and by Funder
* *
* @param projectID * @param projectID
* @param projectFunder * @param projectFunder
* @param page * @param page
* @param size * @param size
* @return OpenAIRE Response * @return Openaire Response
*/ */
public Response searchProjectByIDAndFunder(String projectID, String projectFunder, int page, int size) { public Response searchProjectByIDAndFunder(String projectID, String projectFunder, int page, int size) {
String path = "search/projects?grantID=" + projectID + "&funder=" + projectFunder; String path = "search/projects?grantID=" + projectID + "&funder=" + projectFunder;
@@ -245,12 +245,12 @@ public class OpenAIRERestConnector {
} }
/** /**
* Perform an OpenAIRE Search request * Perform an Openaire Search request
* *
* @param path * @param path
* @param page * @param page
* @param size * @param size
* @return OpenAIRE Response * @return Openaire Response
*/ */
public Response search(String path, int page, int size) { public Response search(String path, int page, int size) {
String[] queryStringPagination = { "page=" + page, "size=" + size }; String[] queryStringPagination = { "page=" + page, "size=" + size };

View File

@@ -8,13 +8,13 @@
package org.dspace.external; package org.dspace.external;
/** /**
* OpenAIRE rest API token to be used when grabbing an accessToken.<br/> * Openaire rest API token to be used when grabbing an accessToken.<br/>
* Based on https://develop.openaire.eu/basic.html * Based on https://develop.openaire.eu/basic.html
* *
* @author paulo-graca * @author paulo-graca
* *
*/ */
public class OpenAIRERestToken { public class OpenaireRestToken {
/** /**
* Stored access token * Stored access token
@@ -32,7 +32,7 @@ public class OpenAIRERestToken {
* @param accessToken * @param accessToken
* @param expiresIn * @param expiresIn
*/ */
public OpenAIRERestToken(String accessToken, Long expiresIn) { public OpenaireRestToken(String accessToken, Long expiresIn) {
this.accessToken = accessToken; this.accessToken = accessToken;
this.setExpirationDate(expiresIn); this.setExpirationDate(expiresIn);
} }

View File

@@ -31,7 +31,7 @@ import eu.openaire.oaf.model.base.Project;
import org.apache.commons.lang3.StringUtils; import org.apache.commons.lang3.StringUtils;
import org.apache.logging.log4j.Logger; import org.apache.logging.log4j.Logger;
import org.dspace.content.dto.MetadataValueDTO; import org.dspace.content.dto.MetadataValueDTO;
import org.dspace.external.OpenAIRERestConnector; import org.dspace.external.OpenaireRestConnector;
import org.dspace.external.model.ExternalDataObject; import org.dspace.external.model.ExternalDataObject;
import org.dspace.external.provider.AbstractExternalDataProvider; import org.dspace.external.provider.AbstractExternalDataProvider;
import org.dspace.importer.external.metadatamapping.MetadataFieldConfig; import org.dspace.importer.external.metadatamapping.MetadataFieldConfig;
@@ -39,13 +39,13 @@ import org.springframework.beans.factory.annotation.Autowired;
/** /**
* This class is the implementation of the ExternalDataProvider interface that * This class is the implementation of the ExternalDataProvider interface that
* will deal with the OpenAIRE External Data lookup * will deal with the Openaire External Data lookup
* *
* @author paulo-graca * @author paulo-graca
*/ */
public class OpenAIREFundingDataProvider extends AbstractExternalDataProvider { public class OpenaireFundingDataProvider extends AbstractExternalDataProvider {
private static Logger log = org.apache.logging.log4j.LogManager.getLogger(OpenAIREFundingDataProvider.class); private static Logger log = org.apache.logging.log4j.LogManager.getLogger(OpenaireFundingDataProvider.class);
/** /**
* GrantAgreement prefix * GrantAgreement prefix
@@ -75,7 +75,7 @@ public class OpenAIREFundingDataProvider extends AbstractExternalDataProvider {
/** /**
* Connector to handle token and requests * Connector to handle token and requests
*/ */
protected OpenAIRERestConnector connector; protected OpenaireRestConnector connector;
protected Map<String, MetadataFieldConfig> metadataFields; protected Map<String, MetadataFieldConfig> metadataFields;
@@ -93,7 +93,7 @@ public class OpenAIREFundingDataProvider extends AbstractExternalDataProvider {
// characters that must be escaped for the <:entry-id> // characters that must be escaped for the <:entry-id>
String decodedId = new String(Base64.getDecoder().decode(id)); String decodedId = new String(Base64.getDecoder().decode(id));
if (!isValidProjectURI(decodedId)) { if (!isValidProjectURI(decodedId)) {
log.error("Invalid ID for OpenAIREFunding - " + id); log.error("Invalid ID for OpenaireFunding - " + id);
return Optional.empty(); return Optional.empty();
} }
Response response = searchByProjectURI(decodedId); Response response = searchByProjectURI(decodedId);
@@ -101,7 +101,7 @@ public class OpenAIREFundingDataProvider extends AbstractExternalDataProvider {
try { try {
if (response.getHeader() != null && Integer.parseInt(response.getHeader().getTotal()) > 0) { if (response.getHeader() != null && Integer.parseInt(response.getHeader().getTotal()) > 0) {
Project project = response.getResults().getResult().get(0).getMetadata().getEntity().getProject(); Project project = response.getResults().getResult().get(0).getMetadata().getEntity().getProject();
ExternalDataObject externalDataObject = new OpenAIREFundingDataProvider ExternalDataObject externalDataObject = new OpenaireFundingDataProvider
.ExternalDataObjectBuilder(project) .ExternalDataObjectBuilder(project)
.setId(generateProjectURI(project)) .setId(generateProjectURI(project))
.setSource(sourceIdentifier) .setSource(sourceIdentifier)
@@ -123,7 +123,7 @@ public class OpenAIREFundingDataProvider extends AbstractExternalDataProvider {
limit = LIMIT_DEFAULT; limit = LIMIT_DEFAULT;
} }
// OpenAIRE uses pages and first page starts with 1 // Openaire uses pages and first page starts with 1
int page = (start / limit) + 1; int page = (start / limit) + 1;
// escaping query // escaping query
@@ -148,7 +148,7 @@ public class OpenAIREFundingDataProvider extends AbstractExternalDataProvider {
if (projects.size() > 0) { if (projects.size() > 0) {
return projects.stream() return projects.stream()
.map(project -> new OpenAIREFundingDataProvider .map(project -> new OpenaireFundingDataProvider
.ExternalDataObjectBuilder(project) .ExternalDataObjectBuilder(project)
.setId(generateProjectURI(project)) .setId(generateProjectURI(project))
.setSource(sourceIdentifier) .setSource(sourceIdentifier)
@@ -176,24 +176,24 @@ public class OpenAIREFundingDataProvider extends AbstractExternalDataProvider {
* Generic setter for the sourceIdentifier * Generic setter for the sourceIdentifier
* *
* @param sourceIdentifier The sourceIdentifier to be set on this * @param sourceIdentifier The sourceIdentifier to be set on this
* OpenAIREFunderDataProvider * OpenaireFunderDataProvider
*/ */
@Autowired(required = true) @Autowired(required = true)
public void setSourceIdentifier(String sourceIdentifier) { public void setSourceIdentifier(String sourceIdentifier) {
this.sourceIdentifier = sourceIdentifier; this.sourceIdentifier = sourceIdentifier;
} }
public OpenAIRERestConnector getConnector() { public OpenaireRestConnector getConnector() {
return connector; return connector;
} }
/** /**
* Generic setter for OpenAIRERestConnector * Generic setter for OpenaireRestConnector
* *
* @param connector * @param connector
*/ */
@Autowired(required = true) @Autowired(required = true)
public void setConnector(OpenAIRERestConnector connector) { public void setConnector(OpenaireRestConnector connector) {
this.connector = connector; this.connector = connector;
} }
@@ -219,7 +219,7 @@ public class OpenAIREFundingDataProvider extends AbstractExternalDataProvider {
} }
/** /**
* This method returns an URI based on OpenAIRE 3.0 guidelines * This method returns an URI based on Openaire 3.0 guidelines
* https://guidelines.openaire.eu/en/latest/literature/field_projectid.html that * https://guidelines.openaire.eu/en/latest/literature/field_projectid.html that
* can be used as an ID if is there any missing part, that part it will be * can be used as an ID if is there any missing part, that part it will be
* replaced by the character '+' * replaced by the character '+'
@@ -281,7 +281,7 @@ public class OpenAIREFundingDataProvider extends AbstractExternalDataProvider {
} }
/** /**
* OpenAIRE Funding External Data Builder Class * Openaire Funding External Data Builder Class
* *
* @author pgraca * @author pgraca
*/ */

View File

@@ -13,6 +13,8 @@ import java.util.Optional;
import java.util.stream.Collectors; import java.util.stream.Collectors;
import org.apache.logging.log4j.Logger; import org.apache.logging.log4j.Logger;
import org.dspace.app.suggestion.SuggestionProvider;
import org.dspace.app.suggestion.SuggestionService;
import org.dspace.authorize.AuthorizeException; import org.dspace.authorize.AuthorizeException;
import org.dspace.content.Collection; import org.dspace.content.Collection;
import org.dspace.content.Item; import org.dspace.content.Item;
@@ -44,6 +46,9 @@ public class ExternalDataServiceImpl implements ExternalDataService {
@Autowired @Autowired
private WorkspaceItemService workspaceItemService; private WorkspaceItemService workspaceItemService;
@Autowired
private SuggestionService suggestionService;
@Override @Override
public Optional<ExternalDataObject> getExternalDataObject(String source, String id) { public Optional<ExternalDataObject> getExternalDataObject(String source, String id) {
ExternalDataProvider provider = getExternalDataProvider(source); ExternalDataProvider provider = getExternalDataProvider(source);
@@ -105,6 +110,16 @@ public class ExternalDataServiceImpl implements ExternalDataService {
log.info(LogHelper.getHeader(context, "create_item_from_externalDataObject", "Created item" + log.info(LogHelper.getHeader(context, "create_item_from_externalDataObject", "Created item" +
"with id: " + item.getID() + " from source: " + externalDataObject.getSource() + " with identifier: " + "with id: " + item.getID() + " from source: " + externalDataObject.getSource() + " with identifier: " +
externalDataObject.getId())); externalDataObject.getId()));
try {
List<SuggestionProvider> providers = suggestionService.getSuggestionProviders();
if (providers != null) {
for (SuggestionProvider p : providers) {
p.flagRelatedSuggestionsAsProcessed(context, externalDataObject);
}
}
} catch (Exception e) {
log.error("Got problems with the solr suggestion storage service: " + e.getMessage(), e);
}
return workspaceItem; return workspaceItem;
} }

View File

@@ -162,8 +162,10 @@ public class CrossRefImportMetadataSourceServiceImpl extends AbstractImportMetad
Iterator<JsonNode> nodes = jsonNode.at("/message/items").iterator(); Iterator<JsonNode> nodes = jsonNode.at("/message/items").iterator();
while (nodes.hasNext()) { while (nodes.hasNext()) {
JsonNode node = nodes.next(); JsonNode node = nodes.next();
if (!node.isMissingNode()) {
results.add(transformSourceRecords(node.toString())); results.add(transformSourceRecords(node.toString()));
} }
}
return results; return results;
} }
@@ -196,7 +198,9 @@ public class CrossRefImportMetadataSourceServiceImpl extends AbstractImportMetad
String responseString = liveImportClient.executeHttpGetRequest(1000, uriBuilder.toString(), params); String responseString = liveImportClient.executeHttpGetRequest(1000, uriBuilder.toString(), params);
JsonNode jsonNode = convertStringJsonToJsonNode(responseString); JsonNode jsonNode = convertStringJsonToJsonNode(responseString);
JsonNode messageNode = jsonNode.at("/message"); JsonNode messageNode = jsonNode.at("/message");
if (!messageNode.isMissingNode()) {
results.add(transformSourceRecords(messageNode.toString())); results.add(transformSourceRecords(messageNode.toString()));
}
return results; return results;
} }
} }
@@ -250,8 +254,10 @@ public class CrossRefImportMetadataSourceServiceImpl extends AbstractImportMetad
Iterator<JsonNode> nodes = jsonNode.at("/message/items").iterator(); Iterator<JsonNode> nodes = jsonNode.at("/message/items").iterator();
while (nodes.hasNext()) { while (nodes.hasNext()) {
JsonNode node = nodes.next(); JsonNode node = nodes.next();
if (!node.isMissingNode()) {
results.add(transformSourceRecords(node.toString())); results.add(transformSourceRecords(node.toString()));
} }
}
return results; return results;
} }

View File

@@ -11,7 +11,9 @@ import java.util.ArrayList;
import java.util.Collection; import java.util.Collection;
import java.util.Collections; import java.util.Collections;
import java.util.List; import java.util.List;
import java.util.Optional;
import org.dspace.content.MetadataFieldName;
import org.dspace.importer.external.metadatamapping.MetadatumDTO; import org.dspace.importer.external.metadatamapping.MetadatumDTO;
/** /**
@@ -94,6 +96,31 @@ public class ImportRecord {
return values; return values;
} }
/**
* Returns an {@code Optional<String>} representing the value
* of the metadata {@code field} found inside the {@code valueList}.
* @param field String of the MetadataField to search
* @return {@code Optional<String>} non empty if found.
*/
public Optional<String> getSingleValue(String field) {
MetadataFieldName metadataFieldName = new MetadataFieldName(field);
return getSingleValue(metadataFieldName.schema, metadataFieldName.element, metadataFieldName.qualifier);
}
/**
* Retrieves a single value for the given schema, element, and qualifier.
*
* @param schema the schema for the value
* @param element the element for the value
* @param qualifier the qualifier for the value
* @return an optional containing the single value, if present
*/
public Optional<String> getSingleValue(String schema, String element, String qualifier) {
return getValue(schema, element, qualifier).stream()
.map(MetadatumDTO::getValue)
.findFirst();
}
/** /**
* Add a value to the valueList * Add a value to the valueList
* *

View File

@@ -0,0 +1,130 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.importer.external.metadatamapping.contributor;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Iterator;
import com.fasterxml.jackson.core.JsonProcessingException;
import com.fasterxml.jackson.databind.JsonNode;
import com.fasterxml.jackson.databind.ObjectMapper;
import org.dspace.importer.external.metadatamapping.MetadatumDTO;
/**
* A ROR JsonPath Metadata processor that should be configured inside the {@code ror-integration.xml} file.
* This allows the extraction of a given contributor with a specific mappings from the ROR JSON response.
*
* @author Vincenzo Mecca (vins01-4science - vincenzo.mecca at 4science.com)
*/
public class RorParentOrgUnitMetadataContributor extends SimpleJsonPathMetadataContributor {
/**
* Determines which field of the JSON detains the {@code type} of this
* specific node (that needs to be mapped).
*
*/
private String typeField;
/**
* Determines which is the type of the main parent node that needs to be mapped.
* It should match the value of the {@code typeField} of the JSON node.
*
*/
private String parentType;
/**
* Determines which is the field of the JSON that contains the value
* that needs to be mapped into a {@code MetadatumDTO}.
*/
private String labelField;
/**
* Creates a {@code MetadatumDTO} for each correctly mapped JSON node
* of the ROR response.
* Partial / Unmatched parent-type metadatum will be ignored from this mapping.
*
* @param fullJson ROR response
* @return a collection of read ROR metadata.
*/
@Override
public Collection<MetadatumDTO> contributeMetadata(String fullJson) {
Collection<MetadatumDTO> metadata = new ArrayList<>();
Collection<String> metadataValue = new ArrayList<>();
JsonNode jsonNode = convertStringJsonToJsonNode(fullJson);
JsonNode array = jsonNode.at(getQuery());
if (!array.isArray()) {
return metadata;
}
Iterator<JsonNode> nodes = array.iterator();
while (nodes.hasNext()) {
JsonNode node = nodes.next();
if (!node.has(labelField)) {
continue;
}
String type = node.has(typeField) ? node.get(typeField).asText() : null;
String label = node.get(labelField).asText();
if (parentType.equalsIgnoreCase(type)) {
metadataValue.add(label);
}
}
for (String value : metadataValue) {
MetadatumDTO metadatumDto = new MetadatumDTO();
metadatumDto.setValue(value);
metadatumDto.setElement(getField().getElement());
metadatumDto.setQualifier(getField().getQualifier());
metadatumDto.setSchema(getField().getSchema());
metadata.add(metadatumDto);
}
return metadata;
}
private JsonNode convertStringJsonToJsonNode(String json) {
ObjectMapper mapper = new ObjectMapper();
JsonNode body = null;
try {
body = mapper.readTree(json);
} catch (JsonProcessingException e) {
throw new RuntimeException(e);
}
return body;
}
public String getTypeField() {
return typeField;
}
public void setTypeField(String typeField) {
this.typeField = typeField;
}
public String getLabelField() {
return labelField;
}
public void setLabelField(String labelField) {
this.labelField = labelField;
}
public String getParentType() {
return parentType;
}
public void setParentType(String parentType) {
this.parentType = parentType;
}
}

View File

@@ -87,5 +87,4 @@ public class SimpleXpathDateFormatMetadataContributor extends SimpleXpathMetadat
dcValue.setSchema(field.getSchema()); dcValue.setSchema(field.getSchema());
return dcValue; return dcValue;
} }
} }

View File

@@ -0,0 +1,29 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.importer.external.openaire.metadatamapping;
import java.util.Map;
import javax.annotation.Resource;
import org.dspace.importer.external.metadatamapping.AbstractMetadataFieldMapping;
/**
* An implementation of {@link AbstractMetadataFieldMapping} responsible for
* defining the mapping of the OpenAIRE metadatum fields on the DSpace metadatum
* fields
*
* @author Mykhaylo Boychuk (4science.it)
*/
public class OpenAIREPublicationFieldMapping extends AbstractMetadataFieldMapping {
@Override
@Resource(name = "openairePublicationsMetadataFieldMap")
public void setMetadataFieldMap(Map metadataFieldMap) {
super.setMetadataFieldMap(metadataFieldMap);
}
}

View File

@@ -0,0 +1,353 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.importer.external.openaire.service;
import java.io.IOException;
import java.io.StringReader;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collection;
import java.util.List;
import java.util.concurrent.Callable;
import javax.el.MethodNotFoundException;
import javax.ws.rs.client.Client;
import javax.ws.rs.client.ClientBuilder;
import javax.ws.rs.client.Invocation;
import javax.ws.rs.client.WebTarget;
import javax.ws.rs.core.Response;
import org.dspace.content.Item;
import org.dspace.importer.external.datamodel.ImportRecord;
import org.dspace.importer.external.datamodel.Query;
import org.dspace.importer.external.exception.MetadataSourceException;
import org.dspace.importer.external.metadatamapping.MetadatumDTO;
import org.dspace.importer.external.service.AbstractImportMetadataSourceService;
import org.dspace.importer.external.service.components.QuerySource;
import org.dspace.services.ConfigurationService;
import org.jdom2.Document;
import org.jdom2.Element;
import org.jdom2.JDOMException;
import org.jdom2.Namespace;
import org.jdom2.filter.Filters;
import org.jdom2.input.SAXBuilder;
import org.jdom2.xpath.XPathExpression;
import org.jdom2.xpath.XPathFactory;
import org.springframework.beans.factory.annotation.Autowired;
/**
* Implements a data source for querying OpenAIRE
*
* @author Pasquale Cavallo (pasquale.cavallo at 4science dot it)
*/
public class OpenAireImportMetadataSourceServiceImpl extends AbstractImportMetadataSourceService<Element>
implements QuerySource {
@Autowired(required = true)
protected ConfigurationService configurationService;
private String baseAddress;
private WebTarget webTarget;
private String queryParam;
@Override
public String getImportSource() {
return "openaire";
}
/**
* The string that identifies this import implementation. Preferable a URI
*
* @return the identifying uri
*/
@Override
public ImportRecord getRecord(String id) throws MetadataSourceException {
return retry(new SearchByIdCallable(id));
}
/**
* The string that identifies this import implementation. Preferable a URI
*
* @return the identifying uri
*/
@Override
public ImportRecord getRecord(Query query) throws MetadataSourceException {
return retry(new SearchByIdCallable(query));
}
/**
* Find the number of records matching a query;
*
* @param query a query string to base the search on.
* @return the sum of the matching records over this import source
* @throws MetadataSourceException if the underlying methods throw any exception.
*/
@Override
public int getRecordsCount(String query) throws MetadataSourceException {
return retry(new CountByQueryCallable(query));
}
/**
* Find the number of records matching a query;
*
* @param query a query object to base the search on.
* @return the sum of the matching records over this import source
* @throws MetadataSourceException if the underlying methods throw any exception.
*/
@Override
public int getRecordsCount(Query query) throws MetadataSourceException {
return retry(new CountByQueryCallable(query));
}
/**
* Find the number of records matching a string query. Supports pagination
*
* @param query a query string to base the search on.
* @param start offset to start at
* @param count number of records to retrieve.
* @return a set of records. Fully transformed.
* @throws MetadataSourceException if the underlying methods throw any exception.
*/
@Override
public Collection<ImportRecord> getRecords(String query, int start, int count) throws MetadataSourceException {
return retry(new SearchByQueryCallable(query, start, count));
}
/**
* Find records based on a object query.
*
* @param query a query object to base the search on.
* @return a set of records. Fully transformed.
* @throws MetadataSourceException if the underlying methods throw any exception.
*/
@Override
public Collection<ImportRecord> getRecords(Query query) throws MetadataSourceException {
return retry(new SearchByQueryCallable(query));
}
@Override
public Collection<ImportRecord> findMatchingRecords(Query query) throws MetadataSourceException {
throw new MethodNotFoundException("This method is not implemented for OpenAIRE");
}
@Override
public Collection<ImportRecord> findMatchingRecords(Item item) throws MetadataSourceException {
throw new MethodNotFoundException("This method is not implemented for OpenAIRE");
}
/**
* Set the baseAddress to this object
*
* @param baseAddress The String object that represents the baseAddress of this object
*/
public void setBaseAddress(String baseAddress) {
this.baseAddress = baseAddress;
}
/**
* Return the baseAddress set to this object
*
* @return The String object that represents the baseAddress of this object
*/
public String getBaseAddress() {
return baseAddress;
}
/**
* Set the name of the query param, this correspond to the index used (title, author)
*
* @param queryParam on which index make the query
*/
public void setQueryParam(String queryParam) {
this.queryParam = queryParam;
}
/**
* Get the name of the query param for the rest call
*
* @return the name of the query param, i.e. the index (title, author) to use
*/
public String getQueryParam() {
return queryParam;
}
/**
* Initialize the class
*
* @throws Exception on generic exception
*/
@Override
public void init() throws Exception {
Client client = ClientBuilder.newClient();
if (baseAddress == null) {
baseAddress = configurationService.getProperty("openaire.base.url");
}
if (queryParam == null) {
queryParam = "title";
}
webTarget = client.target(baseAddress);
}
public class SearchByIdCallable implements Callable<ImportRecord> {
String id = null;
public SearchByIdCallable(String id) {
this.id = id;
}
public SearchByIdCallable(Query query) {
this.id = query.getParameterAsClass("id", String.class);
}
@Override
public ImportRecord call() throws Exception {
List<ImportRecord> results = new ArrayList<ImportRecord>();
WebTarget localTarget = webTarget.queryParam("openairePublicationID", id);
Invocation.Builder invocationBuilder = localTarget.request();
Response response = invocationBuilder.get();
if (response.getStatus() == 200) {
String responseString = response.readEntity(String.class);
List<Element> omElements = splitToRecords(responseString);
if (omElements != null) {
for (Element record : omElements) {
results.add(filterMultipleTitles(transformSourceRecords(record)));
}
}
return results != null ? results.get(0) : null;
} else {
return null;
}
}
}
public class CountByQueryCallable implements Callable<Integer> {
String q;
public CountByQueryCallable(String query) {
q = query;
}
public CountByQueryCallable(Query query) {
q = query.getParameterAsClass("query", String.class);
}
@Override
public Integer call() throws Exception {
WebTarget localTarget = webTarget.queryParam(queryParam, q);
Invocation.Builder invocationBuilder = localTarget.request();
Response response = invocationBuilder.get();
if (response.getStatus() == 200) {
String responseString = response.readEntity(String.class);
SAXBuilder saxBuilder = new SAXBuilder();
Document document = saxBuilder.build(new StringReader(responseString));
Element root = document.getRootElement();
XPathExpression<Element> xpath = XPathFactory.instance().compile("/header/total",
Filters.element(), null);
Element totalItem = (Element) xpath.evaluateFirst(root);
return totalItem != null ? Integer.parseInt(totalItem.getText()) : null;
} else {
return 0;
}
}
}
public class SearchByQueryCallable implements Callable<List<ImportRecord>> {
String q;
int page;
int count;
public SearchByQueryCallable(String query, int start, int count) {
this.q = query;
this.page = start / count;
this.count = count;
}
public SearchByQueryCallable(Query query) {
this.q = query.getParameterAsClass("query", String.class);
this.page = query.getParameterAsClass("start", Integer.class) /
query.getParameterAsClass("count", Integer.class);
this.count = query.getParameterAsClass("count", Integer.class);
}
@Override
public List<ImportRecord> call() throws Exception {
WebTarget localTarget = webTarget.queryParam(queryParam, q);
localTarget = localTarget.queryParam("page", page + 1);
localTarget = localTarget.queryParam("size", count);
List<ImportRecord> results = new ArrayList<ImportRecord>();
Invocation.Builder invocationBuilder = localTarget.request();
Response response = invocationBuilder.get();
if (response.getStatus() == 200) {
String responseString = response.readEntity(String.class);
List<Element> omElements = splitToRecords(responseString);
if (omElements != null) {
for (Element record : omElements) {
results.add(filterMultipleTitles(transformSourceRecords(record)));
}
}
}
return results;
}
}
/**
* This method remove multiple titles occurrences
*
* @param transformSourceRecords
* @return ImportRecord with one or zero title
*/
private ImportRecord filterMultipleTitles(ImportRecord transformSourceRecords) {
List<MetadatumDTO> metadata = (List<MetadatumDTO>)transformSourceRecords.getValueList();
ArrayList<MetadatumDTO> nextSourceRecord = new ArrayList<>();
boolean found = false;
for (MetadatumDTO dto : metadata) {
if ("dc".equals(dto.getSchema()) && "title".equals(dto.getElement()) && dto.getQualifier() == null) {
if (!found) {
nextSourceRecord.add(dto);
found = true;
}
} else {
nextSourceRecord.add(dto);
}
}
return new ImportRecord(nextSourceRecord);
}
private List<Element> splitToRecords(String recordsSrc) {
try {
SAXBuilder saxBuilder = new SAXBuilder();
Document document = saxBuilder.build(new StringReader(recordsSrc));
Element root = document.getRootElement();
List namespaces = Arrays.asList(
Namespace.getNamespace("dri", "http://www.driver-repository.eu/namespace/dri"),
Namespace.getNamespace("oaf", "http://namespace.openaire.eu/oaf"),
Namespace.getNamespace("xsi", "http://www.w3.org/2001/XMLSchema-instance"));
XPathExpression<Element> xpath = XPathFactory.instance().compile("//results/result",
Filters.element(), null, namespaces);
List<Element> recordsList = xpath.evaluate(root);
return recordsList;
} catch (JDOMException | IOException e) {
return null;
}
}
}

View File

@@ -0,0 +1,38 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.importer.external.ror.service;
import java.util.Map;
import javax.annotation.Resource;
import org.dspace.importer.external.metadatamapping.AbstractMetadataFieldMapping;
/**
* An implementation of {@link AbstractMetadataFieldMapping}
* Responsible for defining the mapping of the ROR metadatum fields on the DSpace metadatum fields
*
* @author Vincenzo Mecca (vins01-4science - vincenzo.mecca at 4science.com)
*/
public class RorFieldMapping extends AbstractMetadataFieldMapping {
/**
* Defines which metadatum is mapped on which metadatum. Note that while the key must be unique it
* only matters here for postprocessing of the value. The mapped MetadatumContributor has full control over
* what metadatafield is generated.
*
* @param metadataFieldMap The map containing the link between retrieve metadata and metadata that will be set to
* the item.
*/
@Override
@Resource(name = "rorMetadataFieldMap")
public void setMetadataFieldMap(Map metadataFieldMap) {
super.setMetadataFieldMap(metadataFieldMap);
}
}

View File

@@ -0,0 +1,278 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.importer.external.ror.service;
import java.net.URISyntaxException;
import java.util.ArrayList;
import java.util.Collection;
import java.util.HashMap;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.concurrent.Callable;
import javax.el.MethodNotFoundException;
import com.fasterxml.jackson.core.JsonProcessingException;
import com.fasterxml.jackson.databind.JsonNode;
import com.fasterxml.jackson.databind.ObjectMapper;
import org.apache.commons.collections.CollectionUtils;
import org.apache.commons.lang3.StringUtils;
import org.apache.http.client.utils.URIBuilder;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
import org.dspace.content.Item;
import org.dspace.importer.external.datamodel.ImportRecord;
import org.dspace.importer.external.datamodel.Query;
import org.dspace.importer.external.exception.MetadataSourceException;
import org.dspace.importer.external.liveimportclient.service.LiveImportClient;
import org.dspace.importer.external.service.AbstractImportMetadataSourceService;
import org.dspace.importer.external.service.components.QuerySource;
import org.springframework.beans.factory.annotation.Autowired;
/**
* Implements a {@code AbstractImportMetadataSourceService} for querying ROR services.
*
* @author Vincenzo Mecca (vins01-4science - vincenzo.mecca at 4science.com)
*/
public class RorImportMetadataSourceServiceImpl extends AbstractImportMetadataSourceService<String>
implements QuerySource {
private final static Logger log = LogManager.getLogger();
protected static final String ROR_IDENTIFIER_PREFIX = "https://ror.org/";
private String url;
private int timeout = 1000;
@Autowired
private LiveImportClient liveImportClient;
@Override
public String getImportSource() {
return "ror";
}
@Override
public ImportRecord getRecord(String id) throws MetadataSourceException {
List<ImportRecord> records = retry(new SearchByIdCallable(id));
return CollectionUtils.isEmpty(records) ? null : records.get(0);
}
@Override
public int getRecordsCount(String query) throws MetadataSourceException {
return retry(new CountByQueryCallable(query));
}
@Override
public int getRecordsCount(Query query) throws MetadataSourceException {
return retry(new CountByQueryCallable(query));
}
@Override
public Collection<ImportRecord> getRecords(String query, int start, int count) throws MetadataSourceException {
return retry(new SearchByQueryCallable(query));
}
@Override
public Collection<ImportRecord> getRecords(Query query) throws MetadataSourceException {
return retry(new SearchByQueryCallable(query));
}
@Override
public ImportRecord getRecord(Query query) throws MetadataSourceException {
List<ImportRecord> records = retry(new SearchByIdCallable(query));
return CollectionUtils.isEmpty(records) ? null : records.get(0);
}
@Override
public Collection<ImportRecord> findMatchingRecords(Query query) throws MetadataSourceException {
throw new MethodNotFoundException("This method is not implemented for ROR");
}
@Override
public Collection<ImportRecord> findMatchingRecords(Item item) throws MetadataSourceException {
throw new MethodNotFoundException("This method is not implemented for ROR");
}
@Override
public void init() throws Exception {
}
/**
* This class is a Callable implementation to get ROR entries based on query
* object. This Callable use as query value the string queryString passed to
* constructor. If the object will be construct through Query.class instance, a
* Query's map entry with key "query" will be used. Pagination is supported too,
* using the value of the Query's map with keys "start" and "count".
*
* @author Mykhaylo Boychuk (mykhaylo.boychuk@4science.com)
*/
private class SearchByQueryCallable implements Callable<List<ImportRecord>> {
private Query query;
private SearchByQueryCallable(String queryString) {
query = new Query();
query.addParameter("query", queryString);
}
private SearchByQueryCallable(Query query) {
this.query = query;
}
@Override
public List<ImportRecord> call() throws Exception {
return search(query.getParameterAsClass("query", String.class));
}
}
/**
* This class is a Callable implementation to get an ROR entry using bibcode The
* bibcode to use can be passed through the constructor as a String or as
* Query's map entry, with the key "id".
*
* @author Mykhaylo Boychuk (mykhaylo.boychuk@4science.com)
*/
private class SearchByIdCallable implements Callable<List<ImportRecord>> {
private Query query;
private SearchByIdCallable(Query query) {
this.query = query;
}
private SearchByIdCallable(String id) {
this.query = new Query();
query.addParameter("id", id);
}
@Override
public List<ImportRecord> call() throws Exception {
return searchById(query.getParameterAsClass("id", String.class));
}
}
/**
* This class is a Callable implementation to count the number of entries for a
* ROR query. This Callable uses as query value to ROR the string queryString
* passed to constructor. If the object will be construct through {@code Query}
* instance, the value of the Query's map with the key "query" will be used.
*
* @author Vincenzo Mecca (vins01-4science - vincenzo.mecca at 4science.com)
*/
private class CountByQueryCallable implements Callable<Integer> {
private Query query;
private CountByQueryCallable(String queryString) {
query = new Query();
query.addParameter("query", queryString);
}
private CountByQueryCallable(Query query) {
this.query = query;
}
@Override
public Integer call() throws Exception {
return count(query.getParameterAsClass("query", String.class));
}
}
/**
* Counts the number of results for the given query.
*
* @param query the query string to count results for
* @return the number of results for the given query
*/
public Integer count(String query) {
try {
Map<String, Map<String, String>> params = new HashMap<String, Map<String, String>>();
URIBuilder uriBuilder = new URIBuilder(this.url);
uriBuilder.addParameter("query", query);
String resp = liveImportClient.executeHttpGetRequest(timeout, uriBuilder.toString(), params);
if (StringUtils.isEmpty(resp)) {
return 0;
}
JsonNode jsonNode = convertStringJsonToJsonNode(resp);
return jsonNode.at("/number_of_results").asInt();
} catch (URISyntaxException e) {
e.printStackTrace();
}
return 0;
}
private List<ImportRecord> searchById(String id) {
List<ImportRecord> importResults = new ArrayList<>();
id = StringUtils.removeStart(id, ROR_IDENTIFIER_PREFIX);
try {
Map<String, Map<String, String>> params = new HashMap<String, Map<String, String>>();
URIBuilder uriBuilder = new URIBuilder(this.url + "/" + id);
String resp = liveImportClient.executeHttpGetRequest(timeout, uriBuilder.toString(), params);
if (StringUtils.isEmpty(resp)) {
return importResults;
}
JsonNode jsonNode = convertStringJsonToJsonNode(resp);
importResults.add(transformSourceRecords(jsonNode.toString()));
} catch (URISyntaxException e) {
e.printStackTrace();
}
return importResults;
}
private List<ImportRecord> search(String query) {
List<ImportRecord> importResults = new ArrayList<>();
try {
Map<String, Map<String, String>> params = new HashMap<String, Map<String, String>>();
URIBuilder uriBuilder = new URIBuilder(this.url);
uriBuilder.addParameter("query", query);
String resp = liveImportClient.executeHttpGetRequest(timeout, uriBuilder.toString(), params);
if (StringUtils.isEmpty(resp)) {
return importResults;
}
JsonNode jsonNode = convertStringJsonToJsonNode(resp);
JsonNode docs = jsonNode.at("/items");
if (docs.isArray()) {
Iterator<JsonNode> nodes = docs.elements();
while (nodes.hasNext()) {
JsonNode node = nodes.next();
importResults.add(transformSourceRecords(node.toString()));
}
} else {
importResults.add(transformSourceRecords(docs.toString()));
}
} catch (URISyntaxException e) {
e.printStackTrace();
}
return importResults;
}
private JsonNode convertStringJsonToJsonNode(String json) {
try {
return new ObjectMapper().readTree(json);
} catch (JsonProcessingException e) {
log.error("Unable to process json response.", e);
}
return null;
}
public void setUrl(String url) {
this.url = url;
}
}

View File

@@ -0,0 +1,50 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.qaevent;
import org.dspace.core.Constants;
import org.dspace.core.Context;
import org.dspace.event.Consumer;
import org.dspace.event.Event;
import org.dspace.qaevent.service.QAEventService;
import org.dspace.utils.DSpace;
/**
* Consumer to delete qaevents from solr due to the target item deletion
*
* @author Andrea Bollini (andrea.bollini at 4science.it)
*
*/
public class QAEventsDeleteCascadeConsumer implements Consumer {
private QAEventService qaEventService;
@Override
public void initialize() throws Exception {
qaEventService = new DSpace().getSingletonService(QAEventService.class);
}
@Override
public void finish(Context context) throws Exception {
}
@Override
public void consume(Context context, Event event) throws Exception {
if (event.getEventType() == Event.DELETE) {
if (event.getSubjectType() == Constants.ITEM && event.getSubjectID() != null) {
qaEventService.deleteEventsByTargetId(event.getSubjectID());
}
}
}
public void end(Context context) throws Exception {
}
}

View File

@@ -0,0 +1,46 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.qaevent;
import java.util.Date;
/**
* This model class represent the source/provider of the QA events (as Openaire).
*
* @author Luca Giamminonni (luca.giamminonni at 4Science)
*
*/
public class QASource {
private String name;
private long totalEvents;
private Date lastEvent;
public String getName() {
return name;
}
public void setName(String name) {
this.name = name;
}
public long getTotalEvents() {
return totalEvents;
}
public void setTotalEvents(long totalEvents) {
this.totalEvents = totalEvents;
}
public Date getLastEvent() {
return lastEvent;
}
public void setLastEvent(Date lastEvent) {
this.lastEvent = lastEvent;
}
}

View File

@@ -0,0 +1,47 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.qaevent;
import java.util.Date;
/**
* This model class represent the quality assurance broker topic concept. A
* topic represents a type of event and is therefore used to group events.
*
* @author Andrea Bollini (andrea.bollini at 4science.it)
*
*/
public class QATopic {
private String key;
private long totalEvents;
private Date lastEvent;
public String getKey() {
return key;
}
public void setKey(String key) {
this.key = key;
}
public long getTotalEvents() {
return totalEvents;
}
public void setTotalEvents(long totalEvents) {
this.totalEvents = totalEvents;
}
public Date getLastEvent() {
return lastEvent;
}
public void setLastEvent(Date lastEvent) {
this.lastEvent = lastEvent;
}
}

View File

@@ -0,0 +1,31 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.qaevent;
import org.dspace.content.Item;
import org.dspace.core.Context;
import org.dspace.qaevent.service.dto.QAMessageDTO;
/**
* Interface for classes that perform a correction on the given item.
*
* @author Andrea Bollini (andrea.bollini at 4science.it)
*
*/
public interface QualityAssuranceAction {
/**
* Perform a correction on the given item.
*
* @param context the DSpace context
* @param item the item to correct
* @param relatedItem the related item, if any
* @param message the message with the correction details
*/
public void applyCorrection(Context context, Item item, Item relatedItem, QAMessageDTO message);
}

View File

@@ -0,0 +1,180 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.qaevent.action;
import java.sql.SQLException;
import java.util.Map;
import org.apache.commons.lang3.StringUtils;
import org.dspace.authorize.AuthorizeException;
import org.dspace.content.Collection;
import org.dspace.content.EntityType;
import org.dspace.content.Item;
import org.dspace.content.RelationshipType;
import org.dspace.content.WorkspaceItem;
import org.dspace.content.service.CollectionService;
import org.dspace.content.service.EntityTypeService;
import org.dspace.content.service.InstallItemService;
import org.dspace.content.service.ItemService;
import org.dspace.content.service.RelationshipService;
import org.dspace.content.service.RelationshipTypeService;
import org.dspace.content.service.WorkspaceItemService;
import org.dspace.core.Context;
import org.dspace.qaevent.QualityAssuranceAction;
import org.dspace.qaevent.service.dto.OpenaireMessageDTO;
import org.dspace.qaevent.service.dto.QAMessageDTO;
import org.springframework.beans.factory.annotation.Autowired;
/**
* Implementation of {@link QualityAssuranceAction} that handle the relationship between the
* item to correct and a related item.
*
* @author Andrea Bollini (andrea.bollini at 4science.it)
*
*/
public class QAEntityOpenaireMetadataAction implements QualityAssuranceAction {
private String relation;
private String entityType;
private Map<String, String> entityMetadata;
@Autowired
private InstallItemService installItemService;
@Autowired
private ItemService itemService;
@Autowired
private EntityTypeService entityTypeService;
@Autowired
private RelationshipService relationshipService;
@Autowired
private RelationshipTypeService relationshipTypeService;
@Autowired
private WorkspaceItemService workspaceItemService;
@Autowired
private CollectionService collectionService;
public void setItemService(ItemService itemService) {
this.itemService = itemService;
}
public String getRelation() {
return relation;
}
public void setRelation(String relation) {
this.relation = relation;
}
public String[] splitMetadata(String metadata) {
String[] result = new String[3];
String[] split = metadata.split("\\.");
result[0] = split[0];
result[1] = split[1];
if (split.length == 3) {
result[2] = split[2];
}
return result;
}
public String getEntityType() {
return entityType;
}
public void setEntityType(String entityType) {
this.entityType = entityType;
}
public Map<String, String> getEntityMetadata() {
return entityMetadata;
}
public void setEntityMetadata(Map<String, String> entityMetadata) {
this.entityMetadata = entityMetadata;
}
@Override
public void applyCorrection(Context context, Item item, Item relatedItem, QAMessageDTO message) {
try {
if (relatedItem != null) {
link(context, item, relatedItem);
} else {
Collection collection = collectionService.retrieveCollectionWithSubmitByEntityType(context,
item, entityType);
if (collection == null) {
throw new IllegalStateException("No collection found by entity type: " + collection);
}
WorkspaceItem workspaceItem = workspaceItemService.create(context, collection, true);
relatedItem = workspaceItem.getItem();
for (String key : entityMetadata.keySet()) {
String value = getValue(message, key);
if (StringUtils.isNotBlank(value)) {
String[] targetMetadata = splitMetadata(entityMetadata.get(key));
itemService.addMetadata(context, relatedItem, targetMetadata[0], targetMetadata[1],
targetMetadata[2], null, value);
}
}
installItemService.installItem(context, workspaceItem);
itemService.update(context, relatedItem);
link(context, item, relatedItem);
}
} catch (SQLException | AuthorizeException e) {
throw new RuntimeException(e);
}
}
/**
* Create a new relationship between the two given item, based on the configured
* relation.
*/
private void link(Context context, Item item, Item relatedItem) throws SQLException, AuthorizeException {
EntityType project = entityTypeService.findByEntityType(context, entityType);
RelationshipType relType = relationshipTypeService.findByEntityType(context, project).stream()
.filter(r -> StringUtils.equals(r.getRightwardType(), relation)).findFirst()
.orElseThrow(() -> new IllegalStateException("No relationshipType named " + relation
+ " was found for the entity type " + entityType
+ ". A proper configuration is required to use the QAEntitiyMetadataAction."
+ " If you don't manage funding in your repository please skip this topic in"
+ " the qaevents.cfg"));
// Create the relationship
relationshipService.create(context, item, relatedItem, relType, -1, -1);
}
private String getValue(QAMessageDTO message, String key) {
if (!(message instanceof OpenaireMessageDTO)) {
return null;
}
OpenaireMessageDTO openaireMessage = (OpenaireMessageDTO) message;
if (StringUtils.equals(key, "acronym")) {
return openaireMessage.getAcronym();
} else if (StringUtils.equals(key, "code")) {
return openaireMessage.getCode();
} else if (StringUtils.equals(key, "funder")) {
return openaireMessage.getFunder();
} else if (StringUtils.equals(key, "fundingProgram")) {
return openaireMessage.getFundingProgram();
} else if (StringUtils.equals(key, "jurisdiction")) {
return openaireMessage.getJurisdiction();
} else if (StringUtils.equals(key, "openaireId")) {
return openaireMessage.getOpenaireId();
} else if (StringUtils.equals(key, "title")) {
return openaireMessage.getTitle();
}
return null;
}
}

View File

@@ -0,0 +1,86 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.qaevent.action;
import java.sql.SQLException;
import java.util.Map;
import org.dspace.authorize.AuthorizeException;
import org.dspace.content.Item;
import org.dspace.content.service.ItemService;
import org.dspace.core.Context;
import org.dspace.qaevent.QualityAssuranceAction;
import org.dspace.qaevent.service.dto.OpenaireMessageDTO;
import org.dspace.qaevent.service.dto.QAMessageDTO;
import org.springframework.beans.factory.annotation.Autowired;
/**
* Implementation of {@link QualityAssuranceAction} that add a specific metadata on the given
* item based on the OPENAIRE message type.
*
* @author Andrea Bollini (andrea.bollini at 4science.it)
*
*/
public class QAOpenaireMetadataMapAction implements QualityAssuranceAction {
public static final String DEFAULT = "default";
private Map<String, String> types;
@Autowired
private ItemService itemService;
public void setItemService(ItemService itemService) {
this.itemService = itemService;
}
public Map<String, String> getTypes() {
return types;
}
public void setTypes(Map<String, String> types) {
this.types = types;
}
/**
* Apply the correction on one metadata field of the given item based on the
* openaire message type.
*/
@Override
public void applyCorrection(Context context, Item item, Item relatedItem, QAMessageDTO message) {
if (!(message instanceof OpenaireMessageDTO)) {
throw new IllegalArgumentException("Unsupported message type: " + message.getClass());
}
OpenaireMessageDTO openaireMessage = (OpenaireMessageDTO) message;
try {
String targetMetadata = types.get(openaireMessage.getType());
if (targetMetadata == null) {
targetMetadata = types.get(DEFAULT);
}
String[] metadata = splitMetadata(targetMetadata);
itemService.addMetadata(context, item, metadata[0], metadata[1], metadata[2], null,
openaireMessage.getValue());
itemService.update(context, item);
} catch (SQLException | AuthorizeException e) {
throw new RuntimeException(e);
}
}
public String[] splitMetadata(String metadata) {
String[] result = new String[3];
String[] split = metadata.split("\\.");
result[0] = split[0];
result[1] = split[1];
if (split.length == 3) {
result[2] = split[2];
}
return result;
}
}

Some files were not shown because too many files have changed in this diff Show More