mirror of
https://github.com/DSpace/DSpace.git
synced 2025-10-07 10:04:21 +00:00
Merge branch 'main' into atmire-hierarchical-vocabulary-preloadLevel-config-fix-main
This commit is contained in:
@@ -1,26 +0,0 @@
|
||||
# This workflow runs whenever a new pull request is created
|
||||
# TEMPORARILY DISABLED. Unfortunately this doesn't work for PRs created from forked repositories (which is how we tend to create PRs).
|
||||
# There is no known workaround yet. See https://github.community/t/how-to-use-github-token-for-prs-from-forks/16818
|
||||
name: Pull Request opened
|
||||
|
||||
# Only run for newly opened PRs against the "main" branch
|
||||
on:
|
||||
pull_request:
|
||||
types: [opened]
|
||||
branches:
|
||||
- main
|
||||
|
||||
jobs:
|
||||
automation:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
# Assign the PR to whomever created it. This is useful for visualizing assignments on project boards
|
||||
# See https://github.com/marketplace/actions/pull-request-assigner
|
||||
- name: Assign PR to creator
|
||||
uses: thomaseizinger/assign-pr-creator-action@v1.0.0
|
||||
# Note, this authentication token is created automatically
|
||||
# See: https://docs.github.com/en/actions/configuring-and-managing-workflows/authenticating-with-the-github_token
|
||||
with:
|
||||
repo-token: ${{ secrets.GITHUB_TOKEN }}
|
||||
# Ignore errors. It is possible the PR was created by someone who cannot be assigned
|
||||
continue-on-error: true
|
26
.github/workflows/build.yml
vendored
26
.github/workflows/build.yml
vendored
@@ -21,11 +21,11 @@ jobs:
|
||||
# Also specify version of Java to use (this can allow us to optionally run tests on multiple JDKs in future)
|
||||
matrix:
|
||||
include:
|
||||
# NOTE: Unit Tests include deprecated REST API v6 (as it has unit tests)
|
||||
# NOTE: Unit Tests include a retry for occasionally failing tests
|
||||
# - surefire.rerunFailingTestsCount => try again for flakey tests, and keep track of/report on number of retries
|
||||
- type: "Unit Tests"
|
||||
java: 11
|
||||
mvnflags: "-DskipUnitTests=false -Pdspace-rest -Dsurefire.rerunFailingTestsCount=2"
|
||||
mvnflags: "-DskipUnitTests=false -Dsurefire.rerunFailingTestsCount=2"
|
||||
resultsdir: "**/target/surefire-reports/**"
|
||||
# NOTE: ITs skip all code validation checks, as they are already done by Unit Test job.
|
||||
# - enforcer.skip => Skip maven-enforcer-plugin rules
|
||||
@@ -45,7 +45,7 @@ jobs:
|
||||
steps:
|
||||
# https://github.com/actions/checkout
|
||||
- name: Checkout codebase
|
||||
uses: actions/checkout@v3
|
||||
uses: actions/checkout@v4
|
||||
|
||||
# https://github.com/actions/setup-java
|
||||
- name: Install JDK ${{ matrix.java }}
|
||||
@@ -53,16 +53,7 @@ jobs:
|
||||
with:
|
||||
java-version: ${{ matrix.java }}
|
||||
distribution: 'temurin'
|
||||
|
||||
# https://github.com/actions/cache
|
||||
- name: Cache Maven dependencies
|
||||
uses: actions/cache@v3
|
||||
with:
|
||||
# Cache entire ~/.m2/repository
|
||||
path: ~/.m2/repository
|
||||
# Cache key is hash of all pom.xml files. Therefore any changes to POMs will invalidate cache
|
||||
key: ${{ runner.os }}-maven-${{ hashFiles('**/pom.xml') }}
|
||||
restore-keys: ${{ runner.os }}-maven-
|
||||
cache: 'maven'
|
||||
|
||||
# Run parallel Maven builds based on the above 'strategy.matrix'
|
||||
- name: Run Maven ${{ matrix.type }}
|
||||
@@ -96,7 +87,7 @@ jobs:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v3
|
||||
uses: actions/checkout@v4
|
||||
|
||||
# Download artifacts from previous 'tests' job
|
||||
- name: Download coverage artifacts
|
||||
@@ -108,10 +99,13 @@ jobs:
|
||||
# Retry action: https://github.com/marketplace/actions/retry-action
|
||||
# Codecov action: https://github.com/codecov/codecov-action
|
||||
- name: Upload coverage to Codecov.io
|
||||
uses: Wandalen/wretry.action@v1.0.36
|
||||
uses: Wandalen/wretry.action@v1.3.0
|
||||
with:
|
||||
action: codecov/codecov-action@v3
|
||||
# Try upload 5 times max
|
||||
# Ensure codecov-action throws an error when it fails to upload
|
||||
with: |
|
||||
fail_ci_if_error: true
|
||||
# Try re-running action 5 times max
|
||||
attempt_limit: 5
|
||||
# Run again in 30 seconds
|
||||
attempt_delay: 30000
|
||||
|
12
.github/workflows/codescan.yml
vendored
12
.github/workflows/codescan.yml
vendored
@@ -5,12 +5,16 @@
|
||||
# because CodeQL requires a fresh build with all tests *disabled*.
|
||||
name: "Code Scanning"
|
||||
|
||||
# Run this code scan for all pushes / PRs to main branch. Also run once a week.
|
||||
# Run this code scan for all pushes / PRs to main or maintenance branches. Also run once a week.
|
||||
on:
|
||||
push:
|
||||
branches: [ main ]
|
||||
branches:
|
||||
- main
|
||||
- 'dspace-**'
|
||||
pull_request:
|
||||
branches: [ main ]
|
||||
branches:
|
||||
- main
|
||||
- 'dspace-**'
|
||||
# Don't run if PR is only updating static documentation
|
||||
paths-ignore:
|
||||
- '**/*.md'
|
||||
@@ -31,7 +35,7 @@ jobs:
|
||||
steps:
|
||||
# https://github.com/actions/checkout
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v3
|
||||
uses: actions/checkout@v4
|
||||
|
||||
# https://github.com/actions/setup-java
|
||||
- name: Install JDK
|
||||
|
305
.github/workflows/docker.yml
vendored
305
.github/workflows/docker.yml
vendored
@@ -3,6 +3,7 @@ name: Docker images
|
||||
|
||||
# Run this Build for all pushes to 'main' or maintenance branches, or tagged releases.
|
||||
# Also run for PRs to ensure PR doesn't break Docker build process
|
||||
# NOTE: uses "reusable-docker-build.yml" to actually build each of the Docker images.
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
@@ -16,240 +17,134 @@ permissions:
|
||||
contents: read # to fetch code (actions/checkout)
|
||||
|
||||
jobs:
|
||||
docker:
|
||||
####################################################
|
||||
# Build/Push the 'dspace/dspace-dependencies' image.
|
||||
# This image is used by all other DSpace build jobs.
|
||||
####################################################
|
||||
dspace-dependencies:
|
||||
# Ensure this job never runs on forked repos. It's only executed for 'dspace/dspace'
|
||||
if: github.repository == 'dspace/dspace'
|
||||
runs-on: ubuntu-latest
|
||||
env:
|
||||
# Define tags to use for Docker images based on Git tags/branches (for docker/metadata-action)
|
||||
# For a new commit on default branch (main), use the literal tag 'dspace-7_x' on Docker image.
|
||||
# For a new commit on other branches, use the branch name as the tag for Docker image.
|
||||
# For a new tag, copy that tag name as the tag for Docker image.
|
||||
IMAGE_TAGS: |
|
||||
type=raw,value=dspace-7_x,enable=${{ endsWith(github.ref, github.event.repository.default_branch) }}
|
||||
type=ref,event=branch,enable=${{ !endsWith(github.ref, github.event.repository.default_branch) }}
|
||||
type=ref,event=tag
|
||||
# Define default tag "flavor" for docker/metadata-action per
|
||||
# https://github.com/docker/metadata-action#flavor-input
|
||||
# We turn off 'latest' tag by default.
|
||||
TAGS_FLAVOR: |
|
||||
latest=false
|
||||
# Architectures / Platforms for which we will build Docker images
|
||||
# If this is a PR, we ONLY build for AMD64. For PRs we only do a sanity check test to ensure Docker builds work.
|
||||
# If this is NOT a PR (e.g. a tag or merge commit), also build for ARM64. NOTE: The ARM64 build takes MUCH
|
||||
# longer (around 45mins or so) which is why we only run it when pushing a new Docker image.
|
||||
PLATFORMS: linux/amd64${{ github.event_name != 'pull_request' && ', linux/arm64' || '' }}
|
||||
|
||||
steps:
|
||||
# https://github.com/actions/checkout
|
||||
- name: Checkout codebase
|
||||
uses: actions/checkout@v3
|
||||
|
||||
# https://github.com/docker/setup-buildx-action
|
||||
- name: Setup Docker Buildx
|
||||
uses: docker/setup-buildx-action@v2
|
||||
|
||||
# https://github.com/docker/setup-qemu-action
|
||||
- name: Set up QEMU emulation to build for multiple architectures
|
||||
uses: docker/setup-qemu-action@v2
|
||||
|
||||
# https://github.com/docker/login-action
|
||||
- name: Login to DockerHub
|
||||
# Only login if not a PR, as PRs only trigger a Docker build and not a push
|
||||
if: github.event_name != 'pull_request'
|
||||
uses: docker/login-action@v2
|
||||
uses: ./.github/workflows/reusable-docker-build.yml
|
||||
with:
|
||||
username: ${{ secrets.DOCKER_USERNAME }}
|
||||
password: ${{ secrets.DOCKER_ACCESS_TOKEN }}
|
||||
|
||||
####################################################
|
||||
# Build/Push the 'dspace/dspace-dependencies' image
|
||||
####################################################
|
||||
# https://github.com/docker/metadata-action
|
||||
# Get Metadata for docker_build_deps step below
|
||||
- name: Sync metadata (tags, labels) from GitHub to Docker for 'dspace-dependencies' image
|
||||
id: meta_build_deps
|
||||
uses: docker/metadata-action@v4
|
||||
with:
|
||||
images: dspace/dspace-dependencies
|
||||
tags: ${{ env.IMAGE_TAGS }}
|
||||
flavor: ${{ env.TAGS_FLAVOR }}
|
||||
|
||||
# https://github.com/docker/build-push-action
|
||||
- name: Build and push 'dspace-dependencies' image
|
||||
id: docker_build_deps
|
||||
uses: docker/build-push-action@v3
|
||||
with:
|
||||
context: .
|
||||
file: ./Dockerfile.dependencies
|
||||
platforms: ${{ env.PLATFORMS }}
|
||||
# For pull requests, we run the Docker build (to ensure no PR changes break the build),
|
||||
# but we ONLY do an image push to DockerHub if it's NOT a PR
|
||||
push: ${{ github.event_name != 'pull_request' }}
|
||||
# Use tags / labels provided by 'docker/metadata-action' above
|
||||
tags: ${{ steps.meta_build_deps.outputs.tags }}
|
||||
labels: ${{ steps.meta_build_deps.outputs.labels }}
|
||||
build_id: dspace-dependencies
|
||||
image_name: dspace/dspace-dependencies
|
||||
dockerfile_path: ./Dockerfile.dependencies
|
||||
secrets:
|
||||
DOCKER_USERNAME: ${{ secrets.DOCKER_USERNAME }}
|
||||
DOCKER_ACCESS_TOKEN: ${{ secrets.DOCKER_ACCESS_TOKEN }}
|
||||
|
||||
#######################################
|
||||
# Build/Push the 'dspace/dspace' image
|
||||
#######################################
|
||||
# Get Metadata for docker_build step below
|
||||
- name: Sync metadata (tags, labels) from GitHub to Docker for 'dspace' image
|
||||
id: meta_build
|
||||
uses: docker/metadata-action@v4
|
||||
dspace:
|
||||
# Ensure this job never runs on forked repos. It's only executed for 'dspace/dspace'
|
||||
if: github.repository == 'dspace/dspace'
|
||||
# Must run after 'dspace-dependencies' job above
|
||||
needs: dspace-dependencies
|
||||
uses: ./.github/workflows/reusable-docker-build.yml
|
||||
with:
|
||||
images: dspace/dspace
|
||||
tags: ${{ env.IMAGE_TAGS }}
|
||||
flavor: ${{ env.TAGS_FLAVOR }}
|
||||
build_id: dspace
|
||||
image_name: dspace/dspace
|
||||
dockerfile_path: ./Dockerfile
|
||||
secrets:
|
||||
DOCKER_USERNAME: ${{ secrets.DOCKER_USERNAME }}
|
||||
DOCKER_ACCESS_TOKEN: ${{ secrets.DOCKER_ACCESS_TOKEN }}
|
||||
# Enable redeploy of sandbox & demo if the branch for this image matches the deployment branch of
|
||||
# these sites as specified in reusable-docker-build.xml
|
||||
REDEPLOY_SANDBOX_URL: ${{ secrets.REDEPLOY_SANDBOX_URL }}
|
||||
REDEPLOY_DEMO_URL: ${{ secrets.REDEPLOY_DEMO_URL }}
|
||||
|
||||
- name: Build and push 'dspace' image
|
||||
id: docker_build
|
||||
uses: docker/build-push-action@v3
|
||||
with:
|
||||
context: .
|
||||
file: ./Dockerfile
|
||||
platforms: ${{ env.PLATFORMS }}
|
||||
# For pull requests, we run the Docker build (to ensure no PR changes break the build),
|
||||
# but we ONLY do an image push to DockerHub if it's NOT a PR
|
||||
push: ${{ github.event_name != 'pull_request' }}
|
||||
# Use tags / labels provided by 'docker/metadata-action' above
|
||||
tags: ${{ steps.meta_build.outputs.tags }}
|
||||
labels: ${{ steps.meta_build.outputs.labels }}
|
||||
|
||||
#####################################################
|
||||
#############################################################
|
||||
# Build/Push the 'dspace/dspace' image ('-test' tag)
|
||||
#####################################################
|
||||
# Get Metadata for docker_build_test step below
|
||||
- name: Sync metadata (tags, labels) from GitHub to Docker for 'dspace-test' image
|
||||
id: meta_build_test
|
||||
uses: docker/metadata-action@v4
|
||||
#############################################################
|
||||
dspace-test:
|
||||
# Ensure this job never runs on forked repos. It's only executed for 'dspace/dspace'
|
||||
if: github.repository == 'dspace/dspace'
|
||||
# Must run after 'dspace-dependencies' job above
|
||||
needs: dspace-dependencies
|
||||
uses: ./.github/workflows/reusable-docker-build.yml
|
||||
with:
|
||||
images: dspace/dspace
|
||||
tags: ${{ env.IMAGE_TAGS }}
|
||||
build_id: dspace-test
|
||||
image_name: dspace/dspace
|
||||
dockerfile_path: ./Dockerfile.test
|
||||
# As this is a test/development image, its tags are all suffixed with "-test". Otherwise, it uses the same
|
||||
# tagging logic as the primary 'dspace/dspace' image above.
|
||||
flavor: ${{ env.TAGS_FLAVOR }}
|
||||
suffix=-test
|
||||
|
||||
- name: Build and push 'dspace-test' image
|
||||
id: docker_build_test
|
||||
uses: docker/build-push-action@v3
|
||||
with:
|
||||
context: .
|
||||
file: ./Dockerfile.test
|
||||
platforms: ${{ env.PLATFORMS }}
|
||||
# For pull requests, we run the Docker build (to ensure no PR changes break the build),
|
||||
# but we ONLY do an image push to DockerHub if it's NOT a PR
|
||||
push: ${{ github.event_name != 'pull_request' }}
|
||||
# Use tags / labels provided by 'docker/metadata-action' above
|
||||
tags: ${{ steps.meta_build_test.outputs.tags }}
|
||||
labels: ${{ steps.meta_build_test.outputs.labels }}
|
||||
tags_flavor: suffix=-test
|
||||
secrets:
|
||||
DOCKER_USERNAME: ${{ secrets.DOCKER_USERNAME }}
|
||||
DOCKER_ACCESS_TOKEN: ${{ secrets.DOCKER_ACCESS_TOKEN }}
|
||||
|
||||
###########################################
|
||||
# Build/Push the 'dspace/dspace-cli' image
|
||||
###########################################
|
||||
# Get Metadata for docker_build_test step below
|
||||
- name: Sync metadata (tags, labels) from GitHub to Docker for 'dspace-cli' image
|
||||
id: meta_build_cli
|
||||
uses: docker/metadata-action@v4
|
||||
dspace-cli:
|
||||
# Ensure this job never runs on forked repos. It's only executed for 'dspace/dspace'
|
||||
if: github.repository == 'dspace/dspace'
|
||||
# Must run after 'dspace-dependencies' job above
|
||||
needs: dspace-dependencies
|
||||
uses: ./.github/workflows/reusable-docker-build.yml
|
||||
with:
|
||||
images: dspace/dspace-cli
|
||||
tags: ${{ env.IMAGE_TAGS }}
|
||||
flavor: ${{ env.TAGS_FLAVOR }}
|
||||
|
||||
- name: Build and push 'dspace-cli' image
|
||||
id: docker_build_cli
|
||||
uses: docker/build-push-action@v3
|
||||
with:
|
||||
context: .
|
||||
file: ./Dockerfile.cli
|
||||
platforms: ${{ env.PLATFORMS }}
|
||||
# For pull requests, we run the Docker build (to ensure no PR changes break the build),
|
||||
# but we ONLY do an image push to DockerHub if it's NOT a PR
|
||||
push: ${{ github.event_name != 'pull_request' }}
|
||||
# Use tags / labels provided by 'docker/metadata-action' above
|
||||
tags: ${{ steps.meta_build_cli.outputs.tags }}
|
||||
labels: ${{ steps.meta_build_cli.outputs.labels }}
|
||||
build_id: dspace-cli
|
||||
image_name: dspace/dspace-cli
|
||||
dockerfile_path: ./Dockerfile.cli
|
||||
secrets:
|
||||
DOCKER_USERNAME: ${{ secrets.DOCKER_USERNAME }}
|
||||
DOCKER_ACCESS_TOKEN: ${{ secrets.DOCKER_ACCESS_TOKEN }}
|
||||
|
||||
###########################################
|
||||
# Build/Push the 'dspace/dspace-solr' image
|
||||
###########################################
|
||||
# Get Metadata for docker_build_solr step below
|
||||
- name: Sync metadata (tags, labels) from GitHub to Docker for 'dspace-solr' image
|
||||
id: meta_build_solr
|
||||
uses: docker/metadata-action@v4
|
||||
dspace-solr:
|
||||
# Ensure this job never runs on forked repos. It's only executed for 'dspace/dspace'
|
||||
if: github.repository == 'dspace/dspace'
|
||||
uses: ./.github/workflows/reusable-docker-build.yml
|
||||
with:
|
||||
images: dspace/dspace-solr
|
||||
tags: ${{ env.IMAGE_TAGS }}
|
||||
flavor: ${{ env.TAGS_FLAVOR }}
|
||||
|
||||
- name: Build and push 'dspace-solr' image
|
||||
id: docker_build_solr
|
||||
uses: docker/build-push-action@v3
|
||||
with:
|
||||
context: .
|
||||
file: ./dspace/src/main/docker/dspace-solr/Dockerfile
|
||||
platforms: ${{ env.PLATFORMS }}
|
||||
# For pull requests, we run the Docker build (to ensure no PR changes break the build),
|
||||
# but we ONLY do an image push to DockerHub if it's NOT a PR
|
||||
push: ${{ github.event_name != 'pull_request' }}
|
||||
# Use tags / labels provided by 'docker/metadata-action' above
|
||||
tags: ${{ steps.meta_build_solr.outputs.tags }}
|
||||
labels: ${{ steps.meta_build_solr.outputs.labels }}
|
||||
build_id: dspace-solr
|
||||
image_name: dspace/dspace-solr
|
||||
dockerfile_path: ./dspace/src/main/docker/dspace-solr/Dockerfile
|
||||
# Must pass solrconfigs to the Dockerfile so that it can find the required Solr config files
|
||||
dockerfile_additional_contexts: 'solrconfigs=./dspace/solr/'
|
||||
secrets:
|
||||
DOCKER_USERNAME: ${{ secrets.DOCKER_USERNAME }}
|
||||
DOCKER_ACCESS_TOKEN: ${{ secrets.DOCKER_ACCESS_TOKEN }}
|
||||
# Enable redeploy of sandbox & demo SOLR instance whenever dspace-solr image changes for deployed branch.
|
||||
# These URLs MUST use different secrets than 'dspace/dspace' image build above as they are deployed separately.
|
||||
REDEPLOY_SANDBOX_URL: ${{ secrets.REDEPLOY_SANDBOX_SOLR_URL }}
|
||||
REDEPLOY_DEMO_URL: ${{ secrets.REDEPLOY_DEMO_SOLR_URL }}
|
||||
|
||||
###########################################################
|
||||
# Build/Push the 'dspace/dspace-postgres-pgcrypto' image
|
||||
###########################################################
|
||||
# Get Metadata for docker_build_postgres step below
|
||||
- name: Sync metadata (tags, labels) from GitHub to Docker for 'dspace-postgres-pgcrypto' image
|
||||
id: meta_build_postgres
|
||||
uses: docker/metadata-action@v4
|
||||
dspace-postgres-pgcrypto:
|
||||
# Ensure this job never runs on forked repos. It's only executed for 'dspace/dspace'
|
||||
if: github.repository == 'dspace/dspace'
|
||||
uses: ./.github/workflows/reusable-docker-build.yml
|
||||
with:
|
||||
images: dspace/dspace-postgres-pgcrypto
|
||||
tags: ${{ env.IMAGE_TAGS }}
|
||||
flavor: ${{ env.TAGS_FLAVOR }}
|
||||
build_id: dspace-postgres-pgcrypto
|
||||
image_name: dspace/dspace-postgres-pgcrypto
|
||||
# Must build out of subdirectory to have access to install script for pgcrypto.
|
||||
# NOTE: this context will build the image based on the Dockerfile in the specified directory
|
||||
dockerfile_context: ./dspace/src/main/docker/dspace-postgres-pgcrypto/
|
||||
secrets:
|
||||
DOCKER_USERNAME: ${{ secrets.DOCKER_USERNAME }}
|
||||
DOCKER_ACCESS_TOKEN: ${{ secrets.DOCKER_ACCESS_TOKEN }}
|
||||
|
||||
- name: Build and push 'dspace-postgres-pgcrypto' image
|
||||
id: docker_build_postgres
|
||||
uses: docker/build-push-action@v3
|
||||
########################################################################
|
||||
# Build/Push the 'dspace/dspace-postgres-pgcrypto' image (-loadsql tag)
|
||||
########################################################################
|
||||
dspace-postgres-pgcrypto-loadsql:
|
||||
# Ensure this job never runs on forked repos. It's only executed for 'dspace/dspace'
|
||||
if: github.repository == 'dspace/dspace'
|
||||
uses: ./.github/workflows/reusable-docker-build.yml
|
||||
with:
|
||||
# Must build out of subdirectory to have access to install script for pgcrypto
|
||||
context: ./dspace/src/main/docker/dspace-postgres-pgcrypto/
|
||||
dockerfile: Dockerfile
|
||||
platforms: ${{ env.PLATFORMS }}
|
||||
# For pull requests, we run the Docker build (to ensure no PR changes break the build),
|
||||
# but we ONLY do an image push to DockerHub if it's NOT a PR
|
||||
push: ${{ github.event_name != 'pull_request' }}
|
||||
# Use tags / labels provided by 'docker/metadata-action' above
|
||||
tags: ${{ steps.meta_build_postgres.outputs.tags }}
|
||||
labels: ${{ steps.meta_build_postgres.outputs.labels }}
|
||||
|
||||
###########################################################
|
||||
# Build/Push the 'dspace/dspace-postgres-pgcrypto' image ('-loadsql' tag)
|
||||
###########################################################
|
||||
# Get Metadata for docker_build_postgres_loadsql step below
|
||||
- name: Sync metadata (tags, labels) from GitHub to Docker for 'dspace-postgres-pgcrypto-loadsql' image
|
||||
id: meta_build_postgres_loadsql
|
||||
uses: docker/metadata-action@v4
|
||||
with:
|
||||
images: dspace/dspace-postgres-pgcrypto
|
||||
tags: ${{ env.IMAGE_TAGS }}
|
||||
build_id: dspace-postgres-pgcrypto-loadsql
|
||||
image_name: dspace/dspace-postgres-pgcrypto
|
||||
# Must build out of subdirectory to have access to install script for pgcrypto.
|
||||
# NOTE: this context will build the image based on the Dockerfile in the specified directory
|
||||
dockerfile_context: ./dspace/src/main/docker/dspace-postgres-pgcrypto-curl/
|
||||
# Suffix all tags with "-loadsql". Otherwise, it uses the same
|
||||
# tagging logic as the primary 'dspace/dspace-postgres-pgcrypto' image above.
|
||||
flavor: ${{ env.TAGS_FLAVOR }}
|
||||
suffix=-loadsql
|
||||
|
||||
- name: Build and push 'dspace-postgres-pgcrypto-loadsql' image
|
||||
id: docker_build_postgres_loadsql
|
||||
uses: docker/build-push-action@v3
|
||||
with:
|
||||
# Must build out of subdirectory to have access to install script for pgcrypto
|
||||
context: ./dspace/src/main/docker/dspace-postgres-pgcrypto-curl/
|
||||
dockerfile: Dockerfile
|
||||
platforms: ${{ env.PLATFORMS }}
|
||||
# For pull requests, we run the Docker build (to ensure no PR changes break the build),
|
||||
# but we ONLY do an image push to DockerHub if it's NOT a PR
|
||||
push: ${{ github.event_name != 'pull_request' }}
|
||||
# Use tags / labels provided by 'docker/metadata-action' above
|
||||
tags: ${{ steps.meta_build_postgres_loadsql.outputs.tags }}
|
||||
labels: ${{ steps.meta_build_postgres_loadsql.outputs.labels }}
|
||||
tags_flavor: suffix=-loadsql
|
||||
secrets:
|
||||
DOCKER_USERNAME: ${{ secrets.DOCKER_USERNAME }}
|
||||
DOCKER_ACCESS_TOKEN: ${{ secrets.DOCKER_ACCESS_TOKEN }}
|
9
.github/workflows/label_merge_conflicts.yml
vendored
9
.github/workflows/label_merge_conflicts.yml
vendored
@@ -1,11 +1,12 @@
|
||||
# This workflow checks open PRs for merge conflicts and labels them when conflicts are found
|
||||
name: Check for merge conflicts
|
||||
|
||||
# Run whenever the "main" branch is updated
|
||||
# NOTE: This means merge conflicts are only checked for when a PR is merged to main.
|
||||
# Run this for all pushes (i.e. merges) to 'main' or maintenance branches
|
||||
on:
|
||||
push:
|
||||
branches: [ main ]
|
||||
branches:
|
||||
- main
|
||||
- 'dspace-**'
|
||||
# So that the `conflict_label_name` is removed if conflicts are resolved,
|
||||
# we allow this to run for `pull_request_target` so that github secrets are available.
|
||||
pull_request_target:
|
||||
@@ -24,6 +25,8 @@ jobs:
|
||||
# See: https://github.com/prince-chrismc/label-merge-conflicts-action
|
||||
- name: Auto-label PRs with merge conflicts
|
||||
uses: prince-chrismc/label-merge-conflicts-action@v3
|
||||
# Ignore any failures -- may occur (randomly?) for older, outdated PRs.
|
||||
continue-on-error: true
|
||||
# Add "merge conflict" label if a merge conflict is detected. Remove it when resolved.
|
||||
# Note, the authentication token is created automatically
|
||||
# See: https://docs.github.com/en/actions/configuring-and-managing-workflows/authenticating-with-the-github_token
|
||||
|
46
.github/workflows/port_merged_pull_request.yml
vendored
Normal file
46
.github/workflows/port_merged_pull_request.yml
vendored
Normal file
@@ -0,0 +1,46 @@
|
||||
# This workflow will attempt to port a merged pull request to
|
||||
# the branch specified in a "port to" label (if exists)
|
||||
name: Port merged Pull Request
|
||||
|
||||
# Only run for merged PRs against the "main" or maintenance branches
|
||||
# We allow this to run for `pull_request_target` so that github secrets are available
|
||||
# (This is required when the PR comes from a forked repo)
|
||||
on:
|
||||
pull_request_target:
|
||||
types: [ closed ]
|
||||
branches:
|
||||
- main
|
||||
- 'dspace-**'
|
||||
|
||||
permissions:
|
||||
contents: write # so action can add comments
|
||||
pull-requests: write # so action can create pull requests
|
||||
|
||||
jobs:
|
||||
port_pr:
|
||||
runs-on: ubuntu-latest
|
||||
# Don't run on closed *unmerged* pull requests
|
||||
if: github.event.pull_request.merged
|
||||
steps:
|
||||
# Checkout code
|
||||
- uses: actions/checkout@v4
|
||||
# Port PR to other branch (ONLY if labeled with "port to")
|
||||
# See https://github.com/korthout/backport-action
|
||||
- name: Create backport pull requests
|
||||
uses: korthout/backport-action@v2
|
||||
with:
|
||||
# Trigger based on a "port to [branch]" label on PR
|
||||
# (This label must specify the branch name to port to)
|
||||
label_pattern: '^port to ([^ ]+)$'
|
||||
# Title to add to the (newly created) port PR
|
||||
pull_title: '[Port ${target_branch}] ${pull_title}'
|
||||
# Description to add to the (newly created) port PR
|
||||
pull_description: 'Port of #${pull_number} by @${pull_author} to `${target_branch}`.'
|
||||
# Copy all labels from original PR to (newly created) port PR
|
||||
# NOTE: The labels matching 'label_pattern' are automatically excluded
|
||||
copy_labels_pattern: '.*'
|
||||
# Skip any merge commits in the ported PR. This means only non-merge commits are cherry-picked to the new PR
|
||||
merge_commits: 'skip'
|
||||
# Use a personal access token (PAT) to create PR as 'dspace-bot' user.
|
||||
# A PAT is required in order for the new PR to trigger its own actions (for CI checks)
|
||||
github_token: ${{ secrets.PR_PORT_TOKEN }}
|
24
.github/workflows/pull_request_opened.yml
vendored
Normal file
24
.github/workflows/pull_request_opened.yml
vendored
Normal file
@@ -0,0 +1,24 @@
|
||||
# This workflow runs whenever a new pull request is created
|
||||
name: Pull Request opened
|
||||
|
||||
# Only run for newly opened PRs against the "main" or maintenance branches
|
||||
# We allow this to run for `pull_request_target` so that github secrets are available
|
||||
# (This is required to assign a PR back to the creator when the PR comes from a forked repo)
|
||||
on:
|
||||
pull_request_target:
|
||||
types: [ opened ]
|
||||
branches:
|
||||
- main
|
||||
- 'dspace-**'
|
||||
|
||||
permissions:
|
||||
pull-requests: write
|
||||
|
||||
jobs:
|
||||
automation:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
# Assign the PR to whomever created it. This is useful for visualizing assignments on project boards
|
||||
# See https://github.com/toshimaru/auto-author-assign
|
||||
- name: Assign PR to creator
|
||||
uses: toshimaru/auto-author-assign@v2.0.1
|
225
.github/workflows/reusable-docker-build.yml
vendored
Normal file
225
.github/workflows/reusable-docker-build.yml
vendored
Normal file
@@ -0,0 +1,225 @@
|
||||
#
|
||||
# DSpace's reusable Docker build/push workflow.
|
||||
#
|
||||
# This is used by docker.yml for all Docker image builds
|
||||
name: Reusable DSpace Docker Build
|
||||
|
||||
on:
|
||||
workflow_call:
|
||||
# Possible Inputs to this reusable job
|
||||
inputs:
|
||||
# Build name/id for this Docker build. Used for digest storage to avoid digest overlap between builds.
|
||||
build_id:
|
||||
required: true
|
||||
type: string
|
||||
# Requires the image name to build (e.g dspace/dspace-test)
|
||||
image_name:
|
||||
required: true
|
||||
type: string
|
||||
# Optionally the path to the Dockerfile to use for the build. (Default is [dockerfile_context]/Dockerfile)
|
||||
dockerfile_path:
|
||||
required: false
|
||||
type: string
|
||||
# Optionally the context directory to build the Dockerfile within. Defaults to "." (current directory)
|
||||
dockerfile_context:
|
||||
required: false
|
||||
type: string
|
||||
default: '.'
|
||||
# Optionally a list of "additional_contexts" to pass to Dockerfile. Defaults to empty
|
||||
dockerfile_additional_contexts:
|
||||
required: false
|
||||
type: string
|
||||
default: ''
|
||||
# If Docker image should have additional tag flavor details (e.g. a suffix), it may be passed in.
|
||||
tags_flavor:
|
||||
required: false
|
||||
type: string
|
||||
secrets:
|
||||
# Requires that Docker login info be passed in as secrets.
|
||||
DOCKER_USERNAME:
|
||||
required: true
|
||||
DOCKER_ACCESS_TOKEN:
|
||||
required: true
|
||||
# These URL secrets are optional. When specified & branch checks match, the redeployment code below will trigger.
|
||||
# Therefore builds which need to trigger redeployment MUST specify these URLs. All others should leave them empty.
|
||||
REDEPLOY_SANDBOX_URL:
|
||||
required: false
|
||||
REDEPLOY_DEMO_URL:
|
||||
required: false
|
||||
|
||||
# Define shared default settings as environment variables
|
||||
env:
|
||||
IMAGE_NAME: ${{ inputs.image_name }}
|
||||
# Define tags to use for Docker images based on Git tags/branches (for docker/metadata-action)
|
||||
# For a new commit on default branch (main), use the literal tag 'latest' on Docker image.
|
||||
# For a new commit on other branches, use the branch name as the tag for Docker image.
|
||||
# For a new tag, copy that tag name as the tag for Docker image.
|
||||
IMAGE_TAGS: |
|
||||
type=raw,value=latest,enable=${{ github.ref_name == github.event.repository.default_branch }}
|
||||
type=ref,event=branch,enable=${{ github.ref_name != github.event.repository.default_branch }}
|
||||
type=ref,event=tag
|
||||
# Define default tag "flavor" for docker/metadata-action per
|
||||
# https://github.com/docker/metadata-action#flavor-input
|
||||
# We manage the 'latest' tag ourselves to the 'main' branch (see settings above)
|
||||
TAGS_FLAVOR: |
|
||||
latest=false
|
||||
${{ inputs.tags_flavor }}
|
||||
# When these URL variables are specified & required branch matches, then the sandbox or demo site will be redeployed.
|
||||
# See "Redeploy" steps below for more details.
|
||||
REDEPLOY_SANDBOX_URL: ${{ secrets.REDEPLOY_SANDBOX_URL }}
|
||||
REDEPLOY_DEMO_URL: ${{ secrets.REDEPLOY_DEMO_URL }}
|
||||
# Current DSpace maintenance branch (and architecture) which is deployed to demo.dspace.org / sandbox.dspace.org
|
||||
# (NOTE: No deployment branch specified for sandbox.dspace.org as it uses the default_branch)
|
||||
DEPLOY_DEMO_BRANCH: 'dspace-7_x'
|
||||
DEPLOY_ARCH: 'linux/amd64'
|
||||
|
||||
jobs:
|
||||
docker-build:
|
||||
|
||||
strategy:
|
||||
matrix:
|
||||
# Architectures / Platforms for which we will build Docker images
|
||||
arch: [ 'linux/amd64', 'linux/arm64' ]
|
||||
os: [ ubuntu-latest ]
|
||||
isPr:
|
||||
- ${{ github.event_name == 'pull_request' }}
|
||||
# If this is a PR, we ONLY build for AMD64. For PRs we only do a sanity check test to ensure Docker builds work.
|
||||
# The below exclude therefore ensures we do NOT build ARM64 for PRs.
|
||||
exclude:
|
||||
- isPr: true
|
||||
os: ubuntu-latest
|
||||
arch: linux/arm64
|
||||
|
||||
runs-on: ${{ matrix.os }}
|
||||
|
||||
steps:
|
||||
# https://github.com/actions/checkout
|
||||
- name: Checkout codebase
|
||||
uses: actions/checkout@v4
|
||||
|
||||
# https://github.com/docker/setup-buildx-action
|
||||
- name: Setup Docker Buildx
|
||||
uses: docker/setup-buildx-action@v3
|
||||
|
||||
# https://github.com/docker/setup-qemu-action
|
||||
- name: Set up QEMU emulation to build for multiple architectures
|
||||
uses: docker/setup-qemu-action@v3
|
||||
|
||||
# https://github.com/docker/login-action
|
||||
- name: Login to DockerHub
|
||||
# Only login if not a PR, as PRs only trigger a Docker build and not a push
|
||||
if: ${{ ! matrix.isPr }}
|
||||
uses: docker/login-action@v3
|
||||
with:
|
||||
username: ${{ secrets.DOCKER_USERNAME }}
|
||||
password: ${{ secrets.DOCKER_ACCESS_TOKEN }}
|
||||
|
||||
# https://github.com/docker/metadata-action
|
||||
# Get Metadata for docker_build_deps step below
|
||||
- name: Sync metadata (tags, labels) from GitHub to Docker for image
|
||||
id: meta_build
|
||||
uses: docker/metadata-action@v5
|
||||
with:
|
||||
images: ${{ env.IMAGE_NAME }}
|
||||
tags: ${{ env.IMAGE_TAGS }}
|
||||
flavor: ${{ env.TAGS_FLAVOR }}
|
||||
|
||||
# https://github.com/docker/build-push-action
|
||||
- name: Build and push image
|
||||
id: docker_build
|
||||
uses: docker/build-push-action@v5
|
||||
with:
|
||||
build-contexts: |
|
||||
${{ inputs.dockerfile_additional_contexts }}
|
||||
context: ${{ inputs.dockerfile_context }}
|
||||
file: ${{ inputs.dockerfile_path }}
|
||||
platforms: ${{ matrix.arch }}
|
||||
# For pull requests, we run the Docker build (to ensure no PR changes break the build),
|
||||
# but we ONLY do an image push to DockerHub if it's NOT a PR
|
||||
push: ${{ ! matrix.isPr }}
|
||||
# Use tags / labels provided by 'docker/metadata-action' above
|
||||
tags: ${{ steps.meta_build.outputs.tags }}
|
||||
labels: ${{ steps.meta_build.outputs.labels }}
|
||||
|
||||
# Export the digest of Docker build locally (for non PRs only)
|
||||
- name: Export Docker build digest
|
||||
if: ${{ ! matrix.isPr }}
|
||||
run: |
|
||||
mkdir -p /tmp/digests
|
||||
digest="${{ steps.docker_build.outputs.digest }}"
|
||||
touch "/tmp/digests/${digest#sha256:}"
|
||||
|
||||
# Upload digest to an artifact, so that it can be used in manifest below
|
||||
- name: Upload Docker build digest to artifact
|
||||
if: ${{ ! matrix.isPr }}
|
||||
uses: actions/upload-artifact@v3
|
||||
with:
|
||||
name: digests-${{ inputs.build_id }}
|
||||
path: /tmp/digests/*
|
||||
if-no-files-found: error
|
||||
retention-days: 1
|
||||
|
||||
# If this build is NOT a PR and passed in a REDEPLOY_SANDBOX_URL secret,
|
||||
# Then redeploy https://sandbox.dspace.org if this build is for our deployment architecture and 'main' branch.
|
||||
- name: Redeploy sandbox.dspace.org (based on main branch)
|
||||
if: |
|
||||
!matrix.isPR &&
|
||||
env.REDEPLOY_SANDBOX_URL != '' &&
|
||||
matrix.arch == env.DEPLOY_ARCH &&
|
||||
github.ref_name == github.event.repository.default_branch
|
||||
run: |
|
||||
curl -X POST $REDEPLOY_SANDBOX_URL
|
||||
|
||||
# If this build is NOT a PR and passed in a REDEPLOY_DEMO_URL secret,
|
||||
# Then redeploy https://demo.dspace.org if this build is for our deployment architecture and demo branch.
|
||||
- name: Redeploy demo.dspace.org (based on maintenace branch)
|
||||
if: |
|
||||
!matrix.isPR &&
|
||||
env.REDEPLOY_DEMO_URL != '' &&
|
||||
matrix.arch == env.DEPLOY_ARCH &&
|
||||
github.ref_name == env.DEPLOY_DEMO_BRANCH
|
||||
run: |
|
||||
curl -X POST $REDEPLOY_DEMO_URL
|
||||
|
||||
# Merge Docker digests (from various architectures) into a manifest.
|
||||
# This runs after all Docker builds complete above, and it tells hub.docker.com
|
||||
# that these builds should be all included in the manifest for this tag.
|
||||
# (e.g. AMD64 and ARM64 should be listed as options under the same tagged Docker image)
|
||||
docker-build_manifest:
|
||||
if: ${{ github.event_name != 'pull_request' }}
|
||||
runs-on: ubuntu-latest
|
||||
needs:
|
||||
- docker-build
|
||||
steps:
|
||||
- name: Download Docker build digests
|
||||
uses: actions/download-artifact@v3
|
||||
with:
|
||||
name: digests-${{ inputs.build_id }}
|
||||
path: /tmp/digests
|
||||
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@v3
|
||||
|
||||
- name: Add Docker metadata for image
|
||||
id: meta
|
||||
uses: docker/metadata-action@v5
|
||||
with:
|
||||
images: ${{ env.IMAGE_NAME }}
|
||||
tags: ${{ env.IMAGE_TAGS }}
|
||||
flavor: ${{ env.TAGS_FLAVOR }}
|
||||
|
||||
- name: Login to Docker Hub
|
||||
uses: docker/login-action@v3
|
||||
with:
|
||||
username: ${{ secrets.DOCKER_USERNAME }}
|
||||
password: ${{ secrets.DOCKER_ACCESS_TOKEN }}
|
||||
|
||||
- name: Create manifest list from digests and push
|
||||
working-directory: /tmp/digests
|
||||
run: |
|
||||
docker buildx imagetools create $(jq -cr '.tags | map("-t " + .) | join(" ")' <<< "$DOCKER_METADATA_OUTPUT_JSON") \
|
||||
$(printf '${{ env.IMAGE_NAME }}@sha256:%s ' *)
|
||||
|
||||
- name: Inspect image
|
||||
run: |
|
||||
docker buildx imagetools inspect ${{ env.IMAGE_NAME }}:${{ steps.meta.outputs.version }}
|
14
Dockerfile
14
Dockerfile
@@ -1,14 +1,15 @@
|
||||
# This image will be published as dspace/dspace
|
||||
# See https://github.com/DSpace/DSpace/tree/main/dspace/src/main/docker for usage details
|
||||
#
|
||||
# - note: default tag for branch: dspace/dspace: dspace/dspace:dspace-7_x
|
||||
# - note: default tag for branch: dspace/dspace: dspace/dspace:latest
|
||||
|
||||
# This Dockerfile uses JDK11 by default, but has also been tested with JDK17.
|
||||
# To build with JDK17, use "--build-arg JDK_VERSION=17"
|
||||
ARG JDK_VERSION=11
|
||||
ARG DSPACE_VERSION=latest
|
||||
|
||||
# Step 1 - Run Maven Build
|
||||
FROM dspace/dspace-dependencies:dspace-7_x as build
|
||||
FROM dspace/dspace-dependencies:${DSPACE_VERSION} as build
|
||||
ARG TARGET_DIR=dspace-installer
|
||||
WORKDIR /app
|
||||
# The dspace-installer directory will be written to /install
|
||||
@@ -18,9 +19,12 @@ RUN mkdir /install \
|
||||
USER dspace
|
||||
# Copy the DSpace source code (from local machine) into the workdir (excluding .dockerignore contents)
|
||||
ADD --chown=dspace . /app/
|
||||
# Build DSpace (note: this build doesn't include the optional, deprecated "dspace-rest" webapp)
|
||||
# Build DSpace
|
||||
# Copy the dspace-installer directory to /install. Clean up the build to keep the docker image small
|
||||
RUN mvn --no-transfer-progress package && \
|
||||
# Maven flags here ensure that we skip building test environment and skip all code verification checks.
|
||||
# These flags speed up this compilation as much as reasonably possible.
|
||||
ENV MAVEN_FLAGS="-P-test-environment -Denforcer.skip=true -Dcheckstyle.skip=true -Dlicense.skip=true -Dxml.skip=true"
|
||||
RUN mvn --no-transfer-progress package ${MAVEN_FLAGS} && \
|
||||
mv /app/dspace/target/${TARGET_DIR}/* /install && \
|
||||
mvn clean
|
||||
|
||||
@@ -50,7 +54,7 @@ RUN ant init_installation update_configs update_code update_webapps
|
||||
FROM tomcat:9-jdk${JDK_VERSION}
|
||||
# NOTE: DSPACE_INSTALL must align with the "dspace.dir" default configuration.
|
||||
ENV DSPACE_INSTALL=/dspace
|
||||
# Copy the /dspace directory from 'ant_build' containger to /dspace in this container
|
||||
# Copy the /dspace directory from 'ant_build' container to /dspace in this container
|
||||
COPY --from=ant_build /dspace $DSPACE_INSTALL
|
||||
# Expose Tomcat port and AJP port
|
||||
EXPOSE 8080 8009
|
||||
|
@@ -1,14 +1,15 @@
|
||||
# This image will be published as dspace/dspace-cli
|
||||
# See https://github.com/DSpace/DSpace/tree/main/dspace/src/main/docker for usage details
|
||||
#
|
||||
# - note: default tag for branch: dspace/dspace-cli: dspace/dspace-cli:dspace-7_x
|
||||
# - note: default tag for branch: dspace/dspace-cli: dspace/dspace-cli:latest
|
||||
|
||||
# This Dockerfile uses JDK11 by default, but has also been tested with JDK17.
|
||||
# To build with JDK17, use "--build-arg JDK_VERSION=17"
|
||||
ARG JDK_VERSION=11
|
||||
ARG DSPACE_VERSION=latest
|
||||
|
||||
# Step 1 - Run Maven Build
|
||||
FROM dspace/dspace-dependencies:dspace-7_x as build
|
||||
FROM dspace/dspace-dependencies:${DSPACE_VERSION} as build
|
||||
ARG TARGET_DIR=dspace-installer
|
||||
WORKDIR /app
|
||||
# The dspace-installer directory will be written to /install
|
||||
|
@@ -15,11 +15,6 @@ RUN useradd dspace \
|
||||
&& mkdir -p /home/dspace \
|
||||
&& chown -Rv dspace: /home/dspace
|
||||
RUN chown -Rv dspace: /app
|
||||
# Need git to support buildnumber-maven-plugin, which lets us know what version of DSpace is being run.
|
||||
RUN apt-get update \
|
||||
&& apt-get install -y --no-install-recommends git \
|
||||
&& apt-get purge -y --auto-remove \
|
||||
&& rm -rf /var/lib/apt/lists/*
|
||||
|
||||
# Switch to dspace user & run below commands as that user
|
||||
USER dspace
|
||||
@@ -28,7 +23,10 @@ USER dspace
|
||||
ADD --chown=dspace . /app/
|
||||
|
||||
# Trigger the installation of all maven dependencies (hide download progress messages)
|
||||
RUN mvn --no-transfer-progress package
|
||||
# Maven flags here ensure that we skip final assembly, skip building test environment and skip all code verification checks.
|
||||
# These flags speed up this installation as much as reasonably possible.
|
||||
ENV MAVEN_FLAGS="-P-assembly -P-test-environment -Denforcer.skip=true -Dcheckstyle.skip=true -Dlicense.skip=true -Dxml.skip=true"
|
||||
RUN mvn --no-transfer-progress install ${MAVEN_FLAGS}
|
||||
|
||||
# Clear the contents of the /app directory (including all maven builds), so no artifacts remain.
|
||||
# This ensures when dspace:dspace is built, it will use the Maven local cache (~/.m2) for dependencies
|
||||
|
@@ -1,16 +1,17 @@
|
||||
# This image will be published as dspace/dspace
|
||||
# See https://github.com/DSpace/DSpace/tree/main/dspace/src/main/docker for usage details
|
||||
#
|
||||
# - note: default tag for branch: dspace/dspace: dspace/dspace:dspace-7_x-test
|
||||
# - note: default tag for branch: dspace/dspace: dspace/dspace:latest-test
|
||||
#
|
||||
# This image is meant for TESTING/DEVELOPMENT ONLY as it deploys the old v6 REST API under HTTP (not HTTPS)
|
||||
|
||||
# This Dockerfile uses JDK11 by default, but has also been tested with JDK17.
|
||||
# To build with JDK17, use "--build-arg JDK_VERSION=17"
|
||||
ARG JDK_VERSION=11
|
||||
ARG DSPACE_VERSION=latest
|
||||
|
||||
# Step 1 - Run Maven Build
|
||||
FROM dspace/dspace-dependencies:dspace-7_x as build
|
||||
FROM dspace/dspace-dependencies:${DSPACE_VERSION} as build
|
||||
ARG TARGET_DIR=dspace-installer
|
||||
WORKDIR /app
|
||||
# The dspace-installer directory will be written to /install
|
||||
@@ -20,9 +21,9 @@ RUN mkdir /install \
|
||||
USER dspace
|
||||
# Copy the DSpace source code (from local machine) into the workdir (excluding .dockerignore contents)
|
||||
ADD --chown=dspace . /app/
|
||||
# Build DSpace (INCLUDING the optional, deprecated "dspace-rest" webapp)
|
||||
# Build DSpace
|
||||
# Copy the dspace-installer directory to /install. Clean up the build to keep the docker image small
|
||||
RUN mvn --no-transfer-progress package -Pdspace-rest && \
|
||||
RUN mvn --no-transfer-progress package && \
|
||||
mv /app/dspace/target/${TARGET_DIR}/* /install && \
|
||||
mvn clean
|
||||
|
||||
@@ -66,17 +67,10 @@ ENV CATALINA_OPTS=-Xrunjdwp:transport=dt_socket,server=y,suspend=n,address=*:800
|
||||
|
||||
# Link the DSpace 'server' webapp into Tomcat's webapps directory.
|
||||
# This ensures that when we start Tomcat, it runs from /server path (e.g. http://localhost:8080/server/)
|
||||
# Also link the v6.x (deprecated) REST API off the "/rest" path
|
||||
RUN ln -s $DSPACE_INSTALL/webapps/server /usr/local/tomcat/webapps/server && \
|
||||
ln -s $DSPACE_INSTALL/webapps/rest /usr/local/tomcat/webapps/rest
|
||||
RUN ln -s $DSPACE_INSTALL/webapps/server /usr/local/tomcat/webapps/server
|
||||
# If you wish to run "server" webapp off the ROOT path, then comment out the above RUN, and uncomment the below RUN.
|
||||
# You also MUST update the 'dspace.server.url' configuration to match.
|
||||
# Please note that server webapp should only run on one path at a time.
|
||||
#RUN mv /usr/local/tomcat/webapps/ROOT /usr/local/tomcat/webapps/ROOT.bk && \
|
||||
# ln -s $DSPACE_INSTALL/webapps/server /usr/local/tomcat/webapps/ROOT && \
|
||||
# ln -s $DSPACE_INSTALL/webapps/rest /usr/local/tomcat/webapps/rest
|
||||
# ln -s $DSPACE_INSTALL/webapps/server /usr/local/tomcat/webapps/ROOT
|
||||
|
||||
# Overwrite the v6.x (deprecated) REST API's web.xml, so that we can run it on HTTP (defaults to requiring HTTPS)
|
||||
# WARNING: THIS IS OBVIOUSLY INSECURE. NEVER DO THIS IN PRODUCTION.
|
||||
COPY dspace/src/main/docker/test/rest_web.xml $DSPACE_INSTALL/webapps/rest/WEB-INF/web.xml
|
||||
RUN sed -i -e "s|\${dspace.dir}|$DSPACE_INSTALL|" $DSPACE_INSTALL/webapps/rest/WEB-INF/web.xml
|
||||
|
@@ -1,8 +1,13 @@
|
||||
version: "3.7"
|
||||
|
||||
networks:
|
||||
# Default to using network named 'dspacenet' from docker-compose.yml.
|
||||
# Its full name will be prepended with the project name (e.g. "-p d7" means it will be named "d7_dspacenet")
|
||||
default:
|
||||
name: ${COMPOSE_PROJECT_NAME}_dspacenet
|
||||
external: true
|
||||
services:
|
||||
dspace-cli:
|
||||
image: "${DOCKER_OWNER:-dspace}/dspace-cli:${DSPACE_VER:-dspace-7_x}"
|
||||
image: "${DOCKER_OWNER:-dspace}/dspace-cli:${DSPACE_VER:-latest}"
|
||||
container_name: dspace-cli
|
||||
build:
|
||||
context: .
|
||||
@@ -26,13 +31,8 @@ services:
|
||||
- ./dspace/config:/dspace/config
|
||||
entrypoint: /dspace/bin/dspace
|
||||
command: help
|
||||
networks:
|
||||
- dspacenet
|
||||
tty: true
|
||||
stdin_open: true
|
||||
|
||||
volumes:
|
||||
assetstore:
|
||||
|
||||
networks:
|
||||
dspacenet:
|
||||
|
@@ -28,14 +28,15 @@ services:
|
||||
# proxies.trusted.ipranges: This setting is required for a REST API running in Docker to trust requests
|
||||
# from the host machine. This IP range MUST correspond to the 'dspacenet' subnet defined above.
|
||||
proxies__P__trusted__P__ipranges: '172.23.0'
|
||||
image: "${DOCKER_OWNER:-dspace}/dspace:${DSPACE_VER:-dspace-7_x-test}"
|
||||
LOGGING_CONFIG: /dspace/config/log4j2-container.xml
|
||||
image: "${DOCKER_OWNER:-dspace}/dspace:${DSPACE_VER:-latest-test}"
|
||||
build:
|
||||
context: .
|
||||
dockerfile: Dockerfile.test
|
||||
depends_on:
|
||||
- dspacedb
|
||||
networks:
|
||||
dspacenet:
|
||||
- dspacenet
|
||||
ports:
|
||||
- published: 8080
|
||||
target: 8080
|
||||
@@ -66,7 +67,7 @@ services:
|
||||
dspacedb:
|
||||
container_name: dspacedb
|
||||
# Uses a custom Postgres image with pgcrypto installed
|
||||
image: "${DOCKER_OWNER:-dspace}/dspace-postgres-pgcrypto:${DSPACE_VER:-dspace-7_x}"
|
||||
image: "${DOCKER_OWNER:-dspace}/dspace-postgres-pgcrypto:${DSPACE_VER:-latest}"
|
||||
build:
|
||||
# Must build out of subdirectory to have access to install script for pgcrypto
|
||||
context: ./dspace/src/main/docker/dspace-postgres-pgcrypto/
|
||||
@@ -86,10 +87,12 @@ services:
|
||||
# DSpace Solr container
|
||||
dspacesolr:
|
||||
container_name: dspacesolr
|
||||
image: "${DOCKER_OWNER:-dspace}/dspace-solr:${DSPACE_VER:-dspace-7_x}"
|
||||
image: "${DOCKER_OWNER:-dspace}/dspace-solr:${DSPACE_VER:-latest}"
|
||||
build:
|
||||
context: .
|
||||
dockerfile: ./dspace/src/main/docker/dspace-solr/Dockerfile
|
||||
context: ./dspace/src/main/docker/dspace-solr/
|
||||
# Provide path to Solr configs necessary to build Docker image
|
||||
additional_contexts:
|
||||
solrconfigs: ./dspace/solr/
|
||||
args:
|
||||
SOLR_VERSION: "${SOLR_VER:-8.11}"
|
||||
networks:
|
||||
@@ -120,6 +123,10 @@ services:
|
||||
cp -r /opt/solr/server/solr/configsets/search/* search
|
||||
precreate-core statistics /opt/solr/server/solr/configsets/statistics
|
||||
cp -r /opt/solr/server/solr/configsets/statistics/* statistics
|
||||
precreate-core qaevent /opt/solr/server/solr/configsets/qaevent
|
||||
cp -r /opt/solr/server/solr/configsets/qaevent/* qaevent
|
||||
precreate-core suggestion /opt/solr/server/solr/configsets/suggestion
|
||||
cp -r /opt/solr/server/solr/configsets/suggestion/* suggestion
|
||||
exec solr -f
|
||||
volumes:
|
||||
assetstore:
|
||||
|
@@ -12,7 +12,7 @@
|
||||
<parent>
|
||||
<groupId>org.dspace</groupId>
|
||||
<artifactId>dspace-parent</artifactId>
|
||||
<version>7.6</version>
|
||||
<version>8.0-SNAPSHOT</version>
|
||||
<relativePath>..</relativePath>
|
||||
</parent>
|
||||
|
||||
@@ -102,7 +102,7 @@
|
||||
<plugin>
|
||||
<groupId>org.codehaus.mojo</groupId>
|
||||
<artifactId>build-helper-maven-plugin</artifactId>
|
||||
<version>3.0.0</version>
|
||||
<version>3.4.0</version>
|
||||
<executions>
|
||||
<execution>
|
||||
<phase>validate</phase>
|
||||
@@ -116,7 +116,10 @@
|
||||
<plugin>
|
||||
<groupId>org.codehaus.mojo</groupId>
|
||||
<artifactId>buildnumber-maven-plugin</artifactId>
|
||||
<version>1.4</version>
|
||||
<version>3.2.0</version>
|
||||
<configuration>
|
||||
<revisionOnScmFailure>UNKNOWN_REVISION</revisionOnScmFailure>
|
||||
</configuration>
|
||||
<executions>
|
||||
<execution>
|
||||
<phase>validate</phase>
|
||||
@@ -492,12 +495,6 @@
|
||||
<dependency>
|
||||
<groupId>jaxen</groupId>
|
||||
<artifactId>jaxen</artifactId>
|
||||
<exclusions>
|
||||
<exclusion>
|
||||
<artifactId>xom</artifactId>
|
||||
<groupId>xom</groupId>
|
||||
</exclusion>
|
||||
</exclusions>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.jdom</groupId>
|
||||
@@ -531,7 +528,7 @@
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.hamcrest</groupId>
|
||||
<artifactId>hamcrest-all</artifactId>
|
||||
<artifactId>hamcrest</artifactId>
|
||||
<scope>test</scope>
|
||||
</dependency>
|
||||
<dependency>
|
||||
@@ -623,7 +620,7 @@
|
||||
<dependency>
|
||||
<groupId>com.maxmind.geoip2</groupId>
|
||||
<artifactId>geoip2</artifactId>
|
||||
<version>2.11.0</version>
|
||||
<version>2.17.0</version>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.apache.ant</groupId>
|
||||
@@ -632,7 +629,7 @@
|
||||
<dependency>
|
||||
<groupId>dnsjava</groupId>
|
||||
<artifactId>dnsjava</artifactId>
|
||||
<version>2.1.7</version>
|
||||
<version>2.1.9</version>
|
||||
</dependency>
|
||||
|
||||
<dependency>
|
||||
@@ -668,7 +665,7 @@
|
||||
<dependency>
|
||||
<groupId>org.flywaydb</groupId>
|
||||
<artifactId>flyway-core</artifactId>
|
||||
<version>8.4.4</version>
|
||||
<version>8.5.13</version>
|
||||
</dependency>
|
||||
|
||||
<!-- Google Analytics -->
|
||||
@@ -703,10 +700,6 @@
|
||||
<artifactId>annotations</artifactId>
|
||||
</dependency>
|
||||
|
||||
<dependency>
|
||||
<groupId>joda-time</groupId>
|
||||
<artifactId>joda-time</artifactId>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>javax.inject</groupId>
|
||||
<artifactId>javax.inject</artifactId>
|
||||
@@ -776,7 +769,7 @@
|
||||
<dependency>
|
||||
<groupId>org.json</groupId>
|
||||
<artifactId>json</artifactId>
|
||||
<version>20230227</version>
|
||||
<version>20231013</version>
|
||||
</dependency>
|
||||
|
||||
<!-- Useful for testing command-line tools -->
|
||||
@@ -791,7 +784,7 @@
|
||||
<dependency>
|
||||
<groupId>com.opencsv</groupId>
|
||||
<artifactId>opencsv</artifactId>
|
||||
<version>5.6</version>
|
||||
<version>5.9</version>
|
||||
</dependency>
|
||||
|
||||
<!-- Email templating -->
|
||||
@@ -809,7 +802,7 @@
|
||||
<dependency>
|
||||
<groupId>org.apache.bcel</groupId>
|
||||
<artifactId>bcel</artifactId>
|
||||
<version>6.6.0</version>
|
||||
<version>6.7.0</version>
|
||||
<scope>test</scope>
|
||||
</dependency>
|
||||
|
||||
@@ -818,6 +811,19 @@
|
||||
<groupId>eu.openaire</groupId>
|
||||
<artifactId>funders-model</artifactId>
|
||||
<version>2.0.0</version>
|
||||
<exclusions>
|
||||
<!-- Newer version pulled in via Jersey below -->
|
||||
<exclusion>
|
||||
<groupId>org.javassist</groupId>
|
||||
<artifactId>javassist</artifactId>
|
||||
</exclusion>
|
||||
</exclusions>
|
||||
</dependency>
|
||||
|
||||
<dependency>
|
||||
<groupId>eu.openaire</groupId>
|
||||
<artifactId>broker-client</artifactId>
|
||||
<version>1.1.2</version>
|
||||
</dependency>
|
||||
|
||||
<dependency>
|
||||
@@ -856,32 +862,37 @@
|
||||
<dependencyManagement>
|
||||
<dependencies>
|
||||
<!-- for mockserver -->
|
||||
<!-- Solve dependency convergence issues related to
|
||||
<!-- Solve dependency convergence issues related to Solr and
|
||||
'mockserver-junit-rule' by selecting the versions we want to use. -->
|
||||
<dependency>
|
||||
<groupId>io.netty</groupId>
|
||||
<artifactId>netty-buffer</artifactId>
|
||||
<version>4.1.68.Final</version>
|
||||
<version>4.1.106.Final</version>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>io.netty</groupId>
|
||||
<artifactId>netty-transport</artifactId>
|
||||
<version>4.1.68.Final</version>
|
||||
<version>4.1.106.Final</version>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>io.netty</groupId>
|
||||
<artifactId>netty-transport-native-unix-common</artifactId>
|
||||
<version>4.1.106.Final</version>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>io.netty</groupId>
|
||||
<artifactId>netty-common</artifactId>
|
||||
<version>4.1.68.Final</version>
|
||||
<version>4.1.106.Final</version>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>io.netty</groupId>
|
||||
<artifactId>netty-handler</artifactId>
|
||||
<version>4.1.68.Final</version>
|
||||
<version>4.1.106.Final</version>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>io.netty</groupId>
|
||||
<artifactId>netty-codec</artifactId>
|
||||
<version>4.1.68.Final</version>
|
||||
<version>4.1.106.Final</version>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.apache.velocity</groupId>
|
||||
@@ -891,7 +902,7 @@
|
||||
<dependency>
|
||||
<groupId>org.xmlunit</groupId>
|
||||
<artifactId>xmlunit-core</artifactId>
|
||||
<version>2.8.0</version>
|
||||
<version>2.9.1</version>
|
||||
<scope>test</scope>
|
||||
</dependency>
|
||||
<dependency>
|
||||
@@ -917,7 +928,7 @@
|
||||
<dependency>
|
||||
<groupId>org.scala-lang</groupId>
|
||||
<artifactId>scala-library</artifactId>
|
||||
<version>2.13.9</version>
|
||||
<version>2.13.11</version>
|
||||
<scope>test</scope>
|
||||
</dependency>
|
||||
</dependencies>
|
||||
|
@@ -22,9 +22,21 @@ public interface AccessStatusHelper {
|
||||
*
|
||||
* @param context the DSpace context
|
||||
* @param item the item
|
||||
* @param threshold the embargo threshold date
|
||||
* @return an access status value
|
||||
* @throws SQLException An exception that provides information on a database access error or other errors.
|
||||
*/
|
||||
public String getAccessStatusFromItem(Context context, Item item, Date threshold)
|
||||
throws SQLException;
|
||||
|
||||
/**
|
||||
* Retrieve embargo information for the item
|
||||
*
|
||||
* @param context the DSpace context
|
||||
* @param item the item to check for embargo information
|
||||
* @param threshold the embargo threshold date
|
||||
* @return an embargo date
|
||||
* @throws SQLException An exception that provides information on a database access error or other errors.
|
||||
*/
|
||||
public String getEmbargoFromItem(Context context, Item item, Date threshold) throws SQLException;
|
||||
}
|
||||
|
@@ -8,6 +8,8 @@
|
||||
package org.dspace.access.status;
|
||||
|
||||
import java.sql.SQLException;
|
||||
import java.time.LocalDate;
|
||||
import java.time.ZoneId;
|
||||
import java.util.Date;
|
||||
|
||||
import org.dspace.access.status.service.AccessStatusService;
|
||||
@@ -15,7 +17,6 @@ import org.dspace.content.Item;
|
||||
import org.dspace.core.Context;
|
||||
import org.dspace.core.service.PluginService;
|
||||
import org.dspace.services.ConfigurationService;
|
||||
import org.joda.time.LocalDate;
|
||||
import org.springframework.beans.factory.annotation.Autowired;
|
||||
|
||||
/**
|
||||
@@ -55,7 +56,10 @@ public class AccessStatusServiceImpl implements AccessStatusService {
|
||||
int month = configurationService.getIntProperty("access.status.embargo.forever.month");
|
||||
int day = configurationService.getIntProperty("access.status.embargo.forever.day");
|
||||
|
||||
forever_date = new LocalDate(year, month, day).toDate();
|
||||
forever_date = Date.from(LocalDate.of(year, month, day)
|
||||
.atStartOfDay()
|
||||
.atZone(ZoneId.systemDefault())
|
||||
.toInstant());
|
||||
}
|
||||
}
|
||||
|
||||
@@ -63,4 +67,9 @@ public class AccessStatusServiceImpl implements AccessStatusService {
|
||||
public String getAccessStatus(Context context, Item item) throws SQLException {
|
||||
return helper.getAccessStatusFromItem(context, item, forever_date);
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getEmbargoFromItem(Context context, Item item) throws SQLException {
|
||||
return helper.getEmbargoFromItem(context, item, forever_date);
|
||||
}
|
||||
}
|
||||
|
@@ -26,6 +26,7 @@ import org.dspace.content.service.ItemService;
|
||||
import org.dspace.core.Constants;
|
||||
import org.dspace.core.Context;
|
||||
import org.dspace.eperson.Group;
|
||||
import org.joda.time.LocalDate;
|
||||
|
||||
/**
|
||||
* Default plugin implementation of the access status helper.
|
||||
@@ -33,6 +34,11 @@ import org.dspace.eperson.Group;
|
||||
* calculate the access status of an item based on the policies of
|
||||
* the primary or the first bitstream in the original bundle.
|
||||
* Users can override this method for enhanced functionality.
|
||||
*
|
||||
* The getEmbargoInformationFromItem method provides a simple logic to
|
||||
* * retrieve embargo information of bitstreams from an item based on the policies of
|
||||
* * the primary or the first bitstream in the original bundle.
|
||||
* * Users can override this method for enhanced functionality.
|
||||
*/
|
||||
public class DefaultAccessStatusHelper implements AccessStatusHelper {
|
||||
public static final String EMBARGO = "embargo";
|
||||
@@ -54,12 +60,12 @@ public class DefaultAccessStatusHelper implements AccessStatusHelper {
|
||||
|
||||
/**
|
||||
* Look at the item's policies to determine an access status value.
|
||||
* It is also considering a date threshold for embargos and restrictions.
|
||||
* It is also considering a date threshold for embargoes and restrictions.
|
||||
*
|
||||
* If the item is null, simply returns the "unknown" value.
|
||||
*
|
||||
* @param context the DSpace context
|
||||
* @param item the item to embargo
|
||||
* @param item the item to check for embargoes
|
||||
* @param threshold the embargo threshold date
|
||||
* @return an access status value
|
||||
*/
|
||||
@@ -86,7 +92,7 @@ public class DefaultAccessStatusHelper implements AccessStatusHelper {
|
||||
.findFirst()
|
||||
.orElse(null);
|
||||
}
|
||||
return caculateAccessStatusForDso(context, bitstream, threshold);
|
||||
return calculateAccessStatusForDso(context, bitstream, threshold);
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -104,7 +110,7 @@ public class DefaultAccessStatusHelper implements AccessStatusHelper {
|
||||
* @param threshold the embargo threshold date
|
||||
* @return an access status value
|
||||
*/
|
||||
private String caculateAccessStatusForDso(Context context, DSpaceObject dso, Date threshold)
|
||||
private String calculateAccessStatusForDso(Context context, DSpaceObject dso, Date threshold)
|
||||
throws SQLException {
|
||||
if (dso == null) {
|
||||
return METADATA_ONLY;
|
||||
@@ -156,4 +162,87 @@ public class DefaultAccessStatusHelper implements AccessStatusHelper {
|
||||
}
|
||||
return RESTRICTED;
|
||||
}
|
||||
|
||||
/**
|
||||
* Look at the policies of the primary (or first) bitstream of the item to retrieve its embargo.
|
||||
*
|
||||
* If the item is null, simply returns an empty map with no embargo information.
|
||||
*
|
||||
* @param context the DSpace context
|
||||
* @param item the item to embargo
|
||||
* @return an access status value
|
||||
*/
|
||||
@Override
|
||||
public String getEmbargoFromItem(Context context, Item item, Date threshold)
|
||||
throws SQLException {
|
||||
Date embargoDate;
|
||||
|
||||
// If Item status is not "embargo" then return a null embargo date.
|
||||
String accessStatus = getAccessStatusFromItem(context, item, threshold);
|
||||
|
||||
if (item == null || !accessStatus.equals(EMBARGO)) {
|
||||
return null;
|
||||
}
|
||||
// Consider only the original bundles.
|
||||
List<Bundle> bundles = item.getBundles(Constants.DEFAULT_BUNDLE_NAME);
|
||||
// Check for primary bitstreams first.
|
||||
Bitstream bitstream = bundles.stream()
|
||||
.map(bundle -> bundle.getPrimaryBitstream())
|
||||
.filter(Objects::nonNull)
|
||||
.findFirst()
|
||||
.orElse(null);
|
||||
if (bitstream == null) {
|
||||
// If there is no primary bitstream,
|
||||
// take the first bitstream in the bundles.
|
||||
bitstream = bundles.stream()
|
||||
.map(bundle -> bundle.getBitstreams())
|
||||
.flatMap(List::stream)
|
||||
.findFirst()
|
||||
.orElse(null);
|
||||
}
|
||||
|
||||
if (bitstream == null) {
|
||||
return null;
|
||||
}
|
||||
|
||||
embargoDate = this.retrieveShortestEmbargo(context, bitstream);
|
||||
|
||||
return embargoDate != null ? embargoDate.toString() : null;
|
||||
}
|
||||
|
||||
/**
|
||||
*
|
||||
*/
|
||||
private Date retrieveShortestEmbargo(Context context, Bitstream bitstream) throws SQLException {
|
||||
Date embargoDate = null;
|
||||
// Only consider read policies.
|
||||
List<ResourcePolicy> policies = authorizeService
|
||||
.getPoliciesActionFilter(context, bitstream, Constants.READ);
|
||||
|
||||
// Looks at all read policies.
|
||||
for (ResourcePolicy policy : policies) {
|
||||
boolean isValid = resourcePolicyService.isDateValid(policy);
|
||||
Group group = policy.getGroup();
|
||||
|
||||
if (group != null && StringUtils.equals(group.getName(), Group.ANONYMOUS)) {
|
||||
// Only calculate the status for the anonymous group.
|
||||
if (!isValid) {
|
||||
// If the policy is not valid there is an active embargo
|
||||
Date startDate = policy.getStartDate();
|
||||
|
||||
if (startDate != null && !startDate.before(LocalDate.now().toDate())) {
|
||||
// There is an active embargo: aim to take the shortest embargo (account for rare cases where
|
||||
// more than one resource policy exists)
|
||||
if (embargoDate == null) {
|
||||
embargoDate = startDate;
|
||||
} else {
|
||||
embargoDate = startDate.before(embargoDate) ? startDate : embargoDate;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return embargoDate;
|
||||
}
|
||||
}
|
||||
|
@@ -40,7 +40,18 @@ public interface AccessStatusService {
|
||||
*
|
||||
* @param context the DSpace context
|
||||
* @param item the item
|
||||
* @return an access status value
|
||||
* @throws SQLException An exception that provides information on a database access error or other errors.
|
||||
*/
|
||||
public String getAccessStatus(Context context, Item item) throws SQLException;
|
||||
|
||||
/**
|
||||
* Retrieve embargo information for the item
|
||||
*
|
||||
* @param context the DSpace context
|
||||
* @param item the item to check for embargo information
|
||||
* @return an embargo date
|
||||
* @throws SQLException An exception that provides information on a database access error or other errors.
|
||||
*/
|
||||
public String getEmbargoFromItem(Context context, Item item) throws SQLException;
|
||||
}
|
||||
|
@@ -116,6 +116,17 @@ public final class CreateAdministrator {
|
||||
protected CreateAdministrator()
|
||||
throws Exception {
|
||||
context = new Context();
|
||||
try {
|
||||
context.getDBConfig();
|
||||
} catch (NullPointerException npr) {
|
||||
// if database is null, there is no point in continuing. Prior to this exception and catch,
|
||||
// NullPointerException was thrown, that wasn't very helpful.
|
||||
throw new IllegalStateException("Problem connecting to database. This " +
|
||||
"indicates issue with either network or version (or possibly some other). " +
|
||||
"If you are running this in docker-compose, please make sure dspace-cli was " +
|
||||
"built from the same sources as running dspace container AND that they are in " +
|
||||
"the same project/network.");
|
||||
}
|
||||
groupService = EPersonServiceFactory.getInstance().getGroupService();
|
||||
ePersonService = EPersonServiceFactory.getInstance().getEPersonService();
|
||||
}
|
||||
|
@@ -464,7 +464,7 @@ public class BulkAccessControl extends DSpaceRunnable<BulkAccessControlScriptCon
|
||||
.forEach(accessCondition -> createResourcePolicy(item, accessCondition,
|
||||
itemAccessConditions.get(accessCondition.getName())));
|
||||
|
||||
itemService.adjustItemPolicies(context, item, item.getOwningCollection());
|
||||
itemService.adjustItemPolicies(context, item, item.getOwningCollection(), false);
|
||||
}
|
||||
|
||||
/**
|
||||
|
@@ -578,6 +578,10 @@ public class MetadataImport extends DSpaceRunnable<MetadataImportScriptConfigura
|
||||
wfItem = workflowService.startWithoutNotify(c, wsItem);
|
||||
}
|
||||
} else {
|
||||
// Add provenance info
|
||||
String provenance = installItemService.getSubmittedByProvenanceMessage(c, wsItem.getItem());
|
||||
itemService.addMetadata(c, item, MetadataSchemaEnum.DC.getName(),
|
||||
"description", "provenance", "en", provenance);
|
||||
// Install the item
|
||||
installItemService.installItem(c, wsItem);
|
||||
}
|
||||
@@ -1363,7 +1367,7 @@ public class MetadataImport extends DSpaceRunnable<MetadataImportScriptConfigura
|
||||
* is the field is defined as authority controlled
|
||||
*/
|
||||
private static boolean isAuthorityControlledField(String md) {
|
||||
String mdf = StringUtils.substringAfter(md, ":");
|
||||
String mdf = md.contains(":") ? StringUtils.substringAfter(md, ":") : md;
|
||||
mdf = StringUtils.substringBefore(mdf, "[");
|
||||
return authorityControlled.contains(mdf);
|
||||
}
|
||||
|
@@ -774,6 +774,10 @@ public class ItemImportServiceImpl implements ItemImportService, InitializingBea
|
||||
// put item in system
|
||||
if (!isTest) {
|
||||
try {
|
||||
// Add provenance info
|
||||
String provenance = installItemService.getSubmittedByProvenanceMessage(c, wi.getItem());
|
||||
itemService.addMetadata(c, wi.getItem(), MetadataSchemaEnum.DC.getName(),
|
||||
"description", "provenance", "en", provenance);
|
||||
installItemService.installItem(c, wi, myhandle);
|
||||
} catch (Exception e) {
|
||||
workspaceItemService.deleteAll(c, wi);
|
||||
|
@@ -21,6 +21,7 @@ import org.apache.logging.log4j.LogManager;
|
||||
import org.apache.logging.log4j.Logger;
|
||||
import org.dspace.core.Context;
|
||||
import org.dspace.scripts.DSpaceRunnable;
|
||||
import org.dspace.scripts.DSpaceRunnable.StepResult;
|
||||
import org.dspace.scripts.configuration.ScriptConfiguration;
|
||||
import org.dspace.scripts.factory.ScriptServiceFactory;
|
||||
import org.dspace.scripts.handler.DSpaceRunnableHandler;
|
||||
@@ -145,8 +146,13 @@ public class ScriptLauncher {
|
||||
private static int executeScript(String[] args, DSpaceRunnableHandler dSpaceRunnableHandler,
|
||||
DSpaceRunnable script) {
|
||||
try {
|
||||
script.initialize(args, dSpaceRunnableHandler, null);
|
||||
StepResult result = script.initialize(args, dSpaceRunnableHandler, null);
|
||||
// check the StepResult, only run the script if the result is Continue;
|
||||
// otherwise - for example the script is started with the help as argument, nothing is to do
|
||||
if (StepResult.Continue.equals(result)) {
|
||||
// runs the script, the normal initialization is successful
|
||||
script.run();
|
||||
}
|
||||
return 0;
|
||||
} catch (ParseException e) {
|
||||
script.printHelp();
|
||||
|
@@ -10,6 +10,7 @@ package org.dspace.app.mediafilter;
|
||||
import java.io.InputStream;
|
||||
import java.sql.SQLException;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Collections;
|
||||
import java.util.HashMap;
|
||||
import java.util.Iterator;
|
||||
import java.util.List;
|
||||
@@ -40,6 +41,7 @@ import org.dspace.eperson.Group;
|
||||
import org.dspace.eperson.service.GroupService;
|
||||
import org.dspace.scripts.handler.DSpaceRunnableHandler;
|
||||
import org.dspace.services.ConfigurationService;
|
||||
import org.dspace.util.ThrowableUtils;
|
||||
import org.springframework.beans.factory.InitializingBean;
|
||||
import org.springframework.beans.factory.annotation.Autowired;
|
||||
|
||||
@@ -225,23 +227,9 @@ public class MediaFilterServiceImpl implements MediaFilterService, InitializingB
|
||||
filtered = true;
|
||||
}
|
||||
} catch (Exception e) {
|
||||
String handle = myItem.getHandle();
|
||||
List<Bundle> bundles = myBitstream.getBundles();
|
||||
long size = myBitstream.getSizeBytes();
|
||||
String checksum = myBitstream.getChecksum() + " (" + myBitstream.getChecksumAlgorithm() + ")";
|
||||
int assetstore = myBitstream.getStoreNumber();
|
||||
|
||||
// Printout helpful information to find the errored bitstream.
|
||||
StringBuilder sb = new StringBuilder("ERROR filtering, skipping bitstream:\n");
|
||||
sb.append("\tItem Handle: ").append(handle);
|
||||
for (Bundle bundle : bundles) {
|
||||
sb.append("\tBundle Name: ").append(bundle.getName());
|
||||
}
|
||||
sb.append("\tFile Size: ").append(size);
|
||||
sb.append("\tChecksum: ").append(checksum);
|
||||
sb.append("\tAsset Store: ").append(assetstore);
|
||||
logError(sb.toString());
|
||||
logError(e.getMessage(), e);
|
||||
logError(formatBitstreamDetails(myItem.getHandle(), myBitstream));
|
||||
logError(ThrowableUtils.formatCauseChain(e));
|
||||
}
|
||||
} else if (filterClass instanceof SelfRegisterInputFormats) {
|
||||
// Filter implements self registration, so check to see if it should be applied
|
||||
@@ -319,10 +307,10 @@ public class MediaFilterServiceImpl implements MediaFilterService, InitializingB
|
||||
|
||||
// check if destination bitstream exists
|
||||
Bundle existingBundle = null;
|
||||
List<Bitstream> existingBitstreams = new ArrayList<Bitstream>();
|
||||
List<Bitstream> existingBitstreams = new ArrayList<>();
|
||||
List<Bundle> bundles = itemService.getBundles(item, formatFilter.getBundleName());
|
||||
|
||||
if (bundles.size() > 0) {
|
||||
if (!bundles.isEmpty()) {
|
||||
// only finds the last matching bundle and all matching bitstreams in the proper bundle(s)
|
||||
for (Bundle bundle : bundles) {
|
||||
List<Bitstream> bitstreams = bundle.getBitstreams();
|
||||
@@ -337,7 +325,7 @@ public class MediaFilterServiceImpl implements MediaFilterService, InitializingB
|
||||
}
|
||||
|
||||
// if exists and overwrite = false, exit
|
||||
if (!overWrite && (existingBitstreams.size() > 0)) {
|
||||
if (!overWrite && (!existingBitstreams.isEmpty())) {
|
||||
if (!isQuiet) {
|
||||
logInfo("SKIPPED: bitstream " + source.getID()
|
||||
+ " (item: " + item.getHandle() + ") because '" + newName + "' already exists");
|
||||
@@ -370,7 +358,7 @@ public class MediaFilterServiceImpl implements MediaFilterService, InitializingB
|
||||
}
|
||||
|
||||
Bundle targetBundle; // bundle we're modifying
|
||||
if (bundles.size() < 1) {
|
||||
if (bundles.isEmpty()) {
|
||||
// create new bundle if needed
|
||||
targetBundle = bundleService.create(context, item, formatFilter.getBundleName());
|
||||
} else {
|
||||
@@ -399,6 +387,7 @@ public class MediaFilterServiceImpl implements MediaFilterService, InitializingB
|
||||
|
||||
} catch (OutOfMemoryError oome) {
|
||||
logError("!!! OutOfMemoryError !!!");
|
||||
logError(formatBitstreamDetails(item.getHandle(), source));
|
||||
}
|
||||
|
||||
// we are overwriting, so remove old bitstream
|
||||
@@ -496,6 +485,37 @@ public class MediaFilterServiceImpl implements MediaFilterService, InitializingB
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Describe a Bitstream in detail. Format a single line of text with
|
||||
* information such as Bitstore index, backing file ID, size, checksum,
|
||||
* enclosing Item and Bundles.
|
||||
*
|
||||
* @param itemHandle Handle of the Item by which we found the Bitstream.
|
||||
* @param bitstream the Bitstream to be described.
|
||||
* @return Bitstream details.
|
||||
*/
|
||||
private String formatBitstreamDetails(String itemHandle,
|
||||
Bitstream bitstream) {
|
||||
List<Bundle> bundles;
|
||||
try {
|
||||
bundles = bitstream.getBundles();
|
||||
} catch (SQLException ex) {
|
||||
logError("Unexpected error fetching Bundles", ex);
|
||||
bundles = Collections.EMPTY_LIST;
|
||||
}
|
||||
StringBuilder sb = new StringBuilder("ERROR filtering, skipping bitstream:\n");
|
||||
sb.append("\tItem Handle: ").append(itemHandle);
|
||||
for (Bundle bundle : bundles) {
|
||||
sb.append("\tBundle Name: ").append(bundle.getName());
|
||||
}
|
||||
sb.append("\tFile Size: ").append(bitstream.getSizeBytes());
|
||||
sb.append("\tChecksum: ").append(bitstream.getChecksum())
|
||||
.append(" (").append(bitstream.getChecksumAlgorithm()).append(')');
|
||||
sb.append("\tAsset Store: ").append(bitstream.getStoreNumber());
|
||||
sb.append("\tInternal ID: ").append(bitstream.getInternalId());
|
||||
return sb.toString();
|
||||
}
|
||||
|
||||
private void logInfo(String message) {
|
||||
if (handler != null) {
|
||||
handler.logInfo(message);
|
||||
|
@@ -7,18 +7,10 @@
|
||||
*/
|
||||
package org.dspace.app.sitemap;
|
||||
|
||||
import java.io.BufferedReader;
|
||||
import java.io.File;
|
||||
import java.io.IOException;
|
||||
import java.io.InputStreamReader;
|
||||
import java.io.UnsupportedEncodingException;
|
||||
import java.net.HttpURLConnection;
|
||||
import java.net.MalformedURLException;
|
||||
import java.net.URL;
|
||||
import java.net.URLEncoder;
|
||||
import java.sql.SQLException;
|
||||
import java.util.Date;
|
||||
import java.util.Iterator;
|
||||
import java.util.List;
|
||||
|
||||
import org.apache.commons.cli.CommandLine;
|
||||
@@ -29,12 +21,8 @@ import org.apache.commons.cli.Options;
|
||||
import org.apache.commons.cli.ParseException;
|
||||
import org.apache.commons.collections4.CollectionUtils;
|
||||
import org.apache.commons.io.FileUtils;
|
||||
import org.apache.commons.lang3.ArrayUtils;
|
||||
import org.apache.commons.lang3.StringUtils;
|
||||
import org.apache.logging.log4j.Logger;
|
||||
import org.dspace.content.Collection;
|
||||
import org.dspace.content.Community;
|
||||
import org.dspace.content.Item;
|
||||
import org.dspace.content.factory.ContentServiceFactory;
|
||||
import org.dspace.content.service.CollectionService;
|
||||
import org.dspace.content.service.CommunityService;
|
||||
@@ -43,6 +31,7 @@ import org.dspace.core.Context;
|
||||
import org.dspace.core.LogHelper;
|
||||
import org.dspace.discovery.DiscoverQuery;
|
||||
import org.dspace.discovery.DiscoverResult;
|
||||
import org.dspace.discovery.IndexableObject;
|
||||
import org.dspace.discovery.SearchService;
|
||||
import org.dspace.discovery.SearchServiceException;
|
||||
import org.dspace.discovery.SearchUtils;
|
||||
@@ -68,6 +57,7 @@ public class GenerateSitemaps {
|
||||
private static final ConfigurationService configurationService =
|
||||
DSpaceServicesFactory.getInstance().getConfigurationService();
|
||||
private static final SearchService searchService = SearchUtils.getSearchService();
|
||||
private static final int PAGE_SIZE = 100;
|
||||
|
||||
/**
|
||||
* Default constructor
|
||||
@@ -87,11 +77,6 @@ public class GenerateSitemaps {
|
||||
"do not generate sitemaps.org protocol sitemap");
|
||||
options.addOption("b", "no_htmlmap", false,
|
||||
"do not generate a basic HTML sitemap");
|
||||
options.addOption("a", "ping_all", false,
|
||||
"ping configured search engines");
|
||||
options
|
||||
.addOption("p", "ping", true,
|
||||
"ping specified search engine URL");
|
||||
options
|
||||
.addOption("d", "delete", false,
|
||||
"delete sitemaps dir and its contents");
|
||||
@@ -116,14 +101,13 @@ public class GenerateSitemaps {
|
||||
}
|
||||
|
||||
/*
|
||||
* Sanity check -- if no sitemap generation or pinging to do, or deletion, print usage
|
||||
* Sanity check -- if no sitemap generation or deletion, print usage
|
||||
*/
|
||||
if (line.getArgs().length != 0 || line.hasOption('d') || line.hasOption('b')
|
||||
&& line.hasOption('s') && !line.hasOption('g')
|
||||
&& !line.hasOption('m') && !line.hasOption('y')
|
||||
&& !line.hasOption('p')) {
|
||||
&& !line.hasOption('m') && !line.hasOption('y')) {
|
||||
System.err
|
||||
.println("Nothing to do (no sitemap to generate, no search engines to ping)");
|
||||
.println("Nothing to do (no sitemap to generate)");
|
||||
hf.printHelp(usage, options);
|
||||
System.exit(1);
|
||||
}
|
||||
@@ -137,20 +121,6 @@ public class GenerateSitemaps {
|
||||
deleteSitemaps();
|
||||
}
|
||||
|
||||
if (line.hasOption('a')) {
|
||||
pingConfiguredSearchEngines();
|
||||
}
|
||||
|
||||
if (line.hasOption('p')) {
|
||||
try {
|
||||
pingSearchEngine(line.getOptionValue('p'));
|
||||
} catch (MalformedURLException me) {
|
||||
System.err
|
||||
.println("Bad search engine URL (include all except sitemap URL)");
|
||||
System.exit(1);
|
||||
}
|
||||
}
|
||||
|
||||
System.exit(0);
|
||||
}
|
||||
|
||||
@@ -189,7 +159,10 @@ public class GenerateSitemaps {
|
||||
*/
|
||||
public static void generateSitemaps(boolean makeHTMLMap, boolean makeSitemapOrg) throws SQLException, IOException {
|
||||
String uiURLStem = configurationService.getProperty("dspace.ui.url");
|
||||
String sitemapStem = uiURLStem + "/sitemap";
|
||||
if (!uiURLStem.endsWith("/")) {
|
||||
uiURLStem = uiURLStem + '/';
|
||||
}
|
||||
String sitemapStem = uiURLStem + "sitemap";
|
||||
|
||||
File outputDir = new File(configurationService.getProperty("sitemap.dir"));
|
||||
if (!outputDir.exists() && !outputDir.mkdir()) {
|
||||
@@ -208,171 +181,113 @@ public class GenerateSitemaps {
|
||||
}
|
||||
|
||||
Context c = new Context(Context.Mode.READ_ONLY);
|
||||
|
||||
List<Community> comms = communityService.findAll(c);
|
||||
|
||||
for (Community comm : comms) {
|
||||
String url = uiURLStem + "/communities/" + comm.getID();
|
||||
|
||||
if (makeHTMLMap) {
|
||||
html.addURL(url, null);
|
||||
}
|
||||
if (makeSitemapOrg) {
|
||||
sitemapsOrg.addURL(url, null);
|
||||
}
|
||||
|
||||
c.uncacheEntity(comm);
|
||||
}
|
||||
|
||||
List<Collection> colls = collectionService.findAll(c);
|
||||
|
||||
for (Collection coll : colls) {
|
||||
String url = uiURLStem + "/collections/" + coll.getID();
|
||||
|
||||
if (makeHTMLMap) {
|
||||
html.addURL(url, null);
|
||||
}
|
||||
if (makeSitemapOrg) {
|
||||
sitemapsOrg.addURL(url, null);
|
||||
}
|
||||
|
||||
c.uncacheEntity(coll);
|
||||
}
|
||||
|
||||
Iterator<Item> allItems = itemService.findAll(c);
|
||||
int itemCount = 0;
|
||||
|
||||
while (allItems.hasNext()) {
|
||||
Item i = allItems.next();
|
||||
|
||||
DiscoverQuery entityQuery = new DiscoverQuery();
|
||||
entityQuery.setQuery("search.uniqueid:\"Item-" + i.getID() + "\" and entityType:*");
|
||||
entityQuery.addSearchField("entityType");
|
||||
int offset = 0;
|
||||
long commsCount = 0;
|
||||
long collsCount = 0;
|
||||
long itemsCount = 0;
|
||||
|
||||
try {
|
||||
DiscoverResult discoverResult = searchService.search(c, entityQuery);
|
||||
DiscoverQuery discoveryQuery = new DiscoverQuery();
|
||||
discoveryQuery.setMaxResults(PAGE_SIZE);
|
||||
discoveryQuery.setQuery("search.resourcetype:Community");
|
||||
do {
|
||||
discoveryQuery.setStart(offset);
|
||||
DiscoverResult discoverResult = searchService.search(c, discoveryQuery);
|
||||
List<IndexableObject> docs = discoverResult.getIndexableObjects();
|
||||
commsCount = discoverResult.getTotalSearchResults();
|
||||
|
||||
String url;
|
||||
if (CollectionUtils.isNotEmpty(discoverResult.getIndexableObjects())
|
||||
&& CollectionUtils.isNotEmpty(discoverResult.getSearchDocument(
|
||||
discoverResult.getIndexableObjects().get(0)).get(0).getSearchFieldValues("entityType"))
|
||||
&& StringUtils.isNotBlank(discoverResult.getSearchDocument(
|
||||
discoverResult.getIndexableObjects().get(0)).get(0).getSearchFieldValues("entityType").get(0))
|
||||
) {
|
||||
url = uiURLStem + "/entities/" + StringUtils.lowerCase(discoverResult.getSearchDocument(
|
||||
discoverResult.getIndexableObjects().get(0))
|
||||
.get(0).getSearchFieldValues("entityType").get(0)) + "/" + i.getID();
|
||||
} else {
|
||||
url = uiURLStem + "/items/" + i.getID();
|
||||
}
|
||||
Date lastMod = i.getLastModified();
|
||||
for (IndexableObject doc : docs) {
|
||||
String url = uiURLStem + "communities/" + doc.getID();
|
||||
c.uncacheEntity(doc.getIndexedObject());
|
||||
|
||||
if (makeHTMLMap) {
|
||||
html.addURL(url, lastMod);
|
||||
html.addURL(url, null);
|
||||
}
|
||||
if (makeSitemapOrg) {
|
||||
sitemapsOrg.addURL(url, lastMod);
|
||||
sitemapsOrg.addURL(url, null);
|
||||
}
|
||||
} catch (SearchServiceException e) {
|
||||
log.error("Failed getting entitytype through solr for item " + i.getID() + ": " + e.getMessage());
|
||||
}
|
||||
offset += PAGE_SIZE;
|
||||
} while (offset < commsCount);
|
||||
|
||||
c.uncacheEntity(i);
|
||||
offset = 0;
|
||||
discoveryQuery = new DiscoverQuery();
|
||||
discoveryQuery.setMaxResults(PAGE_SIZE);
|
||||
discoveryQuery.setQuery("search.resourcetype:Collection");
|
||||
do {
|
||||
discoveryQuery.setStart(offset);
|
||||
DiscoverResult discoverResult = searchService.search(c, discoveryQuery);
|
||||
List<IndexableObject> docs = discoverResult.getIndexableObjects();
|
||||
collsCount = discoverResult.getTotalSearchResults();
|
||||
|
||||
itemCount++;
|
||||
for (IndexableObject doc : docs) {
|
||||
String url = uiURLStem + "collections/" + doc.getID();
|
||||
c.uncacheEntity(doc.getIndexedObject());
|
||||
|
||||
if (makeHTMLMap) {
|
||||
html.addURL(url, null);
|
||||
}
|
||||
if (makeSitemapOrg) {
|
||||
sitemapsOrg.addURL(url, null);
|
||||
}
|
||||
}
|
||||
offset += PAGE_SIZE;
|
||||
} while (offset < collsCount);
|
||||
|
||||
offset = 0;
|
||||
discoveryQuery = new DiscoverQuery();
|
||||
discoveryQuery.setMaxResults(PAGE_SIZE);
|
||||
discoveryQuery.setQuery("search.resourcetype:Item");
|
||||
discoveryQuery.addSearchField("search.entitytype");
|
||||
do {
|
||||
|
||||
discoveryQuery.setStart(offset);
|
||||
DiscoverResult discoverResult = searchService.search(c, discoveryQuery);
|
||||
List<IndexableObject> docs = discoverResult.getIndexableObjects();
|
||||
itemsCount = discoverResult.getTotalSearchResults();
|
||||
|
||||
for (IndexableObject doc : docs) {
|
||||
String url;
|
||||
List<String> entityTypeFieldValues = discoverResult.getSearchDocument(doc).get(0)
|
||||
.getSearchFieldValues("search.entitytype");
|
||||
if (CollectionUtils.isNotEmpty(entityTypeFieldValues)) {
|
||||
url = uiURLStem + "entities/" + StringUtils.lowerCase(entityTypeFieldValues.get(0)) + "/"
|
||||
+ doc.getID();
|
||||
} else {
|
||||
url = uiURLStem + "items/" + doc.getID();
|
||||
}
|
||||
Date lastMod = doc.getLastModified();
|
||||
c.uncacheEntity(doc.getIndexedObject());
|
||||
|
||||
if (makeHTMLMap) {
|
||||
html.addURL(url, null);
|
||||
}
|
||||
if (makeSitemapOrg) {
|
||||
sitemapsOrg.addURL(url, null);
|
||||
}
|
||||
}
|
||||
offset += PAGE_SIZE;
|
||||
} while (offset < itemsCount);
|
||||
|
||||
if (makeHTMLMap) {
|
||||
int files = html.finish();
|
||||
log.info(LogHelper.getHeader(c, "write_sitemap",
|
||||
"type=html,num_files=" + files + ",communities="
|
||||
+ comms.size() + ",collections=" + colls.size()
|
||||
+ ",items=" + itemCount));
|
||||
+ commsCount + ",collections=" + collsCount
|
||||
+ ",items=" + itemsCount));
|
||||
}
|
||||
|
||||
if (makeSitemapOrg) {
|
||||
int files = sitemapsOrg.finish();
|
||||
log.info(LogHelper.getHeader(c, "write_sitemap",
|
||||
"type=html,num_files=" + files + ",communities="
|
||||
+ comms.size() + ",collections=" + colls.size()
|
||||
+ ",items=" + itemCount));
|
||||
+ commsCount + ",collections=" + collsCount
|
||||
+ ",items=" + itemsCount));
|
||||
}
|
||||
|
||||
} catch (SearchServiceException e) {
|
||||
throw new RuntimeException(e);
|
||||
} finally {
|
||||
c.abort();
|
||||
}
|
||||
|
||||
/**
|
||||
* Ping all search engines configured in {@code dspace.cfg}.
|
||||
*
|
||||
* @throws UnsupportedEncodingException theoretically should never happen
|
||||
*/
|
||||
public static void pingConfiguredSearchEngines()
|
||||
throws UnsupportedEncodingException {
|
||||
String[] engineURLs = configurationService
|
||||
.getArrayProperty("sitemap.engineurls");
|
||||
|
||||
if (ArrayUtils.isEmpty(engineURLs)) {
|
||||
log.warn("No search engine URLs configured to ping");
|
||||
return;
|
||||
}
|
||||
|
||||
for (int i = 0; i < engineURLs.length; i++) {
|
||||
try {
|
||||
pingSearchEngine(engineURLs[i]);
|
||||
} catch (MalformedURLException me) {
|
||||
log.warn("Bad search engine URL in configuration: "
|
||||
+ engineURLs[i]);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Ping the given search engine.
|
||||
*
|
||||
* @param engineURL Search engine URL minus protocol etc, e.g.
|
||||
* {@code www.google.com}
|
||||
* @throws MalformedURLException if the passed in URL is malformed
|
||||
* @throws UnsupportedEncodingException theoretically should never happen
|
||||
*/
|
||||
public static void pingSearchEngine(String engineURL)
|
||||
throws MalformedURLException, UnsupportedEncodingException {
|
||||
// Set up HTTP proxy
|
||||
if ((StringUtils.isNotBlank(configurationService.getProperty("http.proxy.host")))
|
||||
&& (StringUtils.isNotBlank(configurationService.getProperty("http.proxy.port")))) {
|
||||
System.setProperty("proxySet", "true");
|
||||
System.setProperty("proxyHost", configurationService
|
||||
.getProperty("http.proxy.host"));
|
||||
System.getProperty("proxyPort", configurationService
|
||||
.getProperty("http.proxy.port"));
|
||||
}
|
||||
|
||||
String sitemapURL = configurationService.getProperty("dspace.ui.url")
|
||||
+ "/sitemap";
|
||||
|
||||
URL url = new URL(engineURL + URLEncoder.encode(sitemapURL, "UTF-8"));
|
||||
|
||||
try {
|
||||
HttpURLConnection connection = (HttpURLConnection) url
|
||||
.openConnection();
|
||||
|
||||
BufferedReader in = new BufferedReader(new InputStreamReader(
|
||||
connection.getInputStream()));
|
||||
|
||||
String inputLine;
|
||||
StringBuffer resp = new StringBuffer();
|
||||
while ((inputLine = in.readLine()) != null) {
|
||||
resp.append(inputLine).append("\n");
|
||||
}
|
||||
in.close();
|
||||
|
||||
if (connection.getResponseCode() == 200) {
|
||||
log.info("Pinged " + url.toString() + " successfully");
|
||||
} else {
|
||||
log.warn("Error response pinging " + url.toString() + ":\n"
|
||||
+ resp);
|
||||
}
|
||||
} catch (IOException e) {
|
||||
log.warn("Error pinging " + url.toString(), e);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@@ -0,0 +1,140 @@
|
||||
/**
|
||||
* The contents of this file are subject to the license and copyright
|
||||
* detailed in the LICENSE and NOTICE files at the root of the source
|
||||
* tree and available online at
|
||||
*
|
||||
* http://www.dspace.org/license/
|
||||
*/
|
||||
package org.dspace.app.suggestion;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.List;
|
||||
import java.util.UUID;
|
||||
|
||||
import org.apache.logging.log4j.Logger;
|
||||
import org.apache.solr.client.solrj.SolrServerException;
|
||||
import org.dspace.content.service.ItemService;
|
||||
import org.dspace.core.Context;
|
||||
import org.dspace.external.model.ExternalDataObject;
|
||||
import org.springframework.beans.factory.annotation.Autowired;
|
||||
|
||||
/**
|
||||
* Suggestion provider that read the suggestion from the local suggestion solr
|
||||
* core
|
||||
*
|
||||
* @author Andrea Bollini (andrea.bollini at 4science dot it)
|
||||
*
|
||||
*/
|
||||
public abstract class SolrSuggestionProvider implements SuggestionProvider {
|
||||
private static final Logger log = org.apache.logging.log4j.LogManager.getLogger(SolrSuggestionProvider.class);
|
||||
|
||||
@Autowired
|
||||
protected ItemService itemService;
|
||||
|
||||
@Autowired
|
||||
protected SolrSuggestionStorageService solrSuggestionStorageService;
|
||||
|
||||
private String sourceName;
|
||||
|
||||
public String getSourceName() {
|
||||
return sourceName;
|
||||
}
|
||||
|
||||
public void setSourceName(String sourceName) {
|
||||
this.sourceName = sourceName;
|
||||
}
|
||||
|
||||
public void setItemService(ItemService itemService) {
|
||||
this.itemService = itemService;
|
||||
}
|
||||
|
||||
@Override
|
||||
public long countAllTargets(Context context) {
|
||||
try {
|
||||
return this.solrSuggestionStorageService.countAllTargets(context, sourceName);
|
||||
} catch (SolrServerException | IOException e) {
|
||||
throw new RuntimeException(e);
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public long countUnprocessedSuggestionByTarget(Context context, UUID target) {
|
||||
try {
|
||||
return this.solrSuggestionStorageService.countUnprocessedSuggestionByTarget(context, sourceName, target);
|
||||
} catch (SolrServerException | IOException e) {
|
||||
throw new RuntimeException(e);
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public List<Suggestion> findAllUnprocessedSuggestions(Context context, UUID target, int pageSize, long offset,
|
||||
boolean ascending) {
|
||||
|
||||
try {
|
||||
return this.solrSuggestionStorageService.findAllUnprocessedSuggestions(context, sourceName,
|
||||
target, pageSize, offset, ascending);
|
||||
} catch (SolrServerException | IOException e) {
|
||||
throw new RuntimeException(e);
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
@Override
|
||||
public List<SuggestionTarget> findAllTargets(Context context, int pageSize, long offset) {
|
||||
try {
|
||||
return this.solrSuggestionStorageService.findAllTargets(context, sourceName, pageSize, offset);
|
||||
} catch (SolrServerException | IOException e) {
|
||||
throw new RuntimeException(e);
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public Suggestion findUnprocessedSuggestion(Context context, UUID target, String id) {
|
||||
try {
|
||||
return this.solrSuggestionStorageService.findUnprocessedSuggestion(context, sourceName, target, id);
|
||||
} catch (SolrServerException | IOException e) {
|
||||
throw new RuntimeException(e);
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public SuggestionTarget findTarget(Context context, UUID target) {
|
||||
try {
|
||||
return this.solrSuggestionStorageService.findTarget(context, sourceName, target);
|
||||
} catch (SolrServerException | IOException e) {
|
||||
throw new RuntimeException(e);
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public void rejectSuggestion(Context context, UUID target, String idPart) {
|
||||
Suggestion suggestion = findUnprocessedSuggestion(context, target, idPart);
|
||||
try {
|
||||
solrSuggestionStorageService.flagSuggestionAsProcessed(suggestion);
|
||||
} catch (SolrServerException | IOException e) {
|
||||
throw new RuntimeException(e);
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public void flagRelatedSuggestionsAsProcessed(Context context, ExternalDataObject externalDataObject) {
|
||||
if (!isExternalDataObjectPotentiallySuggested(context, externalDataObject)) {
|
||||
return;
|
||||
}
|
||||
try {
|
||||
solrSuggestionStorageService.flagAllSuggestionAsProcessed(sourceName, externalDataObject.getId());
|
||||
} catch (SolrServerException | IOException e) {
|
||||
log.error(e.getMessage(), e);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* check if the externalDataObject may have suggestion
|
||||
* @param context
|
||||
* @param externalDataObject
|
||||
* @return true if the externalDataObject could be suggested by this provider
|
||||
* (i.e. it comes from a DataProvider used by this suggestor)
|
||||
*/
|
||||
protected abstract boolean isExternalDataObjectPotentiallySuggested(Context context,
|
||||
ExternalDataObject externalDataObject);
|
||||
}
|
@@ -0,0 +1,191 @@
|
||||
/**
|
||||
* The contents of this file are subject to the license and copyright
|
||||
* detailed in the LICENSE and NOTICE files at the root of the source
|
||||
* tree and available online at
|
||||
*
|
||||
* http://www.dspace.org/license/
|
||||
*/
|
||||
package org.dspace.app.suggestion;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.List;
|
||||
import java.util.UUID;
|
||||
|
||||
import org.apache.solr.client.solrj.SolrServerException;
|
||||
import org.dspace.core.Context;
|
||||
|
||||
/**
|
||||
* Service to deal with the local suggestion solr core used by the
|
||||
* SolrSuggestionProvider(s)
|
||||
*
|
||||
* @author Andrea Bollini (andrea.bollini at 4science dot it)
|
||||
* @author Luca Giamminonni (luca.giamminonni at 4science dot it)
|
||||
*
|
||||
*/
|
||||
public interface SolrSuggestionStorageService {
|
||||
public static final String SOURCE = "source";
|
||||
/** This is the URI Part of the suggestion source:target:id */
|
||||
public static final String SUGGESTION_FULLID = "suggestion_fullid";
|
||||
public static final String SUGGESTION_ID = "suggestion_id";
|
||||
public static final String TARGET_ID = "target_id";
|
||||
public static final String TITLE = "title";
|
||||
public static final String DATE = "date";
|
||||
public static final String DISPLAY = "display";
|
||||
public static final String CONTRIBUTORS = "contributors";
|
||||
public static final String ABSTRACT = "abstract";
|
||||
public static final String CATEGORY = "category";
|
||||
public static final String EXTERNAL_URI = "external-uri";
|
||||
public static final String PROCESSED = "processed";
|
||||
public static final String SCORE = "trust";
|
||||
public static final String EVIDENCES = "evidences";
|
||||
|
||||
/**
|
||||
* Add a new suggestion to SOLR
|
||||
*
|
||||
* @param suggestion
|
||||
* @param force true if the suggestion must be reindexed
|
||||
* @param commit
|
||||
* @throws IOException
|
||||
* @throws SolrServerException
|
||||
*/
|
||||
public void addSuggestion(Suggestion suggestion, boolean force, boolean commit)
|
||||
throws SolrServerException, IOException;
|
||||
|
||||
/**
|
||||
* Return true if the suggestion is already in SOLR and flagged as processed
|
||||
*
|
||||
* @param suggestion
|
||||
* @return true if the suggestion is already in SOLR and flagged as processed
|
||||
* @throws IOException
|
||||
* @throws SolrServerException
|
||||
*/
|
||||
public boolean exist(Suggestion suggestion) throws SolrServerException, IOException;
|
||||
|
||||
/**
|
||||
* Delete a suggestion from SOLR if any
|
||||
*
|
||||
* @param suggestion
|
||||
* @throws IOException
|
||||
* @throws SolrServerException
|
||||
*/
|
||||
public void deleteSuggestion(Suggestion suggestion) throws SolrServerException, IOException;
|
||||
|
||||
/**
|
||||
* Flag a suggestion as processed in SOLR if any
|
||||
*
|
||||
* @param suggestion
|
||||
* @throws IOException
|
||||
* @throws SolrServerException
|
||||
*/
|
||||
public void flagSuggestionAsProcessed(Suggestion suggestion) throws SolrServerException, IOException;
|
||||
|
||||
/**
|
||||
* Delete all the suggestions from SOLR if any related to a specific target
|
||||
*
|
||||
* @param target
|
||||
* @throws IOException
|
||||
* @throws SolrServerException
|
||||
*/
|
||||
public void deleteTarget(SuggestionTarget target) throws SolrServerException, IOException;
|
||||
|
||||
/**
|
||||
* Performs an explicit commit, causing pending documents to be committed for
|
||||
* indexing.
|
||||
*
|
||||
* @throws SolrServerException
|
||||
* @throws IOException
|
||||
*/
|
||||
void commit() throws SolrServerException, IOException;
|
||||
|
||||
/**
|
||||
* Flag all the suggestion related to the given source and id as processed.
|
||||
*
|
||||
* @param source the source name
|
||||
* @param idPart the id's last part
|
||||
* @throws SolrServerException
|
||||
* @throws IOException
|
||||
*/
|
||||
void flagAllSuggestionAsProcessed(String source, String idPart) throws SolrServerException, IOException;
|
||||
|
||||
/**
|
||||
* Count all the targets related to the given source.
|
||||
*
|
||||
* @param source the source name
|
||||
* @return the target's count
|
||||
* @throws IOException
|
||||
* @throws SolrServerException
|
||||
*/
|
||||
long countAllTargets(Context context, String source) throws SolrServerException, IOException;
|
||||
|
||||
/**
|
||||
* Count all the unprocessed suggestions related to the given source and target.
|
||||
*
|
||||
* @param context the DSpace Context
|
||||
* @param source the source name
|
||||
* @param target the target id
|
||||
* @return the suggestion count
|
||||
* @throws SolrServerException
|
||||
* @throws IOException
|
||||
*/
|
||||
long countUnprocessedSuggestionByTarget(Context context, String source, UUID target)
|
||||
throws SolrServerException, IOException;
|
||||
|
||||
/**
|
||||
* Find all the unprocessed suggestions related to the given source and target.
|
||||
*
|
||||
* @param context the DSpace Context
|
||||
* @param source the source name
|
||||
* @param target the target id
|
||||
* @param pageSize the page size
|
||||
* @param offset the page offset
|
||||
* @param ascending true to retrieve the suggestions ordered by score
|
||||
* ascending
|
||||
* @return the found suggestions
|
||||
* @throws SolrServerException
|
||||
* @throws IOException
|
||||
*/
|
||||
List<Suggestion> findAllUnprocessedSuggestions(Context context, String source, UUID target,
|
||||
int pageSize, long offset, boolean ascending) throws SolrServerException, IOException;
|
||||
|
||||
/**
|
||||
*
|
||||
* Find all the suggestion targets related to the given source.
|
||||
*
|
||||
* @param context the DSpace Context
|
||||
* @param source the source name
|
||||
* @param pageSize the page size
|
||||
* @param offset the page offset
|
||||
* @return the found suggestion targets
|
||||
* @throws SolrServerException
|
||||
* @throws IOException
|
||||
*/
|
||||
List<SuggestionTarget> findAllTargets(Context context, String source, int pageSize, long offset)
|
||||
throws SolrServerException, IOException;
|
||||
|
||||
/**
|
||||
* Find an unprocessed suggestion by the given source, target id and suggestion
|
||||
* id.
|
||||
*
|
||||
* @param context the DSpace Context
|
||||
* @param source the source name
|
||||
* @param target the target id
|
||||
* @param id the suggestion id
|
||||
* @return the suggestion, if any
|
||||
* @throws SolrServerException
|
||||
* @throws IOException
|
||||
*/
|
||||
Suggestion findUnprocessedSuggestion(Context context, String source, UUID target, String id)
|
||||
throws SolrServerException, IOException;
|
||||
|
||||
/**
|
||||
* Find a suggestion target by the given source and target.
|
||||
*
|
||||
* @param context the DSpace Context
|
||||
* @param source the source name
|
||||
* @param target the target id
|
||||
* @return the suggestion target, if any
|
||||
* @throws SolrServerException
|
||||
* @throws IOException
|
||||
*/
|
||||
SuggestionTarget findTarget(Context context, String source, UUID target) throws SolrServerException, IOException;
|
||||
}
|
@@ -0,0 +1,360 @@
|
||||
/**
|
||||
* The contents of this file are subject to the license and copyright
|
||||
* detailed in the LICENSE and NOTICE files at the root of the source
|
||||
* tree and available online at
|
||||
*
|
||||
* http://www.dspace.org/license/
|
||||
*/
|
||||
package org.dspace.app.suggestion;
|
||||
|
||||
import static org.apache.commons.collections.CollectionUtils.isEmpty;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.sql.SQLException;
|
||||
import java.util.ArrayList;
|
||||
import java.util.HashMap;
|
||||
import java.util.LinkedList;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.UUID;
|
||||
import java.util.stream.Collectors;
|
||||
|
||||
import com.fasterxml.jackson.core.JsonProcessingException;
|
||||
import com.fasterxml.jackson.core.type.TypeReference;
|
||||
import com.fasterxml.jackson.databind.DeserializationFeature;
|
||||
import com.fasterxml.jackson.databind.ObjectMapper;
|
||||
import com.fasterxml.jackson.databind.json.JsonMapper;
|
||||
import org.apache.commons.lang3.StringUtils;
|
||||
import org.apache.logging.log4j.LogManager;
|
||||
import org.apache.logging.log4j.Logger;
|
||||
import org.apache.solr.client.solrj.SolrClient;
|
||||
import org.apache.solr.client.solrj.SolrQuery;
|
||||
import org.apache.solr.client.solrj.SolrQuery.SortClause;
|
||||
import org.apache.solr.client.solrj.SolrServerException;
|
||||
import org.apache.solr.client.solrj.impl.HttpSolrClient;
|
||||
import org.apache.solr.client.solrj.response.FacetField;
|
||||
import org.apache.solr.client.solrj.response.FacetField.Count;
|
||||
import org.apache.solr.client.solrj.response.QueryResponse;
|
||||
import org.apache.solr.common.SolrDocument;
|
||||
import org.apache.solr.common.SolrDocumentList;
|
||||
import org.apache.solr.common.SolrInputDocument;
|
||||
import org.apache.solr.common.params.FacetParams;
|
||||
import org.dspace.content.Item;
|
||||
import org.dspace.content.dto.MetadataValueDTO;
|
||||
import org.dspace.content.service.ItemService;
|
||||
import org.dspace.core.Context;
|
||||
import org.dspace.services.factory.DSpaceServicesFactory;
|
||||
import org.dspace.util.UUIDUtils;
|
||||
import org.springframework.beans.factory.annotation.Autowired;
|
||||
|
||||
|
||||
/**
|
||||
* Service to deal with the local suggestion solr core used by the
|
||||
* SolrSuggestionProvider(s)
|
||||
*
|
||||
* @author Andrea Bollini (andrea.bollini at 4science dot it)
|
||||
*
|
||||
*/
|
||||
public class SolrSuggestionStorageServiceImpl implements SolrSuggestionStorageService {
|
||||
private static final Logger log = LogManager.getLogger(SolrSuggestionStorageServiceImpl.class);
|
||||
|
||||
protected SolrClient solrSuggestionClient;
|
||||
|
||||
@Autowired
|
||||
private ItemService itemService;
|
||||
|
||||
/**
|
||||
* Get solr client which use suggestion core
|
||||
*
|
||||
* @return solr client
|
||||
*/
|
||||
protected SolrClient getSolr() {
|
||||
if (solrSuggestionClient == null) {
|
||||
String solrService = DSpaceServicesFactory.getInstance().getConfigurationService()
|
||||
.getProperty("suggestion.solr.server", "http://localhost:8983/solr/suggestion");
|
||||
solrSuggestionClient = new HttpSolrClient.Builder(solrService).build();
|
||||
}
|
||||
return solrSuggestionClient;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void addSuggestion(Suggestion suggestion, boolean force, boolean commit)
|
||||
throws SolrServerException, IOException {
|
||||
if (force || !exist(suggestion)) {
|
||||
ObjectMapper jsonMapper = new JsonMapper();
|
||||
jsonMapper.configure(DeserializationFeature.FAIL_ON_UNKNOWN_PROPERTIES, false);
|
||||
SolrInputDocument document = new SolrInputDocument();
|
||||
document.addField(SOURCE, suggestion.getSource());
|
||||
// suggestion id is written as concatenation of
|
||||
// source + ":" + targetID + ":" + idPart (of externalDataObj)
|
||||
String suggestionFullID = suggestion.getID();
|
||||
document.addField(SUGGESTION_FULLID, suggestionFullID);
|
||||
document.addField(SUGGESTION_ID, suggestionFullID.split(":", 3)[2]);
|
||||
document.addField(TARGET_ID, suggestion.getTarget().getID().toString());
|
||||
document.addField(DISPLAY, suggestion.getDisplay());
|
||||
document.addField(TITLE, getFirstValue(suggestion, "dc", "title", null));
|
||||
document.addField(DATE, getFirstValue(suggestion, "dc", "date", "issued"));
|
||||
document.addField(CONTRIBUTORS, getAllValues(suggestion, "dc", "contributor", "author"));
|
||||
document.addField(ABSTRACT, getFirstValue(suggestion, "dc", "description", "abstract"));
|
||||
document.addField(CATEGORY, getAllValues(suggestion, "dc", "source", null));
|
||||
document.addField(EXTERNAL_URI, suggestion.getExternalSourceUri());
|
||||
document.addField(SCORE, suggestion.getScore());
|
||||
document.addField(PROCESSED, false);
|
||||
document.addField(EVIDENCES, jsonMapper.writeValueAsString(suggestion.getEvidences()));
|
||||
getSolr().add(document);
|
||||
if (commit) {
|
||||
getSolr().commit();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public void commit() throws SolrServerException, IOException {
|
||||
getSolr().commit();
|
||||
}
|
||||
|
||||
private List<String> getAllValues(Suggestion suggestion, String schema, String element, String qualifier) {
|
||||
return suggestion.getMetadata().stream()
|
||||
.filter(st -> StringUtils.isNotBlank(st.getValue()) && StringUtils.equals(st.getSchema(), schema)
|
||||
&& StringUtils.equals(st.getElement(), element)
|
||||
&& StringUtils.equals(st.getQualifier(), qualifier))
|
||||
.map(st -> st.getValue()).collect(Collectors.toList());
|
||||
}
|
||||
|
||||
private String getFirstValue(Suggestion suggestion, String schema, String element, String qualifier) {
|
||||
return suggestion.getMetadata().stream()
|
||||
.filter(st -> StringUtils.isNotBlank(st.getValue())
|
||||
&& StringUtils.equals(st.getSchema(), schema)
|
||||
&& StringUtils.equals(st.getElement(), element)
|
||||
&& StringUtils.equals(st.getQualifier(), qualifier))
|
||||
.map(st -> st.getValue()).findFirst().orElse(null);
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean exist(Suggestion suggestion) throws SolrServerException, IOException {
|
||||
SolrQuery query = new SolrQuery(
|
||||
SUGGESTION_FULLID + ":\"" + suggestion.getID() + "\" AND " + PROCESSED + ":true");
|
||||
return getSolr().query(query).getResults().getNumFound() == 1;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void deleteSuggestion(Suggestion suggestion) throws SolrServerException, IOException {
|
||||
getSolr().deleteById(suggestion.getID());
|
||||
getSolr().commit();
|
||||
}
|
||||
|
||||
@Override
|
||||
public void flagSuggestionAsProcessed(Suggestion suggestion) throws SolrServerException, IOException {
|
||||
SolrInputDocument sdoc = new SolrInputDocument();
|
||||
sdoc.addField(SUGGESTION_FULLID, suggestion.getID());
|
||||
Map<String, Object> fieldModifier = new HashMap<>(1);
|
||||
fieldModifier.put("set", true);
|
||||
sdoc.addField(PROCESSED, fieldModifier); // add the map as the field value
|
||||
getSolr().add(sdoc);
|
||||
getSolr().commit();
|
||||
}
|
||||
|
||||
@Override
|
||||
public void flagAllSuggestionAsProcessed(String source, String idPart) throws SolrServerException, IOException {
|
||||
SolrQuery query = new SolrQuery(SOURCE + ":" + source + " AND " + SUGGESTION_ID + ":\"" + idPart + "\"");
|
||||
query.setRows(Integer.MAX_VALUE);
|
||||
query.setFields(SUGGESTION_FULLID);
|
||||
SolrDocumentList results = getSolr().query(query).getResults();
|
||||
if (results.getNumFound() > 0) {
|
||||
for (SolrDocument rDoc : results) {
|
||||
SolrInputDocument sdoc = new SolrInputDocument();
|
||||
sdoc.addField(SUGGESTION_FULLID, rDoc.getFieldValue(SUGGESTION_FULLID));
|
||||
Map<String, Object> fieldModifier = new HashMap<>(1);
|
||||
fieldModifier.put("set", true);
|
||||
sdoc.addField(PROCESSED, fieldModifier); // add the map as the field value
|
||||
getSolr().add(sdoc);
|
||||
}
|
||||
}
|
||||
getSolr().commit();
|
||||
}
|
||||
|
||||
@Override
|
||||
public void deleteTarget(SuggestionTarget target) throws SolrServerException, IOException {
|
||||
getSolr().deleteByQuery(
|
||||
SOURCE + ":" + target.getSource() + " AND " + TARGET_ID + ":" + target.getTarget().getID().toString());
|
||||
getSolr().commit();
|
||||
}
|
||||
|
||||
@Override
|
||||
public long countAllTargets(Context context, String source) throws SolrServerException, IOException {
|
||||
SolrQuery solrQuery = new SolrQuery();
|
||||
solrQuery.setRows(0);
|
||||
solrQuery.setQuery(SOURCE + ":" + source);
|
||||
solrQuery.addFilterQuery(PROCESSED + ":false");
|
||||
solrQuery.setFacet(true);
|
||||
solrQuery.setFacetMinCount(1);
|
||||
solrQuery.addFacetField(TARGET_ID);
|
||||
solrQuery.setFacetLimit(Integer.MAX_VALUE);
|
||||
QueryResponse response = getSolr().query(solrQuery);
|
||||
return response.getFacetField(TARGET_ID).getValueCount();
|
||||
}
|
||||
|
||||
@Override
|
||||
public long countUnprocessedSuggestionByTarget(Context context, String source, UUID target)
|
||||
throws SolrServerException, IOException {
|
||||
SolrQuery solrQuery = new SolrQuery();
|
||||
solrQuery.setRows(0);
|
||||
solrQuery.setQuery("*:*");
|
||||
solrQuery.addFilterQuery(
|
||||
SOURCE + ":" + source,
|
||||
TARGET_ID + ":" + target.toString(),
|
||||
PROCESSED + ":false");
|
||||
|
||||
QueryResponse response = getSolr().query(solrQuery);
|
||||
return response.getResults().getNumFound();
|
||||
}
|
||||
|
||||
@Override
|
||||
public List<Suggestion> findAllUnprocessedSuggestions(Context context, String source, UUID target,
|
||||
int pageSize, long offset, boolean ascending) throws SolrServerException, IOException {
|
||||
|
||||
SolrQuery solrQuery = new SolrQuery();
|
||||
solrQuery.setRows(pageSize);
|
||||
solrQuery.setStart((int) offset);
|
||||
solrQuery.setQuery("*:*");
|
||||
solrQuery.addFilterQuery(
|
||||
SOURCE + ":" + source,
|
||||
TARGET_ID + ":" + target.toString(),
|
||||
PROCESSED + ":false");
|
||||
|
||||
if (ascending) {
|
||||
solrQuery.addSort(SortClause.asc("trust"));
|
||||
} else {
|
||||
solrQuery.addSort(SortClause.desc("trust"));
|
||||
}
|
||||
|
||||
solrQuery.addSort(SortClause.desc("date"));
|
||||
solrQuery.addSort(SortClause.asc("title"));
|
||||
|
||||
QueryResponse response = getSolr().query(solrQuery);
|
||||
List<Suggestion> suggestions = new ArrayList<Suggestion>();
|
||||
for (SolrDocument solrDoc : response.getResults()) {
|
||||
suggestions.add(convertSolrDoc(context, solrDoc, source));
|
||||
}
|
||||
return suggestions;
|
||||
|
||||
}
|
||||
|
||||
@Override
|
||||
public List<SuggestionTarget> findAllTargets(Context context, String source, int pageSize, long offset)
|
||||
throws SolrServerException, IOException {
|
||||
|
||||
SolrQuery solrQuery = new SolrQuery();
|
||||
solrQuery.setRows(0);
|
||||
solrQuery.setQuery(SOURCE + ":" + source);
|
||||
solrQuery.addFilterQuery(PROCESSED + ":false");
|
||||
solrQuery.setFacet(true);
|
||||
solrQuery.setFacetMinCount(1);
|
||||
solrQuery.addFacetField(TARGET_ID);
|
||||
solrQuery.setParam(FacetParams.FACET_OFFSET, String.valueOf(offset));
|
||||
solrQuery.setFacetLimit((int) (pageSize));
|
||||
QueryResponse response = getSolr().query(solrQuery);
|
||||
FacetField facetField = response.getFacetField(TARGET_ID);
|
||||
List<SuggestionTarget> suggestionTargets = new ArrayList<SuggestionTarget>();
|
||||
int idx = 0;
|
||||
for (Count c : facetField.getValues()) {
|
||||
SuggestionTarget target = new SuggestionTarget();
|
||||
target.setSource(source);
|
||||
target.setTotal((int) c.getCount());
|
||||
target.setTarget(findItem(context, c.getName()));
|
||||
suggestionTargets.add(target);
|
||||
idx++;
|
||||
}
|
||||
return suggestionTargets;
|
||||
|
||||
}
|
||||
|
||||
@Override
|
||||
public Suggestion findUnprocessedSuggestion(Context context, String source, UUID target, String id)
|
||||
throws SolrServerException, IOException {
|
||||
|
||||
SolrQuery solrQuery = new SolrQuery();
|
||||
solrQuery.setRows(1);
|
||||
solrQuery.setQuery("*:*");
|
||||
solrQuery.addFilterQuery(
|
||||
SOURCE + ":" + source,
|
||||
TARGET_ID + ":" + target.toString(),
|
||||
SUGGESTION_ID + ":\"" + id + "\"",
|
||||
PROCESSED + ":false");
|
||||
|
||||
SolrDocumentList results = getSolr().query(solrQuery).getResults();
|
||||
return isEmpty(results) ? null : convertSolrDoc(context, results.get(0), source);
|
||||
}
|
||||
|
||||
@Override
|
||||
public SuggestionTarget findTarget(Context context, String source, UUID target)
|
||||
throws SolrServerException, IOException {
|
||||
SolrQuery solrQuery = new SolrQuery();
|
||||
solrQuery.setRows(0);
|
||||
solrQuery.setQuery(SOURCE + ":" + source);
|
||||
solrQuery.addFilterQuery(
|
||||
TARGET_ID + ":" + target.toString(),
|
||||
PROCESSED + ":false");
|
||||
QueryResponse response = getSolr().query(solrQuery);
|
||||
SuggestionTarget sTarget = new SuggestionTarget();
|
||||
sTarget.setSource(source);
|
||||
sTarget.setTotal((int) response.getResults().getNumFound());
|
||||
Item itemTarget = findItem(context, target);
|
||||
if (itemTarget != null) {
|
||||
sTarget.setTarget(itemTarget);
|
||||
} else {
|
||||
return null;
|
||||
}
|
||||
return sTarget;
|
||||
}
|
||||
|
||||
private Suggestion convertSolrDoc(Context context, SolrDocument solrDoc, String sourceName) {
|
||||
Item target = findItem(context, (String) solrDoc.getFieldValue(TARGET_ID));
|
||||
|
||||
Suggestion suggestion = new Suggestion(sourceName, target, (String) solrDoc.getFieldValue(SUGGESTION_ID));
|
||||
suggestion.setDisplay((String) solrDoc.getFieldValue(DISPLAY));
|
||||
suggestion.getMetadata()
|
||||
.add(new MetadataValueDTO("dc", "title", null, null, (String) solrDoc.getFieldValue(TITLE)));
|
||||
suggestion.getMetadata()
|
||||
.add(new MetadataValueDTO("dc", "date", "issued", null, (String) solrDoc.getFieldValue(DATE)));
|
||||
suggestion.getMetadata().add(
|
||||
new MetadataValueDTO("dc", "description", "abstract", null, (String) solrDoc.getFieldValue(ABSTRACT)));
|
||||
|
||||
suggestion.setExternalSourceUri((String) solrDoc.getFieldValue(EXTERNAL_URI));
|
||||
if (solrDoc.containsKey(CATEGORY)) {
|
||||
for (Object o : solrDoc.getFieldValues(CATEGORY)) {
|
||||
suggestion.getMetadata().add(
|
||||
new MetadataValueDTO("dc", "source", null, null, (String) o));
|
||||
}
|
||||
}
|
||||
if (solrDoc.containsKey(CONTRIBUTORS)) {
|
||||
for (Object o : solrDoc.getFieldValues(CONTRIBUTORS)) {
|
||||
suggestion.getMetadata().add(
|
||||
new MetadataValueDTO("dc", "contributor", "author", null, (String) o));
|
||||
}
|
||||
}
|
||||
String evidencesJson = (String) solrDoc.getFieldValue(EVIDENCES);
|
||||
ObjectMapper jsonMapper = new JsonMapper();
|
||||
jsonMapper.configure(DeserializationFeature.FAIL_ON_UNKNOWN_PROPERTIES, false);
|
||||
List<SuggestionEvidence> evidences = new LinkedList<SuggestionEvidence>();
|
||||
try {
|
||||
evidences = jsonMapper.readValue(evidencesJson, new TypeReference<List<SuggestionEvidence>>() {});
|
||||
} catch (JsonProcessingException e) {
|
||||
log.error(e);
|
||||
}
|
||||
suggestion.getEvidences().addAll(evidences);
|
||||
return suggestion;
|
||||
}
|
||||
|
||||
private Item findItem(Context context, UUID itemId) {
|
||||
try {
|
||||
return itemService.find(context, itemId);
|
||||
} catch (SQLException e) {
|
||||
throw new RuntimeException(e);
|
||||
}
|
||||
}
|
||||
|
||||
private Item findItem(Context context, String itemId) {
|
||||
return findItem(context, UUIDUtils.fromString(itemId));
|
||||
}
|
||||
}
|
@@ -0,0 +1,99 @@
|
||||
/**
|
||||
* The contents of this file are subject to the license and copyright
|
||||
* detailed in the LICENSE and NOTICE files at the root of the source
|
||||
* tree and available online at
|
||||
*
|
||||
* http://www.dspace.org/license/
|
||||
*/
|
||||
package org.dspace.app.suggestion;
|
||||
|
||||
import java.util.LinkedList;
|
||||
import java.util.List;
|
||||
|
||||
import org.dspace.content.Item;
|
||||
import org.dspace.content.dto.MetadataValueDTO;
|
||||
|
||||
/**
|
||||
* This entity contains metadatas that should be added to the targeted Item
|
||||
*
|
||||
* @author Andrea Bollini (andrea.bollini at 4science.it)
|
||||
*/
|
||||
public class Suggestion {
|
||||
|
||||
/** id of the suggestion */
|
||||
private String id;
|
||||
|
||||
/** the dc.title of the item */
|
||||
private String display;
|
||||
|
||||
/** the external source name the suggestion comes from */
|
||||
private String source;
|
||||
|
||||
/** external uri of the item */
|
||||
private String externalSourceUri;
|
||||
|
||||
/** item targeted by this suggestion */
|
||||
private Item target;
|
||||
|
||||
private List<SuggestionEvidence> evidences = new LinkedList<SuggestionEvidence>();
|
||||
|
||||
private List<MetadataValueDTO> metadata = new LinkedList<MetadataValueDTO>();
|
||||
|
||||
/** suggestion creation
|
||||
* @param source name of the external source
|
||||
* @param target the targeted item in repository
|
||||
* @param idPart external item id, used mainly for suggestion @see #id creation
|
||||
* */
|
||||
public Suggestion(String source, Item target, String idPart) {
|
||||
this.source = source;
|
||||
this.target = target;
|
||||
this.id = source + ":" + target.getID().toString() + ":" + idPart;
|
||||
}
|
||||
|
||||
public String getDisplay() {
|
||||
return display;
|
||||
}
|
||||
|
||||
public void setDisplay(String display) {
|
||||
this.display = display;
|
||||
}
|
||||
|
||||
public String getSource() {
|
||||
return source;
|
||||
}
|
||||
|
||||
public String getExternalSourceUri() {
|
||||
return externalSourceUri;
|
||||
}
|
||||
|
||||
public void setExternalSourceUri(String externalSourceUri) {
|
||||
this.externalSourceUri = externalSourceUri;
|
||||
}
|
||||
|
||||
public List<SuggestionEvidence> getEvidences() {
|
||||
return evidences;
|
||||
}
|
||||
|
||||
public List<MetadataValueDTO> getMetadata() {
|
||||
return metadata;
|
||||
}
|
||||
|
||||
public Item getTarget() {
|
||||
return target;
|
||||
}
|
||||
|
||||
public String getID() {
|
||||
return id;
|
||||
}
|
||||
|
||||
public Double getScore() {
|
||||
if (evidences != null && evidences.size() > 0) {
|
||||
double score = 0;
|
||||
for (SuggestionEvidence evidence : evidences) {
|
||||
score += evidence.getScore();
|
||||
}
|
||||
return score;
|
||||
}
|
||||
return null;
|
||||
}
|
||||
}
|
@@ -0,0 +1,61 @@
|
||||
/**
|
||||
* The contents of this file are subject to the license and copyright
|
||||
* detailed in the LICENSE and NOTICE files at the root of the source
|
||||
* tree and available online at
|
||||
*
|
||||
* http://www.dspace.org/license/
|
||||
*/
|
||||
package org.dspace.app.suggestion;
|
||||
|
||||
/**
|
||||
* This DTO class is returned by an {@link org.dspace.app.suggestion.openaire.EvidenceScorer} to model the concept of
|
||||
* an evidence / fact that has been used to evaluate the precision of a suggestion increasing or decreasing the score
|
||||
* of the suggestion.
|
||||
*
|
||||
* @author Andrea Bollini (andrea.bollini at 4science.it)
|
||||
*/
|
||||
public class SuggestionEvidence {
|
||||
|
||||
/** name of the evidence */
|
||||
private String name;
|
||||
|
||||
/** positive or negative value to influence the score of the suggestion */
|
||||
private double score;
|
||||
|
||||
/** additional notes */
|
||||
private String notes;
|
||||
|
||||
public SuggestionEvidence() {
|
||||
}
|
||||
|
||||
public SuggestionEvidence(String name, double score, String notes) {
|
||||
this.name = name;
|
||||
this.score = score;
|
||||
this.notes = notes;
|
||||
}
|
||||
|
||||
public String getName() {
|
||||
return name;
|
||||
}
|
||||
|
||||
public void setName(String name) {
|
||||
this.name = name;
|
||||
}
|
||||
|
||||
public double getScore() {
|
||||
return score;
|
||||
}
|
||||
|
||||
public void setScore(double score) {
|
||||
this.score = score;
|
||||
}
|
||||
|
||||
public String getNotes() {
|
||||
return notes;
|
||||
}
|
||||
|
||||
public void setNotes(String notes) {
|
||||
this.notes = notes;
|
||||
}
|
||||
|
||||
}
|
@@ -0,0 +1,54 @@
|
||||
/**
|
||||
* The contents of this file are subject to the license and copyright
|
||||
* detailed in the LICENSE and NOTICE files at the root of the source
|
||||
* tree and available online at
|
||||
*
|
||||
* http://www.dspace.org/license/
|
||||
*/
|
||||
package org.dspace.app.suggestion;
|
||||
|
||||
import java.util.List;
|
||||
import java.util.UUID;
|
||||
|
||||
import org.dspace.core.Context;
|
||||
import org.dspace.external.model.ExternalDataObject;
|
||||
|
||||
/**
|
||||
*
|
||||
* Interface for suggestion management like finding and counting.
|
||||
* @see org.dspace.app.suggestion.SuggestionTarget
|
||||
* @author Francesco Bacchelli (francesco.bacchelli at 4science.com)
|
||||
*
|
||||
*/
|
||||
public interface SuggestionProvider {
|
||||
|
||||
/** find all suggestion targets
|
||||
* @see org.dspace.app.suggestion.SuggestionTarget
|
||||
* */
|
||||
public List<SuggestionTarget> findAllTargets(Context context, int pageSize, long offset);
|
||||
|
||||
/** count all suggestion targets */
|
||||
public long countAllTargets(Context context);
|
||||
|
||||
/** find a suggestion target by UUID */
|
||||
public SuggestionTarget findTarget(Context context, UUID target);
|
||||
|
||||
/** find unprocessed suggestions (paged) by target UUID
|
||||
* @see org.dspace.app.suggestion.Suggestion
|
||||
* */
|
||||
public List<Suggestion> findAllUnprocessedSuggestions(Context context, UUID target, int pageSize, long offset,
|
||||
boolean ascending);
|
||||
|
||||
/** find unprocessed suggestions by target UUID */
|
||||
public long countUnprocessedSuggestionByTarget(Context context, UUID target);
|
||||
|
||||
/** find an unprocessed suggestion by target UUID and suggestion id */
|
||||
public Suggestion findUnprocessedSuggestion(Context context, UUID target, String id);
|
||||
|
||||
/** reject a specific suggestion by target @param target and by suggestion id @param idPart */
|
||||
public void rejectSuggestion(Context context, UUID target, String idPart);
|
||||
|
||||
/** flag a suggestion as processed */
|
||||
public void flagRelatedSuggestionsAsProcessed(Context context, ExternalDataObject externalDataObject);
|
||||
|
||||
}
|
@@ -0,0 +1,61 @@
|
||||
/**
|
||||
* The contents of this file are subject to the license and copyright
|
||||
* detailed in the LICENSE and NOTICE files at the root of the source
|
||||
* tree and available online at
|
||||
*
|
||||
* http://www.dspace.org/license/
|
||||
*/
|
||||
package org.dspace.app.suggestion;
|
||||
|
||||
import java.util.List;
|
||||
import java.util.UUID;
|
||||
|
||||
import org.dspace.core.Context;
|
||||
|
||||
/**
|
||||
* Service that handles {@link Suggestion}.
|
||||
*
|
||||
* @author Andrea Bollini (andrea.bollini at 4science.it)
|
||||
*/
|
||||
public interface SuggestionService {
|
||||
|
||||
/** find a {@link SuggestionTarget } by source name and suggestion id */
|
||||
public SuggestionTarget find(Context context, String source, UUID id);
|
||||
|
||||
/** count all suggetion targets by suggestion source */
|
||||
public long countAll(Context context, String source);
|
||||
|
||||
/** find all suggestion targets by source (paged) */
|
||||
public List<SuggestionTarget> findAllTargets(Context context, String source, int pageSize, long offset);
|
||||
|
||||
/** count all (unprocessed) suggestions by the given target uuid */
|
||||
public long countAllByTarget(Context context, UUID target);
|
||||
|
||||
/** find suggestion target by targeted item (paged) */
|
||||
public List<SuggestionTarget> findByTarget(Context context, UUID target, int pageSize, long offset);
|
||||
|
||||
/** find suggestion source by source name */
|
||||
public SuggestionSource findSource(Context context, String source);
|
||||
|
||||
/** count all suggestion sources */
|
||||
public long countSources(Context context);
|
||||
|
||||
/** find all suggestion sources (paged) */
|
||||
public List<SuggestionSource> findAllSources(Context context, int pageSize, long offset);
|
||||
|
||||
/** find unprocessed suggestion by id */
|
||||
public Suggestion findUnprocessedSuggestion(Context context, String id);
|
||||
|
||||
/** reject a specific suggestion by its id */
|
||||
public void rejectSuggestion(Context context, String id);
|
||||
|
||||
/** find all suggestions by targeted item and external source */
|
||||
public List<Suggestion> findByTargetAndSource(Context context, UUID target, String source, int pageSize,
|
||||
long offset, boolean ascending);
|
||||
|
||||
/** count all suggestions by targeted item id and source name */
|
||||
public long countAllByTargetAndSource(Context context, String source, UUID target);
|
||||
|
||||
/** returns all suggestion providers */
|
||||
public List<SuggestionProvider> getSuggestionProviders();
|
||||
}
|
@@ -0,0 +1,194 @@
|
||||
/**
|
||||
* The contents of this file are subject to the license and copyright
|
||||
* detailed in the LICENSE and NOTICE files at the root of the source
|
||||
* tree and available online at
|
||||
*
|
||||
* http://www.dspace.org/license/
|
||||
*/
|
||||
package org.dspace.app.suggestion;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.Comparator;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.UUID;
|
||||
import java.util.stream.Collectors;
|
||||
import javax.annotation.Resource;
|
||||
|
||||
import org.apache.logging.log4j.Logger;
|
||||
import org.dspace.core.Context;
|
||||
import org.springframework.stereotype.Service;
|
||||
|
||||
@Service
|
||||
public class SuggestionServiceImpl implements SuggestionService {
|
||||
private static final Logger log = org.apache.logging.log4j.LogManager.getLogger(SuggestionServiceImpl.class);
|
||||
|
||||
@Resource(name = "suggestionProviders")
|
||||
private Map<String, SuggestionProvider> providersMap;
|
||||
|
||||
@Override
|
||||
public List<SuggestionProvider> getSuggestionProviders() {
|
||||
if (providersMap != null) {
|
||||
return providersMap.values().stream().collect(Collectors.toList());
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
||||
@Override
|
||||
public SuggestionTarget find(Context context, String source, UUID id) {
|
||||
if (providersMap.containsKey(source)) {
|
||||
return providersMap.get(source).findTarget(context, id);
|
||||
} else {
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public long countAll(Context context, String source) {
|
||||
if (providersMap.containsKey(source)) {
|
||||
return providersMap.get(source).countAllTargets(context);
|
||||
} else {
|
||||
return 0;
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public List<SuggestionTarget> findAllTargets(Context context, String source, int pageSize, long offset) {
|
||||
if (providersMap.containsKey(source)) {
|
||||
return providersMap.get(source).findAllTargets(context, pageSize, offset);
|
||||
} else {
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public long countAllByTarget(Context context, UUID target) {
|
||||
int count = 0;
|
||||
for (String provider : providersMap.keySet()) {
|
||||
if (providersMap.get(provider).countUnprocessedSuggestionByTarget(context, target) > 0) {
|
||||
count++;
|
||||
}
|
||||
}
|
||||
return count;
|
||||
}
|
||||
|
||||
@Override
|
||||
public List<SuggestionTarget> findByTarget(Context context, UUID target, int pageSize, long offset) {
|
||||
List<SuggestionTarget> fullSourceTargets = new ArrayList<SuggestionTarget>();
|
||||
for (String source : providersMap.keySet()) {
|
||||
// all the suggestion target will be related to the same target (i.e. the same researcher - person item)
|
||||
SuggestionTarget sTarget = providersMap.get(source).findTarget(context, target);
|
||||
if (sTarget != null && sTarget.getTotal() > 0) {
|
||||
fullSourceTargets.add(sTarget);
|
||||
}
|
||||
}
|
||||
fullSourceTargets.sort(new Comparator<SuggestionTarget>() {
|
||||
@Override
|
||||
public int compare(SuggestionTarget arg0, SuggestionTarget arg1) {
|
||||
return -(arg0.getTotal() - arg1.getTotal());
|
||||
}
|
||||
}
|
||||
);
|
||||
// this list will be as large as the number of sources available in the repository so it is unlikely that
|
||||
// real pagination will occur
|
||||
return fullSourceTargets.stream().skip(offset).limit(pageSize).collect(Collectors.toList());
|
||||
}
|
||||
|
||||
@Override
|
||||
public long countSources(Context context) {
|
||||
return providersMap.size();
|
||||
}
|
||||
|
||||
@Override
|
||||
public SuggestionSource findSource(Context context, String source) {
|
||||
if (providersMap.containsKey(source)) {
|
||||
SuggestionSource ssource = new SuggestionSource(source);
|
||||
ssource.setTotal((int) providersMap.get(source).countAllTargets(context));
|
||||
return ssource;
|
||||
} else {
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public List<SuggestionSource> findAllSources(Context context, int pageSize, long offset) {
|
||||
List<SuggestionSource> fullSources = getSources(context).stream().skip(offset).limit(pageSize)
|
||||
.collect(Collectors.toList());
|
||||
return fullSources;
|
||||
}
|
||||
|
||||
private List<SuggestionSource> getSources(Context context) {
|
||||
List<SuggestionSource> results = new ArrayList<SuggestionSource>();
|
||||
for (String source : providersMap.keySet()) {
|
||||
SuggestionSource ssource = new SuggestionSource(source);
|
||||
ssource.setTotal((int) providersMap.get(source).countAllTargets(context));
|
||||
results.add(ssource);
|
||||
}
|
||||
return results;
|
||||
}
|
||||
|
||||
@Override
|
||||
public long countAllByTargetAndSource(Context context, String source, UUID target) {
|
||||
if (providersMap.containsKey(source)) {
|
||||
return providersMap.get(source).countUnprocessedSuggestionByTarget(context, target);
|
||||
}
|
||||
return 0;
|
||||
}
|
||||
|
||||
@Override
|
||||
public List<Suggestion> findByTargetAndSource(Context context, UUID target, String source, int pageSize,
|
||||
long offset, boolean ascending) {
|
||||
if (providersMap.containsKey(source)) {
|
||||
return providersMap.get(source).findAllUnprocessedSuggestions(context, target, pageSize, offset, ascending);
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Suggestion findUnprocessedSuggestion(Context context, String id) {
|
||||
String source = null;
|
||||
UUID target = null;
|
||||
String idPart = null;
|
||||
String[] split;
|
||||
try {
|
||||
split = id.split(":", 3);
|
||||
source = split[0];
|
||||
target = UUID.fromString(split[1]);
|
||||
idPart = split[2];
|
||||
} catch (Exception e) {
|
||||
log.warn("findSuggestion got an invalid id " + id + ", return null");
|
||||
return null;
|
||||
}
|
||||
if (split.length != 3) {
|
||||
return null;
|
||||
}
|
||||
if (providersMap.containsKey(source)) {
|
||||
return providersMap.get(source).findUnprocessedSuggestion(context, target, idPart);
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void rejectSuggestion(Context context, String id) {
|
||||
String source = null;
|
||||
UUID target = null;
|
||||
String idPart = null;
|
||||
String[] split;
|
||||
try {
|
||||
split = id.split(":", 3);
|
||||
source = split[0];
|
||||
target = UUID.fromString(split[1]);
|
||||
idPart = split[2];
|
||||
} catch (Exception e) {
|
||||
log.warn("rejectSuggestion got an invalid id " + id + ", doing nothing");
|
||||
return;
|
||||
}
|
||||
if (split.length != 3) {
|
||||
return;
|
||||
}
|
||||
if (providersMap.containsKey(source)) {
|
||||
providersMap.get(source).rejectSuggestion(context, target, idPart);
|
||||
}
|
||||
|
||||
}
|
||||
}
|
@@ -0,0 +1,49 @@
|
||||
/**
|
||||
* The contents of this file are subject to the license and copyright
|
||||
* detailed in the LICENSE and NOTICE files at the root of the source
|
||||
* tree and available online at
|
||||
*
|
||||
* http://www.dspace.org/license/
|
||||
*/
|
||||
package org.dspace.app.suggestion;
|
||||
|
||||
/**
|
||||
* This DTO class is used to pass around the number of items interested by suggestion provided by a specific source
|
||||
* (i.e. openaire)
|
||||
*
|
||||
* @author Andrea Bollini (andrea.bollini at 4science.it)
|
||||
*/
|
||||
public class SuggestionSource {
|
||||
|
||||
/** source name of the suggestion */
|
||||
private String name;
|
||||
|
||||
/** number of targeted items */
|
||||
private int total;
|
||||
|
||||
public SuggestionSource() {
|
||||
}
|
||||
|
||||
/**
|
||||
* Summarize the available suggestions from a source.
|
||||
*
|
||||
* @param name the name must be not null
|
||||
*/
|
||||
public SuggestionSource(String name) {
|
||||
super();
|
||||
this.name = name;
|
||||
}
|
||||
|
||||
public String getID() {
|
||||
return name;
|
||||
}
|
||||
|
||||
public int getTotal() {
|
||||
return total;
|
||||
}
|
||||
|
||||
public void setTotal(int total) {
|
||||
this.total = total;
|
||||
}
|
||||
|
||||
}
|
@@ -0,0 +1,75 @@
|
||||
/**
|
||||
* The contents of this file are subject to the license and copyright
|
||||
* detailed in the LICENSE and NOTICE files at the root of the source
|
||||
* tree and available online at
|
||||
*
|
||||
* http://www.dspace.org/license/
|
||||
*/
|
||||
package org.dspace.app.suggestion;
|
||||
|
||||
import org.dspace.content.Item;
|
||||
|
||||
/**
|
||||
* This DTO class is used to pass around the number of suggestions available from a specific source for a target
|
||||
* repository item
|
||||
*
|
||||
* @author Andrea Bollini (andrea.bollini at 4science.it)
|
||||
*/
|
||||
public class SuggestionTarget {
|
||||
|
||||
/** the item targeted */
|
||||
private Item target;
|
||||
|
||||
/** source name of the suggestion */
|
||||
private String source;
|
||||
|
||||
/** total count of suggestions for same target and source */
|
||||
private int total;
|
||||
|
||||
public SuggestionTarget() {
|
||||
}
|
||||
|
||||
/**
|
||||
* Wrap a target repository item (usually a person item) into a suggestion target.
|
||||
*
|
||||
* @param item must be not null
|
||||
*/
|
||||
public SuggestionTarget(Item item) {
|
||||
super();
|
||||
this.target = item;
|
||||
}
|
||||
|
||||
/**
|
||||
* The suggestion target uses the concatenation of the source and target uuid separated by colon as id
|
||||
*
|
||||
* @return the source:uuid of the wrapped item
|
||||
*/
|
||||
public String getID() {
|
||||
return source + ":" + (target != null ? target.getID() : "");
|
||||
}
|
||||
|
||||
public Item getTarget() {
|
||||
return target;
|
||||
}
|
||||
|
||||
public void setTarget(Item target) {
|
||||
this.target = target;
|
||||
}
|
||||
|
||||
public String getSource() {
|
||||
return source;
|
||||
}
|
||||
|
||||
public void setSource(String source) {
|
||||
this.source = source;
|
||||
}
|
||||
|
||||
public int getTotal() {
|
||||
return total;
|
||||
}
|
||||
|
||||
public void setTotal(int total) {
|
||||
this.total = total;
|
||||
}
|
||||
|
||||
}
|
@@ -0,0 +1,111 @@
|
||||
/**
|
||||
* The contents of this file are subject to the license and copyright
|
||||
* detailed in the LICENSE and NOTICE files at the root of the source
|
||||
* tree and available online at
|
||||
*
|
||||
* http://www.dspace.org/license/
|
||||
*/
|
||||
package org.dspace.app.suggestion;
|
||||
|
||||
import java.util.Collections;
|
||||
import java.util.List;
|
||||
import java.util.stream.Collectors;
|
||||
|
||||
import org.apache.commons.lang3.StringUtils;
|
||||
import org.dspace.external.model.ExternalDataObject;
|
||||
|
||||
/**
|
||||
* This utility class provides convenient methods to deal with the
|
||||
* {@link ExternalDataObject} for the purpose of the Suggestion framework
|
||||
*
|
||||
* @author Andrea Bollini (andrea.bollini at 4science.it)
|
||||
*/
|
||||
public class SuggestionUtils {
|
||||
private SuggestionUtils() {
|
||||
}
|
||||
/**
|
||||
* This method receive an ExternalDataObject and a metadatum key.
|
||||
* It return only the values of the Metadata associated with the key.
|
||||
*
|
||||
* @param record the ExternalDataObject to extract metadata from
|
||||
* @param schema schema of the searching record
|
||||
* @param element element of the searching record
|
||||
* @param qualifier qualifier of the searching record
|
||||
* @return value of the first matching record
|
||||
*/
|
||||
public static List<String> getAllEntriesByMetadatum(ExternalDataObject record, String schema, String element,
|
||||
String qualifier) {
|
||||
return record.getMetadata().stream()
|
||||
.filter(x ->
|
||||
StringUtils.equals(x.getSchema(), schema)
|
||||
&& StringUtils.equals(x.getElement(), element)
|
||||
&& StringUtils.equals(x.getQualifier(), qualifier))
|
||||
.map(x -> x.getValue()).collect(Collectors.toList());
|
||||
}
|
||||
|
||||
/**
|
||||
* This method receive an ExternalDataObject and a metadatum key.
|
||||
* It return only the values of the Metadata associated with the key.
|
||||
*
|
||||
* @param record the ExternalDataObject to extract metadata from
|
||||
* @param metadataFieldKey the metadata field key (i.e. dc.title or dc.contributor.author),
|
||||
* the jolly char is not supported
|
||||
* @return value of the first matching record
|
||||
*/
|
||||
public static List<String> getAllEntriesByMetadatum(ExternalDataObject record, String metadataFieldKey) {
|
||||
if (metadataFieldKey == null) {
|
||||
return Collections.EMPTY_LIST;
|
||||
}
|
||||
String[] fields = metadataFieldKey.split("\\.");
|
||||
String schema = fields[0];
|
||||
String element = fields[1];
|
||||
String qualifier = null;
|
||||
if (fields.length == 3) {
|
||||
qualifier = fields[2];
|
||||
}
|
||||
return getAllEntriesByMetadatum(record, schema, element, qualifier);
|
||||
}
|
||||
|
||||
/**
|
||||
* This method receive and ExternalDataObject and a metadatum key.
|
||||
* It return only the value of the first Metadatum from the list associated with the key.
|
||||
*
|
||||
* @param record the ExternalDataObject to extract metadata from
|
||||
* @param schema schema of the searching record
|
||||
* @param element element of the searching record
|
||||
* @param qualifier qualifier of the searching record
|
||||
* @return value of the first matching record
|
||||
*/
|
||||
public static String getFirstEntryByMetadatum(ExternalDataObject record, String schema, String element,
|
||||
String qualifier) {
|
||||
return record.getMetadata().stream()
|
||||
.filter(x ->
|
||||
StringUtils.equals(x.getSchema(), schema)
|
||||
&& StringUtils.equals(x.getElement(), element)
|
||||
&& StringUtils.equals(x.getQualifier(), qualifier))
|
||||
.map(x -> x.getValue()).findFirst().orElse(null);
|
||||
}
|
||||
|
||||
/**
|
||||
* This method receive and ExternalDataObject and a metadatum key.
|
||||
* It return only the value of the first Metadatum from the list associated with the key.
|
||||
*
|
||||
* @param record the ExternalDataObject to extract metadata from
|
||||
* @param metadataFieldKey the metadata field key (i.e. dc.title or dc.contributor.author),
|
||||
* the jolly char is not supported
|
||||
* @return value of the first matching record
|
||||
*/
|
||||
public static String getFirstEntryByMetadatum(ExternalDataObject record, String metadataFieldKey) {
|
||||
if (metadataFieldKey == null) {
|
||||
return null;
|
||||
}
|
||||
String[] fields = metadataFieldKey.split("\\.");
|
||||
String schema = fields[0];
|
||||
String element = fields[1];
|
||||
String qualifier = null;
|
||||
if (fields.length == 3) {
|
||||
qualifier = fields[2];
|
||||
}
|
||||
return getFirstEntryByMetadatum(record, schema, element, qualifier);
|
||||
}
|
||||
}
|
@@ -0,0 +1,151 @@
|
||||
/**
|
||||
* The contents of this file are subject to the license and copyright
|
||||
* detailed in the LICENSE and NOTICE files at the root of the source
|
||||
* tree and available online at
|
||||
*
|
||||
* http://www.dspace.org/license/
|
||||
*/
|
||||
package org.dspace.app.suggestion.openaire;
|
||||
|
||||
import static org.dspace.app.suggestion.SuggestionUtils.getAllEntriesByMetadatum;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.Arrays;
|
||||
import java.util.List;
|
||||
import java.util.Locale;
|
||||
import java.util.Optional;
|
||||
import java.util.stream.Collectors;
|
||||
|
||||
import com.ibm.icu.text.CharsetDetector;
|
||||
import com.ibm.icu.text.CharsetMatch;
|
||||
import com.ibm.icu.text.Normalizer;
|
||||
import org.apache.commons.lang3.StringUtils;
|
||||
import org.dspace.app.suggestion.SuggestionEvidence;
|
||||
import org.dspace.content.Item;
|
||||
import org.dspace.content.MetadataValue;
|
||||
import org.dspace.content.service.ItemService;
|
||||
import org.dspace.external.model.ExternalDataObject;
|
||||
import org.springframework.beans.factory.annotation.Autowired;
|
||||
|
||||
/**
|
||||
* Implementation of {@see org.dspace.app.suggestion.oaire.EvidenceScorer} which evaluate ImportRecords
|
||||
* based on Author's name.
|
||||
*
|
||||
* @author Andrea Bollini (andrea.bollini at 4science dot it)
|
||||
* @author Pasquale Cavallo (pasquale.cavallo at 4science dot it)
|
||||
*
|
||||
*/
|
||||
public class AuthorNamesScorer implements EvidenceScorer {
|
||||
|
||||
private List<String> contributorMetadata;
|
||||
|
||||
private List<String> names;
|
||||
|
||||
@Autowired
|
||||
private ItemService itemService;
|
||||
|
||||
/**
|
||||
* returns the metadata key of the Item which to base the filter on
|
||||
* @return metadata key
|
||||
*/
|
||||
public List<String> getContributorMetadata() {
|
||||
return contributorMetadata;
|
||||
}
|
||||
|
||||
/**
|
||||
* set the metadata key of the Item which to base the filter on
|
||||
*/
|
||||
public void setContributorMetadata(List<String> contributorMetadata) {
|
||||
this.contributorMetadata = contributorMetadata;
|
||||
}
|
||||
|
||||
/**
|
||||
* return the metadata key of ImportRecord which to base the filter on
|
||||
* @return
|
||||
*/
|
||||
public List<String> getNames() {
|
||||
return names;
|
||||
}
|
||||
|
||||
/**
|
||||
* set the metadata key of ImportRecord which to base the filter on
|
||||
*/
|
||||
public void setNames(List<String> names) {
|
||||
this.names = names;
|
||||
}
|
||||
|
||||
/**
|
||||
* Method which is responsible to evaluate ImportRecord based on authors name.
|
||||
* This method extract the researcher name from Item using contributorMetadata fields
|
||||
* and try to match them with values extract from ImportRecord using metadata keys defined
|
||||
* in names.
|
||||
* ImportRecords which don't match will be discarded.
|
||||
*
|
||||
* @param importRecord the import record to check
|
||||
* @param researcher DSpace item
|
||||
* @return the generated evidence or null if the record must be discarded
|
||||
*/
|
||||
@Override
|
||||
public SuggestionEvidence computeEvidence(Item researcher, ExternalDataObject importRecord) {
|
||||
List<String[]> names = searchMetadataValues(researcher);
|
||||
int maxNameLenght = names.stream().mapToInt(n -> n[0].length()).max().orElse(1);
|
||||
List<String> metadataAuthors = new ArrayList<>();
|
||||
for (String contributorMetadatum : contributorMetadata) {
|
||||
metadataAuthors.addAll(getAllEntriesByMetadatum(importRecord, contributorMetadatum));
|
||||
}
|
||||
List<String> normalizedMetadataAuthors = metadataAuthors.stream().map(x -> normalize(x))
|
||||
.collect(Collectors.toList());
|
||||
int idx = 0;
|
||||
for (String nMetadataAuthor : normalizedMetadataAuthors) {
|
||||
Optional<String[]> found = names.stream()
|
||||
.filter(a -> StringUtils.equalsIgnoreCase(a[0], nMetadataAuthor)).findFirst();
|
||||
if (found.isPresent()) {
|
||||
return new SuggestionEvidence(this.getClass().getSimpleName(),
|
||||
100 * ((double) nMetadataAuthor.length() / (double) maxNameLenght),
|
||||
"The author " + metadataAuthors.get(idx) + " at position " + (idx + 1)
|
||||
+ " in the authors list matches the name " + found.get()[1]
|
||||
+ " in the researcher profile");
|
||||
}
|
||||
idx++;
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
||||
/**
|
||||
* Return list of Item metadata values starting from metadata keys defined in class level variable names.
|
||||
*
|
||||
* @param researcher DSpace item
|
||||
* @return list of metadata values
|
||||
*/
|
||||
private List<String[]> searchMetadataValues(Item researcher) {
|
||||
List<String[]> authors = new ArrayList<String[]>();
|
||||
for (String name : names) {
|
||||
List<MetadataValue> values = itemService.getMetadataByMetadataString(researcher, name);
|
||||
if (values != null) {
|
||||
for (MetadataValue v : values) {
|
||||
authors.add(new String[] {normalize(v.getValue()), v.getValue()});
|
||||
}
|
||||
}
|
||||
}
|
||||
return authors;
|
||||
}
|
||||
|
||||
/**
|
||||
* cleans up undesired characters
|
||||
* @param value the string to clean up
|
||||
* @return cleaned up string
|
||||
* */
|
||||
private String normalize(String value) {
|
||||
String norm = Normalizer.normalize(value, Normalizer.NFD);
|
||||
CharsetDetector cd = new CharsetDetector();
|
||||
cd.setText(value.getBytes());
|
||||
CharsetMatch detect = cd.detect();
|
||||
if (detect != null && detect.getLanguage() != null) {
|
||||
norm = norm.replaceAll("[^\\p{L}]", " ").toLowerCase(new Locale(detect.getLanguage()));
|
||||
} else {
|
||||
norm = norm.replaceAll("[^\\p{L}]", " ").toLowerCase();
|
||||
}
|
||||
return Arrays.asList(norm.split("\\s+")).stream().sorted().collect(Collectors.joining());
|
||||
}
|
||||
|
||||
}
|
@@ -0,0 +1,214 @@
|
||||
/**
|
||||
* The contents of this file are subject to the license and copyright
|
||||
* detailed in the LICENSE and NOTICE files at the root of the source
|
||||
* tree and available online at
|
||||
*
|
||||
* http://www.dspace.org/license/
|
||||
*/
|
||||
package org.dspace.app.suggestion.openaire;
|
||||
|
||||
import java.util.Calendar;
|
||||
import java.util.Collections;
|
||||
import java.util.Date;
|
||||
import java.util.GregorianCalendar;
|
||||
import java.util.List;
|
||||
|
||||
import org.dspace.app.suggestion.SuggestionEvidence;
|
||||
import org.dspace.app.suggestion.SuggestionUtils;
|
||||
import org.dspace.content.Item;
|
||||
import org.dspace.content.MetadataValue;
|
||||
import org.dspace.content.service.ItemService;
|
||||
import org.dspace.external.model.ExternalDataObject;
|
||||
import org.dspace.util.MultiFormatDateParser;
|
||||
import org.springframework.beans.factory.annotation.Autowired;
|
||||
|
||||
/**
|
||||
* Implementation of {@see org.dspace.app.suggestion.oaire.EvidenceScorer} which evaluate ImportRecords
|
||||
* based on the distance from a date extracted from the ResearcherProfile (birthday / graduation date)
|
||||
*
|
||||
* @author Andrea Bollini (andrea.bollini at 4science dot it)
|
||||
*
|
||||
*/
|
||||
public class DateScorer implements EvidenceScorer {
|
||||
|
||||
/**
|
||||
* if available it should contains the metadata field key in the form (schema.element[.qualifier]) that contains
|
||||
* the birth date of the researcher
|
||||
*/
|
||||
private String birthDateMetadata;
|
||||
|
||||
/**
|
||||
* if available it should contains the metadata field key in the form (schema.element[.qualifier]) that contains
|
||||
* the date of graduation of the researcher. If the metadata has multiple values the min will be used
|
||||
*/
|
||||
private String educationDateMetadata;
|
||||
|
||||
/**
|
||||
* The minimal age that is expected for a researcher to be a potential author of a scholarly contribution
|
||||
* (i.e. the minimum delta from the publication date and the birth date)
|
||||
*/
|
||||
private int birthDateDelta = 20;
|
||||
|
||||
/**
|
||||
* The maximum age that is expected for a researcher to be a potential author of a scholarly contribution
|
||||
* (i.e. the maximum delta from the publication date and the birth date)
|
||||
*/
|
||||
private int birthDateRange = 50;
|
||||
|
||||
/**
|
||||
* The number of year from/before the graduation that is expected for a researcher to be a potential
|
||||
* author of a scholarly contribution (i.e. the minimum delta from the publication date and the first
|
||||
* graduation date)
|
||||
*/
|
||||
private int educationDateDelta = -3;
|
||||
|
||||
/**
|
||||
* The maximum scientific longevity that is expected for a researcher from its graduation to be a potential
|
||||
* author of a scholarly contribution (i.e. the maximum delta from the publication date and the first
|
||||
* graduation date)
|
||||
*/
|
||||
private int educationDateRange = 50;
|
||||
|
||||
@Autowired
|
||||
private ItemService itemService;
|
||||
|
||||
/**
|
||||
* the metadata used in the publication to track the publication date (i.e. dc.date.issued)
|
||||
*/
|
||||
private String publicationDateMetadata;
|
||||
|
||||
public void setItemService(ItemService itemService) {
|
||||
this.itemService = itemService;
|
||||
}
|
||||
|
||||
public void setBirthDateMetadata(String birthDate) {
|
||||
this.birthDateMetadata = birthDate;
|
||||
}
|
||||
|
||||
public String getBirthDateMetadata() {
|
||||
return birthDateMetadata;
|
||||
}
|
||||
|
||||
public void setEducationDateMetadata(String educationDate) {
|
||||
this.educationDateMetadata = educationDate;
|
||||
}
|
||||
|
||||
public String getEducationDateMetadata() {
|
||||
return educationDateMetadata;
|
||||
}
|
||||
|
||||
public void setBirthDateDelta(int birthDateDelta) {
|
||||
this.birthDateDelta = birthDateDelta;
|
||||
}
|
||||
|
||||
public void setBirthDateRange(int birthDateRange) {
|
||||
this.birthDateRange = birthDateRange;
|
||||
}
|
||||
|
||||
public void setEducationDateDelta(int educationDateDelta) {
|
||||
this.educationDateDelta = educationDateDelta;
|
||||
}
|
||||
|
||||
public void setEducationDateRange(int educationDateRange) {
|
||||
this.educationDateRange = educationDateRange;
|
||||
}
|
||||
|
||||
public void setPublicationDateMetadata(String publicationDateMetadata) {
|
||||
this.publicationDateMetadata = publicationDateMetadata;
|
||||
}
|
||||
|
||||
/**
|
||||
* Method which is responsible to evaluate ImportRecord based on the publication date.
|
||||
* ImportRecords which have a date outside the defined or calculated expected range will be discarded.
|
||||
* {@link DateScorer#birthDateMetadata}, {@link DateScorer#educationDateMetadata}
|
||||
*
|
||||
* @param importRecord the ExternalDataObject to check
|
||||
* @param researcher DSpace item
|
||||
* @return the generated evidence or null if the record must be discarded
|
||||
*/
|
||||
@Override
|
||||
public SuggestionEvidence computeEvidence(Item researcher, ExternalDataObject importRecord) {
|
||||
Integer[] range = calculateRange(researcher);
|
||||
if (range == null) {
|
||||
return new SuggestionEvidence(this.getClass().getSimpleName(),
|
||||
0,
|
||||
"No assumption was possible about the publication year range. "
|
||||
+ "Please consider setting your birthday in your profile.");
|
||||
} else {
|
||||
String optDate = SuggestionUtils.getFirstEntryByMetadatum(importRecord, publicationDateMetadata);
|
||||
int year = getYear(optDate);
|
||||
if (year > 0) {
|
||||
if ((range[0] == null || year >= range[0]) &&
|
||||
(range[1] == null || year <= range[1])) {
|
||||
return new SuggestionEvidence(this.getClass().getSimpleName(),
|
||||
10,
|
||||
"The publication date is within the expected range [" + range[0] + ", "
|
||||
+ range[1] + "]");
|
||||
} else {
|
||||
// outside the range, discard the suggestion
|
||||
return null;
|
||||
}
|
||||
} else {
|
||||
return new SuggestionEvidence(this.getClass().getSimpleName(),
|
||||
0,
|
||||
"No assumption was possible as the publication date is " + (optDate != null
|
||||
? "unprocessable [" + optDate + "]"
|
||||
: "unknown"));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* returns min and max year interval in between it's probably that the researcher
|
||||
* actually contributed to the suggested item
|
||||
* @param researcher
|
||||
* @return
|
||||
*/
|
||||
private Integer[] calculateRange(Item researcher) {
|
||||
String birthDateStr = getSingleValue(researcher, birthDateMetadata);
|
||||
int birthDateYear = getYear(birthDateStr);
|
||||
int educationDateYear = getListMetadataValues(researcher, educationDateMetadata).stream()
|
||||
.mapToInt(x -> getYear(x.getValue())).filter(d -> d > 0).min().orElse(-1);
|
||||
if (educationDateYear > 0) {
|
||||
return new Integer[] {
|
||||
educationDateYear + educationDateDelta,
|
||||
educationDateYear + educationDateDelta + educationDateRange
|
||||
};
|
||||
} else if (birthDateYear > 0) {
|
||||
return new Integer[] {
|
||||
birthDateYear + birthDateDelta,
|
||||
birthDateYear + birthDateDelta + birthDateRange
|
||||
};
|
||||
} else {
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
private List<MetadataValue> getListMetadataValues(Item researcher, String metadataKey) {
|
||||
if (metadataKey != null) {
|
||||
return itemService.getMetadataByMetadataString(researcher, metadataKey);
|
||||
} else {
|
||||
return Collections.EMPTY_LIST;
|
||||
}
|
||||
}
|
||||
|
||||
private String getSingleValue(Item researcher, String metadataKey) {
|
||||
if (metadataKey != null) {
|
||||
return itemService.getMetadata(researcher, metadataKey);
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
||||
private int getYear(String birthDateStr) {
|
||||
int birthDateYear = -1;
|
||||
if (birthDateStr != null) {
|
||||
Date birthDate = MultiFormatDateParser.parse(birthDateStr);
|
||||
if (birthDate != null) {
|
||||
Calendar calendar = new GregorianCalendar();
|
||||
calendar.setTime(birthDate);
|
||||
birthDateYear = calendar.get(Calendar.YEAR);
|
||||
}
|
||||
}
|
||||
return birthDateYear;
|
||||
}
|
||||
}
|
@@ -0,0 +1,37 @@
|
||||
/**
|
||||
* The contents of this file are subject to the license and copyright
|
||||
* detailed in the LICENSE and NOTICE files at the root of the source
|
||||
* tree and available online at
|
||||
*
|
||||
* http://www.dspace.org/license/
|
||||
*/
|
||||
package org.dspace.app.suggestion.openaire;
|
||||
|
||||
import org.dspace.app.suggestion.SuggestionEvidence;
|
||||
import org.dspace.content.Item;
|
||||
import org.dspace.external.model.ExternalDataObject;
|
||||
|
||||
/**
|
||||
* Interface used in {@see org.dspace.app.suggestion.oaire.PublicationApproverServiceImpl}
|
||||
* to construct filtering pipeline.
|
||||
*
|
||||
* For each EvidenceScorer, the service call computeEvidence method.
|
||||
*
|
||||
* @author Andrea Bollini (andrea.bollini at 4science dot it)
|
||||
* @author Pasquale Cavallo (pasquale.cavallo at 4science dot it)
|
||||
*
|
||||
*/
|
||||
public interface EvidenceScorer {
|
||||
|
||||
/**
|
||||
* Method to compute the suggestion evidence of an ImportRecord, a null evidence
|
||||
* would lead the record to be discarded.
|
||||
*
|
||||
* @param researcher DSpace item
|
||||
* @param importRecord the record to evaluate
|
||||
* @return the generated suggestion evidence or null if the record should be
|
||||
* discarded
|
||||
*/
|
||||
public SuggestionEvidence computeEvidence(Item researcher, ExternalDataObject importRecord);
|
||||
|
||||
}
|
@@ -0,0 +1,256 @@
|
||||
/**
|
||||
* The contents of this file are subject to the license and copyright
|
||||
* detailed in the LICENSE and NOTICE files at the root of the source
|
||||
* tree and available online at
|
||||
*
|
||||
* http://www.dspace.org/license/
|
||||
*/
|
||||
package org.dspace.app.suggestion.openaire;
|
||||
|
||||
import static org.dspace.app.suggestion.SuggestionUtils.getAllEntriesByMetadatum;
|
||||
import static org.dspace.app.suggestion.SuggestionUtils.getFirstEntryByMetadatum;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
|
||||
import org.apache.commons.lang3.StringUtils;
|
||||
import org.apache.solr.client.solrj.SolrServerException;
|
||||
import org.dspace.app.suggestion.SolrSuggestionProvider;
|
||||
import org.dspace.app.suggestion.Suggestion;
|
||||
import org.dspace.app.suggestion.SuggestionEvidence;
|
||||
import org.dspace.content.Item;
|
||||
import org.dspace.content.dto.MetadataValueDTO;
|
||||
import org.dspace.core.Context;
|
||||
import org.dspace.external.model.ExternalDataObject;
|
||||
import org.dspace.external.provider.ExternalDataProvider;
|
||||
import org.dspace.services.ConfigurationService;
|
||||
import org.springframework.beans.factory.annotation.Autowired;
|
||||
|
||||
/**
|
||||
* Class responsible to load and manage ImportRecords from OpenAIRE
|
||||
*
|
||||
* @author Pasquale Cavallo (pasquale.cavallo at 4science dot it)
|
||||
*
|
||||
*/
|
||||
public class PublicationLoader extends SolrSuggestionProvider {
|
||||
|
||||
private List<String> names;
|
||||
|
||||
private ExternalDataProvider primaryProvider;
|
||||
|
||||
private List<ExternalDataProvider> otherProviders;
|
||||
|
||||
@Autowired
|
||||
private ConfigurationService configurationService;
|
||||
|
||||
private List<EvidenceScorer> pipeline;
|
||||
|
||||
public void setPrimaryProvider(ExternalDataProvider primaryProvider) {
|
||||
this.primaryProvider = primaryProvider;
|
||||
}
|
||||
|
||||
public void setOtherProviders(List<ExternalDataProvider> otherProviders) {
|
||||
this.otherProviders = otherProviders;
|
||||
}
|
||||
|
||||
/**
|
||||
* Set the pipeline of Approver
|
||||
* @param pipeline list Approver
|
||||
*/
|
||||
public void setPipeline(List<EvidenceScorer> pipeline) {
|
||||
this.pipeline = pipeline;
|
||||
}
|
||||
|
||||
/**
|
||||
* This method filter a list of ImportRecords using a pipeline of AuthorNamesApprover
|
||||
* and return a filtered list of ImportRecords.
|
||||
*
|
||||
* @see org.dspace.app.suggestion.openaire.AuthorNamesScorer
|
||||
* @param researcher the researcher Item
|
||||
* @param importRecords List of import record
|
||||
* @return a list of filtered import records
|
||||
*/
|
||||
public List<Suggestion> reduceAndTransform(Item researcher, List<ExternalDataObject> importRecords) {
|
||||
List<Suggestion> results = new ArrayList<>();
|
||||
for (ExternalDataObject r : importRecords) {
|
||||
boolean skip = false;
|
||||
List<SuggestionEvidence> evidences = new ArrayList<SuggestionEvidence>();
|
||||
for (EvidenceScorer authorNameApprover : pipeline) {
|
||||
SuggestionEvidence evidence = authorNameApprover.computeEvidence(researcher, r);
|
||||
if (evidence != null) {
|
||||
evidences.add(evidence);
|
||||
} else {
|
||||
skip = true;
|
||||
break;
|
||||
}
|
||||
}
|
||||
if (!skip) {
|
||||
Suggestion suggestion = translateImportRecordToSuggestion(researcher, r);
|
||||
suggestion.getEvidences().addAll(evidences);
|
||||
results.add(suggestion);
|
||||
}
|
||||
}
|
||||
return results;
|
||||
}
|
||||
|
||||
/**
|
||||
* Save a List of ImportRecord into Solr.
|
||||
* ImportRecord will be translate into a SolrDocument by the method translateImportRecordToSolrDocument.
|
||||
*
|
||||
* @param context the DSpace Context
|
||||
* @param researcher a DSpace Item
|
||||
* @throws SolrServerException
|
||||
* @throws IOException
|
||||
*/
|
||||
public void importAuthorRecords(Context context, Item researcher)
|
||||
throws SolrServerException, IOException {
|
||||
int offset = 0;
|
||||
int limit = 10;
|
||||
int loaded = limit;
|
||||
List<String> searchValues = searchMetadataValues(researcher);
|
||||
while (loaded > 0) {
|
||||
List<ExternalDataObject> metadata = getImportRecords(searchValues, researcher, offset, limit);
|
||||
if (metadata.isEmpty()) {
|
||||
loaded = 0;
|
||||
continue;
|
||||
}
|
||||
offset += limit;
|
||||
loaded = metadata.size();
|
||||
List<Suggestion> records = reduceAndTransform(researcher, metadata);
|
||||
for (Suggestion record : records) {
|
||||
solrSuggestionStorageService.addSuggestion(record, false, false);
|
||||
}
|
||||
}
|
||||
solrSuggestionStorageService.commit();
|
||||
}
|
||||
|
||||
/**
|
||||
* Translate an ImportRecord into a Suggestion
|
||||
* @param item DSpace item
|
||||
* @param record ImportRecord
|
||||
* @return Suggestion
|
||||
*/
|
||||
private Suggestion translateImportRecordToSuggestion(Item item, ExternalDataObject record) {
|
||||
String openAireId = record.getId();
|
||||
Suggestion suggestion = new Suggestion(getSourceName(), item, openAireId);
|
||||
suggestion.setDisplay(getFirstEntryByMetadatum(record, "dc", "title", null));
|
||||
suggestion.getMetadata().add(
|
||||
new MetadataValueDTO("dc", "title", null, null, getFirstEntryByMetadatum(record, "dc", "title", null)));
|
||||
suggestion.getMetadata().add(new MetadataValueDTO("dc", "date", "issued", null,
|
||||
getFirstEntryByMetadatum(record, "dc", "date", "issued")));
|
||||
suggestion.getMetadata().add(new MetadataValueDTO("dc", "description", "abstract", null,
|
||||
getFirstEntryByMetadatum(record, "dc", "description", "abstract")));
|
||||
suggestion.setExternalSourceUri(configurationService.getProperty("dspace.server.url")
|
||||
+ "/api/integration/externalsources/" + primaryProvider.getSourceIdentifier() + "/entryValues/"
|
||||
+ openAireId);
|
||||
for (String o : getAllEntriesByMetadatum(record, "dc", "source", null)) {
|
||||
suggestion.getMetadata().add(new MetadataValueDTO("dc", "source", null, null, o));
|
||||
}
|
||||
for (String o : getAllEntriesByMetadatum(record, "dc", "contributor", "author")) {
|
||||
suggestion.getMetadata().add(new MetadataValueDTO("dc", "contributor", "author", null, o));
|
||||
}
|
||||
return suggestion;
|
||||
}
|
||||
|
||||
public List<String> getNames() {
|
||||
return names;
|
||||
}
|
||||
|
||||
public void setNames(List<String> names) {
|
||||
this.names = names;
|
||||
}
|
||||
|
||||
/**
|
||||
* Load metadata from OpenAIRE using the import service. The service use the value
|
||||
* get from metadata key defined in class level variable names as author to query OpenAIRE.
|
||||
*
|
||||
* @see org.dspace.importer.external.openaire.service.OpenAireImportMetadataSourceServiceImpl
|
||||
* @param searchValues query
|
||||
* @param researcher item to extract metadata from
|
||||
* @param limit for pagination purpose
|
||||
* @param offset for pagination purpose
|
||||
* @return list of ImportRecord
|
||||
*/
|
||||
private List<ExternalDataObject> getImportRecords(List<String> searchValues,
|
||||
Item researcher, int offset, int limit) {
|
||||
List<ExternalDataObject> matchingRecords = new ArrayList<>();
|
||||
for (String searchValue : searchValues) {
|
||||
matchingRecords.addAll(
|
||||
primaryProvider.searchExternalDataObjects(searchValue, offset, limit));
|
||||
}
|
||||
List<ExternalDataObject> toReturn = removeDuplicates(matchingRecords);
|
||||
return toReturn;
|
||||
}
|
||||
|
||||
/**
|
||||
* This method remove duplicates from importRecords list.
|
||||
* An element is a duplicate if in the list exist another element
|
||||
* with the same value of the metadatum 'dc.identifier.other'
|
||||
*
|
||||
* @param importRecords list of ImportRecord
|
||||
* @return list of ImportRecords without duplicates
|
||||
*/
|
||||
private List<ExternalDataObject> removeDuplicates(List<ExternalDataObject> importRecords) {
|
||||
List<ExternalDataObject> filteredRecords = new ArrayList<>();
|
||||
for (ExternalDataObject currentRecord : importRecords) {
|
||||
if (!isDuplicate(currentRecord, filteredRecords)) {
|
||||
filteredRecords.add(currentRecord);
|
||||
}
|
||||
}
|
||||
return filteredRecords;
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* Check if the ImportRecord is already present in the list.
|
||||
* The comparison is made on the value of metadatum with key 'dc.identifier.other'
|
||||
*
|
||||
* @param dto An importRecord instance
|
||||
* @param importRecords a list of importRecord
|
||||
* @return true if dto is already present in importRecords, false otherwise
|
||||
*/
|
||||
private boolean isDuplicate(ExternalDataObject dto, List<ExternalDataObject> importRecords) {
|
||||
String currentItemId = dto.getId();
|
||||
if (currentItemId == null) {
|
||||
return true;
|
||||
}
|
||||
for (ExternalDataObject importRecord : importRecords) {
|
||||
if (currentItemId.equals(importRecord.getId())) {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* Return list of Item metadata values starting from metadata keys defined in class level variable names.
|
||||
*
|
||||
* @param researcher DSpace item
|
||||
* @return list of metadata values
|
||||
*/
|
||||
private List<String> searchMetadataValues(Item researcher) {
|
||||
List<String> authors = new ArrayList<String>();
|
||||
for (String name : names) {
|
||||
String value = itemService.getMetadata(researcher, name);
|
||||
if (value != null) {
|
||||
authors.add(value);
|
||||
}
|
||||
}
|
||||
return authors;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected boolean isExternalDataObjectPotentiallySuggested(Context context, ExternalDataObject externalDataObject) {
|
||||
if (StringUtils.equals(externalDataObject.getSource(), primaryProvider.getSourceIdentifier())) {
|
||||
return true;
|
||||
} else if (otherProviders != null) {
|
||||
return otherProviders.stream()
|
||||
.anyMatch(x -> StringUtils.equals(externalDataObject.getSource(), x.getSourceIdentifier()));
|
||||
} else {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
}
|
@@ -0,0 +1,29 @@
|
||||
/**
|
||||
* The contents of this file are subject to the license and copyright
|
||||
* detailed in the LICENSE and NOTICE files at the root of the source
|
||||
* tree and available online at
|
||||
*
|
||||
* http://www.dspace.org/license/
|
||||
*/
|
||||
package org.dspace.app.suggestion.openaire;
|
||||
|
||||
import org.apache.commons.cli.Options;
|
||||
|
||||
/**
|
||||
* Extension of {@link PublicationLoaderScriptConfiguration} for CLI.
|
||||
*
|
||||
* @author Alessandro Martelli (alessandro.martelli at 4science.it)
|
||||
*/
|
||||
public class PublicationLoaderCliScriptConfiguration<T extends PublicationLoaderRunnable>
|
||||
extends PublicationLoaderScriptConfiguration<T> {
|
||||
|
||||
@Override
|
||||
public Options getOptions() {
|
||||
Options options = super.getOptions();
|
||||
options.addOption("h", "help", false, "help");
|
||||
options.getOption("h").setType(boolean.class);
|
||||
super.options = options;
|
||||
return options;
|
||||
}
|
||||
|
||||
}
|
@@ -0,0 +1,115 @@
|
||||
/**
|
||||
* The contents of this file are subject to the license and copyright
|
||||
* detailed in the LICENSE and NOTICE files at the root of the source
|
||||
* tree and available online at
|
||||
*
|
||||
* http://www.dspace.org/license/
|
||||
*/
|
||||
package org.dspace.app.suggestion.openaire;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.Iterator;
|
||||
import java.util.List;
|
||||
|
||||
import org.apache.commons.cli.ParseException;
|
||||
import org.dspace.content.Item;
|
||||
import org.dspace.core.Context;
|
||||
import org.dspace.discovery.DiscoverQuery;
|
||||
import org.dspace.discovery.SearchService;
|
||||
import org.dspace.discovery.SearchServiceException;
|
||||
import org.dspace.discovery.SearchUtils;
|
||||
import org.dspace.discovery.utils.DiscoverQueryBuilder;
|
||||
import org.dspace.discovery.utils.parameter.QueryBuilderSearchFilter;
|
||||
import org.dspace.scripts.DSpaceRunnable;
|
||||
import org.dspace.sort.SortOption;
|
||||
import org.dspace.utils.DSpace;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
||||
/**
|
||||
* Runner responsible to import metadata about authors from OpenAIRE to Solr.
|
||||
* This runner works in two ways:
|
||||
* If -s parameter with a valid UUID is received, then the specific researcher
|
||||
* with this UUID will be used.
|
||||
* Invocation without any parameter results in massive import, processing all
|
||||
* authors registered in DSpace.
|
||||
*
|
||||
* @author Alessandro Martelli (alessandro.martelli at 4science.it)
|
||||
*/
|
||||
public class PublicationLoaderRunnable
|
||||
extends DSpaceRunnable<PublicationLoaderScriptConfiguration<PublicationLoaderRunnable>> {
|
||||
|
||||
private static final Logger LOGGER = LoggerFactory.getLogger(PublicationLoaderRunnable.class);
|
||||
|
||||
private PublicationLoader oairePublicationLoader = null;
|
||||
|
||||
protected Context context;
|
||||
|
||||
protected String profile;
|
||||
|
||||
@Override
|
||||
@SuppressWarnings({ "rawtypes", "unchecked" })
|
||||
public PublicationLoaderScriptConfiguration<PublicationLoaderRunnable> getScriptConfiguration() {
|
||||
PublicationLoaderScriptConfiguration configuration = new DSpace().getServiceManager()
|
||||
.getServiceByName("import-openaire-suggestions", PublicationLoaderScriptConfiguration.class);
|
||||
return configuration;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void setup() throws ParseException {
|
||||
|
||||
oairePublicationLoader = new DSpace().getServiceManager().getServiceByName(
|
||||
"OpenairePublicationLoader", PublicationLoader.class);
|
||||
|
||||
profile = commandLine.getOptionValue("s");
|
||||
if (profile == null) {
|
||||
LOGGER.info("No argument for -s, process all profile");
|
||||
} else {
|
||||
LOGGER.info("Process eperson item with UUID " + profile);
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public void internalRun() throws Exception {
|
||||
|
||||
context = new Context();
|
||||
|
||||
Iterator<Item> researchers = getResearchers(profile);
|
||||
while (researchers.hasNext()) {
|
||||
Item researcher = researchers.next();
|
||||
oairePublicationLoader.importAuthorRecords(context, researcher);
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the Item(s) which map a researcher from Solr. If the uuid is specified,
|
||||
* the researcher with this UUID will be chosen. If the uuid doesn't match any
|
||||
* researcher, the method returns an empty array list. If uuid is null, all
|
||||
* research will be return.
|
||||
*
|
||||
* @param profileUUID uuid of the researcher. If null, all researcher will be
|
||||
* returned.
|
||||
* @return the researcher with specified UUID or all researchers
|
||||
*/
|
||||
@SuppressWarnings("rawtypes")
|
||||
private Iterator<Item> getResearchers(String profileUUID) {
|
||||
SearchService searchService = new DSpace().getSingletonService(SearchService.class);
|
||||
DiscoverQueryBuilder queryBuilder = SearchUtils.getQueryBuilder();
|
||||
List<QueryBuilderSearchFilter> filters = new ArrayList<QueryBuilderSearchFilter>();
|
||||
String query = "*:*";
|
||||
if (profileUUID != null) {
|
||||
query = "search.resourceid:" + profileUUID.toString();
|
||||
}
|
||||
try {
|
||||
DiscoverQuery discoverQuery = queryBuilder.buildQuery(context, null,
|
||||
SearchUtils.getDiscoveryConfigurationByName("person"),
|
||||
query, filters,
|
||||
"Item", 10, Long.getLong("0"), null, SortOption.DESCENDING);
|
||||
return searchService.iteratorSearch(context, null, discoverQuery);
|
||||
} catch (SearchServiceException e) {
|
||||
LOGGER.error("Unable to read researcher on solr", e);
|
||||
}
|
||||
return null;
|
||||
}
|
||||
}
|
@@ -0,0 +1,36 @@
|
||||
/**
|
||||
* The contents of this file are subject to the license and copyright
|
||||
* detailed in the LICENSE and NOTICE files at the root of the source
|
||||
* tree and available online at
|
||||
*
|
||||
* http://www.dspace.org/license/
|
||||
*/
|
||||
package org.dspace.app.suggestion.openaire;
|
||||
|
||||
import org.apache.commons.cli.HelpFormatter;
|
||||
import org.apache.commons.cli.ParseException;
|
||||
import org.dspace.utils.DSpace;
|
||||
|
||||
public class PublicationLoaderRunnableCli extends PublicationLoaderRunnable {
|
||||
|
||||
@Override
|
||||
@SuppressWarnings({ "rawtypes", "unchecked" })
|
||||
public PublicationLoaderCliScriptConfiguration getScriptConfiguration() {
|
||||
PublicationLoaderCliScriptConfiguration configuration = new DSpace().getServiceManager()
|
||||
.getServiceByName("import-openaire-suggestions", PublicationLoaderCliScriptConfiguration.class);
|
||||
return configuration;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void setup() throws ParseException {
|
||||
super.setup();
|
||||
|
||||
// in case of CLI we show the help prompt
|
||||
if (commandLine.hasOption('h')) {
|
||||
HelpFormatter formatter = new HelpFormatter();
|
||||
formatter.printHelp("Import Researchers Suggestions", getScriptConfiguration().getOptions());
|
||||
System.exit(0);
|
||||
}
|
||||
}
|
||||
|
||||
}
|
@@ -0,0 +1,56 @@
|
||||
/**
|
||||
* The contents of this file are subject to the license and copyright
|
||||
* detailed in the LICENSE and NOTICE files at the root of the source
|
||||
* tree and available online at
|
||||
*
|
||||
* http://www.dspace.org/license/
|
||||
*/
|
||||
package org.dspace.app.suggestion.openaire;
|
||||
|
||||
import org.apache.commons.cli.Options;
|
||||
import org.dspace.scripts.configuration.ScriptConfiguration;
|
||||
|
||||
public class PublicationLoaderScriptConfiguration<T extends PublicationLoaderRunnable>
|
||||
extends ScriptConfiguration<T> {
|
||||
|
||||
private Class<T> dspaceRunnableClass;
|
||||
|
||||
@Override
|
||||
public Class<T> getDspaceRunnableClass() {
|
||||
return dspaceRunnableClass;
|
||||
}
|
||||
|
||||
/**
|
||||
* Generic setter for the dspaceRunnableClass
|
||||
* @param dspaceRunnableClass The dspaceRunnableClass to be set on this PublicationLoaderScriptConfiguration
|
||||
*/
|
||||
@Override
|
||||
public void setDspaceRunnableClass(Class<T> dspaceRunnableClass) {
|
||||
this.dspaceRunnableClass = dspaceRunnableClass;
|
||||
}
|
||||
|
||||
/*
|
||||
@Override
|
||||
public boolean isAllowedToExecute(Context context) {
|
||||
try {
|
||||
return authorizeService.isAdmin(context);
|
||||
} catch (SQLException e) {
|
||||
throw new RuntimeException("SQLException occurred when checking if the current user is an admin", e);
|
||||
}
|
||||
}
|
||||
*/
|
||||
|
||||
@Override
|
||||
public Options getOptions() {
|
||||
if (options == null) {
|
||||
Options options = new Options();
|
||||
|
||||
options.addOption("s", "single-researcher", true, "Single researcher UUID");
|
||||
options.getOption("s").setType(String.class);
|
||||
|
||||
super.options = options;
|
||||
}
|
||||
return options;
|
||||
}
|
||||
|
||||
}
|
@@ -628,12 +628,23 @@ public class AuthorizeUtil {
|
||||
// actually expected to be returning true.
|
||||
// For example the LDAP canSelfRegister will return true due to auto-register, while that
|
||||
// does not imply a new user can register explicitly
|
||||
return AuthenticateServiceFactory.getInstance().getAuthenticationService()
|
||||
.allowSetPassword(context, request, null);
|
||||
return authorizePasswordChange(context, request);
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
||||
/**
|
||||
* This method will return a boolean indicating whether the current user is allowed to reset the password
|
||||
* or not
|
||||
*
|
||||
* @return A boolean indicating whether the current user can reset its password or not
|
||||
* @throws SQLException If something goes wrong
|
||||
*/
|
||||
public static boolean authorizeForgotPassword() {
|
||||
return DSpaceServicesFactory.getInstance().getConfigurationService()
|
||||
.getBooleanProperty("user.forgot-password", true);
|
||||
}
|
||||
|
||||
/**
|
||||
* This method will return a boolean indicating whether it's allowed to update the password for the EPerson
|
||||
* with the given email and canLogin property
|
||||
@@ -647,8 +658,7 @@ public class AuthorizeUtil {
|
||||
if (eperson != null && eperson.canLogIn()) {
|
||||
HttpServletRequest request = new DSpace().getRequestService().getCurrentRequest()
|
||||
.getHttpServletRequest();
|
||||
return AuthenticateServiceFactory.getInstance().getAuthenticationService()
|
||||
.allowSetPassword(context, request, null);
|
||||
return authorizePasswordChange(context, request);
|
||||
}
|
||||
} catch (SQLException e) {
|
||||
log.error("Something went wrong trying to retrieve EPerson for email: " + email, e);
|
||||
@@ -656,6 +666,19 @@ public class AuthorizeUtil {
|
||||
return false;
|
||||
}
|
||||
|
||||
/**
|
||||
* Checks if the current configuration has at least one password based authentication method
|
||||
*
|
||||
* @param context Dspace Context
|
||||
* @param request Current Request
|
||||
* @return True if the password change is enabled
|
||||
* @throws SQLException
|
||||
*/
|
||||
protected static boolean authorizePasswordChange(Context context, HttpServletRequest request) throws SQLException {
|
||||
return AuthenticateServiceFactory.getInstance().getAuthenticationService()
|
||||
.allowSetPassword(context, request, null);
|
||||
}
|
||||
|
||||
/**
|
||||
* This method checks if the community Admin can manage accounts
|
||||
*
|
||||
|
@@ -14,7 +14,6 @@ import java.util.Iterator;
|
||||
import java.util.LinkedList;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import javax.servlet.ServletException;
|
||||
import javax.xml.parsers.DocumentBuilder;
|
||||
import javax.xml.parsers.DocumentBuilderFactory;
|
||||
import javax.xml.parsers.FactoryConfigurationError;
|
||||
@@ -24,6 +23,7 @@ import org.dspace.content.Collection;
|
||||
import org.dspace.content.MetadataSchemaEnum;
|
||||
import org.dspace.core.Utils;
|
||||
import org.dspace.services.factory.DSpaceServicesFactory;
|
||||
import org.dspace.submit.factory.SubmissionServiceFactory;
|
||||
import org.w3c.dom.Document;
|
||||
import org.w3c.dom.NamedNodeMap;
|
||||
import org.w3c.dom.Node;
|
||||
@@ -149,16 +149,16 @@ public class DCInputsReader {
|
||||
* Returns the set of DC inputs used for a particular collection, or the
|
||||
* default set if no inputs defined for the collection
|
||||
*
|
||||
* @param collectionHandle collection's unique Handle
|
||||
* @param collection collection for which search the set of DC inputs
|
||||
* @return DC input set
|
||||
* @throws DCInputsReaderException if no default set defined
|
||||
* @throws ServletException
|
||||
*/
|
||||
public List<DCInputSet> getInputsByCollectionHandle(String collectionHandle)
|
||||
public List<DCInputSet> getInputsByCollection(Collection collection)
|
||||
throws DCInputsReaderException {
|
||||
SubmissionConfig config;
|
||||
try {
|
||||
config = new SubmissionConfigReader().getSubmissionConfigByCollection(collectionHandle);
|
||||
config = SubmissionServiceFactory.getInstance().getSubmissionConfigService()
|
||||
.getSubmissionConfigByCollection(collection);
|
||||
String formName = config.getSubmissionName();
|
||||
if (formName == null) {
|
||||
throw new DCInputsReaderException("No form designated as default");
|
||||
@@ -180,7 +180,8 @@ public class DCInputsReader {
|
||||
throws DCInputsReaderException {
|
||||
SubmissionConfig config;
|
||||
try {
|
||||
config = new SubmissionConfigReader().getSubmissionConfigByName(name);
|
||||
config = SubmissionServiceFactory.getInstance().getSubmissionConfigService()
|
||||
.getSubmissionConfigByName(name);
|
||||
String formName = config.getSubmissionName();
|
||||
if (formName == null) {
|
||||
throw new DCInputsReaderException("No form designated as default");
|
||||
@@ -688,7 +689,7 @@ public class DCInputsReader {
|
||||
|
||||
public String getInputFormNameByCollectionAndField(Collection collection, String field)
|
||||
throws DCInputsReaderException {
|
||||
List<DCInputSet> inputSets = getInputsByCollectionHandle(collection.getHandle());
|
||||
List<DCInputSet> inputSets = getInputsByCollection(collection);
|
||||
for (DCInputSet inputSet : inputSets) {
|
||||
String[] tokenized = Utils.tokenize(field);
|
||||
String schema = tokenized[0];
|
||||
|
@@ -11,6 +11,7 @@ import java.io.File;
|
||||
import java.sql.SQLException;
|
||||
import java.util.ArrayList;
|
||||
import java.util.HashMap;
|
||||
import java.util.LinkedHashMap;
|
||||
import java.util.LinkedList;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
@@ -21,6 +22,7 @@ import javax.xml.parsers.FactoryConfigurationError;
|
||||
import org.apache.commons.lang3.StringUtils;
|
||||
import org.apache.logging.log4j.Logger;
|
||||
import org.dspace.content.Collection;
|
||||
import org.dspace.content.Community;
|
||||
import org.dspace.content.DSpaceObject;
|
||||
import org.dspace.content.factory.ContentServiceFactory;
|
||||
import org.dspace.content.service.CollectionService;
|
||||
@@ -90,6 +92,13 @@ public class SubmissionConfigReader {
|
||||
*/
|
||||
private Map<String, String> collectionToSubmissionConfig = null;
|
||||
|
||||
/**
|
||||
* Hashmap which stores which submission process configuration is used by
|
||||
* which community, computed from the item submission config file
|
||||
* (specifically, the 'submission-map' tag)
|
||||
*/
|
||||
private Map<String, String> communityToSubmissionConfig = null;
|
||||
|
||||
/**
|
||||
* Reference to the global submission step definitions defined in the
|
||||
* "step-definitions" section
|
||||
@@ -127,6 +136,7 @@ public class SubmissionConfigReader {
|
||||
|
||||
public void reload() throws SubmissionConfigReaderException {
|
||||
collectionToSubmissionConfig = null;
|
||||
communityToSubmissionConfig = null;
|
||||
stepDefns = null;
|
||||
submitDefns = null;
|
||||
buildInputs(configDir + SUBMIT_DEF_FILE_PREFIX + SUBMIT_DEF_FILE_SUFFIX);
|
||||
@@ -145,7 +155,8 @@ public class SubmissionConfigReader {
|
||||
*/
|
||||
private void buildInputs(String fileName) throws SubmissionConfigReaderException {
|
||||
collectionToSubmissionConfig = new HashMap<String, String>();
|
||||
submitDefns = new HashMap<String, List<Map<String, String>>>();
|
||||
communityToSubmissionConfig = new HashMap<String, String>();
|
||||
submitDefns = new LinkedHashMap<String, List<Map<String, String>>>();
|
||||
|
||||
String uri = "file:" + new File(fileName).getAbsolutePath();
|
||||
|
||||
@@ -210,18 +221,41 @@ public class SubmissionConfigReader {
|
||||
* Returns the Item Submission process config used for a particular
|
||||
* collection, or the default if none is defined for the collection
|
||||
*
|
||||
* @param collectionHandle collection's unique Handle
|
||||
* @param col collection for which search Submission process config
|
||||
* @return the SubmissionConfig representing the item submission config
|
||||
* @throws SubmissionConfigReaderException if no default submission process configuration defined
|
||||
* @throws IllegalStateException if no default submission process configuration defined
|
||||
*/
|
||||
public SubmissionConfig getSubmissionConfigByCollection(String collectionHandle) {
|
||||
public SubmissionConfig getSubmissionConfigByCollection(Collection col) {
|
||||
|
||||
String submitName;
|
||||
|
||||
if (col != null) {
|
||||
|
||||
// get the name of the submission process config for this collection
|
||||
String submitName = collectionToSubmissionConfig
|
||||
.get(collectionHandle);
|
||||
if (submitName == null) {
|
||||
submitName = collectionToSubmissionConfig
|
||||
.get(DEFAULT_COLLECTION);
|
||||
.get(col.getHandle());
|
||||
if (submitName != null) {
|
||||
return getSubmissionConfigByName(submitName);
|
||||
}
|
||||
|
||||
if (!communityToSubmissionConfig.isEmpty()) {
|
||||
try {
|
||||
List<Community> communities = col.getCommunities();
|
||||
for (Community com : communities) {
|
||||
submitName = getSubmissionConfigByCommunity(com);
|
||||
if (submitName != null) {
|
||||
return getSubmissionConfigByName(submitName);
|
||||
}
|
||||
}
|
||||
} catch (SQLException sqle) {
|
||||
throw new IllegalStateException("Error occurred while getting item submission configured " +
|
||||
"by community", sqle);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
submitName = collectionToSubmissionConfig.get(DEFAULT_COLLECTION);
|
||||
|
||||
if (submitName == null) {
|
||||
throw new IllegalStateException(
|
||||
"No item submission process configuration designated as 'default' in 'submission-map' section of " +
|
||||
@@ -230,6 +264,30 @@ public class SubmissionConfigReader {
|
||||
return getSubmissionConfigByName(submitName);
|
||||
}
|
||||
|
||||
/**
|
||||
* Recursive function to return the Item Submission process config
|
||||
* used for a community or the closest community parent, or null
|
||||
* if none is defined
|
||||
*
|
||||
* @param com community for which search Submission process config
|
||||
* @return the SubmissionConfig representing the item submission config
|
||||
*/
|
||||
private String getSubmissionConfigByCommunity(Community com) {
|
||||
String submitName = communityToSubmissionConfig
|
||||
.get(com.getHandle());
|
||||
if (submitName != null) {
|
||||
return submitName;
|
||||
}
|
||||
List<Community> communities = com.getParentCommunities();
|
||||
for (Community parentCom : communities) {
|
||||
submitName = getSubmissionConfigByCommunity(parentCom);
|
||||
if (submitName != null) {
|
||||
return submitName;
|
||||
}
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the Item Submission process config
|
||||
*
|
||||
@@ -357,13 +415,14 @@ public class SubmissionConfigReader {
|
||||
Node nd = nl.item(i);
|
||||
if (nd.getNodeName().equals("name-map")) {
|
||||
String id = getAttribute(nd, "collection-handle");
|
||||
String communityId = getAttribute(nd, "community-handle");
|
||||
String entityType = getAttribute(nd, "collection-entity-type");
|
||||
String value = getAttribute(nd, "submission-name");
|
||||
String content = getValue(nd);
|
||||
if (id == null && entityType == null) {
|
||||
if (id == null && communityId == null && entityType == null) {
|
||||
throw new SAXException(
|
||||
"name-map element is missing collection-handle or collection-entity-type attribute " +
|
||||
"in 'item-submission.xml'");
|
||||
"name-map element is missing collection-handle or community-handle or collection-entity-type " +
|
||||
"attribute in 'item-submission.xml'");
|
||||
}
|
||||
if (value == null) {
|
||||
throw new SAXException(
|
||||
@@ -375,7 +434,8 @@ public class SubmissionConfigReader {
|
||||
}
|
||||
if (id != null) {
|
||||
collectionToSubmissionConfig.put(id, value);
|
||||
|
||||
} else if (communityId != null) {
|
||||
communityToSubmissionConfig.put(communityId, value);
|
||||
} else {
|
||||
// get all collections for this entity-type
|
||||
List<Collection> collections = collectionService.findAllCollectionsByEntityType( context,
|
||||
|
@@ -405,21 +405,13 @@ public class Util {
|
||||
DCInput myInputs = null;
|
||||
boolean myInputsFound = false;
|
||||
String formFileName = I18nUtil.getInputFormsFileName(locale);
|
||||
String col_handle = "";
|
||||
|
||||
Collection collection = item.getOwningCollection();
|
||||
|
||||
if (collection == null) {
|
||||
// set an empty handle so to get the default input set
|
||||
col_handle = "";
|
||||
} else {
|
||||
col_handle = collection.getHandle();
|
||||
}
|
||||
|
||||
// Read the input form file for the specific collection
|
||||
DCInputsReader inputsReader = new DCInputsReader(formFileName);
|
||||
|
||||
List<DCInputSet> inputSets = inputsReader.getInputsByCollectionHandle(col_handle);
|
||||
List<DCInputSet> inputSets = inputsReader.getInputsByCollection(collection);
|
||||
|
||||
// Replace the values of Metadatum[] with the correct ones in case
|
||||
// of
|
||||
@@ -500,8 +492,8 @@ public class Util {
|
||||
public static List<String> differenceInSubmissionFields(Collection fromCollection, Collection toCollection)
|
||||
throws DCInputsReaderException {
|
||||
DCInputsReader reader = new DCInputsReader();
|
||||
List<DCInputSet> from = reader.getInputsByCollectionHandle(fromCollection.getHandle());
|
||||
List<DCInputSet> to = reader.getInputsByCollectionHandle(toCollection.getHandle());
|
||||
List<DCInputSet> from = reader.getInputsByCollection(fromCollection);
|
||||
List<DCInputSet> to = reader.getInputsByCollection(toCollection);
|
||||
|
||||
Set<String> fromFieldName = new HashSet<>();
|
||||
Set<String> toFieldName = new HashSet<>();
|
||||
|
@@ -153,6 +153,22 @@ public interface AuthenticationMethod {
|
||||
public List<Group> getSpecialGroups(Context context, HttpServletRequest request)
|
||||
throws SQLException;
|
||||
|
||||
/**
|
||||
* Returns true if the special groups returned by
|
||||
* {@link org.dspace.authenticate.AuthenticationMethod#getSpecialGroups(Context, HttpServletRequest)}
|
||||
* should be implicitly be added to the groups related to the current user. By
|
||||
* default this is true if the authentication method is the actual
|
||||
* authentication mechanism used by the user.
|
||||
* @param context A valid DSpace context.
|
||||
* @param request The request that started this operation, or null if not
|
||||
* applicable.
|
||||
* @return true is the special groups must be considered, false
|
||||
* otherwise
|
||||
*/
|
||||
public default boolean areSpecialGroupsApplicable(Context context, HttpServletRequest request) {
|
||||
return getName().equals(context.getAuthenticationMethod());
|
||||
}
|
||||
|
||||
/**
|
||||
* Authenticate the given or implicit credentials.
|
||||
* This is the heart of the authentication method: test the
|
||||
|
@@ -179,11 +179,16 @@ public class AuthenticationServiceImpl implements AuthenticationService {
|
||||
int totalLen = 0;
|
||||
|
||||
for (AuthenticationMethod method : getAuthenticationMethodStack()) {
|
||||
|
||||
if (method.areSpecialGroupsApplicable(context, request)) {
|
||||
|
||||
List<Group> gl = method.getSpecialGroups(context, request);
|
||||
if (gl.size() > 0) {
|
||||
result.addAll(gl);
|
||||
totalLen += gl.size();
|
||||
}
|
||||
|
||||
}
|
||||
}
|
||||
|
||||
return result;
|
||||
|
@@ -252,6 +252,11 @@ public class IPAuthentication implements AuthenticationMethod {
|
||||
return groups;
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean areSpecialGroupsApplicable(Context context, HttpServletRequest request) {
|
||||
return true;
|
||||
}
|
||||
|
||||
@Override
|
||||
public int authenticate(Context context, String username, String password,
|
||||
String realm, HttpServletRequest request) throws SQLException {
|
||||
|
@@ -494,6 +494,8 @@ public class LDAPAuthentication
|
||||
try {
|
||||
SearchControls ctrls = new SearchControls();
|
||||
ctrls.setSearchScope(ldap_search_scope_value);
|
||||
// Fetch both user attributes '*' (eg. uid, cn) and operational attributes '+' (eg. memberOf)
|
||||
ctrls.setReturningAttributes(new String[] {"*", "+"});
|
||||
|
||||
String searchName;
|
||||
if (useTLS) {
|
||||
@@ -713,8 +715,8 @@ public class LDAPAuthentication
|
||||
private void assignGroups(String dn, ArrayList<String> group, Context context) {
|
||||
if (StringUtils.isNotBlank(dn)) {
|
||||
System.out.println("dn:" + dn);
|
||||
int i = 1;
|
||||
String groupMap = configurationService.getProperty("authentication-ldap.login.groupmap." + i);
|
||||
int groupmapIndex = 1;
|
||||
String groupMap = configurationService.getProperty("authentication-ldap.login.groupmap." + groupmapIndex);
|
||||
boolean cmp;
|
||||
|
||||
|
||||
@@ -725,6 +727,13 @@ public class LDAPAuthentication
|
||||
String ldapSearchString = t[0];
|
||||
String dspaceGroupName = t[1];
|
||||
|
||||
if (group == null) {
|
||||
cmp = StringUtils.containsIgnoreCase(dn, ldapSearchString + ",");
|
||||
|
||||
if (cmp) {
|
||||
assignGroup(context, groupmapIndex, dspaceGroupName);
|
||||
}
|
||||
} else {
|
||||
// list of strings with dn from LDAP groups
|
||||
// inner loop
|
||||
Iterator<String> groupIterator = group.iterator();
|
||||
@@ -741,7 +750,29 @@ public class LDAPAuthentication
|
||||
}
|
||||
|
||||
if (cmp) {
|
||||
// assign user to this group
|
||||
assignGroup(context, groupmapIndex, dspaceGroupName);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
groupMap = configurationService.getProperty("authentication-ldap.login.groupmap." + ++groupmapIndex);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Add the current authenticated user to the specified group
|
||||
*
|
||||
* @param context
|
||||
* DSpace context
|
||||
*
|
||||
* @param groupmapIndex
|
||||
* authentication-ldap.login.groupmap.* key index defined in dspace.cfg
|
||||
*
|
||||
* @param dspaceGroupName
|
||||
* The DSpace group to add the user to
|
||||
*/
|
||||
private void assignGroup(Context context, int groupmapIndex, String dspaceGroupName) {
|
||||
try {
|
||||
Group ldapGroup = groupService.findByName(context, dspaceGroupName);
|
||||
if (ldapGroup != null) {
|
||||
@@ -751,7 +782,7 @@ public class LDAPAuthentication
|
||||
// The group does not exist
|
||||
log.warn(LogHelper.getHeader(context,
|
||||
"ldap_assignGroupsBasedOnLdapDn",
|
||||
"Group defined in authentication-ldap.login.groupmap." + i
|
||||
"Group defined in authentication-ldap.login.groupmap." + groupmapIndex
|
||||
+ " does not exist :: " + dspaceGroupName));
|
||||
}
|
||||
} catch (AuthorizeException ae) {
|
||||
@@ -764,12 +795,6 @@ public class LDAPAuthentication
|
||||
dspaceGroupName));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
groupMap = configurationService.getProperty("authentication-ldap.login.groupmap." + ++i);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean isUsed(final Context context, final HttpServletRequest request) {
|
||||
|
@@ -9,6 +9,10 @@ package org.dspace.authority;
|
||||
|
||||
import java.sql.SQLException;
|
||||
import java.text.DateFormat;
|
||||
import java.time.DateTimeException;
|
||||
import java.time.ZoneId;
|
||||
import java.time.ZonedDateTime;
|
||||
import java.time.format.DateTimeFormatter;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Date;
|
||||
import java.util.HashMap;
|
||||
@@ -16,6 +20,7 @@ import java.util.List;
|
||||
import java.util.Map;
|
||||
|
||||
import org.apache.commons.lang3.StringUtils;
|
||||
import org.apache.logging.log4j.LogManager;
|
||||
import org.apache.logging.log4j.Logger;
|
||||
import org.apache.solr.common.SolrDocument;
|
||||
import org.apache.solr.common.SolrInputDocument;
|
||||
@@ -25,9 +30,6 @@ import org.dspace.content.MetadataValue;
|
||||
import org.dspace.content.factory.ContentServiceFactory;
|
||||
import org.dspace.core.Context;
|
||||
import org.dspace.util.SolrUtils;
|
||||
import org.joda.time.DateTime;
|
||||
import org.joda.time.format.DateTimeFormatter;
|
||||
import org.joda.time.format.ISODateTimeFormat;
|
||||
|
||||
/**
|
||||
* @author Antoine Snyers (antoine at atmire.com)
|
||||
@@ -192,7 +194,7 @@ public class AuthorityValue {
|
||||
}
|
||||
|
||||
/**
|
||||
* Information that can be used the choice ui
|
||||
* Information that can be used the choice ui.
|
||||
*
|
||||
* @return map
|
||||
*/
|
||||
@@ -200,42 +202,51 @@ public class AuthorityValue {
|
||||
return new HashMap<>();
|
||||
}
|
||||
|
||||
|
||||
public List<DateTimeFormatter> getDateFormatters() {
|
||||
List<DateTimeFormatter> list = new ArrayList<>();
|
||||
list.add(ISODateTimeFormat.dateTime());
|
||||
list.add(ISODateTimeFormat.dateTimeNoMillis());
|
||||
/**
|
||||
* Build a list of ISO date formatters to parse various forms.
|
||||
*
|
||||
* <p><strong>Note:</strong> any formatter which does not parse a zone or
|
||||
* offset must have a default zone set. See {@link stringToDate}.
|
||||
*
|
||||
* @return the formatters.
|
||||
*/
|
||||
static private List<DateTimeFormatter> getDateFormatters() {
|
||||
List<java.time.format.DateTimeFormatter> list = new ArrayList<>();
|
||||
list.add(java.time.format.DateTimeFormatter.ofPattern("yyyy-MM-dd'T'HH:mm:ss[.SSS]X"));
|
||||
list.add(java.time.format.DateTimeFormatter.ISO_LOCAL_DATE_TIME
|
||||
.withZone(ZoneId.systemDefault().normalized()));
|
||||
return list;
|
||||
}
|
||||
|
||||
public Date stringToDate(String date) {
|
||||
/**
|
||||
* Convert a date string to internal form, trying several parsers.
|
||||
*
|
||||
* @param date serialized date to be converted.
|
||||
* @return converted date, or null if no parser accepted the input.
|
||||
*/
|
||||
static public Date stringToDate(String date) {
|
||||
Date result = null;
|
||||
if (StringUtils.isNotBlank(date)) {
|
||||
List<DateTimeFormatter> dateFormatters = getDateFormatters();
|
||||
boolean converted = false;
|
||||
int formatter = 0;
|
||||
while (!converted) {
|
||||
for (DateTimeFormatter formatter : getDateFormatters()) {
|
||||
try {
|
||||
DateTimeFormatter dateTimeFormatter = dateFormatters.get(formatter);
|
||||
DateTime dateTime = dateTimeFormatter.parseDateTime(date);
|
||||
result = dateTime.toDate();
|
||||
converted = true;
|
||||
} catch (IllegalArgumentException e) {
|
||||
formatter++;
|
||||
if (formatter > dateFormatters.size()) {
|
||||
converted = true;
|
||||
}
|
||||
log.error("Could not find a valid date format for: \"" + date + "\"", e);
|
||||
ZonedDateTime dateTime = ZonedDateTime.parse(date, formatter);
|
||||
result = Date.from(dateTime.toInstant());
|
||||
break;
|
||||
} catch (DateTimeException e) {
|
||||
log.debug("Input '{}' did not match {}", date, formatter);
|
||||
}
|
||||
}
|
||||
}
|
||||
if (null == result) {
|
||||
log.error("Could not find a valid date format for: \"{}\"", date);
|
||||
}
|
||||
return result;
|
||||
}
|
||||
|
||||
/**
|
||||
* log4j logger
|
||||
*/
|
||||
private static Logger log = org.apache.logging.log4j.LogManager.getLogger(AuthorityValue.class);
|
||||
private static Logger log = LogManager.getLogger();
|
||||
|
||||
@Override
|
||||
public String toString() {
|
||||
@@ -272,6 +283,10 @@ public class AuthorityValue {
|
||||
return new AuthorityValue();
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the type of authority which created this value.
|
||||
* @return type name.
|
||||
*/
|
||||
public String getAuthorityType() {
|
||||
return "internal";
|
||||
}
|
||||
|
@@ -80,7 +80,15 @@ public class DSpaceAuthorityIndexer implements AuthorityIndexerInterface, Initia
|
||||
throws SQLException, AuthorizeException {
|
||||
List<AuthorityValue> values = new ArrayList<>();
|
||||
for (String metadataField : metadataFields) {
|
||||
List<MetadataValue> metadataValues = itemService.getMetadataByMetadataString(item, metadataField);
|
||||
|
||||
String[] fieldParts = metadataField.split("\\.");
|
||||
String schema = (fieldParts.length > 0 ? fieldParts[0] : null);
|
||||
String element = (fieldParts.length > 1 ? fieldParts[1] : null);
|
||||
String qualifier = (fieldParts.length > 2 ? fieldParts[2] : null);
|
||||
|
||||
// Get metadata values without virtual metadata
|
||||
List<MetadataValue> metadataValues = itemService.getMetadata(item, schema, element, qualifier, Item.ANY,
|
||||
false);
|
||||
for (MetadataValue metadataValue : metadataValues) {
|
||||
String content = metadataValue.getValue();
|
||||
String authorityKey = metadataValue.getAuthority();
|
||||
|
@@ -451,7 +451,7 @@ public class AuthorizeServiceImpl implements AuthorizeService {
|
||||
if (e == null) {
|
||||
return false; // anonymous users can't be admins....
|
||||
} else {
|
||||
return groupService.isMember(c, e, Group.ADMIN);
|
||||
return groupService.isMember(c, e, c.getAdminGroup());
|
||||
}
|
||||
}
|
||||
|
||||
@@ -895,7 +895,7 @@ public class AuthorizeServiceImpl implements AuthorizeService {
|
||||
return true;
|
||||
}
|
||||
} catch (SearchServiceException e) {
|
||||
log.error("Failed getting getting community/collection admin status for "
|
||||
log.error("Failed getting community/collection admin status for "
|
||||
+ context.getCurrentUser().getEmail() + " The search error is: " + e.getMessage()
|
||||
+ " The search resourceType filter was: " + query);
|
||||
}
|
||||
|
@@ -108,7 +108,7 @@ public class CrossLinks {
|
||||
} else {
|
||||
// Exact match, if the key field has no .* wildcard
|
||||
if (links.containsKey(metadata)) {
|
||||
return links.get(key);
|
||||
return links.get(metadata);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@@ -0,0 +1,77 @@
|
||||
/**
|
||||
* The contents of this file are subject to the license and copyright
|
||||
* detailed in the LICENSE and NOTICE files at the root of the source
|
||||
* tree and available online at
|
||||
*
|
||||
* http://www.dspace.org/license/
|
||||
*/
|
||||
package org.dspace.cli;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
import java.util.Properties;
|
||||
|
||||
import org.apache.commons.cli.CommandLine;
|
||||
import org.apache.commons.cli.DefaultParser;
|
||||
import org.apache.commons.cli.Options;
|
||||
import org.apache.commons.cli.ParseException;
|
||||
|
||||
/**
|
||||
* Extended version of the DefaultParser. This parser skip/ignore unknown arguments.
|
||||
*/
|
||||
public class DSpaceSkipUnknownArgumentsParser extends DefaultParser {
|
||||
|
||||
|
||||
@Override
|
||||
public CommandLine parse(Options options, String[] arguments) throws ParseException {
|
||||
return super.parse(options, getOnlyKnownArguments(options, arguments));
|
||||
}
|
||||
|
||||
@Override
|
||||
public CommandLine parse(Options options, String[] arguments, Properties properties) throws ParseException {
|
||||
return super.parse(options, getOnlyKnownArguments(options, arguments), properties);
|
||||
}
|
||||
|
||||
/**
|
||||
* Parse the arguments according to the specified options and properties.
|
||||
* @param options the specified Options
|
||||
* @param arguments the command line arguments
|
||||
* @param stopAtNonOption can be ignored - an unrecognized argument is ignored, an unrecognized argument doesn't
|
||||
* stop the parsing and doesn't trigger a ParseException
|
||||
*
|
||||
* @return the list of atomic option and value tokens
|
||||
* @throws ParseException if there are any problems encountered while parsing the command line tokens.
|
||||
*/
|
||||
@Override
|
||||
public CommandLine parse(Options options, String[] arguments, boolean stopAtNonOption) throws ParseException {
|
||||
return super.parse(options, getOnlyKnownArguments(options, arguments), stopAtNonOption);
|
||||
}
|
||||
|
||||
/**
|
||||
* Parse the arguments according to the specified options and properties.
|
||||
* @param options the specified Options
|
||||
* @param arguments the command line arguments
|
||||
* @param properties command line option name-value pairs
|
||||
* @param stopAtNonOption can be ignored - an unrecognized argument is ignored, an unrecognized argument doesn't
|
||||
* stop the parsing and doesn't trigger a ParseException
|
||||
*
|
||||
* @return the list of atomic option and value tokens
|
||||
* @throws ParseException if there are any problems encountered while parsing the command line tokens.
|
||||
*/
|
||||
@Override
|
||||
public CommandLine parse(Options options, String[] arguments, Properties properties, boolean stopAtNonOption)
|
||||
throws ParseException {
|
||||
return super.parse(options, getOnlyKnownArguments(options, arguments), properties, stopAtNonOption);
|
||||
}
|
||||
|
||||
|
||||
private String[] getOnlyKnownArguments(Options options, String[] arguments) {
|
||||
List<String> knownArguments = new ArrayList<>();
|
||||
for (String arg : arguments) {
|
||||
if (options.hasOption(arg)) {
|
||||
knownArguments.add(arg);
|
||||
}
|
||||
}
|
||||
return knownArguments.toArray(new String[0]);
|
||||
}
|
||||
}
|
@@ -307,10 +307,18 @@ public class Bitstream extends DSpaceObject implements DSpaceObjectLegacySupport
|
||||
return collection;
|
||||
}
|
||||
|
||||
public void setCollection(Collection collection) {
|
||||
this.collection = collection;
|
||||
}
|
||||
|
||||
public Community getCommunity() {
|
||||
return community;
|
||||
}
|
||||
|
||||
public void setCommunity(Community community) {
|
||||
this.community = community;
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the asset store number where this bitstream is stored
|
||||
*
|
||||
|
@@ -276,6 +276,11 @@ public class BitstreamServiceImpl extends DSpaceObjectServiceImpl<Bitstream> imp
|
||||
//Remove our bitstream from all our bundles
|
||||
final List<Bundle> bundles = bitstream.getBundles();
|
||||
for (Bundle bundle : bundles) {
|
||||
authorizeService.authorizeAction(context, bundle, Constants.REMOVE);
|
||||
//We also need to remove the bitstream id when it's set as bundle's primary bitstream
|
||||
if (bitstream.equals(bundle.getPrimaryBitstream())) {
|
||||
bundle.unsetPrimaryBitstreamID();
|
||||
}
|
||||
bundle.removeBitstream(bitstream);
|
||||
}
|
||||
|
||||
@@ -403,7 +408,7 @@ public class BitstreamServiceImpl extends DSpaceObjectServiceImpl<Bitstream> imp
|
||||
|
||||
@Override
|
||||
public Bitstream getThumbnail(Context context, Bitstream bitstream) throws SQLException {
|
||||
Pattern pattern = Pattern.compile("^" + bitstream.getName() + ".([^.]+)$");
|
||||
Pattern pattern = getBitstreamNamePattern(bitstream);
|
||||
|
||||
for (Bundle bundle : bitstream.getBundles()) {
|
||||
for (Item item : bundle.getItems()) {
|
||||
@@ -420,6 +425,13 @@ public class BitstreamServiceImpl extends DSpaceObjectServiceImpl<Bitstream> imp
|
||||
return null;
|
||||
}
|
||||
|
||||
protected Pattern getBitstreamNamePattern(Bitstream bitstream) {
|
||||
if (bitstream.getName() != null) {
|
||||
return Pattern.compile("^" + Pattern.quote(bitstream.getName()) + ".([^.]+)$");
|
||||
}
|
||||
return Pattern.compile("^" + bitstream.getName() + ".([^.]+)$");
|
||||
}
|
||||
|
||||
@Override
|
||||
public BitstreamFormat getFormat(Context context, Bitstream bitstream) throws SQLException {
|
||||
if (bitstream.getBitstreamFormat() == null) {
|
||||
@@ -446,11 +458,16 @@ public class BitstreamServiceImpl extends DSpaceObjectServiceImpl<Bitstream> imp
|
||||
|
||||
@Override
|
||||
public Bitstream findByIdOrLegacyId(Context context, String id) throws SQLException {
|
||||
try {
|
||||
if (StringUtils.isNumeric(id)) {
|
||||
return findByLegacyId(context, Integer.parseInt(id));
|
||||
} else {
|
||||
return find(context, UUID.fromString(id));
|
||||
}
|
||||
} catch (IllegalArgumentException e) {
|
||||
// Not a valid legacy ID or valid UUID
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@@ -126,7 +126,7 @@ public class Bundle extends DSpaceObject implements DSpaceObjectLegacySupport {
|
||||
* Unset the primary bitstream ID of the bundle
|
||||
*/
|
||||
public void unsetPrimaryBitstreamID() {
|
||||
primaryBitstream = null;
|
||||
setPrimaryBitstreamID(null);
|
||||
}
|
||||
|
||||
/**
|
||||
|
@@ -194,7 +194,6 @@ public class BundleServiceImpl extends DSpaceObjectServiceImpl<Bundle> implement
|
||||
List<Group> defaultBitstreamReadGroups =
|
||||
authorizeService.getAuthorizedGroups(context, owningCollection,
|
||||
Constants.DEFAULT_BITSTREAM_READ);
|
||||
log.info(defaultBitstreamReadGroups.size());
|
||||
// If this collection is configured with a DEFAULT_BITSTREAM_READ group, overwrite the READ policy
|
||||
// inherited from the bundle with this policy.
|
||||
if (!defaultBitstreamReadGroups.isEmpty()) {
|
||||
@@ -563,11 +562,16 @@ public class BundleServiceImpl extends DSpaceObjectServiceImpl<Bundle> implement
|
||||
|
||||
@Override
|
||||
public Bundle findByIdOrLegacyId(Context context, String id) throws SQLException {
|
||||
try {
|
||||
if (StringUtils.isNumeric(id)) {
|
||||
return findByLegacyId(context, Integer.parseInt(id));
|
||||
} else {
|
||||
return find(context, UUID.fromString(id));
|
||||
}
|
||||
} catch (IllegalArgumentException e) {
|
||||
// Not a valid legacy ID or valid UUID
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@@ -135,6 +135,9 @@ public class Collection extends DSpaceObject implements DSpaceObjectLegacySuppor
|
||||
|
||||
protected void setLogo(Bitstream logo) {
|
||||
this.logo = logo;
|
||||
if (logo != null) {
|
||||
logo.setCollection(this);
|
||||
}
|
||||
setModified();
|
||||
}
|
||||
|
||||
|
@@ -895,11 +895,16 @@ public class CollectionServiceImpl extends DSpaceObjectServiceImpl<Collection> i
|
||||
|
||||
@Override
|
||||
public Collection findByIdOrLegacyId(Context context, String id) throws SQLException {
|
||||
try {
|
||||
if (StringUtils.isNumeric(id)) {
|
||||
return findByLegacyId(context, Integer.parseInt(id));
|
||||
} else {
|
||||
return find(context, UUID.fromString(id));
|
||||
}
|
||||
} catch (IllegalArgumentException e) {
|
||||
// Not a valid legacy ID or valid UUID
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
@@ -1021,6 +1026,61 @@ public class CollectionServiceImpl extends DSpaceObjectServiceImpl<Collection> i
|
||||
return resp;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Collection retrieveCollectionWithSubmitByEntityType(Context context, Item item,
|
||||
String entityType) throws SQLException {
|
||||
Collection ownCollection = item.getOwningCollection();
|
||||
return retrieveWithSubmitCollectionByEntityType(context, ownCollection.getCommunities(), entityType);
|
||||
}
|
||||
|
||||
private Collection retrieveWithSubmitCollectionByEntityType(Context context, List<Community> communities,
|
||||
String entityType) {
|
||||
|
||||
for (Community community : communities) {
|
||||
Collection collection = retrieveCollectionWithSubmitByCommunityAndEntityType(context, community,
|
||||
entityType);
|
||||
if (collection != null) {
|
||||
return collection;
|
||||
}
|
||||
}
|
||||
|
||||
for (Community community : communities) {
|
||||
List<Community> parentCommunities = community.getParentCommunities();
|
||||
Collection collection = retrieveWithSubmitCollectionByEntityType(context, parentCommunities, entityType);
|
||||
if (collection != null) {
|
||||
return collection;
|
||||
}
|
||||
}
|
||||
|
||||
return retrieveCollectionWithSubmitByCommunityAndEntityType(context, null, entityType);
|
||||
}
|
||||
|
||||
@Override
|
||||
public Collection retrieveCollectionWithSubmitByCommunityAndEntityType(Context context, Community community,
|
||||
String entityType) {
|
||||
context.turnOffAuthorisationSystem();
|
||||
List<Collection> collections;
|
||||
try {
|
||||
collections = findCollectionsWithSubmit(null, context, community, entityType, 0, 1);
|
||||
} catch (SQLException | SearchServiceException e) {
|
||||
throw new RuntimeException(e);
|
||||
}
|
||||
context.restoreAuthSystemState();
|
||||
if (collections != null && collections.size() > 0) {
|
||||
return collections.get(0);
|
||||
}
|
||||
if (community != null) {
|
||||
for (Community subCommunity : community.getSubcommunities()) {
|
||||
Collection collection = retrieveCollectionWithSubmitByCommunityAndEntityType(context,
|
||||
subCommunity, entityType);
|
||||
if (collection != null) {
|
||||
return collection;
|
||||
}
|
||||
}
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
||||
@Override
|
||||
public List<Collection> findCollectionsWithSubmit(String q, Context context, Community community, String entityType,
|
||||
int offset, int limit) throws SQLException, SearchServiceException {
|
||||
|
@@ -123,6 +123,9 @@ public class Community extends DSpaceObject implements DSpaceObjectLegacySupport
|
||||
|
||||
void setLogo(Bitstream logo) {
|
||||
this.logo = logo;
|
||||
if (logo != null) {
|
||||
logo.setCommunity(this);
|
||||
}
|
||||
setModified();
|
||||
}
|
||||
|
||||
|
@@ -694,11 +694,16 @@ public class CommunityServiceImpl extends DSpaceObjectServiceImpl<Community> imp
|
||||
|
||||
@Override
|
||||
public Community findByIdOrLegacyId(Context context, String id) throws SQLException {
|
||||
try {
|
||||
if (StringUtils.isNumeric(id)) {
|
||||
return findByLegacyId(context, Integer.parseInt(id));
|
||||
} else {
|
||||
return find(context, UUID.fromString(id));
|
||||
}
|
||||
} catch (IllegalArgumentException e) {
|
||||
// Not a valid legacy ID or valid UUID
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@@ -93,7 +93,7 @@ public class InstallItemServiceImpl implements InstallItemService {
|
||||
// As this is a BRAND NEW item, as a final step we need to remove the
|
||||
// submitter item policies created during deposit and replace them with
|
||||
// the default policies from the collection.
|
||||
itemService.inheritCollectionDefaultPolicies(c, item, collection);
|
||||
itemService.inheritCollectionDefaultPolicies(c, item, collection, false);
|
||||
|
||||
return item;
|
||||
}
|
||||
@@ -150,7 +150,6 @@ public class InstallItemServiceImpl implements InstallItemService {
|
||||
return finishItem(c, item, is);
|
||||
}
|
||||
|
||||
|
||||
protected void populateMetadata(Context c, Item item)
|
||||
throws SQLException, AuthorizeException {
|
||||
// create accession date
|
||||
@@ -158,15 +157,6 @@ public class InstallItemServiceImpl implements InstallItemService {
|
||||
itemService.addMetadata(c, item, MetadataSchemaEnum.DC.getName(),
|
||||
"date", "accessioned", null, now.toString());
|
||||
|
||||
// add date available if not under embargo, otherwise it will
|
||||
// be set when the embargo is lifted.
|
||||
// this will flush out fatal embargo metadata
|
||||
// problems before we set inArchive.
|
||||
if (embargoService.getEmbargoTermsAsDate(c, item) == null) {
|
||||
itemService.addMetadata(c, item, MetadataSchemaEnum.DC.getName(),
|
||||
"date", "available", null, now.toString());
|
||||
}
|
||||
|
||||
// If issue date is set as "today" (literal string), then set it to current date
|
||||
// In the below loop, we temporarily clear all issued dates and re-add, one-by-one,
|
||||
// replacing "today" with today's date.
|
||||
@@ -271,4 +261,28 @@ public class InstallItemServiceImpl implements InstallItemService {
|
||||
|
||||
return myMessage.toString();
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getSubmittedByProvenanceMessage(Context context, Item item) throws SQLException {
|
||||
// get date
|
||||
DCDate now = DCDate.getCurrent();
|
||||
|
||||
// Create provenance description
|
||||
StringBuffer provmessage = new StringBuffer();
|
||||
|
||||
if (item.getSubmitter() != null) {
|
||||
provmessage.append("Submitted by ").append(item.getSubmitter().getFullName())
|
||||
.append(" (").append(item.getSubmitter().getEmail()).append(") on ")
|
||||
.append(now.toString());
|
||||
} else {
|
||||
// else, null submitter
|
||||
provmessage.append("Submitted by unknown (probably automated) on")
|
||||
.append(now.toString());
|
||||
}
|
||||
provmessage.append("\n");
|
||||
|
||||
// add sizes and checksums of bitstreams
|
||||
provmessage.append(getBitstreamProvenanceMessage(context, item));
|
||||
return provmessage.toString();
|
||||
}
|
||||
}
|
||||
|
@@ -77,6 +77,7 @@ import org.dspace.orcid.service.OrcidQueueService;
|
||||
import org.dspace.orcid.service.OrcidSynchronizationService;
|
||||
import org.dspace.orcid.service.OrcidTokenService;
|
||||
import org.dspace.profile.service.ResearcherProfileService;
|
||||
import org.dspace.qaevent.dao.QAEventsDAO;
|
||||
import org.dspace.services.ConfigurationService;
|
||||
import org.dspace.versioning.service.VersioningService;
|
||||
import org.dspace.workflow.WorkflowItemService;
|
||||
@@ -170,6 +171,9 @@ public class ItemServiceImpl extends DSpaceObjectServiceImpl<Item> implements It
|
||||
@Autowired(required = true)
|
||||
protected SubscribeService subscribeService;
|
||||
|
||||
@Autowired
|
||||
private QAEventsDAO qaEventsDao;
|
||||
|
||||
protected ItemServiceImpl() {
|
||||
super();
|
||||
}
|
||||
@@ -819,6 +823,11 @@ public class ItemServiceImpl extends DSpaceObjectServiceImpl<Item> implements It
|
||||
orcidToken.setProfileItem(null);
|
||||
}
|
||||
|
||||
List<QAEventProcessed> qaEvents = qaEventsDao.findByItem(context, item);
|
||||
for (QAEventProcessed qaEvent : qaEvents) {
|
||||
qaEventsDao.delete(context, qaEvent);
|
||||
}
|
||||
|
||||
//Only clear collections after we have removed everything else from the item
|
||||
item.clearCollections();
|
||||
item.setOwningCollection(null);
|
||||
@@ -920,8 +929,16 @@ public class ItemServiceImpl extends DSpaceObjectServiceImpl<Item> implements It
|
||||
@Override
|
||||
public void inheritCollectionDefaultPolicies(Context context, Item item, Collection collection)
|
||||
throws SQLException, AuthorizeException {
|
||||
adjustItemPolicies(context, item, collection);
|
||||
adjustBundleBitstreamPolicies(context, item, collection);
|
||||
inheritCollectionDefaultPolicies(context, item, collection, true);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void inheritCollectionDefaultPolicies(Context context, Item item, Collection collection,
|
||||
boolean replaceReadRPWithCollectionRP)
|
||||
throws SQLException, AuthorizeException {
|
||||
|
||||
adjustItemPolicies(context, item, collection, replaceReadRPWithCollectionRP);
|
||||
adjustBundleBitstreamPolicies(context, item, collection, replaceReadRPWithCollectionRP);
|
||||
|
||||
log.debug(LogHelper.getHeader(context, "item_inheritCollectionDefaultPolicies",
|
||||
"item_id=" + item.getID()));
|
||||
@@ -930,6 +947,13 @@ public class ItemServiceImpl extends DSpaceObjectServiceImpl<Item> implements It
|
||||
@Override
|
||||
public void adjustBundleBitstreamPolicies(Context context, Item item, Collection collection)
|
||||
throws SQLException, AuthorizeException {
|
||||
adjustBundleBitstreamPolicies(context, item, collection, true);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void adjustBundleBitstreamPolicies(Context context, Item item, Collection collection,
|
||||
boolean replaceReadRPWithCollectionRP)
|
||||
throws SQLException, AuthorizeException {
|
||||
// Bundles should inherit from DEFAULT_ITEM_READ so that if the item is readable, the files
|
||||
// can be listed (even if they are themselves not readable as per DEFAULT_BITSTREAM_READ or other
|
||||
// policies or embargos applied
|
||||
@@ -948,10 +972,19 @@ public class ItemServiceImpl extends DSpaceObjectServiceImpl<Item> implements It
|
||||
}
|
||||
// TODO: should we also throw an exception if no DEFAULT_ITEM_READ?
|
||||
|
||||
boolean removeCurrentReadRPBitstream =
|
||||
replaceReadRPWithCollectionRP && defaultCollectionBitstreamPolicies.size() > 0;
|
||||
boolean removeCurrentReadRPBundle =
|
||||
replaceReadRPWithCollectionRP && defaultCollectionBundlePolicies.size() > 0;
|
||||
|
||||
// remove all policies from bundles, add new ones
|
||||
// Remove bundles
|
||||
List<Bundle> bunds = item.getBundles();
|
||||
for (Bundle mybundle : bunds) {
|
||||
// If collection has default READ policies, remove the bundle's READ policies.
|
||||
if (removeCurrentReadRPBundle) {
|
||||
authorizeService.removePoliciesActionFilter(context, mybundle, Constants.READ);
|
||||
}
|
||||
|
||||
// if come from InstallItem: remove all submission/workflow policies
|
||||
authorizeService.removeAllPoliciesByDSOAndType(context, mybundle, ResourcePolicy.TYPE_SUBMISSION);
|
||||
@@ -960,6 +993,11 @@ public class ItemServiceImpl extends DSpaceObjectServiceImpl<Item> implements It
|
||||
addDefaultPoliciesNotInPlace(context, mybundle, defaultCollectionBundlePolicies);
|
||||
|
||||
for (Bitstream bitstream : mybundle.getBitstreams()) {
|
||||
// If collection has default READ policies, remove the bundle's READ policies.
|
||||
if (removeCurrentReadRPBitstream) {
|
||||
authorizeService.removePoliciesActionFilter(context, bitstream, Constants.READ);
|
||||
}
|
||||
|
||||
// if come from InstallItem: remove all submission/workflow policies
|
||||
removeAllPoliciesAndAddDefault(context, bitstream, defaultItemPolicies,
|
||||
defaultCollectionBitstreamPolicies);
|
||||
@@ -970,6 +1008,13 @@ public class ItemServiceImpl extends DSpaceObjectServiceImpl<Item> implements It
|
||||
@Override
|
||||
public void adjustBitstreamPolicies(Context context, Item item, Collection collection, Bitstream bitstream)
|
||||
throws SQLException, AuthorizeException {
|
||||
adjustBitstreamPolicies(context, item, collection, bitstream, true);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void adjustBitstreamPolicies(Context context, Item item, Collection collection , Bitstream bitstream,
|
||||
boolean replaceReadRPWithCollectionRP)
|
||||
throws SQLException, AuthorizeException {
|
||||
List<ResourcePolicy> defaultCollectionPolicies = authorizeService
|
||||
.getPoliciesActionFilter(context, collection, Constants.DEFAULT_BITSTREAM_READ);
|
||||
|
||||
@@ -998,10 +1043,22 @@ public class ItemServiceImpl extends DSpaceObjectServiceImpl<Item> implements It
|
||||
@Override
|
||||
public void adjustItemPolicies(Context context, Item item, Collection collection)
|
||||
throws SQLException, AuthorizeException {
|
||||
adjustItemPolicies(context, item, collection, true);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void adjustItemPolicies(Context context, Item item, Collection collection,
|
||||
boolean replaceReadRPWithCollectionRP)
|
||||
throws SQLException, AuthorizeException {
|
||||
// read collection's default READ policies
|
||||
List<ResourcePolicy> defaultCollectionPolicies = authorizeService
|
||||
.getPoliciesActionFilter(context, collection, Constants.DEFAULT_ITEM_READ);
|
||||
|
||||
// If collection has defaultREAD policies, remove the item's READ policies.
|
||||
if (replaceReadRPWithCollectionRP && defaultCollectionPolicies.size() > 0) {
|
||||
authorizeService.removePoliciesActionFilter(context, item, Constants.READ);
|
||||
}
|
||||
|
||||
// MUST have default policies
|
||||
if (defaultCollectionPolicies.size() < 1) {
|
||||
throw new SQLException("Collection " + collection.getID()
|
||||
@@ -1378,16 +1435,6 @@ prevent the generation of resource policy entry values with null dspace_object a
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public Iterator<Item> findByMetadataQuery(Context context, List<List<MetadataField>> listFieldList,
|
||||
List<String> query_op, List<String> query_val, List<UUID> collectionUuids,
|
||||
String regexClause, int offset, int limit)
|
||||
throws SQLException, AuthorizeException, IOException {
|
||||
return itemDAO
|
||||
.findByMetadataQuery(context, listFieldList, query_op, query_val, collectionUuids, regexClause, offset,
|
||||
limit);
|
||||
}
|
||||
|
||||
@Override
|
||||
public DSpaceObject getAdminObject(Context context, Item item, int action) throws SQLException {
|
||||
DSpaceObject adminObject = null;
|
||||
@@ -1561,11 +1608,16 @@ prevent the generation of resource policy entry values with null dspace_object a
|
||||
|
||||
@Override
|
||||
public Item findByIdOrLegacyId(Context context, String id) throws SQLException {
|
||||
try {
|
||||
if (StringUtils.isNumeric(id)) {
|
||||
return findByLegacyId(context, Integer.parseInt(id));
|
||||
} else {
|
||||
return find(context, UUID.fromString(id));
|
||||
}
|
||||
} catch (IllegalArgumentException e) {
|
||||
// Not a valid legacy ID or valid UUID
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
|
213
dspace-api/src/main/java/org/dspace/content/QAEvent.java
Normal file
213
dspace-api/src/main/java/org/dspace/content/QAEvent.java
Normal file
@@ -0,0 +1,213 @@
|
||||
/**
|
||||
* The contents of this file are subject to the license and copyright
|
||||
* detailed in the LICENSE and NOTICE files at the root of the source
|
||||
* tree and available online at
|
||||
*
|
||||
* http://www.dspace.org/license/
|
||||
*/
|
||||
package org.dspace.content;
|
||||
|
||||
import java.io.UnsupportedEncodingException;
|
||||
import java.security.MessageDigest;
|
||||
import java.security.NoSuchAlgorithmException;
|
||||
import java.util.Date;
|
||||
|
||||
import com.fasterxml.jackson.databind.annotation.JsonDeserialize;
|
||||
import org.dspace.qaevent.service.dto.OpenaireMessageDTO;
|
||||
import org.dspace.qaevent.service.dto.QAMessageDTO;
|
||||
import org.dspace.util.RawJsonDeserializer;
|
||||
|
||||
/**
|
||||
* This class represent the Quality Assurance broker data as loaded in our solr
|
||||
* qaevent core
|
||||
*
|
||||
*/
|
||||
public class QAEvent {
|
||||
public static final char[] HEX_DIGITS = { '0', '1', '2', '3', '4', '5', '6', '7', '8', '9', 'a', 'b', 'c', 'd', 'e',
|
||||
'f' };
|
||||
public static final String ACCEPTED = "accepted";
|
||||
public static final String REJECTED = "rejected";
|
||||
public static final String DISCARDED = "discarded";
|
||||
|
||||
public static final String OPENAIRE_SOURCE = "openaire";
|
||||
|
||||
private String source;
|
||||
|
||||
private String eventId;
|
||||
/**
|
||||
* contains the targeted dspace object,
|
||||
* ie: oai:www.openstarts.units.it:123456789/1120 contains the handle
|
||||
* of the DSpace pbject in its final part 123456789/1120
|
||||
* */
|
||||
private String originalId;
|
||||
|
||||
/**
|
||||
* evaluated with the targeted dspace object id
|
||||
*
|
||||
* */
|
||||
private String target;
|
||||
|
||||
private String related;
|
||||
|
||||
private String title;
|
||||
|
||||
private String topic;
|
||||
|
||||
private double trust;
|
||||
|
||||
@JsonDeserialize(using = RawJsonDeserializer.class)
|
||||
private String message;
|
||||
|
||||
private Date lastUpdate;
|
||||
|
||||
private String status = "PENDING";
|
||||
|
||||
public QAEvent() {
|
||||
}
|
||||
|
||||
public QAEvent(String source, String originalId, String target, String title,
|
||||
String topic, double trust, String message, Date lastUpdate) {
|
||||
super();
|
||||
this.source = source;
|
||||
this.originalId = originalId;
|
||||
this.target = target;
|
||||
this.title = title;
|
||||
this.topic = topic;
|
||||
this.trust = trust;
|
||||
this.message = message;
|
||||
this.lastUpdate = lastUpdate;
|
||||
try {
|
||||
computedEventId();
|
||||
} catch (NoSuchAlgorithmException | UnsupportedEncodingException e) {
|
||||
throw new IllegalStateException(e);
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
public String getOriginalId() {
|
||||
return originalId;
|
||||
}
|
||||
|
||||
public void setOriginalId(String originalId) {
|
||||
this.originalId = originalId;
|
||||
}
|
||||
|
||||
public String getTitle() {
|
||||
return title;
|
||||
}
|
||||
|
||||
public void setTitle(String title) {
|
||||
this.title = title;
|
||||
}
|
||||
|
||||
public String getTopic() {
|
||||
return topic;
|
||||
}
|
||||
|
||||
public void setTopic(String topic) {
|
||||
this.topic = topic;
|
||||
}
|
||||
|
||||
public double getTrust() {
|
||||
return trust;
|
||||
}
|
||||
|
||||
public void setTrust(double trust) {
|
||||
this.trust = trust;
|
||||
}
|
||||
|
||||
public String getMessage() {
|
||||
return message;
|
||||
}
|
||||
|
||||
public void setMessage(String message) {
|
||||
this.message = message;
|
||||
}
|
||||
|
||||
public String getEventId() {
|
||||
if (eventId == null) {
|
||||
try {
|
||||
computedEventId();
|
||||
} catch (NoSuchAlgorithmException | UnsupportedEncodingException e) {
|
||||
throw new RuntimeException(e);
|
||||
}
|
||||
}
|
||||
return eventId;
|
||||
}
|
||||
|
||||
public void setEventId(String eventId) {
|
||||
this.eventId = eventId;
|
||||
}
|
||||
|
||||
public String getTarget() {
|
||||
return target;
|
||||
}
|
||||
|
||||
public void setTarget(String target) {
|
||||
this.target = target;
|
||||
}
|
||||
|
||||
public Date getLastUpdate() {
|
||||
return lastUpdate;
|
||||
}
|
||||
|
||||
public void setLastUpdate(Date lastUpdate) {
|
||||
this.lastUpdate = lastUpdate;
|
||||
}
|
||||
|
||||
public void setRelated(String related) {
|
||||
this.related = related;
|
||||
}
|
||||
|
||||
public String getRelated() {
|
||||
return related;
|
||||
}
|
||||
|
||||
public void setStatus(String status) {
|
||||
this.status = status;
|
||||
}
|
||||
|
||||
public String getStatus() {
|
||||
return status;
|
||||
}
|
||||
|
||||
public String getSource() {
|
||||
return source != null ? source : OPENAIRE_SOURCE;
|
||||
}
|
||||
|
||||
public void setSource(String source) {
|
||||
this.source = source;
|
||||
}
|
||||
|
||||
/*
|
||||
* DTO constructed via Jackson use empty constructor. In this case, the eventId
|
||||
* must be compute on the get method. This method create a signature based on
|
||||
* the event fields and store it in the eventid attribute.
|
||||
*/
|
||||
private void computedEventId() throws NoSuchAlgorithmException, UnsupportedEncodingException {
|
||||
MessageDigest digester = MessageDigest.getInstance("MD5");
|
||||
String dataToString = "source=" + source + ",originalId=" + originalId + ", title=" + title + ", topic="
|
||||
+ topic + ", trust=" + trust + ", message=" + message;
|
||||
digester.update(dataToString.getBytes("UTF-8"));
|
||||
byte[] signature = digester.digest();
|
||||
char[] arr = new char[signature.length << 1];
|
||||
for (int i = 0; i < signature.length; i++) {
|
||||
int b = signature[i];
|
||||
int idx = i << 1;
|
||||
arr[idx] = HEX_DIGITS[(b >> 4) & 0xf];
|
||||
arr[idx + 1] = HEX_DIGITS[b & 0xf];
|
||||
}
|
||||
eventId = new String(arr);
|
||||
|
||||
}
|
||||
|
||||
public Class<? extends QAMessageDTO> getMessageDtoClass() {
|
||||
switch (getSource()) {
|
||||
case OPENAIRE_SOURCE:
|
||||
return OpenaireMessageDTO.class;
|
||||
default:
|
||||
throw new IllegalArgumentException("Unknown event's source: " + getSource());
|
||||
}
|
||||
}
|
||||
|
||||
}
|
@@ -0,0 +1,82 @@
|
||||
/**
|
||||
* The contents of this file are subject to the license and copyright
|
||||
* detailed in the LICENSE and NOTICE files at the root of the source
|
||||
* tree and available online at
|
||||
*
|
||||
* http://www.dspace.org/license/
|
||||
*/
|
||||
package org.dspace.content;
|
||||
|
||||
import java.io.Serializable;
|
||||
import java.util.Date;
|
||||
import javax.persistence.Column;
|
||||
import javax.persistence.Entity;
|
||||
import javax.persistence.Id;
|
||||
import javax.persistence.JoinColumn;
|
||||
import javax.persistence.ManyToOne;
|
||||
import javax.persistence.Table;
|
||||
import javax.persistence.Temporal;
|
||||
import javax.persistence.TemporalType;
|
||||
|
||||
import org.dspace.eperson.EPerson;
|
||||
|
||||
/**
|
||||
* This class represent the stored information about processed notification
|
||||
* broker events
|
||||
*
|
||||
*/
|
||||
@Entity
|
||||
@Table(name = "qaevent_processed")
|
||||
public class QAEventProcessed implements Serializable {
|
||||
|
||||
private static final long serialVersionUID = 3427340199132007814L;
|
||||
|
||||
@Id
|
||||
@Column(name = "qaevent_id")
|
||||
private String eventId;
|
||||
|
||||
@Temporal(TemporalType.TIMESTAMP)
|
||||
@Column(name = "qaevent_timestamp")
|
||||
private Date eventTimestamp;
|
||||
|
||||
@JoinColumn(name = "eperson_uuid")
|
||||
@ManyToOne
|
||||
private EPerson eperson;
|
||||
|
||||
@JoinColumn(name = "item_uuid")
|
||||
@ManyToOne
|
||||
private Item item;
|
||||
|
||||
public String getEventId() {
|
||||
return eventId;
|
||||
}
|
||||
|
||||
public void setEventId(String eventId) {
|
||||
this.eventId = eventId;
|
||||
}
|
||||
|
||||
public Date getEventTimestamp() {
|
||||
return eventTimestamp;
|
||||
}
|
||||
|
||||
public void setEventTimestamp(Date eventTimestamp) {
|
||||
this.eventTimestamp = eventTimestamp;
|
||||
}
|
||||
|
||||
public EPerson getEperson() {
|
||||
return eperson;
|
||||
}
|
||||
|
||||
public void setEperson(EPerson eperson) {
|
||||
this.eperson = eperson;
|
||||
}
|
||||
|
||||
public Item getItem() {
|
||||
return item;
|
||||
}
|
||||
|
||||
public void setItem(Item item) {
|
||||
this.item = item;
|
||||
}
|
||||
|
||||
}
|
@@ -17,6 +17,7 @@ import java.util.Map.Entry;
|
||||
import java.util.Set;
|
||||
import java.util.stream.Collectors;
|
||||
|
||||
import org.apache.commons.lang3.ArrayUtils;
|
||||
import org.apache.commons.lang3.StringUtils;
|
||||
import org.apache.logging.log4j.Logger;
|
||||
import org.dspace.app.util.DCInput;
|
||||
@@ -24,7 +25,6 @@ import org.dspace.app.util.DCInputSet;
|
||||
import org.dspace.app.util.DCInputsReader;
|
||||
import org.dspace.app.util.DCInputsReaderException;
|
||||
import org.dspace.app.util.SubmissionConfig;
|
||||
import org.dspace.app.util.SubmissionConfigReader;
|
||||
import org.dspace.app.util.SubmissionConfigReaderException;
|
||||
import org.dspace.content.Collection;
|
||||
import org.dspace.content.MetadataValue;
|
||||
@@ -34,6 +34,8 @@ import org.dspace.core.service.PluginService;
|
||||
import org.dspace.discovery.configuration.DiscoveryConfigurationService;
|
||||
import org.dspace.discovery.configuration.DiscoverySearchFilterFacet;
|
||||
import org.dspace.services.ConfigurationService;
|
||||
import org.dspace.submit.factory.SubmissionServiceFactory;
|
||||
import org.dspace.submit.service.SubmissionConfigService;
|
||||
import org.springframework.beans.factory.annotation.Autowired;
|
||||
|
||||
/**
|
||||
@@ -87,7 +89,7 @@ public final class ChoiceAuthorityServiceImpl implements ChoiceAuthorityService
|
||||
protected Map<String, DSpaceControlledVocabularyIndex> vocabularyIndexMap = new HashMap<>();
|
||||
|
||||
// the item submission reader
|
||||
private SubmissionConfigReader itemSubmissionConfigReader;
|
||||
private SubmissionConfigService submissionConfigService;
|
||||
|
||||
@Autowired(required = true)
|
||||
protected ConfigurationService configurationService;
|
||||
@@ -134,7 +136,7 @@ public final class ChoiceAuthorityServiceImpl implements ChoiceAuthorityService
|
||||
private synchronized void init() {
|
||||
if (!initialized) {
|
||||
try {
|
||||
itemSubmissionConfigReader = new SubmissionConfigReader();
|
||||
submissionConfigService = SubmissionServiceFactory.getInstance().getSubmissionConfigService();
|
||||
} catch (SubmissionConfigReaderException e) {
|
||||
// the system is in an illegal state as the submission definition is not valid
|
||||
throw new IllegalStateException("Error reading the item submission configuration: " + e.getMessage(),
|
||||
@@ -239,8 +241,8 @@ public final class ChoiceAuthorityServiceImpl implements ChoiceAuthorityService
|
||||
// there is an authority configured for the metadata valid for some collections,
|
||||
// check if it is the requested collection
|
||||
Map<String, ChoiceAuthority> controllerFormDef = controllerFormDefinitions.get(fieldKey);
|
||||
SubmissionConfig submissionConfig = itemSubmissionConfigReader
|
||||
.getSubmissionConfigByCollection(collection.getHandle());
|
||||
SubmissionConfig submissionConfig = submissionConfigService
|
||||
.getSubmissionConfigByCollection(collection);
|
||||
String submissionName = submissionConfig.getSubmissionName();
|
||||
// check if the requested collection has a submission definition that use an authority for the metadata
|
||||
if (controllerFormDef.containsKey(submissionName)) {
|
||||
@@ -261,14 +263,14 @@ public final class ChoiceAuthorityServiceImpl implements ChoiceAuthorityService
|
||||
}
|
||||
|
||||
@Override
|
||||
public void clearCache() {
|
||||
public void clearCache() throws SubmissionConfigReaderException {
|
||||
controller.clear();
|
||||
authorities.clear();
|
||||
presentation.clear();
|
||||
closed.clear();
|
||||
controllerFormDefinitions.clear();
|
||||
authoritiesFormDefinitions.clear();
|
||||
itemSubmissionConfigReader = null;
|
||||
submissionConfigService.reload();
|
||||
initialized = false;
|
||||
}
|
||||
|
||||
@@ -318,7 +320,7 @@ public final class ChoiceAuthorityServiceImpl implements ChoiceAuthorityService
|
||||
*/
|
||||
private void autoRegisterChoiceAuthorityFromInputReader() {
|
||||
try {
|
||||
List<SubmissionConfig> submissionConfigs = itemSubmissionConfigReader
|
||||
List<SubmissionConfig> submissionConfigs = submissionConfigService
|
||||
.getAllSubmissionConfigs(Integer.MAX_VALUE, 0);
|
||||
DCInputsReader dcInputsReader = new DCInputsReader();
|
||||
|
||||
@@ -489,10 +491,11 @@ public final class ChoiceAuthorityServiceImpl implements ChoiceAuthorityService
|
||||
init();
|
||||
ChoiceAuthority ma = controller.get(fieldKey);
|
||||
if (ma == null && collection != null) {
|
||||
SubmissionConfigReader configReader;
|
||||
SubmissionConfigService configReaderService;
|
||||
try {
|
||||
configReader = new SubmissionConfigReader();
|
||||
SubmissionConfig submissionName = configReader.getSubmissionConfigByCollection(collection.getHandle());
|
||||
configReaderService = SubmissionServiceFactory.getInstance().getSubmissionConfigService();
|
||||
SubmissionConfig submissionName = configReaderService
|
||||
.getSubmissionConfigByCollection(collection);
|
||||
ma = controllerFormDefinitions.get(fieldKey).get(submissionName.getSubmissionName());
|
||||
} catch (SubmissionConfigReaderException e) {
|
||||
// the system is in an illegal state as the submission definition is not valid
|
||||
@@ -557,6 +560,15 @@ public final class ChoiceAuthorityServiceImpl implements ChoiceAuthorityService
|
||||
init();
|
||||
ChoiceAuthority source = this.getChoiceAuthorityByAuthorityName(nameVocab);
|
||||
if (source != null && source instanceof DSpaceControlledVocabulary) {
|
||||
|
||||
// First, check if this vocabulary index is disabled
|
||||
String[] vocabulariesDisabled = configurationService
|
||||
.getArrayProperty("webui.browse.vocabularies.disabled");
|
||||
if (vocabulariesDisabled != null && ArrayUtils.contains(vocabulariesDisabled, nameVocab)) {
|
||||
// Discard this vocabulary browse index
|
||||
return null;
|
||||
}
|
||||
|
||||
Set<String> metadataFields = new HashSet<>();
|
||||
Map<String, List<String>> formsToFields = this.authoritiesFormDefinitions.get(nameVocab);
|
||||
for (Map.Entry<String, List<String>> formToField : formsToFields.entrySet()) {
|
||||
@@ -585,6 +597,12 @@ public final class ChoiceAuthorityServiceImpl implements ChoiceAuthorityService
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
// If there is no matching facet, return null to ignore this vocabulary index
|
||||
if (matchingFacet == null) {
|
||||
return null;
|
||||
}
|
||||
|
||||
DSpaceControlledVocabularyIndex vocabularyIndex =
|
||||
new DSpaceControlledVocabularyIndex((DSpaceControlledVocabulary) source, metadataFields,
|
||||
matchingFacet);
|
||||
|
@@ -156,7 +156,8 @@ public class DCInputAuthority extends SelfNamedPlugin implements ChoiceAuthority
|
||||
int found = 0;
|
||||
List<Choice> v = new ArrayList<Choice>();
|
||||
for (int i = 0; i < valuesLocale.length; ++i) {
|
||||
if (query == null || StringUtils.containsIgnoreCase(valuesLocale[i], query)) {
|
||||
// In a DCInputAuthority context, a user will want to query the labels, not the values
|
||||
if (query == null || StringUtils.containsIgnoreCase(labelsLocale[i], query)) {
|
||||
if (found >= start && v.size() < limit) {
|
||||
v.add(new Choice(null, valuesLocale[i], labelsLocale[i]));
|
||||
if (valuesLocale[i].equalsIgnoreCase(query)) {
|
||||
|
@@ -59,7 +59,37 @@ public class SHERPARoMEOJournalTitle implements ChoiceAuthority {
|
||||
|
||||
@Override
|
||||
public Choices getBestMatch(String text, String locale) {
|
||||
return getMatches(text, 0, 1, locale);
|
||||
// punt if there is no query text
|
||||
if (text == null || text.trim().length() == 0) {
|
||||
return new Choices(true);
|
||||
}
|
||||
int limit = 10;
|
||||
SHERPAService sherpaService = new DSpace().getSingletonService(SHERPAService.class);
|
||||
SHERPAResponse sherpaResponse = sherpaService.performRequest("publication", "title",
|
||||
"equals", text, 0, limit);
|
||||
Choices result;
|
||||
if (CollectionUtils.isNotEmpty(sherpaResponse.getJournals())) {
|
||||
List<Choice> list = sherpaResponse
|
||||
.getJournals().stream()
|
||||
.map(sherpaJournal -> new Choice(sherpaJournal.getIssns().get(0),
|
||||
sherpaJournal.getTitles().get(0), sherpaJournal.getTitles().get(0)))
|
||||
.collect(Collectors.toList());
|
||||
int total = sherpaResponse.getJournals().size();
|
||||
|
||||
int confidence;
|
||||
if (list.isEmpty()) {
|
||||
confidence = Choices.CF_NOTFOUND;
|
||||
} else if (list.size() == 1) {
|
||||
confidence = Choices.CF_UNCERTAIN;
|
||||
} else {
|
||||
confidence = Choices.CF_AMBIGUOUS;
|
||||
}
|
||||
result = new Choices(list.toArray(new Choice[list.size()]), 0, total, confidence,
|
||||
total > limit);
|
||||
} else {
|
||||
result = new Choices(false);
|
||||
}
|
||||
return result;
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@@ -60,7 +60,38 @@ public class SHERPARoMEOPublisher implements ChoiceAuthority {
|
||||
|
||||
@Override
|
||||
public Choices getBestMatch(String text, String locale) {
|
||||
return getMatches(text, 0, 1, locale);
|
||||
// punt if there is no query text
|
||||
if (text == null || text.trim().length() == 0) {
|
||||
return new Choices(true);
|
||||
}
|
||||
int limit = 10;
|
||||
SHERPAService sherpaService = new DSpace().getSingletonService(SHERPAService.class);
|
||||
SHERPAPublisherResponse sherpaResponse = sherpaService.performPublisherRequest("publisher", "name",
|
||||
"equals", text, 0, limit);
|
||||
Choices result;
|
||||
if (CollectionUtils.isNotEmpty(sherpaResponse.getPublishers())) {
|
||||
List<Choice> list = sherpaResponse
|
||||
.getPublishers().stream()
|
||||
.map(sherpaPublisher ->
|
||||
new Choice(sherpaPublisher.getIdentifier(),
|
||||
sherpaPublisher.getName(), sherpaPublisher.getName()))
|
||||
.collect(Collectors.toList());
|
||||
int total = sherpaResponse.getPublishers().size();
|
||||
|
||||
int confidence;
|
||||
if (list.isEmpty()) {
|
||||
confidence = Choices.CF_NOTFOUND;
|
||||
} else if (list.size() == 1) {
|
||||
confidence = Choices.CF_UNCERTAIN;
|
||||
} else {
|
||||
confidence = Choices.CF_AMBIGUOUS;
|
||||
}
|
||||
result = new Choices(list.toArray(new Choice[list.size()]), 0, total, confidence,
|
||||
total > limit);
|
||||
} else {
|
||||
result = new Choices(false);
|
||||
}
|
||||
return result;
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@@ -10,6 +10,7 @@ package org.dspace.content.authority.service;
|
||||
import java.util.List;
|
||||
import java.util.Set;
|
||||
|
||||
import org.dspace.app.util.SubmissionConfigReaderException;
|
||||
import org.dspace.content.Collection;
|
||||
import org.dspace.content.MetadataValue;
|
||||
import org.dspace.content.authority.Choice;
|
||||
@@ -174,7 +175,7 @@ public interface ChoiceAuthorityService {
|
||||
/**
|
||||
* This method has been created to have a way of clearing the cache kept inside the service
|
||||
*/
|
||||
public void clearCache();
|
||||
public void clearCache() throws SubmissionConfigReaderException;
|
||||
|
||||
/**
|
||||
* Should we store the authority key (if any) for such field key and collection?
|
||||
|
@@ -11,7 +11,6 @@ import java.sql.SQLException;
|
||||
import java.util.Date;
|
||||
import java.util.Iterator;
|
||||
import java.util.List;
|
||||
import java.util.UUID;
|
||||
|
||||
import org.dspace.content.Collection;
|
||||
import org.dspace.content.Community;
|
||||
@@ -80,10 +79,6 @@ public interface ItemDAO extends DSpaceObjectLegacySupportDAO<Item> {
|
||||
public Iterator<Item> findByMetadataField(Context context, MetadataField metadataField, String value,
|
||||
boolean inArchive) throws SQLException;
|
||||
|
||||
public Iterator<Item> findByMetadataQuery(Context context, List<List<MetadataField>> listFieldList,
|
||||
List<String> query_op, List<String> query_val, List<UUID> collectionUuids,
|
||||
String regexClause, int offset, int limit) throws SQLException;
|
||||
|
||||
public Iterator<Item> findByAuthorityValue(Context context, MetadataField metadataField, String authority,
|
||||
boolean inArchive) throws SQLException;
|
||||
|
||||
|
@@ -68,9 +68,9 @@ public class BitstreamDAOImpl extends AbstractHibernateDSODAO<Bitstream> impleme
|
||||
|
||||
@Override
|
||||
public List<Bitstream> findBitstreamsWithNoRecentChecksum(Context context) throws SQLException {
|
||||
Query query = createQuery(context,
|
||||
"select b from Bitstream b where b not in (select c.bitstream from " +
|
||||
"MostRecentChecksum c)");
|
||||
Query query = createQuery(context, "SELECT b FROM MostRecentChecksum c RIGHT JOIN Bitstream b " +
|
||||
"ON c.bitstream = b WHERE c IS NULL" );
|
||||
|
||||
return query.getResultList();
|
||||
}
|
||||
|
||||
|
@@ -12,7 +12,6 @@ import java.util.Collections;
|
||||
import java.util.Date;
|
||||
import java.util.Iterator;
|
||||
import java.util.List;
|
||||
import java.util.UUID;
|
||||
import javax.persistence.Query;
|
||||
import javax.persistence.TemporalType;
|
||||
import javax.persistence.criteria.CriteriaBuilder;
|
||||
@@ -24,20 +23,10 @@ import org.dspace.content.Collection;
|
||||
import org.dspace.content.Item;
|
||||
import org.dspace.content.Item_;
|
||||
import org.dspace.content.MetadataField;
|
||||
import org.dspace.content.MetadataValue;
|
||||
import org.dspace.content.dao.ItemDAO;
|
||||
import org.dspace.core.AbstractHibernateDSODAO;
|
||||
import org.dspace.core.Context;
|
||||
import org.dspace.eperson.EPerson;
|
||||
import org.hibernate.Criteria;
|
||||
import org.hibernate.criterion.Criterion;
|
||||
import org.hibernate.criterion.DetachedCriteria;
|
||||
import org.hibernate.criterion.Order;
|
||||
import org.hibernate.criterion.Projections;
|
||||
import org.hibernate.criterion.Property;
|
||||
import org.hibernate.criterion.Restrictions;
|
||||
import org.hibernate.criterion.Subqueries;
|
||||
import org.hibernate.type.StandardBasicTypes;
|
||||
|
||||
/**
|
||||
* Hibernate implementation of the Database Access Object interface class for the Item object.
|
||||
@@ -174,120 +163,6 @@ public class ItemDAOImpl extends AbstractHibernateDSODAO<Item> implements ItemDA
|
||||
return iterate(query);
|
||||
}
|
||||
|
||||
enum OP {
|
||||
equals {
|
||||
public Criterion buildPredicate(String val, String regexClause) {
|
||||
return Property.forName("mv.value").eq(val);
|
||||
}
|
||||
},
|
||||
not_equals {
|
||||
public Criterion buildPredicate(String val, String regexClause) {
|
||||
return OP.equals.buildPredicate(val, regexClause);
|
||||
}
|
||||
},
|
||||
like {
|
||||
public Criterion buildPredicate(String val, String regexClause) {
|
||||
return Property.forName("mv.value").like(val);
|
||||
}
|
||||
},
|
||||
not_like {
|
||||
public Criterion buildPredicate(String val, String regexClause) {
|
||||
return OP.like.buildPredicate(val, regexClause);
|
||||
}
|
||||
},
|
||||
contains {
|
||||
public Criterion buildPredicate(String val, String regexClause) {
|
||||
return Property.forName("mv.value").like("%" + val + "%");
|
||||
}
|
||||
},
|
||||
doesnt_contain {
|
||||
public Criterion buildPredicate(String val, String regexClause) {
|
||||
return OP.contains.buildPredicate(val, regexClause);
|
||||
}
|
||||
},
|
||||
exists {
|
||||
public Criterion buildPredicate(String val, String regexClause) {
|
||||
return Property.forName("mv.value").isNotNull();
|
||||
}
|
||||
},
|
||||
doesnt_exist {
|
||||
public Criterion buildPredicate(String val, String regexClause) {
|
||||
return OP.exists.buildPredicate(val, regexClause);
|
||||
}
|
||||
},
|
||||
matches {
|
||||
public Criterion buildPredicate(String val, String regexClause) {
|
||||
return Restrictions.sqlRestriction(regexClause, val, StandardBasicTypes.STRING);
|
||||
}
|
||||
},
|
||||
doesnt_match {
|
||||
public Criterion buildPredicate(String val, String regexClause) {
|
||||
return OP.matches.buildPredicate(val, regexClause);
|
||||
}
|
||||
|
||||
};
|
||||
public abstract Criterion buildPredicate(String val, String regexClause);
|
||||
}
|
||||
|
||||
@Override
|
||||
@Deprecated
|
||||
public Iterator<Item> findByMetadataQuery(Context context, List<List<MetadataField>> listFieldList,
|
||||
List<String> query_op, List<String> query_val, List<UUID> collectionUuids,
|
||||
String regexClause, int offset, int limit) throws SQLException {
|
||||
|
||||
Criteria criteria = getHibernateSession(context).createCriteria(Item.class, "item");
|
||||
criteria.setFirstResult(offset);
|
||||
criteria.setMaxResults(limit);
|
||||
|
||||
if (!collectionUuids.isEmpty()) {
|
||||
DetachedCriteria dcollCriteria = DetachedCriteria.forClass(Collection.class, "coll");
|
||||
dcollCriteria.setProjection(Projections.property("coll.id"));
|
||||
dcollCriteria.add(Restrictions.eqProperty("coll.id", "item.owningCollection"));
|
||||
dcollCriteria.add(Restrictions.in("coll.id", collectionUuids));
|
||||
criteria.add(Subqueries.exists(dcollCriteria));
|
||||
}
|
||||
|
||||
int index = Math.min(listFieldList.size(), Math.min(query_op.size(), query_val.size()));
|
||||
StringBuilder sb = new StringBuilder();
|
||||
|
||||
for (int i = 0; i < index; i++) {
|
||||
OP op = OP.valueOf(query_op.get(i));
|
||||
if (op == null) {
|
||||
log.warn("Skipping Invalid Operator: " + query_op.get(i));
|
||||
continue;
|
||||
}
|
||||
|
||||
if (op == OP.matches || op == OP.doesnt_match) {
|
||||
if (regexClause.isEmpty()) {
|
||||
log.warn("Skipping Unsupported Regex Operator: " + query_op.get(i));
|
||||
continue;
|
||||
}
|
||||
}
|
||||
|
||||
DetachedCriteria subcriteria = DetachedCriteria.forClass(MetadataValue.class, "mv");
|
||||
subcriteria.add(Property.forName("mv.dSpaceObject").eqProperty("item.id"));
|
||||
subcriteria.setProjection(Projections.property("mv.dSpaceObject"));
|
||||
|
||||
if (!listFieldList.get(i).isEmpty()) {
|
||||
subcriteria.add(Restrictions.in("metadataField", listFieldList.get(i)));
|
||||
}
|
||||
|
||||
subcriteria.add(op.buildPredicate(query_val.get(i), regexClause));
|
||||
|
||||
if (op == OP.exists || op == OP.equals || op == OP.like || op == OP.contains || op == OP.matches) {
|
||||
criteria.add(Subqueries.exists(subcriteria));
|
||||
} else {
|
||||
criteria.add(Subqueries.notExists(subcriteria));
|
||||
}
|
||||
}
|
||||
criteria.addOrder(Order.asc("item.id"));
|
||||
|
||||
log.debug(String.format("Running custom query with %d filters", index));
|
||||
|
||||
return ((List<Item>) criteria.list()).iterator();
|
||||
|
||||
}
|
||||
|
||||
@Override
|
||||
public Iterator<Item> findByAuthorityValue(Context context, MetadataField metadataField, String authority,
|
||||
boolean inArchive) throws SQLException {
|
||||
|
@@ -417,6 +417,34 @@ public interface CollectionService
|
||||
public List<Collection> findCollectionsWithSubmit(String q, Context context, Community community,
|
||||
int offset, int limit) throws SQLException, SearchServiceException;
|
||||
|
||||
/**
|
||||
* Retrieve the first collection in the community or its descending that support
|
||||
* the provided entityType
|
||||
*
|
||||
* @param context the DSpace context
|
||||
* @param community the root from where the search start
|
||||
* @param entityType the requested entity type
|
||||
* @return the first collection in the community or its descending
|
||||
* that support the provided entityType
|
||||
*/
|
||||
public Collection retrieveCollectionWithSubmitByCommunityAndEntityType(Context context, Community community,
|
||||
String entityType);
|
||||
|
||||
/**
|
||||
* Retrieve the close collection to the item for which the current user has
|
||||
* 'submit' privileges that support the provided entityType. Close mean the
|
||||
* collection that can be reach with the minimum steps starting from the item
|
||||
* (owningCollection, brothers collections, etc)
|
||||
*
|
||||
* @param context the DSpace context
|
||||
* @param item the item from where the search start
|
||||
* @param entityType the requested entity type
|
||||
* @return the first collection in the community or its descending
|
||||
* that support the provided entityType
|
||||
*/
|
||||
public Collection retrieveCollectionWithSubmitByEntityType(Context context, Item item, String entityType)
|
||||
throws SQLException;
|
||||
|
||||
/**
|
||||
* Counts the number of Collection for which the current user has 'submit' privileges.
|
||||
* NOTE: for better performance, this method retrieves its results from an index (cache)
|
||||
|
@@ -83,4 +83,15 @@ public interface InstallItemService {
|
||||
public String getBitstreamProvenanceMessage(Context context, Item myitem)
|
||||
throws SQLException;
|
||||
|
||||
/**
|
||||
* Generate provenance description of direct item submission (not through workflow).
|
||||
*
|
||||
* @param context context
|
||||
* @param item the item to generate description for
|
||||
* @return provenance description
|
||||
* @throws SQLException if database error
|
||||
*/
|
||||
public String getSubmittedByProvenanceMessage(Context context, Item item)
|
||||
throws SQLException;;
|
||||
|
||||
}
|
||||
|
@@ -23,7 +23,6 @@ import org.dspace.content.Collection;
|
||||
import org.dspace.content.Community;
|
||||
import org.dspace.content.EntityType;
|
||||
import org.dspace.content.Item;
|
||||
import org.dspace.content.MetadataField;
|
||||
import org.dspace.content.MetadataValue;
|
||||
import org.dspace.content.Thumbnail;
|
||||
import org.dspace.content.WorkspaceItem;
|
||||
@@ -473,7 +472,7 @@ public interface ItemService
|
||||
public void removeGroupPolicies(Context context, Item item, Group group) throws SQLException, AuthorizeException;
|
||||
|
||||
/**
|
||||
* remove all policies on an item and its contents, and replace them with
|
||||
* Remove all policies on an item and its contents, and replace them with
|
||||
* the DEFAULT_ITEM_READ and DEFAULT_BITSTREAM_READ policies belonging to
|
||||
* the collection.
|
||||
*
|
||||
@@ -488,6 +487,26 @@ public interface ItemService
|
||||
public void inheritCollectionDefaultPolicies(Context context, Item item, Collection collection)
|
||||
throws java.sql.SQLException, AuthorizeException;
|
||||
|
||||
/**
|
||||
* Remove all submission and workflow policies on an item and its contents, and add
|
||||
* default collection policies which are not yet already in place.
|
||||
* If overrideItemReadPolicies is true, then all read policies on the item are replaced (but only if the
|
||||
* collection has a default read policy).
|
||||
*
|
||||
* @param context DSpace context object
|
||||
* @param item item to reset policies on
|
||||
* @param collection Collection
|
||||
* @param overrideItemReadPolicies if true, all read policies on the item are replaced (but only if the
|
||||
* collection has a default read policy)
|
||||
* @throws SQLException if database error
|
||||
* if an SQL error or if no default policies found. It's a bit
|
||||
* draconian, but default policies must be enforced.
|
||||
* @throws AuthorizeException if authorization error
|
||||
*/
|
||||
public void inheritCollectionDefaultPolicies(Context context, Item item, Collection collection,
|
||||
boolean overrideItemReadPolicies)
|
||||
throws java.sql.SQLException, AuthorizeException;
|
||||
|
||||
/**
|
||||
* Adjust the Bundle and Bitstream policies to reflect what have been defined
|
||||
* during the submission/workflow. The temporary SUBMISSION and WORKFLOW
|
||||
@@ -507,6 +526,28 @@ public interface ItemService
|
||||
public void adjustBundleBitstreamPolicies(Context context, Item item, Collection collection)
|
||||
throws SQLException, AuthorizeException;
|
||||
|
||||
/**
|
||||
* Adjust the Bundle and Bitstream policies to reflect what have been defined
|
||||
* during the submission/workflow. The temporary SUBMISSION and WORKFLOW
|
||||
* policies are removed and the policies defined at the item and collection
|
||||
* level are copied and inherited as appropriate. Custom selected Item policies
|
||||
* are copied to the bundle/bitstream only if no explicit custom policies were
|
||||
* already applied to the bundle/bitstream. Collection's policies are inherited
|
||||
* if there are no other policies defined or if the append mode is defined by
|
||||
* the configuration via the core.authorization.installitem.inheritance-read.append-mode property
|
||||
*
|
||||
* @param context DSpace context object
|
||||
* @param item Item to adjust policies on
|
||||
* @param collection Collection
|
||||
* @param replaceReadRPWithCollectionRP if true, all read policies on the item are replaced (but only if the
|
||||
* collection has a default read policy)
|
||||
* @throws SQLException If database error
|
||||
* @throws AuthorizeException If authorization error
|
||||
*/
|
||||
public void adjustBundleBitstreamPolicies(Context context, Item item, Collection collection,
|
||||
boolean replaceReadRPWithCollectionRP)
|
||||
throws SQLException, AuthorizeException;
|
||||
|
||||
/**
|
||||
* Adjust the Bitstream policies to reflect what have been defined
|
||||
* during the submission/workflow. The temporary SUBMISSION and WORKFLOW
|
||||
@@ -527,6 +568,29 @@ public interface ItemService
|
||||
public void adjustBitstreamPolicies(Context context, Item item, Collection collection, Bitstream bitstream)
|
||||
throws SQLException, AuthorizeException;
|
||||
|
||||
/**
|
||||
* Adjust the Bitstream policies to reflect what have been defined
|
||||
* during the submission/workflow. The temporary SUBMISSION and WORKFLOW
|
||||
* policies are removed and the policies defined at the item and collection
|
||||
* level are copied and inherited as appropriate. Custom selected Item policies
|
||||
* are copied to the bitstream only if no explicit custom policies were
|
||||
* already applied to the bitstream. Collection's policies are inherited
|
||||
* if there are no other policies defined or if the append mode is defined by
|
||||
* the configuration via the core.authorization.installitem.inheritance-read.append-mode property
|
||||
*
|
||||
* @param context DSpace context object
|
||||
* @param item Item to adjust policies on
|
||||
* @param collection Collection
|
||||
* @param bitstream Bitstream to adjust policies on
|
||||
* @param replaceReadRPWithCollectionRP If true, all read policies on the bitstream are replaced (but only if the
|
||||
* collection has a default read policy)
|
||||
* @throws SQLException If database error
|
||||
* @throws AuthorizeException If authorization error
|
||||
*/
|
||||
public void adjustBitstreamPolicies(Context context, Item item, Collection collection, Bitstream bitstream,
|
||||
boolean replaceReadRPWithCollectionRP)
|
||||
throws SQLException, AuthorizeException;
|
||||
|
||||
|
||||
/**
|
||||
* Adjust the Item's policies to reflect what have been defined during the
|
||||
@@ -545,6 +609,26 @@ public interface ItemService
|
||||
public void adjustItemPolicies(Context context, Item item, Collection collection)
|
||||
throws SQLException, AuthorizeException;
|
||||
|
||||
/**
|
||||
* Adjust the Item's policies to reflect what have been defined during the
|
||||
* submission/workflow. The temporary SUBMISSION and WORKFLOW policies are
|
||||
* removed and the default policies defined at the collection level are
|
||||
* inherited as appropriate. Collection's policies are inherited if there are no
|
||||
* other policies defined or if the append mode is defined by the configuration
|
||||
* via the core.authorization.installitem.inheritance-read.append-mode property
|
||||
*
|
||||
* @param context DSpace context object
|
||||
* @param item Item to adjust policies on
|
||||
* @param collection Collection
|
||||
* @param replaceReadRPWithCollectionRP If true, all read policies on the item are replaced (but only if the
|
||||
* collection has a default read policy)
|
||||
* @throws SQLException If database error
|
||||
* @throws AuthorizeException If authorization error
|
||||
*/
|
||||
public void adjustItemPolicies(Context context, Item item, Collection collection,
|
||||
boolean replaceReadRPWithCollectionRP)
|
||||
throws SQLException, AuthorizeException;
|
||||
|
||||
/**
|
||||
* Moves the item from one collection to another one
|
||||
*
|
||||
@@ -664,11 +748,6 @@ public interface ItemService
|
||||
String schema, String element, String qualifier, String value)
|
||||
throws SQLException, AuthorizeException, IOException;
|
||||
|
||||
public Iterator<Item> findByMetadataQuery(Context context, List<List<MetadataField>> listFieldList,
|
||||
List<String> query_op, List<String> query_val, List<UUID> collectionUuids,
|
||||
String regexClause, int offset, int limit)
|
||||
throws SQLException, AuthorizeException, IOException;
|
||||
|
||||
/**
|
||||
* Find all the items in the archive with a given authority key value
|
||||
* in the indicated metadata field.
|
||||
|
@@ -83,13 +83,14 @@ public abstract class AbstractHibernateDSODAO<T extends DSpaceObject> extends Ab
|
||||
if (CollectionUtils.isNotEmpty(metadataFields) || StringUtils.isNotBlank(additionalWhere)) {
|
||||
//Add the where query on metadata
|
||||
query.append(" WHERE ");
|
||||
// Group the 'OR' clauses below in outer parentheses, e.g. "WHERE (clause1 OR clause2 OR clause3)".
|
||||
// Grouping these 'OR' clauses allows for later code to append 'AND' clauses without unexpected behaviors
|
||||
query.append("(");
|
||||
for (int i = 0; i < metadataFields.size(); i++) {
|
||||
MetadataField metadataField = metadataFields.get(i);
|
||||
if (StringUtils.isNotBlank(operator)) {
|
||||
query.append(" (");
|
||||
query.append("lower(STR(" + metadataField.toString()).append(".value)) ").append(operator)
|
||||
.append(" lower(:queryParam)");
|
||||
query.append(")");
|
||||
if (i < metadataFields.size() - 1) {
|
||||
query.append(" OR ");
|
||||
}
|
||||
@@ -102,6 +103,7 @@ public abstract class AbstractHibernateDSODAO<T extends DSpaceObject> extends Ab
|
||||
}
|
||||
query.append(additionalWhere);
|
||||
}
|
||||
query.append(")");
|
||||
|
||||
}
|
||||
}
|
||||
|
@@ -128,6 +128,11 @@ public class Context implements AutoCloseable {
|
||||
|
||||
private DBConnection dbConnection;
|
||||
|
||||
/**
|
||||
* The default administrator group
|
||||
*/
|
||||
private Group adminGroup;
|
||||
|
||||
public enum Mode {
|
||||
READ_ONLY,
|
||||
READ_WRITE,
|
||||
@@ -810,6 +815,15 @@ public class Context implements AutoCloseable {
|
||||
readOnlyCache.clear();
|
||||
}
|
||||
|
||||
// When going to READ_ONLY, flush database changes to ensure that the current data is retrieved
|
||||
if (newMode == Mode.READ_ONLY && mode != Mode.READ_ONLY) {
|
||||
try {
|
||||
dbConnection.flushSession();
|
||||
} catch (SQLException ex) {
|
||||
log.warn("Unable to flush database changes after switching to READ_ONLY mode", ex);
|
||||
}
|
||||
}
|
||||
|
||||
//save the new mode
|
||||
mode = newMode;
|
||||
}
|
||||
@@ -951,4 +965,15 @@ public class Context implements AutoCloseable {
|
||||
public boolean isContextUserSwitched() {
|
||||
return currentUserPreviousState != null;
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the default "Administrator" group for DSpace administrators.
|
||||
* The result is cached in the 'adminGroup' field, so it is only looked up once.
|
||||
* This is done to improve performance, as this method is called quite often.
|
||||
*/
|
||||
public Group getAdminGroup() throws SQLException {
|
||||
return (adminGroup == null) ? EPersonServiceFactory.getInstance()
|
||||
.getGroupService()
|
||||
.findByName(this, Group.ADMIN) : adminGroup;
|
||||
}
|
||||
}
|
||||
|
@@ -148,4 +148,12 @@ public interface DBConnection<T> {
|
||||
* @throws java.sql.SQLException passed through.
|
||||
*/
|
||||
public <E extends ReloadableEntity> void uncacheEntity(E entity) throws SQLException;
|
||||
|
||||
/**
|
||||
* Do a manual flush. This synchronizes the in-memory state of the Session
|
||||
* with the database (write changes to the database)
|
||||
*
|
||||
* @throws SQLException passed through.
|
||||
*/
|
||||
public void flushSession() throws SQLException;
|
||||
}
|
||||
|
@@ -21,7 +21,6 @@ import java.util.ArrayList;
|
||||
import java.util.Collections;
|
||||
import java.util.Date;
|
||||
import java.util.Enumeration;
|
||||
import java.util.Iterator;
|
||||
import java.util.List;
|
||||
import java.util.Properties;
|
||||
import javax.activation.DataHandler;
|
||||
@@ -41,7 +40,6 @@ import javax.mail.internet.MimeMessage;
|
||||
import javax.mail.internet.MimeMultipart;
|
||||
import javax.mail.internet.ParseException;
|
||||
|
||||
import org.apache.commons.lang3.StringUtils;
|
||||
import org.apache.logging.log4j.LogManager;
|
||||
import org.apache.logging.log4j.Logger;
|
||||
import org.apache.velocity.Template;
|
||||
@@ -57,26 +55,40 @@ import org.dspace.services.ConfigurationService;
|
||||
import org.dspace.services.factory.DSpaceServicesFactory;
|
||||
|
||||
/**
|
||||
* Class representing an e-mail message, also used to send e-mails.
|
||||
* Class representing an e-mail message. The {@link send} method causes the
|
||||
* assembled message to be formatted and sent.
|
||||
* <p>
|
||||
* Typical use:
|
||||
* </p>
|
||||
* <pre>
|
||||
* <code>Email email = Email.getEmail(path);</code>
|
||||
* <code>email.addRecipient("foo@bar.com");</code>
|
||||
* <code>email.addArgument("John");</code>
|
||||
* <code>email.addArgument("On the Testing of DSpace");</code>
|
||||
* <code>email.send();</code>
|
||||
* </pre>
|
||||
* {@code path} is the filesystem path of an email template, typically in
|
||||
* {@code ${dspace.dir}/config/emails/} and can include the subject -- see
|
||||
* below. Templates are processed by <a href='https://velocity.apache.org/'>
|
||||
* Apache Velocity</a>. They may contain VTL directives and property
|
||||
* placeholders.
|
||||
* <p>
|
||||
* <code>Email email = new Email();</code><br>
|
||||
* <code>email.addRecipient("foo@bar.com");</code><br>
|
||||
* <code>email.addArgument("John");</code><br>
|
||||
* <code>email.addArgument("On the Testing of DSpace");</code><br>
|
||||
* <code>email.send();</code><br>
|
||||
* </p>
|
||||
* {@link addArgument(string)} adds a property to the {@code params} array
|
||||
* in the Velocity context, which can be used to replace placeholder tokens
|
||||
* in the message. These arguments are indexed by number in the order they were
|
||||
* added to the message.
|
||||
* <p>
|
||||
* <code>name</code> is the name of an email template in
|
||||
* <code>dspace-dir/config/emails/</code> (which also includes the subject.)
|
||||
* <code>arg0</code> and <code>arg1</code> are arguments to fill out the
|
||||
* message with.
|
||||
* <P>
|
||||
* Emails are formatted using Apache Velocity. Headers such as Subject may be
|
||||
* supplied by the template, by defining them using #set(). Example:
|
||||
* </p>
|
||||
* The DSpace configuration properties are also available to templates as the
|
||||
* array {@code config}, indexed by name. Example: {@code ${config.get('dspace.name')}}
|
||||
* <p>
|
||||
* Recipients and attachments may be added as needed. See {@link addRecipient},
|
||||
* {@link addAttachment(File, String)}, and
|
||||
* {@link addAttachment(InputStream, String, String)}.
|
||||
* <p>
|
||||
* Headers such as Subject may be supplied by the template, by defining them
|
||||
* using the VTL directive {@code #set()}. Only headers named in the DSpace
|
||||
* configuration array property {@code mail.message.headers} will be added.
|
||||
* <p>
|
||||
* Example:
|
||||
*
|
||||
* <pre>
|
||||
*
|
||||
@@ -91,12 +103,14 @@ import org.dspace.services.factory.DSpaceServicesFactory;
|
||||
*
|
||||
* Thank you for sending us your submission "${params[1]}".
|
||||
*
|
||||
* --
|
||||
* The ${config.get('dspace.name')} Team
|
||||
*
|
||||
* </pre>
|
||||
*
|
||||
* <p>
|
||||
* If the example code above was used to send this mail, the resulting mail
|
||||
* would have the subject <code>Example e-mail</code> and the body would be:
|
||||
* </p>
|
||||
*
|
||||
* <pre>
|
||||
*
|
||||
@@ -105,7 +119,16 @@ import org.dspace.services.factory.DSpaceServicesFactory;
|
||||
*
|
||||
* Thank you for sending us your submission "On the Testing of DSpace".
|
||||
*
|
||||
* --
|
||||
* The DSpace Team
|
||||
*
|
||||
* </pre>
|
||||
* <p>
|
||||
* There are two ways to load a message body. One can create an instance of
|
||||
* {@link Email} and call {@link setContent} on it, passing the body as a String. Or
|
||||
* one can use the static factory method {@link getEmail} to load a file by its
|
||||
* complete filesystem path. In either case the text will be loaded into a
|
||||
* Velocity template.
|
||||
*
|
||||
* @author Robert Tansley
|
||||
* @author Jim Downing - added attachment handling code
|
||||
@@ -115,7 +138,6 @@ public class Email {
|
||||
/**
|
||||
* The content of the message
|
||||
*/
|
||||
private String content;
|
||||
private String contentName;
|
||||
|
||||
/**
|
||||
@@ -176,13 +198,12 @@ public class Email {
|
||||
moreAttachments = new ArrayList<>(10);
|
||||
subject = "";
|
||||
template = null;
|
||||
content = "";
|
||||
replyTo = null;
|
||||
charset = null;
|
||||
}
|
||||
|
||||
/**
|
||||
* Add a recipient
|
||||
* Add a recipient.
|
||||
*
|
||||
* @param email the recipient's email address
|
||||
*/
|
||||
@@ -196,16 +217,24 @@ public class Email {
|
||||
* "Subject:" line must be stripped.
|
||||
*
|
||||
* @param name a name for this message body
|
||||
* @param cnt the content of the message
|
||||
* @param content the content of the message
|
||||
*/
|
||||
public void setContent(String name, String cnt) {
|
||||
content = cnt;
|
||||
public void setContent(String name, String content) {
|
||||
contentName = name;
|
||||
arguments.clear();
|
||||
|
||||
VelocityEngine templateEngine = new VelocityEngine();
|
||||
templateEngine.init(VELOCITY_PROPERTIES);
|
||||
|
||||
StringResourceRepository repo = (StringResourceRepository)
|
||||
templateEngine.getApplicationAttribute(RESOURCE_REPOSITORY_NAME);
|
||||
repo.putStringResource(contentName, content);
|
||||
// Turn content into a template.
|
||||
template = templateEngine.getTemplate(contentName);
|
||||
}
|
||||
|
||||
/**
|
||||
* Set the subject of the message
|
||||
* Set the subject of the message.
|
||||
*
|
||||
* @param s the subject of the message
|
||||
*/
|
||||
@@ -214,7 +243,7 @@ public class Email {
|
||||
}
|
||||
|
||||
/**
|
||||
* Set the reply-to email address
|
||||
* Set the reply-to email address.
|
||||
*
|
||||
* @param email the reply-to email address
|
||||
*/
|
||||
@@ -223,7 +252,7 @@ public class Email {
|
||||
}
|
||||
|
||||
/**
|
||||
* Fill out the next argument in the template
|
||||
* Fill out the next argument in the template.
|
||||
*
|
||||
* @param arg the value for the next argument
|
||||
*/
|
||||
@@ -231,6 +260,13 @@ public class Email {
|
||||
arguments.add(arg);
|
||||
}
|
||||
|
||||
/**
|
||||
* Add an attachment bodypart to the message from an external file.
|
||||
*
|
||||
* @param f reference to a file to be attached.
|
||||
* @param name a name for the resulting bodypart in the message's MIME
|
||||
* structure.
|
||||
*/
|
||||
public void addAttachment(File f, String name) {
|
||||
attachments.add(new FileAttachment(f, name));
|
||||
}
|
||||
@@ -238,6 +274,17 @@ public class Email {
|
||||
/** When given a bad MIME type for an attachment, use this instead. */
|
||||
private static final String DEFAULT_ATTACHMENT_TYPE = "application/octet-stream";
|
||||
|
||||
/**
|
||||
* Add an attachment bodypart to the message from a byte stream.
|
||||
*
|
||||
* @param is the content of this stream will become the content of the
|
||||
* bodypart.
|
||||
* @param name a name for the resulting bodypart in the message's MIME
|
||||
* structure.
|
||||
* @param mimetype the MIME type of the resulting bodypart, such as
|
||||
* "text/pdf". If {@code null} it will default to
|
||||
* "application/octet-stream", which is MIME for "unknown format".
|
||||
*/
|
||||
public void addAttachment(InputStream is, String name, String mimetype) {
|
||||
if (null == mimetype) {
|
||||
LOG.error("Null MIME type replaced with '" + DEFAULT_ATTACHMENT_TYPE
|
||||
@@ -257,6 +304,11 @@ public class Email {
|
||||
moreAttachments.add(new InputStreamAttachment(is, name, mimetype));
|
||||
}
|
||||
|
||||
/**
|
||||
* Set the character set of the message.
|
||||
*
|
||||
* @param cs the name of a character set, such as "UTF-8" or "EUC-JP".
|
||||
*/
|
||||
public void setCharset(String cs) {
|
||||
charset = cs;
|
||||
}
|
||||
@@ -280,15 +332,20 @@ public class Email {
|
||||
* {@code mail.message.headers} then that name and its value will be added
|
||||
* to the message's headers.
|
||||
*
|
||||
* <p>"subject" is treated specially: if {@link setSubject()} has not been called,
|
||||
* the value of any "subject" property will be used as if setSubject had
|
||||
* been called with that value. Thus a template may define its subject, but
|
||||
* the caller may override it.
|
||||
* <p>"subject" is treated specially: if {@link setSubject()} has not been
|
||||
* called, the value of any "subject" property will be used as if setSubject
|
||||
* had been called with that value. Thus a template may define its subject,
|
||||
* but the caller may override it.
|
||||
*
|
||||
* @throws MessagingException if there was a problem sending the mail.
|
||||
* @throws IOException if IO error
|
||||
*/
|
||||
public void send() throws MessagingException, IOException {
|
||||
if (null == template) {
|
||||
// No template -- no content -- PANIC!!!
|
||||
throw new MessagingException("Email has no body");
|
||||
}
|
||||
|
||||
ConfigurationService config
|
||||
= DSpaceServicesFactory.getInstance().getConfigurationService();
|
||||
|
||||
@@ -308,37 +365,18 @@ public class Email {
|
||||
MimeMessage message = new MimeMessage(session);
|
||||
|
||||
// Set the recipients of the message
|
||||
Iterator<String> i = recipients.iterator();
|
||||
|
||||
while (i.hasNext()) {
|
||||
message.addRecipient(Message.RecipientType.TO, new InternetAddress(
|
||||
i.next()));
|
||||
for (String recipient : recipients) {
|
||||
message.addRecipient(Message.RecipientType.TO,
|
||||
new InternetAddress(recipient));
|
||||
}
|
||||
// Get headers defined by the template.
|
||||
String[] templateHeaders = config.getArrayProperty("mail.message.headers");
|
||||
|
||||
// Format the mail message body
|
||||
VelocityEngine templateEngine = new VelocityEngine();
|
||||
templateEngine.init(VELOCITY_PROPERTIES);
|
||||
|
||||
VelocityContext vctx = new VelocityContext();
|
||||
vctx.put("config", new UnmodifiableConfigurationService(config));
|
||||
vctx.put("params", Collections.unmodifiableList(arguments));
|
||||
|
||||
if (null == template) {
|
||||
if (StringUtils.isBlank(content)) {
|
||||
// No template and no content -- PANIC!!!
|
||||
throw new MessagingException("Email has no body");
|
||||
}
|
||||
// No template, so use a String of content.
|
||||
StringResourceRepository repo = (StringResourceRepository)
|
||||
templateEngine.getApplicationAttribute(RESOURCE_REPOSITORY_NAME);
|
||||
repo.putStringResource(contentName, content);
|
||||
// Turn content into a template.
|
||||
template = templateEngine.getTemplate(contentName);
|
||||
templateHeaders = new String[] {};
|
||||
}
|
||||
|
||||
StringWriter writer = new StringWriter();
|
||||
try {
|
||||
template.merge(vctx, writer);
|
||||
@@ -405,7 +443,8 @@ public class Email {
|
||||
// add the stream
|
||||
messageBodyPart = new MimeBodyPart();
|
||||
messageBodyPart.setDataHandler(new DataHandler(
|
||||
new InputStreamDataSource(attachment.name,attachment.mimetype,attachment.is)));
|
||||
new InputStreamDataSource(attachment.name,
|
||||
attachment.mimetype, attachment.is)));
|
||||
messageBodyPart.setFileName(attachment.name);
|
||||
multipart.addBodyPart(messageBodyPart);
|
||||
}
|
||||
@@ -447,6 +486,9 @@ public class Email {
|
||||
/**
|
||||
* Get the VTL template for an email message. The message is suitable
|
||||
* for inserting values using Apache Velocity.
|
||||
* <p>
|
||||
* Note that everything is stored here, so that only send() throws a
|
||||
* MessagingException.
|
||||
*
|
||||
* @param emailFile
|
||||
* full name for the email template, for example "/dspace/config/emails/register".
|
||||
@@ -484,15 +526,6 @@ public class Email {
|
||||
}
|
||||
return email;
|
||||
}
|
||||
/*
|
||||
* Implementation note: It might be necessary to add a quick utility method
|
||||
* like "send(to, subject, message)". We'll see how far we get without it -
|
||||
* having all emails as templates in the config allows customisation and
|
||||
* internationalisation.
|
||||
*
|
||||
* Note that everything is stored and the run in send() so that only send()
|
||||
* throws a MessagingException.
|
||||
*/
|
||||
|
||||
/**
|
||||
* Test method to send an email to check email server settings
|
||||
@@ -547,7 +580,7 @@ public class Email {
|
||||
}
|
||||
|
||||
/**
|
||||
* Utility struct class for handling file attachments.
|
||||
* Utility record class for handling file attachments.
|
||||
*
|
||||
* @author ojd20
|
||||
*/
|
||||
@@ -563,7 +596,7 @@ public class Email {
|
||||
}
|
||||
|
||||
/**
|
||||
* Utility struct class for handling file attachments.
|
||||
* Utility record class for handling file attachments.
|
||||
*
|
||||
* @author Adán Román Ruiz at arvo.es
|
||||
*/
|
||||
@@ -580,6 +613,8 @@ public class Email {
|
||||
}
|
||||
|
||||
/**
|
||||
* Wrap an {@link InputStream} in a {@link DataSource}.
|
||||
*
|
||||
* @author arnaldo
|
||||
*/
|
||||
public static class InputStreamDataSource implements DataSource {
|
||||
@@ -587,6 +622,14 @@ public class Email {
|
||||
private final String contentType;
|
||||
private final ByteArrayOutputStream baos;
|
||||
|
||||
/**
|
||||
* Consume the content of an InputStream and store it in a local buffer.
|
||||
*
|
||||
* @param name give the DataSource a name.
|
||||
* @param contentType the DataSource contains this type of data.
|
||||
* @param inputStream content to be buffered in the DataSource.
|
||||
* @throws IOException if the stream cannot be read.
|
||||
*/
|
||||
InputStreamDataSource(String name, String contentType, InputStream inputStream) throws IOException {
|
||||
this.name = name;
|
||||
this.contentType = contentType;
|
||||
|
@@ -337,4 +337,17 @@ public class HibernateDBConnection implements DBConnection<Session> {
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Do a manual flush. This synchronizes the in-memory state of the Session
|
||||
* with the database (write changes to the database)
|
||||
*
|
||||
* @throws SQLException passed through.
|
||||
*/
|
||||
@Override
|
||||
public void flushSession() throws SQLException {
|
||||
if (getSession().isDirty()) {
|
||||
getSession().flush();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@@ -17,9 +17,12 @@ import java.io.InputStream;
|
||||
import java.io.InputStreamReader;
|
||||
import java.io.OutputStreamWriter;
|
||||
import java.io.PrintWriter;
|
||||
import javax.servlet.http.HttpServletRequest;
|
||||
|
||||
import org.dspace.core.service.LicenseService;
|
||||
import org.dspace.services.factory.DSpaceServicesFactory;
|
||||
import org.dspace.services.model.Request;
|
||||
import org.dspace.web.ContextUtil;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
||||
@@ -101,13 +104,14 @@ public class LicenseServiceImpl implements LicenseService {
|
||||
/**
|
||||
* Get the site-wide default license that submitters need to grant
|
||||
*
|
||||
* Localized license requires: default_{{locale}}.license file.
|
||||
* Locale also must be listed in webui.supported.locales setting.
|
||||
*
|
||||
* @return the default license
|
||||
*/
|
||||
@Override
|
||||
public String getDefaultSubmissionLicense() {
|
||||
if (null == license) {
|
||||
init();
|
||||
}
|
||||
return license;
|
||||
}
|
||||
|
||||
@@ -115,9 +119,8 @@ public class LicenseServiceImpl implements LicenseService {
|
||||
* Load in the default license.
|
||||
*/
|
||||
protected void init() {
|
||||
File licenseFile = new File(
|
||||
DSpaceServicesFactory.getInstance().getConfigurationService().getProperty("dspace.dir")
|
||||
+ File.separator + "config" + File.separator + "default.license");
|
||||
Context context = obtainContext();
|
||||
File licenseFile = new File(I18nUtil.getDefaultLicense(context));
|
||||
|
||||
FileInputStream fir = null;
|
||||
InputStreamReader ir = null;
|
||||
@@ -169,4 +172,24 @@ public class LicenseServiceImpl implements LicenseService {
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Obtaining current request context.
|
||||
* Return new context if getting one from current request failed.
|
||||
*
|
||||
* @return DSpace context object
|
||||
*/
|
||||
private Context obtainContext() {
|
||||
try {
|
||||
Request currentRequest = DSpaceServicesFactory.getInstance().getRequestService().getCurrentRequest();
|
||||
if (currentRequest != null) {
|
||||
HttpServletRequest request = currentRequest.getHttpServletRequest();
|
||||
return ContextUtil.obtainContext(request);
|
||||
}
|
||||
} catch (Exception e) {
|
||||
log.error("Can't load current request context.");
|
||||
}
|
||||
|
||||
return new Context();
|
||||
}
|
||||
}
|
||||
|
@@ -17,6 +17,7 @@ import org.dspace.app.util.DCInput;
|
||||
import org.dspace.app.util.DCInputSet;
|
||||
import org.dspace.app.util.DCInputsReader;
|
||||
import org.dspace.app.util.DCInputsReaderException;
|
||||
import org.dspace.content.Collection;
|
||||
import org.dspace.content.DSpaceObject;
|
||||
import org.dspace.content.Item;
|
||||
import org.dspace.content.MetadataValue;
|
||||
@@ -69,7 +70,7 @@ public class RequiredMetadata extends AbstractCurationTask {
|
||||
handle = "in workflow";
|
||||
}
|
||||
sb.append("Item: ").append(handle);
|
||||
for (String req : getReqList(item.getOwningCollection().getHandle())) {
|
||||
for (String req : getReqList(item.getOwningCollection())) {
|
||||
List<MetadataValue> vals = itemService.getMetadataByMetadataString(item, req);
|
||||
if (vals.size() == 0) {
|
||||
sb.append(" missing required field: ").append(req);
|
||||
@@ -91,14 +92,14 @@ public class RequiredMetadata extends AbstractCurationTask {
|
||||
}
|
||||
}
|
||||
|
||||
protected List<String> getReqList(String handle) throws DCInputsReaderException {
|
||||
List<String> reqList = reqMap.get(handle);
|
||||
protected List<String> getReqList(Collection collection) throws DCInputsReaderException {
|
||||
List<String> reqList = reqMap.get(collection.getHandle());
|
||||
if (reqList == null) {
|
||||
reqList = reqMap.get("default");
|
||||
}
|
||||
if (reqList == null) {
|
||||
reqList = new ArrayList<String>();
|
||||
List<DCInputSet> inputSet = reader.getInputsByCollectionHandle(handle);
|
||||
List<DCInputSet> inputSet = reader.getInputsByCollection(collection);
|
||||
for (DCInputSet inputs : inputSet) {
|
||||
for (DCInput[] row : inputs.getFields()) {
|
||||
for (DCInput input : row) {
|
||||
|
@@ -152,17 +152,10 @@ public class Curation extends DSpaceRunnable<CurationScriptConfiguration> {
|
||||
super.handler.logInfo("Curating id: " + entry.getObjectId());
|
||||
}
|
||||
curator.clear();
|
||||
// does entry relate to a DSO or workflow object?
|
||||
if (entry.getObjectId().indexOf('/') > 0) {
|
||||
for (String taskName : entry.getTaskNames()) {
|
||||
curator.addTask(taskName);
|
||||
}
|
||||
curator.curate(context, entry.getObjectId());
|
||||
} else {
|
||||
// TODO: Remove this exception once curation tasks are supported by configurable workflow
|
||||
// e.g. see https://github.com/DSpace/DSpace/pull/3157
|
||||
throw new IllegalArgumentException("curation for workflow items is no longer supported");
|
||||
}
|
||||
}
|
||||
queue.release(this.queue, ticket, true);
|
||||
return ticket;
|
||||
|
@@ -13,6 +13,8 @@ import java.sql.SQLException;
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
|
||||
import org.apache.commons.lang3.StringUtils;
|
||||
import org.apache.logging.log4j.LogManager;
|
||||
import org.apache.logging.log4j.Logger;
|
||||
import org.dspace.authorize.AuthorizeException;
|
||||
import org.dspace.content.Collection;
|
||||
@@ -30,6 +32,7 @@ import org.dspace.workflow.CurationTaskConfig;
|
||||
import org.dspace.workflow.FlowStep;
|
||||
import org.dspace.workflow.Task;
|
||||
import org.dspace.workflow.TaskSet;
|
||||
import org.dspace.xmlworkflow.Role;
|
||||
import org.dspace.xmlworkflow.RoleMembers;
|
||||
import org.dspace.xmlworkflow.WorkflowConfigurationException;
|
||||
import org.dspace.xmlworkflow.factory.XmlWorkflowFactory;
|
||||
@@ -47,14 +50,17 @@ import org.springframework.stereotype.Service;
|
||||
* Manage interactions between curation and workflow. A curation task can be
|
||||
* attached to a workflow step, to be executed during the step.
|
||||
*
|
||||
* <p>
|
||||
* <strong>NOTE:</strong> when run in workflow, curation tasks <em>run with
|
||||
* authorization disabled</em>.
|
||||
*
|
||||
* @see CurationTaskConfig
|
||||
* @author mwood
|
||||
*/
|
||||
@Service
|
||||
public class XmlWorkflowCuratorServiceImpl
|
||||
implements XmlWorkflowCuratorService {
|
||||
private static final Logger LOG
|
||||
= org.apache.logging.log4j.LogManager.getLogger();
|
||||
private static final Logger LOG = LogManager.getLogger();
|
||||
|
||||
@Autowired(required = true)
|
||||
protected XmlWorkflowFactory workflowFactory;
|
||||
@@ -97,7 +103,18 @@ public class XmlWorkflowCuratorServiceImpl
|
||||
throws AuthorizeException, IOException, SQLException {
|
||||
Curator curator = new Curator();
|
||||
curator.setReporter(reporter);
|
||||
return curate(curator, c, wfi);
|
||||
c.turnOffAuthorisationSystem();
|
||||
boolean wasAnonymous = false;
|
||||
if (null == c.getCurrentUser()) { // We need someone to email
|
||||
wasAnonymous = true;
|
||||
c.setCurrentUser(ePersonService.getSystemEPerson(c));
|
||||
}
|
||||
boolean failedP = curate(curator, c, wfi);
|
||||
if (wasAnonymous) {
|
||||
c.setCurrentUser(null);
|
||||
}
|
||||
c.restoreAuthSystemState();
|
||||
return failedP;
|
||||
}
|
||||
|
||||
@Override
|
||||
@@ -123,7 +140,13 @@ public class XmlWorkflowCuratorServiceImpl
|
||||
item.setOwningCollection(wfi.getCollection());
|
||||
for (Task task : step.tasks) {
|
||||
curator.addTask(task.name);
|
||||
curator.curate(item);
|
||||
// Check whether the task is configured to be queued rather than automatically run
|
||||
if (StringUtils.isNotEmpty(step.queue)) {
|
||||
// queue attribute has been set in the FlowStep configuration: add task to configured queue
|
||||
curator.queue(c, item.getID().toString(), step.queue);
|
||||
} else {
|
||||
// Task is configured to be run automatically
|
||||
curator.curate(c, item);
|
||||
int status = curator.getStatus(task.name);
|
||||
String result = curator.getResult(task.name);
|
||||
String action = "none";
|
||||
@@ -158,6 +181,7 @@ public class XmlWorkflowCuratorServiceImpl
|
||||
default:
|
||||
break;
|
||||
}
|
||||
}
|
||||
curator.clear();
|
||||
}
|
||||
|
||||
@@ -223,8 +247,12 @@ public class XmlWorkflowCuratorServiceImpl
|
||||
String status, String action, String message)
|
||||
throws AuthorizeException, IOException, SQLException {
|
||||
List<EPerson> epa = resolveContacts(c, task.getContacts(status), wfi);
|
||||
if (epa.size() > 0) {
|
||||
if (!epa.isEmpty()) {
|
||||
workflowService.notifyOfCuration(c, wfi, epa, task.name, action, message);
|
||||
} else {
|
||||
LOG.warn("No contacts were found for workflow item {}: "
|
||||
+ "task {} returned action {} with message {}",
|
||||
wfi.getID(), task.name, action, message);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -247,8 +275,7 @@ public class XmlWorkflowCuratorServiceImpl
|
||||
// decode contacts
|
||||
if ("$flowgroup".equals(contact)) {
|
||||
// special literal for current flowgoup
|
||||
ClaimedTask claimedTask = claimedTaskService.findByWorkflowIdAndEPerson(c, wfi, c.getCurrentUser());
|
||||
String stepID = claimedTask.getStepID();
|
||||
String stepID = getFlowStep(c, wfi).step;
|
||||
Step step;
|
||||
try {
|
||||
Workflow workflow = workflowFactory.getWorkflow(wfi.getCollection());
|
||||
@@ -258,19 +285,26 @@ public class XmlWorkflowCuratorServiceImpl
|
||||
String.valueOf(wfi.getID()), e);
|
||||
return epList;
|
||||
}
|
||||
RoleMembers roleMembers = step.getRole().getMembers(c, wfi);
|
||||
Role role = step.getRole();
|
||||
if (null != role) {
|
||||
RoleMembers roleMembers = role.getMembers(c, wfi);
|
||||
for (EPerson ep : roleMembers.getEPersons()) {
|
||||
epList.add(ep);
|
||||
}
|
||||
for (Group group : roleMembers.getGroups()) {
|
||||
epList.addAll(group.getMembers());
|
||||
}
|
||||
} else {
|
||||
epList.add(ePersonService.getSystemEPerson(c));
|
||||
}
|
||||
} else if ("$colladmin".equals(contact)) {
|
||||
// special literal for collection administrators
|
||||
Group adGroup = wfi.getCollection().getAdministrators();
|
||||
if (adGroup != null) {
|
||||
epList.addAll(groupService.allMembers(c, adGroup));
|
||||
}
|
||||
} else if ("$siteadmin".equals(contact)) {
|
||||
// special literal for site administrator
|
||||
EPerson siteEp = ePersonService.findByEmail(c,
|
||||
configurationService.getProperty("mail.admin"));
|
||||
if (siteEp != null) {
|
||||
|
@@ -42,9 +42,9 @@ public interface XmlWorkflowCuratorService {
|
||||
*
|
||||
* @param c the context
|
||||
* @param wfi the workflow item
|
||||
* @return true if curation was completed or not required,
|
||||
* @return true if curation was completed or not required;
|
||||
* false if tasks were queued for later completion,
|
||||
* or item was rejected
|
||||
* or item was rejected.
|
||||
* @throws AuthorizeException if authorization error
|
||||
* @throws IOException if IO error
|
||||
* @throws SQLException if database error
|
||||
@@ -58,7 +58,9 @@ public interface XmlWorkflowCuratorService {
|
||||
* @param curator the curation context
|
||||
* @param c the user context
|
||||
* @param wfId the workflow item's ID
|
||||
* @return true if curation failed.
|
||||
* @return true if curation curation was completed or not required;
|
||||
* false if tasks were queued for later completion,
|
||||
* or item was rejected.
|
||||
* @throws AuthorizeException if authorization error
|
||||
* @throws IOException if IO error
|
||||
* @throws SQLException if database error
|
||||
@@ -72,7 +74,9 @@ public interface XmlWorkflowCuratorService {
|
||||
* @param curator the curation context
|
||||
* @param c the user context
|
||||
* @param wfi the workflow item
|
||||
* @return true if curation failed.
|
||||
* @return true if workflow curation was completed or not required;
|
||||
* false if tasks were queued for later completion,
|
||||
* or item was rejected.
|
||||
* @throws AuthorizeException if authorization error
|
||||
* @throws IOException if IO error
|
||||
* @throws SQLException if database error
|
||||
|
@@ -76,14 +76,19 @@ public class FullTextContentStreams extends ContentStreamBase {
|
||||
if (StringUtils.equals(FULLTEXT_BUNDLE, myBundle.getName())) {
|
||||
// a-ha! grab the text out of the bitstreams
|
||||
List<Bitstream> bitstreams = myBundle.getBitstreams();
|
||||
log.debug("Processing full-text bitstreams. Item handle: " + sourceInfo);
|
||||
|
||||
for (Bitstream fulltextBitstream : emptyIfNull(bitstreams)) {
|
||||
fullTextStreams.add(new FullTextBitstream(sourceInfo, fulltextBitstream));
|
||||
|
||||
if (fulltextBitstream != null) {
|
||||
log.debug("Added BitStream: "
|
||||
+ fulltextBitstream.getStoreNumber() + " "
|
||||
+ fulltextBitstream.getSequenceID() + " "
|
||||
+ fulltextBitstream.getName());
|
||||
} else {
|
||||
log.error("Found a NULL bitstream when processing full-text files: item handle:" + sourceInfo);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -158,16 +163,16 @@ public class FullTextContentStreams extends ContentStreamBase {
|
||||
}
|
||||
|
||||
public String getContentType(final Context context) throws SQLException {
|
||||
BitstreamFormat format = bitstream.getFormat(context);
|
||||
BitstreamFormat format = bitstream != null ? bitstream.getFormat(context) : null;
|
||||
return format == null ? null : StringUtils.trimToEmpty(format.getMIMEType());
|
||||
}
|
||||
|
||||
public String getFileName() {
|
||||
return StringUtils.trimToEmpty(bitstream.getName());
|
||||
return bitstream != null ? StringUtils.trimToEmpty(bitstream.getName()) : null;
|
||||
}
|
||||
|
||||
public long getSize() {
|
||||
return bitstream.getSizeBytes();
|
||||
return bitstream != null ? bitstream.getSizeBytes() : -1;
|
||||
}
|
||||
|
||||
public InputStream getInputStream() throws SQLException, IOException, AuthorizeException {
|
||||
|
@@ -7,14 +7,20 @@
|
||||
*/
|
||||
package org.dspace.discovery;
|
||||
|
||||
import static org.dspace.discovery.IndexClientOptions.TYPE_OPTION;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.sql.SQLException;
|
||||
import java.util.Arrays;
|
||||
import java.util.Iterator;
|
||||
import java.util.List;
|
||||
import java.util.Optional;
|
||||
import java.util.UUID;
|
||||
import java.util.stream.Collectors;
|
||||
|
||||
import org.apache.commons.cli.CommandLine;
|
||||
import org.apache.commons.cli.ParseException;
|
||||
import org.apache.commons.lang3.StringUtils;
|
||||
import org.dspace.content.Collection;
|
||||
import org.dspace.content.Community;
|
||||
import org.dspace.content.DSpaceObject;
|
||||
@@ -51,6 +57,17 @@ public class IndexClient extends DSpaceRunnable<IndexDiscoveryScriptConfiguratio
|
||||
return;
|
||||
}
|
||||
|
||||
String type = null;
|
||||
if (commandLine.hasOption(TYPE_OPTION)) {
|
||||
List<String> indexableObjectTypes = IndexObjectFactoryFactory.getInstance().getIndexFactories().stream()
|
||||
.map((indexFactory -> indexFactory.getType())).collect(Collectors.toList());
|
||||
type = commandLine.getOptionValue(TYPE_OPTION);
|
||||
if (!indexableObjectTypes.contains(type)) {
|
||||
handler.handleException(String.format("%s is not a valid indexable object type, options: %s",
|
||||
type, Arrays.toString(indexableObjectTypes.toArray())));
|
||||
}
|
||||
}
|
||||
|
||||
/** Acquire from dspace-services in future */
|
||||
/**
|
||||
* new DSpace.getServiceManager().getServiceByName("org.dspace.discovery.SolrIndexer");
|
||||
@@ -113,6 +130,10 @@ public class IndexClient extends DSpaceRunnable<IndexDiscoveryScriptConfiguratio
|
||||
} else if (indexClientOptions == IndexClientOptions.BUILD ||
|
||||
indexClientOptions == IndexClientOptions.BUILDANDSPELLCHECK) {
|
||||
handler.logInfo("(Re)building index from scratch.");
|
||||
if (StringUtils.isNotBlank(type)) {
|
||||
handler.logWarning(String.format("Type option, %s, not applicable for entire index rebuild option, b" +
|
||||
", type will be ignored", TYPE_OPTION));
|
||||
}
|
||||
indexer.deleteIndex();
|
||||
indexer.createIndex(context);
|
||||
if (indexClientOptions == IndexClientOptions.BUILDANDSPELLCHECK) {
|
||||
@@ -133,14 +154,14 @@ public class IndexClient extends DSpaceRunnable<IndexDiscoveryScriptConfiguratio
|
||||
} else if (indexClientOptions == IndexClientOptions.UPDATE ||
|
||||
indexClientOptions == IndexClientOptions.UPDATEANDSPELLCHECK) {
|
||||
handler.logInfo("Updating Index");
|
||||
indexer.updateIndex(context, false);
|
||||
indexer.updateIndex(context, false, type);
|
||||
if (indexClientOptions == IndexClientOptions.UPDATEANDSPELLCHECK) {
|
||||
checkRebuildSpellCheck(commandLine, indexer);
|
||||
}
|
||||
} else if (indexClientOptions == IndexClientOptions.FORCEUPDATE ||
|
||||
indexClientOptions == IndexClientOptions.FORCEUPDATEANDSPELLCHECK) {
|
||||
handler.logInfo("Updating Index");
|
||||
indexer.updateIndex(context, true);
|
||||
indexer.updateIndex(context, true, type);
|
||||
if (indexClientOptions == IndexClientOptions.FORCEUPDATEANDSPELLCHECK) {
|
||||
checkRebuildSpellCheck(commandLine, indexer);
|
||||
}
|
||||
|
@@ -8,8 +8,13 @@
|
||||
|
||||
package org.dspace.discovery;
|
||||
|
||||
import java.util.Arrays;
|
||||
import java.util.List;
|
||||
import java.util.stream.Collectors;
|
||||
|
||||
import org.apache.commons.cli.CommandLine;
|
||||
import org.apache.commons.cli.Options;
|
||||
import org.dspace.discovery.indexobject.factory.IndexObjectFactoryFactory;
|
||||
|
||||
/**
|
||||
* This Enum holds all the possible options and combinations for the Index discovery script
|
||||
@@ -29,6 +34,8 @@ public enum IndexClientOptions {
|
||||
FORCEUPDATEANDSPELLCHECK,
|
||||
HELP;
|
||||
|
||||
public static final String TYPE_OPTION = "t";
|
||||
|
||||
/**
|
||||
* This method resolves the CommandLine parameters to figure out which action the index-discovery script should
|
||||
* perform
|
||||
@@ -71,11 +78,15 @@ public enum IndexClientOptions {
|
||||
|
||||
protected static Options constructOptions() {
|
||||
Options options = new Options();
|
||||
List<String> indexableObjectTypes = IndexObjectFactoryFactory.getInstance().getIndexFactories().stream()
|
||||
.map((indexFactory -> indexFactory.getType())).collect(Collectors.toList());
|
||||
|
||||
options
|
||||
.addOption("r", "remove", true, "remove an Item, Collection or Community from index based on its handle");
|
||||
options.addOption("i", "index", true,
|
||||
"add or update an Item, Collection or Community based on its handle or uuid");
|
||||
options.addOption(TYPE_OPTION, "type", true, "reindex only specific type of " +
|
||||
"(re)indexable objects; options: " + Arrays.toString(indexableObjectTypes.toArray()));
|
||||
options.addOption("c", "clean", false,
|
||||
"clean existing index removing any documents that no longer exist in the db");
|
||||
options.addOption("d", "delete", false,
|
||||
|
@@ -154,7 +154,11 @@ public class IndexEventConsumer implements Consumer {
|
||||
|
||||
case Event.REMOVE:
|
||||
case Event.ADD:
|
||||
if (object == null) {
|
||||
// At this time, ADD and REMOVE actions are ignored on SITE object. They are only triggered for
|
||||
// top-level communities. No action is necessary as Community itself is indexed (or deleted) separately.
|
||||
if (event.getSubjectType() == Constants.SITE) {
|
||||
log.debug(event.getEventTypeAsString() + " event triggered for Site object. Skipping it.");
|
||||
} else if (object == null) {
|
||||
log.warn(event.getEventTypeAsString() + " event, could not get object for "
|
||||
+ event.getObjectTypeAsString() + " id="
|
||||
+ event.getObjectID()
|
||||
@@ -201,6 +205,10 @@ public class IndexEventConsumer implements Consumer {
|
||||
@Override
|
||||
public void end(Context ctx) throws Exception {
|
||||
|
||||
// Change the mode to readonly to improve performance
|
||||
Context.Mode originalMode = ctx.getCurrentMode();
|
||||
ctx.setMode(Context.Mode.READ_ONLY);
|
||||
|
||||
try {
|
||||
for (String uid : uniqueIdsToDelete) {
|
||||
try {
|
||||
@@ -230,6 +238,8 @@ public class IndexEventConsumer implements Consumer {
|
||||
uniqueIdsToDelete.clear();
|
||||
createdItemsToUpdate.clear();
|
||||
}
|
||||
|
||||
ctx.setMode(originalMode);
|
||||
}
|
||||
}
|
||||
|
||||
|
@@ -1031,9 +1031,8 @@ public class SolrServiceImpl implements SearchService, IndexingService {
|
||||
// Add information about our search fields
|
||||
for (String field : searchFields) {
|
||||
List<String> valuesAsString = new ArrayList<>();
|
||||
for (Object o : doc.getFieldValues(field)) {
|
||||
valuesAsString.add(String.valueOf(o));
|
||||
}
|
||||
Optional.ofNullable(doc.getFieldValues(field))
|
||||
.ifPresent(l -> l.forEach(o -> valuesAsString.add(String.valueOf(o))));
|
||||
resultDoc.addSearchField(field, valuesAsString.toArray(new String[valuesAsString.size()]));
|
||||
}
|
||||
result.addSearchDocument(indexableObject, resultDoc);
|
||||
|
@@ -64,7 +64,14 @@ public abstract class IndexFactoryImpl<T extends IndexableObject, S> implements
|
||||
|
||||
//Do any additional indexing, depends on the plugins
|
||||
for (SolrServiceIndexPlugin solrServiceIndexPlugin : ListUtils.emptyIfNull(solrServiceIndexPlugins)) {
|
||||
try {
|
||||
solrServiceIndexPlugin.additionalIndex(context, indexableObject, doc);
|
||||
} catch (Exception e) {
|
||||
log.error("An error occurred while indexing additional fields. " +
|
||||
"Could not fully index item with UUID: {}. Plugin: {}",
|
||||
indexableObject.getUniqueIndexID(), solrServiceIndexPlugin.getClass().getSimpleName());
|
||||
|
||||
}
|
||||
}
|
||||
|
||||
return doc;
|
||||
@@ -113,6 +120,18 @@ public abstract class IndexFactoryImpl<T extends IndexableObject, S> implements
|
||||
// Use Apache Tika to parse the full text stream(s)
|
||||
try (InputStream fullTextStreams = streams.getStream()) {
|
||||
tikaParser.parse(fullTextStreams, tikaHandler, tikaMetadata, tikaContext);
|
||||
|
||||
// Write Tika metadata to "tika_meta_*" fields.
|
||||
// This metadata is not very useful right now,
|
||||
// but we'll keep it just in case it becomes more useful.
|
||||
for (String name : tikaMetadata.names()) {
|
||||
for (String value : tikaMetadata.getValues(name)) {
|
||||
doc.addField("tika_meta_" + name, value);
|
||||
}
|
||||
}
|
||||
|
||||
// Save (parsed) full text to "fulltext" field
|
||||
doc.addField("fulltext", tikaHandler.toString());
|
||||
} catch (SAXException saxe) {
|
||||
// Check if this SAXException is just a notice that this file was longer than the character limit.
|
||||
// Unfortunately there is not a unique, public exception type to catch here. This error is thrown
|
||||
@@ -126,26 +145,19 @@ public abstract class IndexFactoryImpl<T extends IndexableObject, S> implements
|
||||
log.error("Tika parsing error. Could not index full text.", saxe);
|
||||
throw new IOException("Tika parsing error. Could not index full text.", saxe);
|
||||
}
|
||||
} catch (TikaException ex) {
|
||||
} catch (TikaException | IOException ex) {
|
||||
log.error("Tika parsing error. Could not index full text.", ex);
|
||||
throw new IOException("Tika parsing error. Could not index full text.", ex);
|
||||
}
|
||||
|
||||
// Write Tika metadata to "tika_meta_*" fields.
|
||||
// This metadata is not very useful right now, but we'll keep it just in case it becomes more useful.
|
||||
for (String name : tikaMetadata.names()) {
|
||||
for (String value : tikaMetadata.getValues(name)) {
|
||||
doc.addField("tika_meta_" + name, value);
|
||||
}
|
||||
}
|
||||
|
||||
// Save (parsed) full text to "fulltext" field
|
||||
doc.addField("fulltext", tikaHandler.toString());
|
||||
}
|
||||
|
||||
} finally {
|
||||
// Add document to index
|
||||
solr.add(doc);
|
||||
}
|
||||
return;
|
||||
}
|
||||
// Add document to index
|
||||
solr.add(doc);
|
||||
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
|
@@ -172,13 +172,6 @@ public class ItemIndexFactoryImpl extends DSpaceObjectIndexFactoryImpl<Indexable
|
||||
addNamedResourceTypeIndex(doc, acvalue);
|
||||
}
|
||||
|
||||
// write the index and close the inputstreamreaders
|
||||
try {
|
||||
log.info("Wrote Item: " + item.getID() + " to Index");
|
||||
} catch (RuntimeException e) {
|
||||
log.error("Error while writing item to discovery index: " + item.getID() + " message:"
|
||||
+ e.getMessage(), e);
|
||||
}
|
||||
return doc;
|
||||
}
|
||||
|
||||
@@ -845,7 +838,7 @@ public class ItemIndexFactoryImpl extends DSpaceObjectIndexFactoryImpl<Indexable
|
||||
private void saveFacetPrefixParts(SolrInputDocument doc, DiscoverySearchFilter searchFilter, String value,
|
||||
String separator, String authority, String preferedLabel) {
|
||||
value = StringUtils.normalizeSpace(value);
|
||||
Pattern pattern = Pattern.compile("\\b\\w+\\b", Pattern.CASE_INSENSITIVE);
|
||||
Pattern pattern = Pattern.compile("\\b\\w+\\b", Pattern.CASE_INSENSITIVE | Pattern.UNICODE_CHARACTER_CLASS);
|
||||
Matcher matcher = pattern.matcher(value);
|
||||
while (matcher.find()) {
|
||||
int index = matcher.start();
|
||||
|
@@ -33,6 +33,7 @@ import org.dspace.content.DSpaceObjectServiceImpl;
|
||||
import org.dspace.content.Item;
|
||||
import org.dspace.content.MetadataField;
|
||||
import org.dspace.content.MetadataValue;
|
||||
import org.dspace.content.QAEventProcessed;
|
||||
import org.dspace.content.WorkspaceItem;
|
||||
import org.dspace.content.factory.ContentServiceFactory;
|
||||
import org.dspace.content.service.ItemService;
|
||||
@@ -47,6 +48,8 @@ import org.dspace.eperson.service.GroupService;
|
||||
import org.dspace.eperson.service.SubscribeService;
|
||||
import org.dspace.event.Event;
|
||||
import org.dspace.orcid.service.OrcidTokenService;
|
||||
import org.dspace.qaevent.dao.QAEventsDAO;
|
||||
import org.dspace.services.ConfigurationService;
|
||||
import org.dspace.util.UUIDUtils;
|
||||
import org.dspace.versioning.Version;
|
||||
import org.dspace.versioning.VersionHistory;
|
||||
@@ -101,8 +104,12 @@ public class EPersonServiceImpl extends DSpaceObjectServiceImpl<EPerson> impleme
|
||||
protected VersionDAO versionDAO;
|
||||
@Autowired(required = true)
|
||||
protected ClaimedTaskService claimedTaskService;
|
||||
@Autowired(required = true)
|
||||
protected ConfigurationService configurationService;
|
||||
@Autowired
|
||||
protected OrcidTokenService orcidTokenService;
|
||||
@Autowired
|
||||
protected QAEventsDAO qaEventsDao;
|
||||
|
||||
protected EPersonServiceImpl() {
|
||||
super();
|
||||
@@ -113,13 +120,42 @@ public class EPersonServiceImpl extends DSpaceObjectServiceImpl<EPerson> impleme
|
||||
return ePersonDAO.findByID(context, EPerson.class, id);
|
||||
}
|
||||
|
||||
/**
|
||||
* Create a fake EPerson which can receive email. Its address will be the
|
||||
* value of "mail.admin", or "postmaster" if all else fails.
|
||||
* @param c
|
||||
* @return
|
||||
* @throws SQLException
|
||||
*/
|
||||
@Override
|
||||
public EPerson getSystemEPerson(Context c)
|
||||
throws SQLException {
|
||||
String adminEmail = configurationService.getProperty("mail.admin");
|
||||
if (null == adminEmail) {
|
||||
adminEmail = "postmaster"; // Last-ditch attempt to send *somewhere*
|
||||
}
|
||||
EPerson systemEPerson = findByEmail(c, adminEmail);
|
||||
|
||||
if (null == systemEPerson) {
|
||||
systemEPerson = new EPerson();
|
||||
systemEPerson.setEmail(adminEmail);
|
||||
}
|
||||
|
||||
return systemEPerson;
|
||||
}
|
||||
|
||||
@Override
|
||||
public EPerson findByIdOrLegacyId(Context context, String id) throws SQLException {
|
||||
try {
|
||||
if (StringUtils.isNumeric(id)) {
|
||||
return findByLegacyId(context, Integer.parseInt(id));
|
||||
} else {
|
||||
return find(context, UUID.fromString(id));
|
||||
}
|
||||
} catch (IllegalArgumentException e) {
|
||||
// Not a valid legacy ID or valid UUID
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
@@ -157,32 +193,98 @@ public class EPersonServiceImpl extends DSpaceObjectServiceImpl<EPerson> impleme
|
||||
|
||||
@Override
|
||||
public List<EPerson> search(Context context, String query, int offset, int limit) throws SQLException {
|
||||
try {
|
||||
List<EPerson> ePerson = new ArrayList<>();
|
||||
EPerson person = find(context, UUID.fromString(query));
|
||||
if (person != null) {
|
||||
ePerson.add(person);
|
||||
}
|
||||
return ePerson;
|
||||
} catch (IllegalArgumentException e) {
|
||||
List<EPerson> ePersons = new ArrayList<>();
|
||||
UUID uuid = UUIDUtils.fromString(query);
|
||||
if (uuid == null) {
|
||||
// Search by firstname & lastname (NOTE: email will also be included automatically)
|
||||
MetadataField firstNameField = metadataFieldService.findByElement(context, "eperson", "firstname", null);
|
||||
MetadataField lastNameField = metadataFieldService.findByElement(context, "eperson", "lastname", null);
|
||||
if (StringUtils.isBlank(query)) {
|
||||
query = null;
|
||||
}
|
||||
return ePersonDAO.search(context, query, Arrays.asList(firstNameField, lastNameField),
|
||||
ePersons = ePersonDAO.search(context, query, Arrays.asList(firstNameField, lastNameField),
|
||||
Arrays.asList(firstNameField, lastNameField), offset, limit);
|
||||
} else {
|
||||
// Search by UUID
|
||||
EPerson person = find(context, uuid);
|
||||
if (person != null) {
|
||||
ePersons.add(person);
|
||||
}
|
||||
}
|
||||
return ePersons;
|
||||
}
|
||||
|
||||
@Override
|
||||
public int searchResultCount(Context context, String query) throws SQLException {
|
||||
int result = 0;
|
||||
UUID uuid = UUIDUtils.fromString(query);
|
||||
if (uuid == null) {
|
||||
// Count results found by firstname & lastname (email is also included automatically)
|
||||
MetadataField firstNameField = metadataFieldService.findByElement(context, "eperson", "firstname", null);
|
||||
MetadataField lastNameField = metadataFieldService.findByElement(context, "eperson", "lastname", null);
|
||||
if (StringUtils.isBlank(query)) {
|
||||
query = null;
|
||||
}
|
||||
return ePersonDAO.searchResultCount(context, query, Arrays.asList(firstNameField, lastNameField));
|
||||
result = ePersonDAO.searchResultCount(context, query, Arrays.asList(firstNameField, lastNameField));
|
||||
} else {
|
||||
// Search by UUID
|
||||
EPerson person = find(context, uuid);
|
||||
if (person != null) {
|
||||
result = 1;
|
||||
}
|
||||
}
|
||||
return result;
|
||||
}
|
||||
|
||||
@Override
|
||||
public List<EPerson> searchNonMembers(Context context, String query, Group excludeGroup, int offset, int limit)
|
||||
throws SQLException {
|
||||
List<EPerson> ePersons = new ArrayList<>();
|
||||
UUID uuid = UUIDUtils.fromString(query);
|
||||
if (uuid == null) {
|
||||
// Search by firstname & lastname (NOTE: email will also be included automatically)
|
||||
MetadataField firstNameField = metadataFieldService.findByElement(context, "eperson", "firstname", null);
|
||||
MetadataField lastNameField = metadataFieldService.findByElement(context, "eperson", "lastname", null);
|
||||
if (StringUtils.isBlank(query)) {
|
||||
query = null;
|
||||
}
|
||||
ePersons = ePersonDAO.searchNotMember(context, query, Arrays.asList(firstNameField, lastNameField),
|
||||
excludeGroup, Arrays.asList(firstNameField, lastNameField),
|
||||
offset, limit);
|
||||
} else {
|
||||
// Search by UUID
|
||||
EPerson person = find(context, uuid);
|
||||
// Verify EPerson is NOT a member of the given excludeGroup before adding
|
||||
if (person != null && !groupService.isDirectMember(excludeGroup, person)) {
|
||||
ePersons.add(person);
|
||||
}
|
||||
}
|
||||
|
||||
return ePersons;
|
||||
}
|
||||
|
||||
@Override
|
||||
public int searchNonMembersCount(Context context, String query, Group excludeGroup) throws SQLException {
|
||||
int result = 0;
|
||||
UUID uuid = UUIDUtils.fromString(query);
|
||||
if (uuid == null) {
|
||||
// Count results found by firstname & lastname (email is also included automatically)
|
||||
MetadataField firstNameField = metadataFieldService.findByElement(context, "eperson", "firstname", null);
|
||||
MetadataField lastNameField = metadataFieldService.findByElement(context, "eperson", "lastname", null);
|
||||
if (StringUtils.isBlank(query)) {
|
||||
query = null;
|
||||
}
|
||||
result = ePersonDAO.searchNotMemberCount(context, query, Arrays.asList(firstNameField, lastNameField),
|
||||
excludeGroup);
|
||||
} else {
|
||||
// Search by UUID
|
||||
EPerson person = find(context, uuid);
|
||||
// Verify EPerson is NOT a member of the given excludeGroup before counting
|
||||
if (person != null && !groupService.isDirectMember(excludeGroup, person)) {
|
||||
result = 1;
|
||||
}
|
||||
}
|
||||
return result;
|
||||
}
|
||||
|
||||
@Override
|
||||
@@ -278,10 +380,13 @@ public class EPersonServiceImpl extends DSpaceObjectServiceImpl<EPerson> impleme
|
||||
throw new AuthorizeException(
|
||||
"You must be an admin to delete an EPerson");
|
||||
}
|
||||
// Get all workflow-related groups that the current EPerson belongs to
|
||||
Set<Group> workFlowGroups = getAllWorkFlowGroups(context, ePerson);
|
||||
for (Group group: workFlowGroups) {
|
||||
List<EPerson> ePeople = groupService.allMembers(context, group);
|
||||
if (ePeople.size() == 1 && ePeople.contains(ePerson)) {
|
||||
// Get total number of unique EPerson objs who are a member of this group (or subgroup)
|
||||
int totalMembers = groupService.countAllMembers(context, group);
|
||||
// If only one EPerson is a member, then we cannot delete the last member of this group.
|
||||
if (totalMembers == 1) {
|
||||
throw new EmptyWorkflowGroupException(ePerson.getID(), group.getID());
|
||||
}
|
||||
}
|
||||
@@ -391,6 +496,11 @@ public class EPersonServiceImpl extends DSpaceObjectServiceImpl<EPerson> impleme
|
||||
// Remove any subscriptions
|
||||
subscribeService.deleteByEPerson(context, ePerson);
|
||||
|
||||
List<QAEventProcessed> qaEvents = qaEventsDao.findByEPerson(context, ePerson);
|
||||
for (QAEventProcessed qaEvent : qaEvents) {
|
||||
qaEventsDao.delete(context, qaEvent);
|
||||
}
|
||||
|
||||
// Remove ourself
|
||||
ePersonDAO.delete(context, ePerson);
|
||||
|
||||
@@ -540,14 +650,29 @@ public class EPersonServiceImpl extends DSpaceObjectServiceImpl<EPerson> impleme
|
||||
|
||||
@Override
|
||||
public List<EPerson> findByGroups(Context c, Set<Group> groups) throws SQLException {
|
||||
return findByGroups(c, groups, -1, -1);
|
||||
}
|
||||
|
||||
@Override
|
||||
public List<EPerson> findByGroups(Context c, Set<Group> groups, int pageSize, int offset) throws SQLException {
|
||||
//Make sure we at least have one group, if not don't even bother searching.
|
||||
if (CollectionUtils.isNotEmpty(groups)) {
|
||||
return ePersonDAO.findByGroups(c, groups);
|
||||
return ePersonDAO.findByGroups(c, groups, pageSize, offset);
|
||||
} else {
|
||||
return new ArrayList<>();
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public int countByGroups(Context c, Set<Group> groups) throws SQLException {
|
||||
//Make sure we at least have one group, if not don't even bother counting.
|
||||
if (CollectionUtils.isNotEmpty(groups)) {
|
||||
return ePersonDAO.countByGroups(c, groups);
|
||||
} else {
|
||||
return 0;
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public List<EPerson> findEPeopleWithSubscription(Context context) throws SQLException {
|
||||
return ePersonDAO.findAllSubscribers(context);
|
||||
|
@@ -141,15 +141,6 @@ public class Groomer {
|
||||
System.out.println();
|
||||
|
||||
if (delete) {
|
||||
List<String> whyNot = ePersonService.getDeleteConstraints(myContext, account);
|
||||
if (!whyNot.isEmpty()) {
|
||||
System.out.print("\tCannot be deleted; referenced in");
|
||||
for (String table : whyNot) {
|
||||
System.out.print(' ');
|
||||
System.out.print(table);
|
||||
}
|
||||
System.out.println();
|
||||
} else {
|
||||
try {
|
||||
ePersonService.delete(myContext, account);
|
||||
} catch (AuthorizeException | IOException ex) {
|
||||
@@ -157,7 +148,6 @@ public class Groomer {
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
myContext.restoreAuthSystemState();
|
||||
myContext.complete();
|
||||
|
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user