Merge remote-tracking branch 'upstream/main' into simplify-process-of-adding-sidear-facets_contribute-7.6

# Conflicts:
#	dspace-server-webapp/src/test/java/org/dspace/app/rest/DiscoveryRestControllerIT.java
This commit is contained in:
Alexandre Vryghem
2023-11-01 12:35:48 +01:00
124 changed files with 2451 additions and 772 deletions

View File

@@ -1,26 +0,0 @@
# This workflow runs whenever a new pull request is created
# TEMPORARILY DISABLED. Unfortunately this doesn't work for PRs created from forked repositories (which is how we tend to create PRs).
# There is no known workaround yet. See https://github.community/t/how-to-use-github-token-for-prs-from-forks/16818
name: Pull Request opened
# Only run for newly opened PRs against the "main" branch
on:
pull_request:
types: [opened]
branches:
- main
jobs:
automation:
runs-on: ubuntu-latest
steps:
# Assign the PR to whomever created it. This is useful for visualizing assignments on project boards
# See https://github.com/marketplace/actions/pull-request-assigner
- name: Assign PR to creator
uses: thomaseizinger/assign-pr-creator-action@v1.0.0
# Note, this authentication token is created automatically
# See: https://docs.github.com/en/actions/configuring-and-managing-workflows/authenticating-with-the-github_token
with:
repo-token: ${{ secrets.GITHUB_TOKEN }}
# Ignore errors. It is possible the PR was created by someone who cannot be assigned
continue-on-error: true

View File

@@ -5,12 +5,16 @@
# because CodeQL requires a fresh build with all tests *disabled*. # because CodeQL requires a fresh build with all tests *disabled*.
name: "Code Scanning" name: "Code Scanning"
# Run this code scan for all pushes / PRs to main branch. Also run once a week. # Run this code scan for all pushes / PRs to main or maintenance branches. Also run once a week.
on: on:
push: push:
branches: [ main ] branches:
- main
- 'dspace-**'
pull_request: pull_request:
branches: [ main ] branches:
- main
- 'dspace-**'
# Don't run if PR is only updating static documentation # Don't run if PR is only updating static documentation
paths-ignore: paths-ignore:
- '**/*.md' - '**/*.md'

View File

@@ -15,23 +15,19 @@ on:
permissions: permissions:
contents: read # to fetch code (actions/checkout) contents: read # to fetch code (actions/checkout)
jobs: # Define shared environment variables for all jobs below
docker: env:
# Ensure this job never runs on forked repos. It's only executed for 'dspace/dspace'
if: github.repository == 'dspace/dspace'
runs-on: ubuntu-latest
env:
# Define tags to use for Docker images based on Git tags/branches (for docker/metadata-action) # Define tags to use for Docker images based on Git tags/branches (for docker/metadata-action)
# For a new commit on default branch (main), use the literal tag 'dspace-7_x' on Docker image. # For a new commit on default branch (main), use the literal tag 'latest' on Docker image.
# For a new commit on other branches, use the branch name as the tag for Docker image. # For a new commit on other branches, use the branch name as the tag for Docker image.
# For a new tag, copy that tag name as the tag for Docker image. # For a new tag, copy that tag name as the tag for Docker image.
IMAGE_TAGS: | IMAGE_TAGS: |
type=raw,value=dspace-7_x,enable=${{ endsWith(github.ref, github.event.repository.default_branch) }} type=raw,value=latest,enable=${{ endsWith(github.ref, github.event.repository.default_branch) }}
type=ref,event=branch,enable=${{ !endsWith(github.ref, github.event.repository.default_branch) }} type=ref,event=branch,enable=${{ !endsWith(github.ref, github.event.repository.default_branch) }}
type=ref,event=tag type=ref,event=tag
# Define default tag "flavor" for docker/metadata-action per # Define default tag "flavor" for docker/metadata-action per
# https://github.com/docker/metadata-action#flavor-input # https://github.com/docker/metadata-action#flavor-input
# We turn off 'latest' tag by default. # We manage the 'latest' tag ourselves to the 'main' branch (see settings above)
TAGS_FLAVOR: | TAGS_FLAVOR: |
latest=false latest=false
# Architectures / Platforms for which we will build Docker images # Architectures / Platforms for which we will build Docker images
@@ -40,6 +36,16 @@ jobs:
# longer (around 45mins or so) which is why we only run it when pushing a new Docker image. # longer (around 45mins or so) which is why we only run it when pushing a new Docker image.
PLATFORMS: linux/amd64${{ github.event_name != 'pull_request' && ', linux/arm64' || '' }} PLATFORMS: linux/amd64${{ github.event_name != 'pull_request' && ', linux/arm64' || '' }}
jobs:
####################################################
# Build/Push the 'dspace/dspace-dependencies' image.
# This image is used by all other jobs.
####################################################
dspace-dependencies:
# Ensure this job never runs on forked repos. It's only executed for 'dspace/dspace'
if: github.repository == 'dspace/dspace'
runs-on: ubuntu-latest
steps: steps:
# https://github.com/actions/checkout # https://github.com/actions/checkout
- name: Checkout codebase - name: Checkout codebase
@@ -62,9 +68,6 @@ jobs:
username: ${{ secrets.DOCKER_USERNAME }} username: ${{ secrets.DOCKER_USERNAME }}
password: ${{ secrets.DOCKER_ACCESS_TOKEN }} password: ${{ secrets.DOCKER_ACCESS_TOKEN }}
####################################################
# Build/Push the 'dspace/dspace-dependencies' image
####################################################
# https://github.com/docker/metadata-action # https://github.com/docker/metadata-action
# Get Metadata for docker_build_deps step below # Get Metadata for docker_build_deps step below
- name: Sync metadata (tags, labels) from GitHub to Docker for 'dspace-dependencies' image - name: Sync metadata (tags, labels) from GitHub to Docker for 'dspace-dependencies' image
@@ -78,7 +81,7 @@ jobs:
# https://github.com/docker/build-push-action # https://github.com/docker/build-push-action
- name: Build and push 'dspace-dependencies' image - name: Build and push 'dspace-dependencies' image
id: docker_build_deps id: docker_build_deps
uses: docker/build-push-action@v3 uses: docker/build-push-action@v4
with: with:
context: . context: .
file: ./Dockerfile.dependencies file: ./Dockerfile.dependencies
@@ -93,6 +96,35 @@ jobs:
####################################### #######################################
# Build/Push the 'dspace/dspace' image # Build/Push the 'dspace/dspace' image
####################################### #######################################
dspace:
# Ensure this job never runs on forked repos. It's only executed for 'dspace/dspace'
if: github.repository == 'dspace/dspace'
# Must run after 'dspace-dependencies' job above
needs: dspace-dependencies
runs-on: ubuntu-latest
steps:
# https://github.com/actions/checkout
- name: Checkout codebase
uses: actions/checkout@v3
# https://github.com/docker/setup-buildx-action
- name: Setup Docker Buildx
uses: docker/setup-buildx-action@v2
# https://github.com/docker/setup-qemu-action
- name: Set up QEMU emulation to build for multiple architectures
uses: docker/setup-qemu-action@v2
# https://github.com/docker/login-action
- name: Login to DockerHub
# Only login if not a PR, as PRs only trigger a Docker build and not a push
if: github.event_name != 'pull_request'
uses: docker/login-action@v2
with:
username: ${{ secrets.DOCKER_USERNAME }}
password: ${{ secrets.DOCKER_ACCESS_TOKEN }}
# Get Metadata for docker_build step below # Get Metadata for docker_build step below
- name: Sync metadata (tags, labels) from GitHub to Docker for 'dspace' image - name: Sync metadata (tags, labels) from GitHub to Docker for 'dspace' image
id: meta_build id: meta_build
@@ -104,7 +136,7 @@ jobs:
- name: Build and push 'dspace' image - name: Build and push 'dspace' image
id: docker_build id: docker_build
uses: docker/build-push-action@v3 uses: docker/build-push-action@v4
with: with:
context: . context: .
file: ./Dockerfile file: ./Dockerfile
@@ -116,9 +148,38 @@ jobs:
tags: ${{ steps.meta_build.outputs.tags }} tags: ${{ steps.meta_build.outputs.tags }}
labels: ${{ steps.meta_build.outputs.labels }} labels: ${{ steps.meta_build.outputs.labels }}
##################################################### #############################################################
# Build/Push the 'dspace/dspace' image ('-test' tag) # Build/Push the 'dspace/dspace' image ('-test' tag)
##################################################### #############################################################
dspace-test:
# Ensure this job never runs on forked repos. It's only executed for 'dspace/dspace'
if: github.repository == 'dspace/dspace'
# Must run after 'dspace-dependencies' job above
needs: dspace-dependencies
runs-on: ubuntu-latest
steps:
# https://github.com/actions/checkout
- name: Checkout codebase
uses: actions/checkout@v3
# https://github.com/docker/setup-buildx-action
- name: Setup Docker Buildx
uses: docker/setup-buildx-action@v2
# https://github.com/docker/setup-qemu-action
- name: Set up QEMU emulation to build for multiple architectures
uses: docker/setup-qemu-action@v2
# https://github.com/docker/login-action
- name: Login to DockerHub
# Only login if not a PR, as PRs only trigger a Docker build and not a push
if: github.event_name != 'pull_request'
uses: docker/login-action@v2
with:
username: ${{ secrets.DOCKER_USERNAME }}
password: ${{ secrets.DOCKER_ACCESS_TOKEN }}
# Get Metadata for docker_build_test step below # Get Metadata for docker_build_test step below
- name: Sync metadata (tags, labels) from GitHub to Docker for 'dspace-test' image - name: Sync metadata (tags, labels) from GitHub to Docker for 'dspace-test' image
id: meta_build_test id: meta_build_test
@@ -133,7 +194,7 @@ jobs:
- name: Build and push 'dspace-test' image - name: Build and push 'dspace-test' image
id: docker_build_test id: docker_build_test
uses: docker/build-push-action@v3 uses: docker/build-push-action@v4
with: with:
context: . context: .
file: ./Dockerfile.test file: ./Dockerfile.test
@@ -148,6 +209,35 @@ jobs:
########################################### ###########################################
# Build/Push the 'dspace/dspace-cli' image # Build/Push the 'dspace/dspace-cli' image
########################################### ###########################################
dspace-cli:
# Ensure this job never runs on forked repos. It's only executed for 'dspace/dspace'
if: github.repository == 'dspace/dspace'
# Must run after 'dspace-dependencies' job above
needs: dspace-dependencies
runs-on: ubuntu-latest
steps:
# https://github.com/actions/checkout
- name: Checkout codebase
uses: actions/checkout@v3
# https://github.com/docker/setup-buildx-action
- name: Setup Docker Buildx
uses: docker/setup-buildx-action@v2
# https://github.com/docker/setup-qemu-action
- name: Set up QEMU emulation to build for multiple architectures
uses: docker/setup-qemu-action@v2
# https://github.com/docker/login-action
- name: Login to DockerHub
# Only login if not a PR, as PRs only trigger a Docker build and not a push
if: github.event_name != 'pull_request'
uses: docker/login-action@v2
with:
username: ${{ secrets.DOCKER_USERNAME }}
password: ${{ secrets.DOCKER_ACCESS_TOKEN }}
# Get Metadata for docker_build_test step below # Get Metadata for docker_build_test step below
- name: Sync metadata (tags, labels) from GitHub to Docker for 'dspace-cli' image - name: Sync metadata (tags, labels) from GitHub to Docker for 'dspace-cli' image
id: meta_build_cli id: meta_build_cli
@@ -159,7 +249,7 @@ jobs:
- name: Build and push 'dspace-cli' image - name: Build and push 'dspace-cli' image
id: docker_build_cli id: docker_build_cli
uses: docker/build-push-action@v3 uses: docker/build-push-action@v4
with: with:
context: . context: .
file: ./Dockerfile.cli file: ./Dockerfile.cli
@@ -174,6 +264,33 @@ jobs:
########################################### ###########################################
# Build/Push the 'dspace/dspace-solr' image # Build/Push the 'dspace/dspace-solr' image
########################################### ###########################################
dspace-solr:
# Ensure this job never runs on forked repos. It's only executed for 'dspace/dspace'
if: github.repository == 'dspace/dspace'
runs-on: ubuntu-latest
steps:
# https://github.com/actions/checkout
- name: Checkout codebase
uses: actions/checkout@v3
# https://github.com/docker/setup-buildx-action
- name: Setup Docker Buildx
uses: docker/setup-buildx-action@v2
# https://github.com/docker/setup-qemu-action
- name: Set up QEMU emulation to build for multiple architectures
uses: docker/setup-qemu-action@v2
# https://github.com/docker/login-action
- name: Login to DockerHub
# Only login if not a PR, as PRs only trigger a Docker build and not a push
if: github.event_name != 'pull_request'
uses: docker/login-action@v2
with:
username: ${{ secrets.DOCKER_USERNAME }}
password: ${{ secrets.DOCKER_ACCESS_TOKEN }}
# Get Metadata for docker_build_solr step below # Get Metadata for docker_build_solr step below
- name: Sync metadata (tags, labels) from GitHub to Docker for 'dspace-solr' image - name: Sync metadata (tags, labels) from GitHub to Docker for 'dspace-solr' image
id: meta_build_solr id: meta_build_solr
@@ -185,7 +302,7 @@ jobs:
- name: Build and push 'dspace-solr' image - name: Build and push 'dspace-solr' image
id: docker_build_solr id: docker_build_solr
uses: docker/build-push-action@v3 uses: docker/build-push-action@v4
with: with:
context: . context: .
file: ./dspace/src/main/docker/dspace-solr/Dockerfile file: ./dspace/src/main/docker/dspace-solr/Dockerfile
@@ -200,6 +317,33 @@ jobs:
########################################################### ###########################################################
# Build/Push the 'dspace/dspace-postgres-pgcrypto' image # Build/Push the 'dspace/dspace-postgres-pgcrypto' image
########################################################### ###########################################################
dspace-postgres-pgcrypto:
# Ensure this job never runs on forked repos. It's only executed for 'dspace/dspace'
if: github.repository == 'dspace/dspace'
runs-on: ubuntu-latest
steps:
# https://github.com/actions/checkout
- name: Checkout codebase
uses: actions/checkout@v3
# https://github.com/docker/setup-buildx-action
- name: Setup Docker Buildx
uses: docker/setup-buildx-action@v2
# https://github.com/docker/setup-qemu-action
- name: Set up QEMU emulation to build for multiple architectures
uses: docker/setup-qemu-action@v2
# https://github.com/docker/login-action
- name: Login to DockerHub
# Only login if not a PR, as PRs only trigger a Docker build and not a push
if: github.event_name != 'pull_request'
uses: docker/login-action@v2
with:
username: ${{ secrets.DOCKER_USERNAME }}
password: ${{ secrets.DOCKER_ACCESS_TOKEN }}
# Get Metadata for docker_build_postgres step below # Get Metadata for docker_build_postgres step below
- name: Sync metadata (tags, labels) from GitHub to Docker for 'dspace-postgres-pgcrypto' image - name: Sync metadata (tags, labels) from GitHub to Docker for 'dspace-postgres-pgcrypto' image
id: meta_build_postgres id: meta_build_postgres
@@ -211,7 +355,7 @@ jobs:
- name: Build and push 'dspace-postgres-pgcrypto' image - name: Build and push 'dspace-postgres-pgcrypto' image
id: docker_build_postgres id: docker_build_postgres
uses: docker/build-push-action@v3 uses: docker/build-push-action@v4
with: with:
# Must build out of subdirectory to have access to install script for pgcrypto # Must build out of subdirectory to have access to install script for pgcrypto
context: ./dspace/src/main/docker/dspace-postgres-pgcrypto/ context: ./dspace/src/main/docker/dspace-postgres-pgcrypto/
@@ -224,9 +368,36 @@ jobs:
tags: ${{ steps.meta_build_postgres.outputs.tags }} tags: ${{ steps.meta_build_postgres.outputs.tags }}
labels: ${{ steps.meta_build_postgres.outputs.labels }} labels: ${{ steps.meta_build_postgres.outputs.labels }}
########################################################### ########################################################################
# Build/Push the 'dspace/dspace-postgres-pgcrypto' image ('-loadsql' tag) # Build/Push the 'dspace/dspace-postgres-pgcrypto' image (-loadsql tag)
########################################################### ########################################################################
dspace-postgres-pgcrypto-loadsql:
# Ensure this job never runs on forked repos. It's only executed for 'dspace/dspace'
if: github.repository == 'dspace/dspace'
runs-on: ubuntu-latest
steps:
# https://github.com/actions/checkout
- name: Checkout codebase
uses: actions/checkout@v3
# https://github.com/docker/setup-buildx-action
- name: Setup Docker Buildx
uses: docker/setup-buildx-action@v2
# https://github.com/docker/setup-qemu-action
- name: Set up QEMU emulation to build for multiple architectures
uses: docker/setup-qemu-action@v2
# https://github.com/docker/login-action
- name: Login to DockerHub
# Only login if not a PR, as PRs only trigger a Docker build and not a push
if: github.event_name != 'pull_request'
uses: docker/login-action@v2
with:
username: ${{ secrets.DOCKER_USERNAME }}
password: ${{ secrets.DOCKER_ACCESS_TOKEN }}
# Get Metadata for docker_build_postgres_loadsql step below # Get Metadata for docker_build_postgres_loadsql step below
- name: Sync metadata (tags, labels) from GitHub to Docker for 'dspace-postgres-pgcrypto-loadsql' image - name: Sync metadata (tags, labels) from GitHub to Docker for 'dspace-postgres-pgcrypto-loadsql' image
id: meta_build_postgres_loadsql id: meta_build_postgres_loadsql
@@ -241,7 +412,7 @@ jobs:
- name: Build and push 'dspace-postgres-pgcrypto-loadsql' image - name: Build and push 'dspace-postgres-pgcrypto-loadsql' image
id: docker_build_postgres_loadsql id: docker_build_postgres_loadsql
uses: docker/build-push-action@v3 uses: docker/build-push-action@v4
with: with:
# Must build out of subdirectory to have access to install script for pgcrypto # Must build out of subdirectory to have access to install script for pgcrypto
context: ./dspace/src/main/docker/dspace-postgres-pgcrypto-curl/ context: ./dspace/src/main/docker/dspace-postgres-pgcrypto-curl/

View File

@@ -1,11 +1,12 @@
# This workflow checks open PRs for merge conflicts and labels them when conflicts are found # This workflow checks open PRs for merge conflicts and labels them when conflicts are found
name: Check for merge conflicts name: Check for merge conflicts
# Run whenever the "main" branch is updated # Run this for all pushes (i.e. merges) to 'main' or maintenance branches
# NOTE: This means merge conflicts are only checked for when a PR is merged to main.
on: on:
push: push:
branches: [ main ] branches:
- main
- 'dspace-**'
# So that the `conflict_label_name` is removed if conflicts are resolved, # So that the `conflict_label_name` is removed if conflicts are resolved,
# we allow this to run for `pull_request_target` so that github secrets are available. # we allow this to run for `pull_request_target` so that github secrets are available.
pull_request_target: pull_request_target:
@@ -24,6 +25,8 @@ jobs:
# See: https://github.com/prince-chrismc/label-merge-conflicts-action # See: https://github.com/prince-chrismc/label-merge-conflicts-action
- name: Auto-label PRs with merge conflicts - name: Auto-label PRs with merge conflicts
uses: prince-chrismc/label-merge-conflicts-action@v3 uses: prince-chrismc/label-merge-conflicts-action@v3
# Ignore any failures -- may occur (randomly?) for older, outdated PRs.
continue-on-error: true
# Add "merge conflict" label if a merge conflict is detected. Remove it when resolved. # Add "merge conflict" label if a merge conflict is detected. Remove it when resolved.
# Note, the authentication token is created automatically # Note, the authentication token is created automatically
# See: https://docs.github.com/en/actions/configuring-and-managing-workflows/authenticating-with-the-github_token # See: https://docs.github.com/en/actions/configuring-and-managing-workflows/authenticating-with-the-github_token

View File

@@ -0,0 +1,46 @@
# This workflow will attempt to port a merged pull request to
# the branch specified in a "port to" label (if exists)
name: Port merged Pull Request
# Only run for merged PRs against the "main" or maintenance branches
# We allow this to run for `pull_request_target` so that github secrets are available
# (This is required when the PR comes from a forked repo)
on:
pull_request_target:
types: [ closed ]
branches:
- main
- 'dspace-**'
permissions:
contents: write # so action can add comments
pull-requests: write # so action can create pull requests
jobs:
port_pr:
runs-on: ubuntu-latest
# Don't run on closed *unmerged* pull requests
if: github.event.pull_request.merged
steps:
# Checkout code
- uses: actions/checkout@v3
# Port PR to other branch (ONLY if labeled with "port to")
# See https://github.com/korthout/backport-action
- name: Create backport pull requests
uses: korthout/backport-action@v1
with:
# Trigger based on a "port to [branch]" label on PR
# (This label must specify the branch name to port to)
label_pattern: '^port to ([^ ]+)$'
# Title to add to the (newly created) port PR
pull_title: '[Port ${target_branch}] ${pull_title}'
# Description to add to the (newly created) port PR
pull_description: 'Port of #${pull_number} by @${pull_author} to `${target_branch}`.'
# Copy all labels from original PR to (newly created) port PR
# NOTE: The labels matching 'label_pattern' are automatically excluded
copy_labels_pattern: '.*'
# Skip any merge commits in the ported PR. This means only non-merge commits are cherry-picked to the new PR
merge_commits: 'skip'
# Use a personal access token (PAT) to create PR as 'dspace-bot' user.
# A PAT is required in order for the new PR to trigger its own actions (for CI checks)
github_token: ${{ secrets.PR_PORT_TOKEN }}

View File

@@ -0,0 +1,24 @@
# This workflow runs whenever a new pull request is created
name: Pull Request opened
# Only run for newly opened PRs against the "main" or maintenance branches
# We allow this to run for `pull_request_target` so that github secrets are available
# (This is required to assign a PR back to the creator when the PR comes from a forked repo)
on:
pull_request_target:
types: [ opened ]
branches:
- main
- 'dspace-**'
permissions:
pull-requests: write
jobs:
automation:
runs-on: ubuntu-latest
steps:
# Assign the PR to whomever created it. This is useful for visualizing assignments on project boards
# See https://github.com/toshimaru/auto-author-assign
- name: Assign PR to creator
uses: toshimaru/auto-author-assign@v1.6.2

View File

@@ -1,14 +1,15 @@
# This image will be published as dspace/dspace # This image will be published as dspace/dspace
# See https://github.com/DSpace/DSpace/tree/main/dspace/src/main/docker for usage details # See https://github.com/DSpace/DSpace/tree/main/dspace/src/main/docker for usage details
# #
# - note: default tag for branch: dspace/dspace: dspace/dspace:dspace-7_x # - note: default tag for branch: dspace/dspace: dspace/dspace:latest
# This Dockerfile uses JDK11 by default, but has also been tested with JDK17. # This Dockerfile uses JDK11 by default, but has also been tested with JDK17.
# To build with JDK17, use "--build-arg JDK_VERSION=17" # To build with JDK17, use "--build-arg JDK_VERSION=17"
ARG JDK_VERSION=11 ARG JDK_VERSION=11
ARG DSPACE_VERSION=latest
# Step 1 - Run Maven Build # Step 1 - Run Maven Build
FROM dspace/dspace-dependencies:dspace-7_x as build FROM dspace/dspace-dependencies:${DSPACE_VERSION} as build
ARG TARGET_DIR=dspace-installer ARG TARGET_DIR=dspace-installer
WORKDIR /app WORKDIR /app
# The dspace-installer directory will be written to /install # The dspace-installer directory will be written to /install
@@ -50,7 +51,7 @@ RUN ant init_installation update_configs update_code update_webapps
FROM tomcat:9-jdk${JDK_VERSION} FROM tomcat:9-jdk${JDK_VERSION}
# NOTE: DSPACE_INSTALL must align with the "dspace.dir" default configuration. # NOTE: DSPACE_INSTALL must align with the "dspace.dir" default configuration.
ENV DSPACE_INSTALL=/dspace ENV DSPACE_INSTALL=/dspace
# Copy the /dspace directory from 'ant_build' containger to /dspace in this container # Copy the /dspace directory from 'ant_build' container to /dspace in this container
COPY --from=ant_build /dspace $DSPACE_INSTALL COPY --from=ant_build /dspace $DSPACE_INSTALL
# Expose Tomcat port and AJP port # Expose Tomcat port and AJP port
EXPOSE 8080 8009 EXPOSE 8080 8009

View File

@@ -1,14 +1,15 @@
# This image will be published as dspace/dspace-cli # This image will be published as dspace/dspace-cli
# See https://github.com/DSpace/DSpace/tree/main/dspace/src/main/docker for usage details # See https://github.com/DSpace/DSpace/tree/main/dspace/src/main/docker for usage details
# #
# - note: default tag for branch: dspace/dspace-cli: dspace/dspace-cli:dspace-7_x # - note: default tag for branch: dspace/dspace-cli: dspace/dspace-cli:latest
# This Dockerfile uses JDK11 by default, but has also been tested with JDK17. # This Dockerfile uses JDK11 by default, but has also been tested with JDK17.
# To build with JDK17, use "--build-arg JDK_VERSION=17" # To build with JDK17, use "--build-arg JDK_VERSION=17"
ARG JDK_VERSION=11 ARG JDK_VERSION=11
ARG DSPACE_VERSION=latest
# Step 1 - Run Maven Build # Step 1 - Run Maven Build
FROM dspace/dspace-dependencies:dspace-7_x as build FROM dspace/dspace-dependencies:${DSPACE_VERSION} as build
ARG TARGET_DIR=dspace-installer ARG TARGET_DIR=dspace-installer
WORKDIR /app WORKDIR /app
# The dspace-installer directory will be written to /install # The dspace-installer directory will be written to /install

View File

@@ -1,16 +1,17 @@
# This image will be published as dspace/dspace # This image will be published as dspace/dspace
# See https://github.com/DSpace/DSpace/tree/main/dspace/src/main/docker for usage details # See https://github.com/DSpace/DSpace/tree/main/dspace/src/main/docker for usage details
# #
# - note: default tag for branch: dspace/dspace: dspace/dspace:dspace-7_x-test # - note: default tag for branch: dspace/dspace: dspace/dspace:latest-test
# #
# This image is meant for TESTING/DEVELOPMENT ONLY as it deploys the old v6 REST API under HTTP (not HTTPS) # This image is meant for TESTING/DEVELOPMENT ONLY as it deploys the old v6 REST API under HTTP (not HTTPS)
# This Dockerfile uses JDK11 by default, but has also been tested with JDK17. # This Dockerfile uses JDK11 by default, but has also been tested with JDK17.
# To build with JDK17, use "--build-arg JDK_VERSION=17" # To build with JDK17, use "--build-arg JDK_VERSION=17"
ARG JDK_VERSION=11 ARG JDK_VERSION=11
ARG DSPACE_VERSION=latest
# Step 1 - Run Maven Build # Step 1 - Run Maven Build
FROM dspace/dspace-dependencies:dspace-7_x as build FROM dspace/dspace-dependencies:${DSPACE_VERSION} as build
ARG TARGET_DIR=dspace-installer ARG TARGET_DIR=dspace-installer
WORKDIR /app WORKDIR /app
# The dspace-installer directory will be written to /install # The dspace-installer directory will be written to /install

View File

@@ -2,7 +2,7 @@ version: "3.7"
services: services:
dspace-cli: dspace-cli:
image: "${DOCKER_OWNER:-dspace}/dspace-cli:${DSPACE_VER:-dspace-7_x}" image: "${DOCKER_OWNER:-dspace}/dspace-cli:${DSPACE_VER:-latest}"
container_name: dspace-cli container_name: dspace-cli
build: build:
context: . context: .

View File

@@ -28,7 +28,8 @@ services:
# proxies.trusted.ipranges: This setting is required for a REST API running in Docker to trust requests # proxies.trusted.ipranges: This setting is required for a REST API running in Docker to trust requests
# from the host machine. This IP range MUST correspond to the 'dspacenet' subnet defined above. # from the host machine. This IP range MUST correspond to the 'dspacenet' subnet defined above.
proxies__P__trusted__P__ipranges: '172.23.0' proxies__P__trusted__P__ipranges: '172.23.0'
image: "${DOCKER_OWNER:-dspace}/dspace:${DSPACE_VER:-dspace-7_x-test}" LOGGING_CONFIG: /dspace/config/log4j2-container.xml
image: "${DOCKER_OWNER:-dspace}/dspace:${DSPACE_VER:-latest-test}"
build: build:
context: . context: .
dockerfile: Dockerfile.test dockerfile: Dockerfile.test
@@ -66,7 +67,7 @@ services:
dspacedb: dspacedb:
container_name: dspacedb container_name: dspacedb
# Uses a custom Postgres image with pgcrypto installed # Uses a custom Postgres image with pgcrypto installed
image: "${DOCKER_OWNER:-dspace}/dspace-postgres-pgcrypto:${DSPACE_VER:-dspace-7_x}" image: "${DOCKER_OWNER:-dspace}/dspace-postgres-pgcrypto:${DSPACE_VER:-latest}"
build: build:
# Must build out of subdirectory to have access to install script for pgcrypto # Must build out of subdirectory to have access to install script for pgcrypto
context: ./dspace/src/main/docker/dspace-postgres-pgcrypto/ context: ./dspace/src/main/docker/dspace-postgres-pgcrypto/
@@ -86,7 +87,7 @@ services:
# DSpace Solr container # DSpace Solr container
dspacesolr: dspacesolr:
container_name: dspacesolr container_name: dspacesolr
image: "${DOCKER_OWNER:-dspace}/dspace-solr:${DSPACE_VER:-dspace-7_x}" image: "${DOCKER_OWNER:-dspace}/dspace-solr:${DSPACE_VER:-latest}"
build: build:
context: . context: .
dockerfile: ./dspace/src/main/docker/dspace-solr/Dockerfile dockerfile: ./dspace/src/main/docker/dspace-solr/Dockerfile

View File

@@ -12,7 +12,7 @@
<parent> <parent>
<groupId>org.dspace</groupId> <groupId>org.dspace</groupId>
<artifactId>dspace-parent</artifactId> <artifactId>dspace-parent</artifactId>
<version>7.6</version> <version>8.0-SNAPSHOT</version>
<relativePath>..</relativePath> <relativePath>..</relativePath>
</parent> </parent>
@@ -102,7 +102,7 @@
<plugin> <plugin>
<groupId>org.codehaus.mojo</groupId> <groupId>org.codehaus.mojo</groupId>
<artifactId>build-helper-maven-plugin</artifactId> <artifactId>build-helper-maven-plugin</artifactId>
<version>3.0.0</version> <version>3.4.0</version>
<executions> <executions>
<execution> <execution>
<phase>validate</phase> <phase>validate</phase>
@@ -116,7 +116,10 @@
<plugin> <plugin>
<groupId>org.codehaus.mojo</groupId> <groupId>org.codehaus.mojo</groupId>
<artifactId>buildnumber-maven-plugin</artifactId> <artifactId>buildnumber-maven-plugin</artifactId>
<version>1.4</version> <version>3.2.0</version>
<configuration>
<revisionOnScmFailure>UNKNOWN_REVISION</revisionOnScmFailure>
</configuration>
<executions> <executions>
<execution> <execution>
<phase>validate</phase> <phase>validate</phase>
@@ -492,12 +495,6 @@
<dependency> <dependency>
<groupId>jaxen</groupId> <groupId>jaxen</groupId>
<artifactId>jaxen</artifactId> <artifactId>jaxen</artifactId>
<exclusions>
<exclusion>
<artifactId>xom</artifactId>
<groupId>xom</groupId>
</exclusion>
</exclusions>
</dependency> </dependency>
<dependency> <dependency>
<groupId>org.jdom</groupId> <groupId>org.jdom</groupId>
@@ -632,7 +629,7 @@
<dependency> <dependency>
<groupId>dnsjava</groupId> <groupId>dnsjava</groupId>
<artifactId>dnsjava</artifactId> <artifactId>dnsjava</artifactId>
<version>2.1.7</version> <version>2.1.9</version>
</dependency> </dependency>
<dependency> <dependency>
@@ -668,7 +665,7 @@
<dependency> <dependency>
<groupId>org.flywaydb</groupId> <groupId>org.flywaydb</groupId>
<artifactId>flyway-core</artifactId> <artifactId>flyway-core</artifactId>
<version>8.4.4</version> <version>8.5.13</version>
</dependency> </dependency>
<!-- Google Analytics --> <!-- Google Analytics -->
@@ -703,10 +700,6 @@
<artifactId>annotations</artifactId> <artifactId>annotations</artifactId>
</dependency> </dependency>
<dependency>
<groupId>joda-time</groupId>
<artifactId>joda-time</artifactId>
</dependency>
<dependency> <dependency>
<groupId>javax.inject</groupId> <groupId>javax.inject</groupId>
<artifactId>javax.inject</artifactId> <artifactId>javax.inject</artifactId>
@@ -776,7 +769,7 @@
<dependency> <dependency>
<groupId>org.json</groupId> <groupId>org.json</groupId>
<artifactId>json</artifactId> <artifactId>json</artifactId>
<version>20230227</version> <version>20231013</version>
</dependency> </dependency>
<!-- Useful for testing command-line tools --> <!-- Useful for testing command-line tools -->
@@ -791,7 +784,7 @@
<dependency> <dependency>
<groupId>com.opencsv</groupId> <groupId>com.opencsv</groupId>
<artifactId>opencsv</artifactId> <artifactId>opencsv</artifactId>
<version>5.6</version> <version>5.7.1</version>
</dependency> </dependency>
<!-- Email templating --> <!-- Email templating -->
@@ -809,7 +802,7 @@
<dependency> <dependency>
<groupId>org.apache.bcel</groupId> <groupId>org.apache.bcel</groupId>
<artifactId>bcel</artifactId> <artifactId>bcel</artifactId>
<version>6.6.0</version> <version>6.7.0</version>
<scope>test</scope> <scope>test</scope>
</dependency> </dependency>
@@ -818,6 +811,13 @@
<groupId>eu.openaire</groupId> <groupId>eu.openaire</groupId>
<artifactId>funders-model</artifactId> <artifactId>funders-model</artifactId>
<version>2.0.0</version> <version>2.0.0</version>
<exclusions>
<!-- Newer version pulled in via Jersey below -->
<exclusion>
<groupId>org.javassist</groupId>
<artifactId>javassist</artifactId>
</exclusion>
</exclusions>
</dependency> </dependency>
<dependency> <dependency>
@@ -856,32 +856,37 @@
<dependencyManagement> <dependencyManagement>
<dependencies> <dependencies>
<!-- for mockserver --> <!-- for mockserver -->
<!-- Solve dependency convergence issues related to <!-- Solve dependency convergence issues related to Solr and
'mockserver-junit-rule' by selecting the versions we want to use. --> 'mockserver-junit-rule' by selecting the versions we want to use. -->
<dependency> <dependency>
<groupId>io.netty</groupId> <groupId>io.netty</groupId>
<artifactId>netty-buffer</artifactId> <artifactId>netty-buffer</artifactId>
<version>4.1.68.Final</version> <version>4.1.94.Final</version>
</dependency> </dependency>
<dependency> <dependency>
<groupId>io.netty</groupId> <groupId>io.netty</groupId>
<artifactId>netty-transport</artifactId> <artifactId>netty-transport</artifactId>
<version>4.1.68.Final</version> <version>4.1.94.Final</version>
</dependency>
<dependency>
<groupId>io.netty</groupId>
<artifactId>netty-transport-native-unix-common</artifactId>
<version>4.1.94.Final</version>
</dependency> </dependency>
<dependency> <dependency>
<groupId>io.netty</groupId> <groupId>io.netty</groupId>
<artifactId>netty-common</artifactId> <artifactId>netty-common</artifactId>
<version>4.1.68.Final</version> <version>4.1.94.Final</version>
</dependency> </dependency>
<dependency> <dependency>
<groupId>io.netty</groupId> <groupId>io.netty</groupId>
<artifactId>netty-handler</artifactId> <artifactId>netty-handler</artifactId>
<version>4.1.68.Final</version> <version>4.1.94.Final</version>
</dependency> </dependency>
<dependency> <dependency>
<groupId>io.netty</groupId> <groupId>io.netty</groupId>
<artifactId>netty-codec</artifactId> <artifactId>netty-codec</artifactId>
<version>4.1.68.Final</version> <version>4.1.94.Final</version>
</dependency> </dependency>
<dependency> <dependency>
<groupId>org.apache.velocity</groupId> <groupId>org.apache.velocity</groupId>
@@ -917,7 +922,7 @@
<dependency> <dependency>
<groupId>org.scala-lang</groupId> <groupId>org.scala-lang</groupId>
<artifactId>scala-library</artifactId> <artifactId>scala-library</artifactId>
<version>2.13.9</version> <version>2.13.11</version>
<scope>test</scope> <scope>test</scope>
</dependency> </dependency>
</dependencies> </dependencies>

View File

@@ -22,9 +22,21 @@ public interface AccessStatusHelper {
* *
* @param context the DSpace context * @param context the DSpace context
* @param item the item * @param item the item
* @param threshold the embargo threshold date
* @return an access status value * @return an access status value
* @throws SQLException An exception that provides information on a database access error or other errors. * @throws SQLException An exception that provides information on a database access error or other errors.
*/ */
public String getAccessStatusFromItem(Context context, Item item, Date threshold) public String getAccessStatusFromItem(Context context, Item item, Date threshold)
throws SQLException; throws SQLException;
/**
* Retrieve embargo information for the item
*
* @param context the DSpace context
* @param item the item to check for embargo information
* @param threshold the embargo threshold date
* @return an embargo date
* @throws SQLException An exception that provides information on a database access error or other errors.
*/
public String getEmbargoFromItem(Context context, Item item, Date threshold) throws SQLException;
} }

View File

@@ -8,6 +8,8 @@
package org.dspace.access.status; package org.dspace.access.status;
import java.sql.SQLException; import java.sql.SQLException;
import java.time.LocalDate;
import java.time.ZoneId;
import java.util.Date; import java.util.Date;
import org.dspace.access.status.service.AccessStatusService; import org.dspace.access.status.service.AccessStatusService;
@@ -15,7 +17,6 @@ import org.dspace.content.Item;
import org.dspace.core.Context; import org.dspace.core.Context;
import org.dspace.core.service.PluginService; import org.dspace.core.service.PluginService;
import org.dspace.services.ConfigurationService; import org.dspace.services.ConfigurationService;
import org.joda.time.LocalDate;
import org.springframework.beans.factory.annotation.Autowired; import org.springframework.beans.factory.annotation.Autowired;
/** /**
@@ -55,7 +56,10 @@ public class AccessStatusServiceImpl implements AccessStatusService {
int month = configurationService.getIntProperty("access.status.embargo.forever.month"); int month = configurationService.getIntProperty("access.status.embargo.forever.month");
int day = configurationService.getIntProperty("access.status.embargo.forever.day"); int day = configurationService.getIntProperty("access.status.embargo.forever.day");
forever_date = new LocalDate(year, month, day).toDate(); forever_date = Date.from(LocalDate.of(year, month, day)
.atStartOfDay()
.atZone(ZoneId.systemDefault())
.toInstant());
} }
} }
@@ -63,4 +67,9 @@ public class AccessStatusServiceImpl implements AccessStatusService {
public String getAccessStatus(Context context, Item item) throws SQLException { public String getAccessStatus(Context context, Item item) throws SQLException {
return helper.getAccessStatusFromItem(context, item, forever_date); return helper.getAccessStatusFromItem(context, item, forever_date);
} }
@Override
public String getEmbargoFromItem(Context context, Item item) throws SQLException {
return helper.getEmbargoFromItem(context, item, forever_date);
}
} }

View File

@@ -26,6 +26,7 @@ import org.dspace.content.service.ItemService;
import org.dspace.core.Constants; import org.dspace.core.Constants;
import org.dspace.core.Context; import org.dspace.core.Context;
import org.dspace.eperson.Group; import org.dspace.eperson.Group;
import org.joda.time.LocalDate;
/** /**
* Default plugin implementation of the access status helper. * Default plugin implementation of the access status helper.
@@ -33,6 +34,11 @@ import org.dspace.eperson.Group;
* calculate the access status of an item based on the policies of * calculate the access status of an item based on the policies of
* the primary or the first bitstream in the original bundle. * the primary or the first bitstream in the original bundle.
* Users can override this method for enhanced functionality. * Users can override this method for enhanced functionality.
*
* The getEmbargoInformationFromItem method provides a simple logic to
* * retrieve embargo information of bitstreams from an item based on the policies of
* * the primary or the first bitstream in the original bundle.
* * Users can override this method for enhanced functionality.
*/ */
public class DefaultAccessStatusHelper implements AccessStatusHelper { public class DefaultAccessStatusHelper implements AccessStatusHelper {
public static final String EMBARGO = "embargo"; public static final String EMBARGO = "embargo";
@@ -54,12 +60,12 @@ public class DefaultAccessStatusHelper implements AccessStatusHelper {
/** /**
* Look at the item's policies to determine an access status value. * Look at the item's policies to determine an access status value.
* It is also considering a date threshold for embargos and restrictions. * It is also considering a date threshold for embargoes and restrictions.
* *
* If the item is null, simply returns the "unknown" value. * If the item is null, simply returns the "unknown" value.
* *
* @param context the DSpace context * @param context the DSpace context
* @param item the item to embargo * @param item the item to check for embargoes
* @param threshold the embargo threshold date * @param threshold the embargo threshold date
* @return an access status value * @return an access status value
*/ */
@@ -86,7 +92,7 @@ public class DefaultAccessStatusHelper implements AccessStatusHelper {
.findFirst() .findFirst()
.orElse(null); .orElse(null);
} }
return caculateAccessStatusForDso(context, bitstream, threshold); return calculateAccessStatusForDso(context, bitstream, threshold);
} }
/** /**
@@ -104,7 +110,7 @@ public class DefaultAccessStatusHelper implements AccessStatusHelper {
* @param threshold the embargo threshold date * @param threshold the embargo threshold date
* @return an access status value * @return an access status value
*/ */
private String caculateAccessStatusForDso(Context context, DSpaceObject dso, Date threshold) private String calculateAccessStatusForDso(Context context, DSpaceObject dso, Date threshold)
throws SQLException { throws SQLException {
if (dso == null) { if (dso == null) {
return METADATA_ONLY; return METADATA_ONLY;
@@ -156,4 +162,87 @@ public class DefaultAccessStatusHelper implements AccessStatusHelper {
} }
return RESTRICTED; return RESTRICTED;
} }
/**
* Look at the policies of the primary (or first) bitstream of the item to retrieve its embargo.
*
* If the item is null, simply returns an empty map with no embargo information.
*
* @param context the DSpace context
* @param item the item to embargo
* @return an access status value
*/
@Override
public String getEmbargoFromItem(Context context, Item item, Date threshold)
throws SQLException {
Date embargoDate;
// If Item status is not "embargo" then return a null embargo date.
String accessStatus = getAccessStatusFromItem(context, item, threshold);
if (item == null || !accessStatus.equals(EMBARGO)) {
return null;
}
// Consider only the original bundles.
List<Bundle> bundles = item.getBundles(Constants.DEFAULT_BUNDLE_NAME);
// Check for primary bitstreams first.
Bitstream bitstream = bundles.stream()
.map(bundle -> bundle.getPrimaryBitstream())
.filter(Objects::nonNull)
.findFirst()
.orElse(null);
if (bitstream == null) {
// If there is no primary bitstream,
// take the first bitstream in the bundles.
bitstream = bundles.stream()
.map(bundle -> bundle.getBitstreams())
.flatMap(List::stream)
.findFirst()
.orElse(null);
}
if (bitstream == null) {
return null;
}
embargoDate = this.retrieveShortestEmbargo(context, bitstream);
return embargoDate != null ? embargoDate.toString() : null;
}
/**
*
*/
private Date retrieveShortestEmbargo(Context context, Bitstream bitstream) throws SQLException {
Date embargoDate = null;
// Only consider read policies.
List<ResourcePolicy> policies = authorizeService
.getPoliciesActionFilter(context, bitstream, Constants.READ);
// Looks at all read policies.
for (ResourcePolicy policy : policies) {
boolean isValid = resourcePolicyService.isDateValid(policy);
Group group = policy.getGroup();
if (group != null && StringUtils.equals(group.getName(), Group.ANONYMOUS)) {
// Only calculate the status for the anonymous group.
if (!isValid) {
// If the policy is not valid there is an active embargo
Date startDate = policy.getStartDate();
if (startDate != null && !startDate.before(LocalDate.now().toDate())) {
// There is an active embargo: aim to take the shortest embargo (account for rare cases where
// more than one resource policy exists)
if (embargoDate == null) {
embargoDate = startDate;
} else {
embargoDate = startDate.before(embargoDate) ? startDate : embargoDate;
}
}
}
}
}
return embargoDate;
}
} }

View File

@@ -40,7 +40,18 @@ public interface AccessStatusService {
* *
* @param context the DSpace context * @param context the DSpace context
* @param item the item * @param item the item
* @return an access status value
* @throws SQLException An exception that provides information on a database access error or other errors. * @throws SQLException An exception that provides information on a database access error or other errors.
*/ */
public String getAccessStatus(Context context, Item item) throws SQLException; public String getAccessStatus(Context context, Item item) throws SQLException;
/**
* Retrieve embargo information for the item
*
* @param context the DSpace context
* @param item the item to check for embargo information
* @return an embargo date
* @throws SQLException An exception that provides information on a database access error or other errors.
*/
public String getEmbargoFromItem(Context context, Item item) throws SQLException;
} }

View File

@@ -464,7 +464,7 @@ public class BulkAccessControl extends DSpaceRunnable<BulkAccessControlScriptCon
.forEach(accessCondition -> createResourcePolicy(item, accessCondition, .forEach(accessCondition -> createResourcePolicy(item, accessCondition,
itemAccessConditions.get(accessCondition.getName()))); itemAccessConditions.get(accessCondition.getName())));
itemService.adjustItemPolicies(context, item, item.getOwningCollection()); itemService.adjustItemPolicies(context, item, item.getOwningCollection(), false);
} }
/** /**

View File

@@ -578,6 +578,10 @@ public class MetadataImport extends DSpaceRunnable<MetadataImportScriptConfigura
wfItem = workflowService.startWithoutNotify(c, wsItem); wfItem = workflowService.startWithoutNotify(c, wsItem);
} }
} else { } else {
// Add provenance info
String provenance = installItemService.getSubmittedByProvenanceMessage(c, wsItem.getItem());
itemService.addMetadata(c, item, MetadataSchemaEnum.DC.getName(),
"description", "provenance", "en", provenance);
// Install the item // Install the item
installItemService.installItem(c, wsItem); installItemService.installItem(c, wsItem);
} }
@@ -1363,7 +1367,7 @@ public class MetadataImport extends DSpaceRunnable<MetadataImportScriptConfigura
* is the field is defined as authority controlled * is the field is defined as authority controlled
*/ */
private static boolean isAuthorityControlledField(String md) { private static boolean isAuthorityControlledField(String md) {
String mdf = StringUtils.substringAfter(md, ":"); String mdf = md.contains(":") ? StringUtils.substringAfter(md, ":") : md;
mdf = StringUtils.substringBefore(mdf, "["); mdf = StringUtils.substringBefore(mdf, "[");
return authorityControlled.contains(mdf); return authorityControlled.contains(mdf);
} }

View File

@@ -774,6 +774,10 @@ public class ItemImportServiceImpl implements ItemImportService, InitializingBea
// put item in system // put item in system
if (!isTest) { if (!isTest) {
try { try {
// Add provenance info
String provenance = installItemService.getSubmittedByProvenanceMessage(c, wi.getItem());
itemService.addMetadata(c, wi.getItem(), MetadataSchemaEnum.DC.getName(),
"description", "provenance", "en", provenance);
installItemService.installItem(c, wi, myhandle); installItemService.installItem(c, wi, myhandle);
} catch (Exception e) { } catch (Exception e) {
workspaceItemService.deleteAll(c, wi); workspaceItemService.deleteAll(c, wi);

View File

@@ -21,6 +21,7 @@ import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger; import org.apache.logging.log4j.Logger;
import org.dspace.core.Context; import org.dspace.core.Context;
import org.dspace.scripts.DSpaceRunnable; import org.dspace.scripts.DSpaceRunnable;
import org.dspace.scripts.DSpaceRunnable.StepResult;
import org.dspace.scripts.configuration.ScriptConfiguration; import org.dspace.scripts.configuration.ScriptConfiguration;
import org.dspace.scripts.factory.ScriptServiceFactory; import org.dspace.scripts.factory.ScriptServiceFactory;
import org.dspace.scripts.handler.DSpaceRunnableHandler; import org.dspace.scripts.handler.DSpaceRunnableHandler;
@@ -145,8 +146,13 @@ public class ScriptLauncher {
private static int executeScript(String[] args, DSpaceRunnableHandler dSpaceRunnableHandler, private static int executeScript(String[] args, DSpaceRunnableHandler dSpaceRunnableHandler,
DSpaceRunnable script) { DSpaceRunnable script) {
try { try {
script.initialize(args, dSpaceRunnableHandler, null); StepResult result = script.initialize(args, dSpaceRunnableHandler, null);
// check the StepResult, only run the script if the result is Continue;
// otherwise - for example the script is started with the help as argument, nothing is to do
if (StepResult.Continue.equals(result)) {
// runs the script, the normal initialization is successful
script.run(); script.run();
}
return 0; return 0;
} catch (ParseException e) { } catch (ParseException e) {
script.printHelp(); script.printHelp();

View File

@@ -10,6 +10,7 @@ package org.dspace.app.mediafilter;
import java.io.InputStream; import java.io.InputStream;
import java.sql.SQLException; import java.sql.SQLException;
import java.util.ArrayList; import java.util.ArrayList;
import java.util.Collections;
import java.util.HashMap; import java.util.HashMap;
import java.util.Iterator; import java.util.Iterator;
import java.util.List; import java.util.List;
@@ -40,6 +41,7 @@ import org.dspace.eperson.Group;
import org.dspace.eperson.service.GroupService; import org.dspace.eperson.service.GroupService;
import org.dspace.scripts.handler.DSpaceRunnableHandler; import org.dspace.scripts.handler.DSpaceRunnableHandler;
import org.dspace.services.ConfigurationService; import org.dspace.services.ConfigurationService;
import org.dspace.util.ThrowableUtils;
import org.springframework.beans.factory.InitializingBean; import org.springframework.beans.factory.InitializingBean;
import org.springframework.beans.factory.annotation.Autowired; import org.springframework.beans.factory.annotation.Autowired;
@@ -225,23 +227,9 @@ public class MediaFilterServiceImpl implements MediaFilterService, InitializingB
filtered = true; filtered = true;
} }
} catch (Exception e) { } catch (Exception e) {
String handle = myItem.getHandle();
List<Bundle> bundles = myBitstream.getBundles();
long size = myBitstream.getSizeBytes();
String checksum = myBitstream.getChecksum() + " (" + myBitstream.getChecksumAlgorithm() + ")";
int assetstore = myBitstream.getStoreNumber();
// Printout helpful information to find the errored bitstream. // Printout helpful information to find the errored bitstream.
StringBuilder sb = new StringBuilder("ERROR filtering, skipping bitstream:\n"); logError(formatBitstreamDetails(myItem.getHandle(), myBitstream));
sb.append("\tItem Handle: ").append(handle); logError(ThrowableUtils.formatCauseChain(e));
for (Bundle bundle : bundles) {
sb.append("\tBundle Name: ").append(bundle.getName());
}
sb.append("\tFile Size: ").append(size);
sb.append("\tChecksum: ").append(checksum);
sb.append("\tAsset Store: ").append(assetstore);
logError(sb.toString());
logError(e.getMessage(), e);
} }
} else if (filterClass instanceof SelfRegisterInputFormats) { } else if (filterClass instanceof SelfRegisterInputFormats) {
// Filter implements self registration, so check to see if it should be applied // Filter implements self registration, so check to see if it should be applied
@@ -319,10 +307,10 @@ public class MediaFilterServiceImpl implements MediaFilterService, InitializingB
// check if destination bitstream exists // check if destination bitstream exists
Bundle existingBundle = null; Bundle existingBundle = null;
List<Bitstream> existingBitstreams = new ArrayList<Bitstream>(); List<Bitstream> existingBitstreams = new ArrayList<>();
List<Bundle> bundles = itemService.getBundles(item, formatFilter.getBundleName()); List<Bundle> bundles = itemService.getBundles(item, formatFilter.getBundleName());
if (bundles.size() > 0) { if (!bundles.isEmpty()) {
// only finds the last matching bundle and all matching bitstreams in the proper bundle(s) // only finds the last matching bundle and all matching bitstreams in the proper bundle(s)
for (Bundle bundle : bundles) { for (Bundle bundle : bundles) {
List<Bitstream> bitstreams = bundle.getBitstreams(); List<Bitstream> bitstreams = bundle.getBitstreams();
@@ -337,7 +325,7 @@ public class MediaFilterServiceImpl implements MediaFilterService, InitializingB
} }
// if exists and overwrite = false, exit // if exists and overwrite = false, exit
if (!overWrite && (existingBitstreams.size() > 0)) { if (!overWrite && (!existingBitstreams.isEmpty())) {
if (!isQuiet) { if (!isQuiet) {
logInfo("SKIPPED: bitstream " + source.getID() logInfo("SKIPPED: bitstream " + source.getID()
+ " (item: " + item.getHandle() + ") because '" + newName + "' already exists"); + " (item: " + item.getHandle() + ") because '" + newName + "' already exists");
@@ -370,7 +358,7 @@ public class MediaFilterServiceImpl implements MediaFilterService, InitializingB
} }
Bundle targetBundle; // bundle we're modifying Bundle targetBundle; // bundle we're modifying
if (bundles.size() < 1) { if (bundles.isEmpty()) {
// create new bundle if needed // create new bundle if needed
targetBundle = bundleService.create(context, item, formatFilter.getBundleName()); targetBundle = bundleService.create(context, item, formatFilter.getBundleName());
} else { } else {
@@ -399,6 +387,7 @@ public class MediaFilterServiceImpl implements MediaFilterService, InitializingB
} catch (OutOfMemoryError oome) { } catch (OutOfMemoryError oome) {
logError("!!! OutOfMemoryError !!!"); logError("!!! OutOfMemoryError !!!");
logError(formatBitstreamDetails(item.getHandle(), source));
} }
// we are overwriting, so remove old bitstream // we are overwriting, so remove old bitstream
@@ -496,6 +485,37 @@ public class MediaFilterServiceImpl implements MediaFilterService, InitializingB
} }
} }
/**
* Describe a Bitstream in detail. Format a single line of text with
* information such as Bitstore index, backing file ID, size, checksum,
* enclosing Item and Bundles.
*
* @param itemHandle Handle of the Item by which we found the Bitstream.
* @param bitstream the Bitstream to be described.
* @return Bitstream details.
*/
private String formatBitstreamDetails(String itemHandle,
Bitstream bitstream) {
List<Bundle> bundles;
try {
bundles = bitstream.getBundles();
} catch (SQLException ex) {
logError("Unexpected error fetching Bundles", ex);
bundles = Collections.EMPTY_LIST;
}
StringBuilder sb = new StringBuilder("ERROR filtering, skipping bitstream:\n");
sb.append("\tItem Handle: ").append(itemHandle);
for (Bundle bundle : bundles) {
sb.append("\tBundle Name: ").append(bundle.getName());
}
sb.append("\tFile Size: ").append(bitstream.getSizeBytes());
sb.append("\tChecksum: ").append(bitstream.getChecksum())
.append(" (").append(bitstream.getChecksumAlgorithm()).append(')');
sb.append("\tAsset Store: ").append(bitstream.getStoreNumber());
sb.append("\tInternal ID: ").append(bitstream.getInternalId());
return sb.toString();
}
private void logInfo(String message) { private void logInfo(String message) {
if (handler != null) { if (handler != null) {
handler.logInfo(message); handler.logInfo(message);

View File

@@ -189,7 +189,10 @@ public class GenerateSitemaps {
*/ */
public static void generateSitemaps(boolean makeHTMLMap, boolean makeSitemapOrg) throws SQLException, IOException { public static void generateSitemaps(boolean makeHTMLMap, boolean makeSitemapOrg) throws SQLException, IOException {
String uiURLStem = configurationService.getProperty("dspace.ui.url"); String uiURLStem = configurationService.getProperty("dspace.ui.url");
String sitemapStem = uiURLStem + "/sitemap"; if (!uiURLStem.endsWith("/")) {
uiURLStem = uiURLStem + '/';
}
String sitemapStem = uiURLStem + "sitemap";
File outputDir = new File(configurationService.getProperty("sitemap.dir")); File outputDir = new File(configurationService.getProperty("sitemap.dir"));
if (!outputDir.exists() && !outputDir.mkdir()) { if (!outputDir.exists() && !outputDir.mkdir()) {
@@ -212,7 +215,7 @@ public class GenerateSitemaps {
List<Community> comms = communityService.findAll(c); List<Community> comms = communityService.findAll(c);
for (Community comm : comms) { for (Community comm : comms) {
String url = uiURLStem + "/communities/" + comm.getID(); String url = uiURLStem + "communities/" + comm.getID();
if (makeHTMLMap) { if (makeHTMLMap) {
html.addURL(url, null); html.addURL(url, null);
@@ -227,7 +230,7 @@ public class GenerateSitemaps {
List<Collection> colls = collectionService.findAll(c); List<Collection> colls = collectionService.findAll(c);
for (Collection coll : colls) { for (Collection coll : colls) {
String url = uiURLStem + "/collections/" + coll.getID(); String url = uiURLStem + "collections/" + coll.getID();
if (makeHTMLMap) { if (makeHTMLMap) {
html.addURL(url, null); html.addURL(url, null);
@@ -259,11 +262,11 @@ public class GenerateSitemaps {
&& StringUtils.isNotBlank(discoverResult.getSearchDocument( && StringUtils.isNotBlank(discoverResult.getSearchDocument(
discoverResult.getIndexableObjects().get(0)).get(0).getSearchFieldValues("entityType").get(0)) discoverResult.getIndexableObjects().get(0)).get(0).getSearchFieldValues("entityType").get(0))
) { ) {
url = uiURLStem + "/entities/" + StringUtils.lowerCase(discoverResult.getSearchDocument( url = uiURLStem + "entities/" + StringUtils.lowerCase(discoverResult.getSearchDocument(
discoverResult.getIndexableObjects().get(0)) discoverResult.getIndexableObjects().get(0))
.get(0).getSearchFieldValues("entityType").get(0)) + "/" + i.getID(); .get(0).getSearchFieldValues("entityType").get(0)) + "/" + i.getID();
} else { } else {
url = uiURLStem + "/items/" + i.getID(); url = uiURLStem + "items/" + i.getID();
} }
Date lastMod = i.getLastModified(); Date lastMod = i.getLastModified();

View File

@@ -713,8 +713,8 @@ public class LDAPAuthentication
private void assignGroups(String dn, ArrayList<String> group, Context context) { private void assignGroups(String dn, ArrayList<String> group, Context context) {
if (StringUtils.isNotBlank(dn)) { if (StringUtils.isNotBlank(dn)) {
System.out.println("dn:" + dn); System.out.println("dn:" + dn);
int i = 1; int groupmapIndex = 1;
String groupMap = configurationService.getProperty("authentication-ldap.login.groupmap." + i); String groupMap = configurationService.getProperty("authentication-ldap.login.groupmap." + groupmapIndex);
boolean cmp; boolean cmp;
@@ -725,6 +725,13 @@ public class LDAPAuthentication
String ldapSearchString = t[0]; String ldapSearchString = t[0];
String dspaceGroupName = t[1]; String dspaceGroupName = t[1];
if (group == null) {
cmp = StringUtils.containsIgnoreCase(dn, ldapSearchString + ",");
if (cmp) {
assignGroup(context, groupmapIndex, dspaceGroupName);
}
} else {
// list of strings with dn from LDAP groups // list of strings with dn from LDAP groups
// inner loop // inner loop
Iterator<String> groupIterator = group.iterator(); Iterator<String> groupIterator = group.iterator();
@@ -741,7 +748,29 @@ public class LDAPAuthentication
} }
if (cmp) { if (cmp) {
// assign user to this group assignGroup(context, groupmapIndex, dspaceGroupName);
}
}
}
groupMap = configurationService.getProperty("authentication-ldap.login.groupmap." + ++groupmapIndex);
}
}
}
/**
* Add the current authenticated user to the specified group
*
* @param context
* DSpace context
*
* @param groupmapIndex
* authentication-ldap.login.groupmap.* key index defined in dspace.cfg
*
* @param dspaceGroupName
* The DSpace group to add the user to
*/
private void assignGroup(Context context, int groupmapIndex, String dspaceGroupName) {
try { try {
Group ldapGroup = groupService.findByName(context, dspaceGroupName); Group ldapGroup = groupService.findByName(context, dspaceGroupName);
if (ldapGroup != null) { if (ldapGroup != null) {
@@ -751,7 +780,7 @@ public class LDAPAuthentication
// The group does not exist // The group does not exist
log.warn(LogHelper.getHeader(context, log.warn(LogHelper.getHeader(context,
"ldap_assignGroupsBasedOnLdapDn", "ldap_assignGroupsBasedOnLdapDn",
"Group defined in authentication-ldap.login.groupmap." + i "Group defined in authentication-ldap.login.groupmap." + groupmapIndex
+ " does not exist :: " + dspaceGroupName)); + " does not exist :: " + dspaceGroupName));
} }
} catch (AuthorizeException ae) { } catch (AuthorizeException ae) {
@@ -764,12 +793,6 @@ public class LDAPAuthentication
dspaceGroupName)); dspaceGroupName));
} }
} }
}
groupMap = configurationService.getProperty("authentication-ldap.login.groupmap." + ++i);
}
}
}
@Override @Override
public boolean isUsed(final Context context, final HttpServletRequest request) { public boolean isUsed(final Context context, final HttpServletRequest request) {

View File

@@ -9,6 +9,10 @@ package org.dspace.authority;
import java.sql.SQLException; import java.sql.SQLException;
import java.text.DateFormat; import java.text.DateFormat;
import java.time.DateTimeException;
import java.time.ZoneId;
import java.time.ZonedDateTime;
import java.time.format.DateTimeFormatter;
import java.util.ArrayList; import java.util.ArrayList;
import java.util.Date; import java.util.Date;
import java.util.HashMap; import java.util.HashMap;
@@ -16,6 +20,7 @@ import java.util.List;
import java.util.Map; import java.util.Map;
import org.apache.commons.lang3.StringUtils; import org.apache.commons.lang3.StringUtils;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger; import org.apache.logging.log4j.Logger;
import org.apache.solr.common.SolrDocument; import org.apache.solr.common.SolrDocument;
import org.apache.solr.common.SolrInputDocument; import org.apache.solr.common.SolrInputDocument;
@@ -25,9 +30,6 @@ import org.dspace.content.MetadataValue;
import org.dspace.content.factory.ContentServiceFactory; import org.dspace.content.factory.ContentServiceFactory;
import org.dspace.core.Context; import org.dspace.core.Context;
import org.dspace.util.SolrUtils; import org.dspace.util.SolrUtils;
import org.joda.time.DateTime;
import org.joda.time.format.DateTimeFormatter;
import org.joda.time.format.ISODateTimeFormat;
/** /**
* @author Antoine Snyers (antoine at atmire.com) * @author Antoine Snyers (antoine at atmire.com)
@@ -192,7 +194,7 @@ public class AuthorityValue {
} }
/** /**
* Information that can be used the choice ui * Information that can be used the choice ui.
* *
* @return map * @return map
*/ */
@@ -200,42 +202,51 @@ public class AuthorityValue {
return new HashMap<>(); return new HashMap<>();
} }
/**
public List<DateTimeFormatter> getDateFormatters() { * Build a list of ISO date formatters to parse various forms.
List<DateTimeFormatter> list = new ArrayList<>(); *
list.add(ISODateTimeFormat.dateTime()); * <p><strong>Note:</strong> any formatter which does not parse a zone or
list.add(ISODateTimeFormat.dateTimeNoMillis()); * offset must have a default zone set. See {@link stringToDate}.
*
* @return the formatters.
*/
static private List<DateTimeFormatter> getDateFormatters() {
List<java.time.format.DateTimeFormatter> list = new ArrayList<>();
list.add(java.time.format.DateTimeFormatter.ofPattern("yyyy-MM-dd'T'HH:mm:ss[.SSS]X"));
list.add(java.time.format.DateTimeFormatter.ISO_LOCAL_DATE_TIME
.withZone(ZoneId.systemDefault().normalized()));
return list; return list;
} }
public Date stringToDate(String date) { /**
* Convert a date string to internal form, trying several parsers.
*
* @param date serialized date to be converted.
* @return converted date, or null if no parser accepted the input.
*/
static public Date stringToDate(String date) {
Date result = null; Date result = null;
if (StringUtils.isNotBlank(date)) { if (StringUtils.isNotBlank(date)) {
List<DateTimeFormatter> dateFormatters = getDateFormatters(); for (DateTimeFormatter formatter : getDateFormatters()) {
boolean converted = false;
int formatter = 0;
while (!converted) {
try { try {
DateTimeFormatter dateTimeFormatter = dateFormatters.get(formatter); ZonedDateTime dateTime = ZonedDateTime.parse(date, formatter);
DateTime dateTime = dateTimeFormatter.parseDateTime(date); result = Date.from(dateTime.toInstant());
result = dateTime.toDate(); break;
converted = true; } catch (DateTimeException e) {
} catch (IllegalArgumentException e) { log.debug("Input '{}' did not match {}", date, formatter);
formatter++;
if (formatter > dateFormatters.size()) {
converted = true;
}
log.error("Could not find a valid date format for: \"" + date + "\"", e);
} }
} }
} }
if (null == result) {
log.error("Could not find a valid date format for: \"{}\"", date);
}
return result; return result;
} }
/** /**
* log4j logger * log4j logger
*/ */
private static Logger log = org.apache.logging.log4j.LogManager.getLogger(AuthorityValue.class); private static Logger log = LogManager.getLogger();
@Override @Override
public String toString() { public String toString() {
@@ -272,6 +283,10 @@ public class AuthorityValue {
return new AuthorityValue(); return new AuthorityValue();
} }
/**
* Get the type of authority which created this value.
* @return type name.
*/
public String getAuthorityType() { public String getAuthorityType() {
return "internal"; return "internal";
} }

View File

@@ -108,7 +108,7 @@ public class CrossLinks {
} else { } else {
// Exact match, if the key field has no .* wildcard // Exact match, if the key field has no .* wildcard
if (links.containsKey(metadata)) { if (links.containsKey(metadata)) {
return links.get(key); return links.get(metadata);
} }
} }
} }

View File

@@ -0,0 +1,77 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.cli;
import java.util.ArrayList;
import java.util.List;
import java.util.Properties;
import org.apache.commons.cli.CommandLine;
import org.apache.commons.cli.DefaultParser;
import org.apache.commons.cli.Options;
import org.apache.commons.cli.ParseException;
/**
* Extended version of the DefaultParser. This parser skip/ignore unknown arguments.
*/
public class DSpaceSkipUnknownArgumentsParser extends DefaultParser {
@Override
public CommandLine parse(Options options, String[] arguments) throws ParseException {
return super.parse(options, getOnlyKnownArguments(options, arguments));
}
@Override
public CommandLine parse(Options options, String[] arguments, Properties properties) throws ParseException {
return super.parse(options, getOnlyKnownArguments(options, arguments), properties);
}
/**
* Parse the arguments according to the specified options and properties.
* @param options the specified Options
* @param arguments the command line arguments
* @param stopAtNonOption can be ignored - an unrecognized argument is ignored, an unrecognized argument doesn't
* stop the parsing and doesn't trigger a ParseException
*
* @return the list of atomic option and value tokens
* @throws ParseException if there are any problems encountered while parsing the command line tokens.
*/
@Override
public CommandLine parse(Options options, String[] arguments, boolean stopAtNonOption) throws ParseException {
return super.parse(options, getOnlyKnownArguments(options, arguments), stopAtNonOption);
}
/**
* Parse the arguments according to the specified options and properties.
* @param options the specified Options
* @param arguments the command line arguments
* @param properties command line option name-value pairs
* @param stopAtNonOption can be ignored - an unrecognized argument is ignored, an unrecognized argument doesn't
* stop the parsing and doesn't trigger a ParseException
*
* @return the list of atomic option and value tokens
* @throws ParseException if there are any problems encountered while parsing the command line tokens.
*/
@Override
public CommandLine parse(Options options, String[] arguments, Properties properties, boolean stopAtNonOption)
throws ParseException {
return super.parse(options, getOnlyKnownArguments(options, arguments), properties, stopAtNonOption);
}
private String[] getOnlyKnownArguments(Options options, String[] arguments) {
List<String> knownArguments = new ArrayList<>();
for (String arg : arguments) {
if (options.hasOption(arg)) {
knownArguments.add(arg);
}
}
return knownArguments.toArray(new String[0]);
}
}

View File

@@ -276,6 +276,11 @@ public class BitstreamServiceImpl extends DSpaceObjectServiceImpl<Bitstream> imp
//Remove our bitstream from all our bundles //Remove our bitstream from all our bundles
final List<Bundle> bundles = bitstream.getBundles(); final List<Bundle> bundles = bitstream.getBundles();
for (Bundle bundle : bundles) { for (Bundle bundle : bundles) {
authorizeService.authorizeAction(context, bundle, Constants.REMOVE);
//We also need to remove the bitstream id when it's set as bundle's primary bitstream
if (bitstream.equals(bundle.getPrimaryBitstream())) {
bundle.unsetPrimaryBitstreamID();
}
bundle.removeBitstream(bitstream); bundle.removeBitstream(bitstream);
} }
@@ -403,7 +408,7 @@ public class BitstreamServiceImpl extends DSpaceObjectServiceImpl<Bitstream> imp
@Override @Override
public Bitstream getThumbnail(Context context, Bitstream bitstream) throws SQLException { public Bitstream getThumbnail(Context context, Bitstream bitstream) throws SQLException {
Pattern pattern = Pattern.compile("^" + bitstream.getName() + ".([^.]+)$"); Pattern pattern = getBitstreamNamePattern(bitstream);
for (Bundle bundle : bitstream.getBundles()) { for (Bundle bundle : bitstream.getBundles()) {
for (Item item : bundle.getItems()) { for (Item item : bundle.getItems()) {
@@ -420,6 +425,13 @@ public class BitstreamServiceImpl extends DSpaceObjectServiceImpl<Bitstream> imp
return null; return null;
} }
protected Pattern getBitstreamNamePattern(Bitstream bitstream) {
if (bitstream.getName() != null) {
return Pattern.compile("^" + Pattern.quote(bitstream.getName()) + ".([^.]+)$");
}
return Pattern.compile("^" + bitstream.getName() + ".([^.]+)$");
}
@Override @Override
public BitstreamFormat getFormat(Context context, Bitstream bitstream) throws SQLException { public BitstreamFormat getFormat(Context context, Bitstream bitstream) throws SQLException {
if (bitstream.getBitstreamFormat() == null) { if (bitstream.getBitstreamFormat() == null) {

View File

@@ -126,7 +126,7 @@ public class Bundle extends DSpaceObject implements DSpaceObjectLegacySupport {
* Unset the primary bitstream ID of the bundle * Unset the primary bitstream ID of the bundle
*/ */
public void unsetPrimaryBitstreamID() { public void unsetPrimaryBitstreamID() {
primaryBitstream = null; setPrimaryBitstreamID(null);
} }
/** /**

View File

@@ -194,7 +194,6 @@ public class BundleServiceImpl extends DSpaceObjectServiceImpl<Bundle> implement
List<Group> defaultBitstreamReadGroups = List<Group> defaultBitstreamReadGroups =
authorizeService.getAuthorizedGroups(context, owningCollection, authorizeService.getAuthorizedGroups(context, owningCollection,
Constants.DEFAULT_BITSTREAM_READ); Constants.DEFAULT_BITSTREAM_READ);
log.info(defaultBitstreamReadGroups.size());
// If this collection is configured with a DEFAULT_BITSTREAM_READ group, overwrite the READ policy // If this collection is configured with a DEFAULT_BITSTREAM_READ group, overwrite the READ policy
// inherited from the bundle with this policy. // inherited from the bundle with this policy.
if (!defaultBitstreamReadGroups.isEmpty()) { if (!defaultBitstreamReadGroups.isEmpty()) {

View File

@@ -93,7 +93,7 @@ public class InstallItemServiceImpl implements InstallItemService {
// As this is a BRAND NEW item, as a final step we need to remove the // As this is a BRAND NEW item, as a final step we need to remove the
// submitter item policies created during deposit and replace them with // submitter item policies created during deposit and replace them with
// the default policies from the collection. // the default policies from the collection.
itemService.inheritCollectionDefaultPolicies(c, item, collection); itemService.inheritCollectionDefaultPolicies(c, item, collection, false);
return item; return item;
} }
@@ -271,4 +271,28 @@ public class InstallItemServiceImpl implements InstallItemService {
return myMessage.toString(); return myMessage.toString();
} }
@Override
public String getSubmittedByProvenanceMessage(Context context, Item item) throws SQLException {
// get date
DCDate now = DCDate.getCurrent();
// Create provenance description
StringBuffer provmessage = new StringBuffer();
if (item.getSubmitter() != null) {
provmessage.append("Submitted by ").append(item.getSubmitter().getFullName())
.append(" (").append(item.getSubmitter().getEmail()).append(") on ")
.append(now.toString());
} else {
// else, null submitter
provmessage.append("Submitted by unknown (probably automated) on")
.append(now.toString());
}
provmessage.append("\n");
// add sizes and checksums of bitstreams
provmessage.append(getBitstreamProvenanceMessage(context, item));
return provmessage.toString();
}
} }

View File

@@ -920,8 +920,16 @@ public class ItemServiceImpl extends DSpaceObjectServiceImpl<Item> implements It
@Override @Override
public void inheritCollectionDefaultPolicies(Context context, Item item, Collection collection) public void inheritCollectionDefaultPolicies(Context context, Item item, Collection collection)
throws SQLException, AuthorizeException { throws SQLException, AuthorizeException {
adjustItemPolicies(context, item, collection); inheritCollectionDefaultPolicies(context, item, collection, true);
adjustBundleBitstreamPolicies(context, item, collection); }
@Override
public void inheritCollectionDefaultPolicies(Context context, Item item, Collection collection,
boolean replaceReadRPWithCollectionRP)
throws SQLException, AuthorizeException {
adjustItemPolicies(context, item, collection, replaceReadRPWithCollectionRP);
adjustBundleBitstreamPolicies(context, item, collection, replaceReadRPWithCollectionRP);
log.debug(LogHelper.getHeader(context, "item_inheritCollectionDefaultPolicies", log.debug(LogHelper.getHeader(context, "item_inheritCollectionDefaultPolicies",
"item_id=" + item.getID())); "item_id=" + item.getID()));
@@ -930,6 +938,13 @@ public class ItemServiceImpl extends DSpaceObjectServiceImpl<Item> implements It
@Override @Override
public void adjustBundleBitstreamPolicies(Context context, Item item, Collection collection) public void adjustBundleBitstreamPolicies(Context context, Item item, Collection collection)
throws SQLException, AuthorizeException { throws SQLException, AuthorizeException {
adjustBundleBitstreamPolicies(context, item, collection, true);
}
@Override
public void adjustBundleBitstreamPolicies(Context context, Item item, Collection collection,
boolean replaceReadRPWithCollectionRP)
throws SQLException, AuthorizeException {
// Bundles should inherit from DEFAULT_ITEM_READ so that if the item is readable, the files // Bundles should inherit from DEFAULT_ITEM_READ so that if the item is readable, the files
// can be listed (even if they are themselves not readable as per DEFAULT_BITSTREAM_READ or other // can be listed (even if they are themselves not readable as per DEFAULT_BITSTREAM_READ or other
// policies or embargos applied // policies or embargos applied
@@ -948,10 +963,19 @@ public class ItemServiceImpl extends DSpaceObjectServiceImpl<Item> implements It
} }
// TODO: should we also throw an exception if no DEFAULT_ITEM_READ? // TODO: should we also throw an exception if no DEFAULT_ITEM_READ?
boolean removeCurrentReadRPBitstream =
replaceReadRPWithCollectionRP && defaultCollectionBitstreamPolicies.size() > 0;
boolean removeCurrentReadRPBundle =
replaceReadRPWithCollectionRP && defaultCollectionBundlePolicies.size() > 0;
// remove all policies from bundles, add new ones // remove all policies from bundles, add new ones
// Remove bundles // Remove bundles
List<Bundle> bunds = item.getBundles(); List<Bundle> bunds = item.getBundles();
for (Bundle mybundle : bunds) { for (Bundle mybundle : bunds) {
// If collection has default READ policies, remove the bundle's READ policies.
if (removeCurrentReadRPBundle) {
authorizeService.removePoliciesActionFilter(context, mybundle, Constants.READ);
}
// if come from InstallItem: remove all submission/workflow policies // if come from InstallItem: remove all submission/workflow policies
authorizeService.removeAllPoliciesByDSOAndType(context, mybundle, ResourcePolicy.TYPE_SUBMISSION); authorizeService.removeAllPoliciesByDSOAndType(context, mybundle, ResourcePolicy.TYPE_SUBMISSION);
@@ -960,6 +984,11 @@ public class ItemServiceImpl extends DSpaceObjectServiceImpl<Item> implements It
addDefaultPoliciesNotInPlace(context, mybundle, defaultCollectionBundlePolicies); addDefaultPoliciesNotInPlace(context, mybundle, defaultCollectionBundlePolicies);
for (Bitstream bitstream : mybundle.getBitstreams()) { for (Bitstream bitstream : mybundle.getBitstreams()) {
// If collection has default READ policies, remove the bundle's READ policies.
if (removeCurrentReadRPBitstream) {
authorizeService.removePoliciesActionFilter(context, bitstream, Constants.READ);
}
// if come from InstallItem: remove all submission/workflow policies // if come from InstallItem: remove all submission/workflow policies
removeAllPoliciesAndAddDefault(context, bitstream, defaultItemPolicies, removeAllPoliciesAndAddDefault(context, bitstream, defaultItemPolicies,
defaultCollectionBitstreamPolicies); defaultCollectionBitstreamPolicies);
@@ -968,7 +997,14 @@ public class ItemServiceImpl extends DSpaceObjectServiceImpl<Item> implements It
} }
@Override @Override
public void adjustBitstreamPolicies(Context context, Item item, Collection collection , Bitstream bitstream) public void adjustBitstreamPolicies(Context context, Item item, Collection collection, Bitstream bitstream)
throws SQLException, AuthorizeException {
adjustBitstreamPolicies(context, item, collection, bitstream, true);
}
@Override
public void adjustBitstreamPolicies(Context context, Item item, Collection collection , Bitstream bitstream,
boolean replaceReadRPWithCollectionRP)
throws SQLException, AuthorizeException { throws SQLException, AuthorizeException {
List<ResourcePolicy> defaultCollectionPolicies = authorizeService List<ResourcePolicy> defaultCollectionPolicies = authorizeService
.getPoliciesActionFilter(context, collection, Constants.DEFAULT_BITSTREAM_READ); .getPoliciesActionFilter(context, collection, Constants.DEFAULT_BITSTREAM_READ);
@@ -998,10 +1034,22 @@ public class ItemServiceImpl extends DSpaceObjectServiceImpl<Item> implements It
@Override @Override
public void adjustItemPolicies(Context context, Item item, Collection collection) public void adjustItemPolicies(Context context, Item item, Collection collection)
throws SQLException, AuthorizeException { throws SQLException, AuthorizeException {
adjustItemPolicies(context, item, collection, true);
}
@Override
public void adjustItemPolicies(Context context, Item item, Collection collection,
boolean replaceReadRPWithCollectionRP)
throws SQLException, AuthorizeException {
// read collection's default READ policies // read collection's default READ policies
List<ResourcePolicy> defaultCollectionPolicies = authorizeService List<ResourcePolicy> defaultCollectionPolicies = authorizeService
.getPoliciesActionFilter(context, collection, Constants.DEFAULT_ITEM_READ); .getPoliciesActionFilter(context, collection, Constants.DEFAULT_ITEM_READ);
// If collection has defaultREAD policies, remove the item's READ policies.
if (replaceReadRPWithCollectionRP && defaultCollectionPolicies.size() > 0) {
authorizeService.removePoliciesActionFilter(context, item, Constants.READ);
}
// MUST have default policies // MUST have default policies
if (defaultCollectionPolicies.size() < 1) { if (defaultCollectionPolicies.size() < 1) {
throw new SQLException("Collection " + collection.getID() throw new SQLException("Collection " + collection.getID()

View File

@@ -17,6 +17,7 @@ import java.util.Map.Entry;
import java.util.Set; import java.util.Set;
import java.util.stream.Collectors; import java.util.stream.Collectors;
import org.apache.commons.lang3.ArrayUtils;
import org.apache.commons.lang3.StringUtils; import org.apache.commons.lang3.StringUtils;
import org.apache.logging.log4j.Logger; import org.apache.logging.log4j.Logger;
import org.dspace.app.util.DCInput; import org.dspace.app.util.DCInput;
@@ -557,6 +558,15 @@ public final class ChoiceAuthorityServiceImpl implements ChoiceAuthorityService
init(); init();
ChoiceAuthority source = this.getChoiceAuthorityByAuthorityName(nameVocab); ChoiceAuthority source = this.getChoiceAuthorityByAuthorityName(nameVocab);
if (source != null && source instanceof DSpaceControlledVocabulary) { if (source != null && source instanceof DSpaceControlledVocabulary) {
// First, check if this vocabulary index is disabled
String[] vocabulariesDisabled = configurationService
.getArrayProperty("webui.browse.vocabularies.disabled");
if (vocabulariesDisabled != null && ArrayUtils.contains(vocabulariesDisabled, nameVocab)) {
// Discard this vocabulary browse index
return null;
}
Set<String> metadataFields = new HashSet<>(); Set<String> metadataFields = new HashSet<>();
Map<String, List<String>> formsToFields = this.authoritiesFormDefinitions.get(nameVocab); Map<String, List<String>> formsToFields = this.authoritiesFormDefinitions.get(nameVocab);
for (Map.Entry<String, List<String>> formToField : formsToFields.entrySet()) { for (Map.Entry<String, List<String>> formToField : formsToFields.entrySet()) {
@@ -585,6 +595,12 @@ public final class ChoiceAuthorityServiceImpl implements ChoiceAuthorityService
break; break;
} }
} }
// If there is no matching facet, return null to ignore this vocabulary index
if (matchingFacet == null) {
return null;
}
DSpaceControlledVocabularyIndex vocabularyIndex = DSpaceControlledVocabularyIndex vocabularyIndex =
new DSpaceControlledVocabularyIndex((DSpaceControlledVocabulary) source, metadataFields, new DSpaceControlledVocabularyIndex((DSpaceControlledVocabulary) source, metadataFields,
matchingFacet); matchingFacet);

View File

@@ -83,4 +83,15 @@ public interface InstallItemService {
public String getBitstreamProvenanceMessage(Context context, Item myitem) public String getBitstreamProvenanceMessage(Context context, Item myitem)
throws SQLException; throws SQLException;
/**
* Generate provenance description of direct item submission (not through workflow).
*
* @param context context
* @param item the item to generate description for
* @return provenance description
* @throws SQLException if database error
*/
public String getSubmittedByProvenanceMessage(Context context, Item item)
throws SQLException;;
} }

View File

@@ -473,7 +473,7 @@ public interface ItemService
public void removeGroupPolicies(Context context, Item item, Group group) throws SQLException, AuthorizeException; public void removeGroupPolicies(Context context, Item item, Group group) throws SQLException, AuthorizeException;
/** /**
* remove all policies on an item and its contents, and replace them with * Remove all policies on an item and its contents, and replace them with
* the DEFAULT_ITEM_READ and DEFAULT_BITSTREAM_READ policies belonging to * the DEFAULT_ITEM_READ and DEFAULT_BITSTREAM_READ policies belonging to
* the collection. * the collection.
* *
@@ -488,6 +488,26 @@ public interface ItemService
public void inheritCollectionDefaultPolicies(Context context, Item item, Collection collection) public void inheritCollectionDefaultPolicies(Context context, Item item, Collection collection)
throws java.sql.SQLException, AuthorizeException; throws java.sql.SQLException, AuthorizeException;
/**
* Remove all submission and workflow policies on an item and its contents, and add
* default collection policies which are not yet already in place.
* If overrideItemReadPolicies is true, then all read policies on the item are replaced (but only if the
* collection has a default read policy).
*
* @param context DSpace context object
* @param item item to reset policies on
* @param collection Collection
* @param overrideItemReadPolicies if true, all read policies on the item are replaced (but only if the
* collection has a default read policy)
* @throws SQLException if database error
* if an SQL error or if no default policies found. It's a bit
* draconian, but default policies must be enforced.
* @throws AuthorizeException if authorization error
*/
public void inheritCollectionDefaultPolicies(Context context, Item item, Collection collection,
boolean overrideItemReadPolicies)
throws java.sql.SQLException, AuthorizeException;
/** /**
* Adjust the Bundle and Bitstream policies to reflect what have been defined * Adjust the Bundle and Bitstream policies to reflect what have been defined
* during the submission/workflow. The temporary SUBMISSION and WORKFLOW * during the submission/workflow. The temporary SUBMISSION and WORKFLOW
@@ -507,6 +527,28 @@ public interface ItemService
public void adjustBundleBitstreamPolicies(Context context, Item item, Collection collection) public void adjustBundleBitstreamPolicies(Context context, Item item, Collection collection)
throws SQLException, AuthorizeException; throws SQLException, AuthorizeException;
/**
* Adjust the Bundle and Bitstream policies to reflect what have been defined
* during the submission/workflow. The temporary SUBMISSION and WORKFLOW
* policies are removed and the policies defined at the item and collection
* level are copied and inherited as appropriate. Custom selected Item policies
* are copied to the bundle/bitstream only if no explicit custom policies were
* already applied to the bundle/bitstream. Collection's policies are inherited
* if there are no other policies defined or if the append mode is defined by
* the configuration via the core.authorization.installitem.inheritance-read.append-mode property
*
* @param context DSpace context object
* @param item Item to adjust policies on
* @param collection Collection
* @param replaceReadRPWithCollectionRP if true, all read policies on the item are replaced (but only if the
* collection has a default read policy)
* @throws SQLException If database error
* @throws AuthorizeException If authorization error
*/
public void adjustBundleBitstreamPolicies(Context context, Item item, Collection collection,
boolean replaceReadRPWithCollectionRP)
throws SQLException, AuthorizeException;
/** /**
* Adjust the Bitstream policies to reflect what have been defined * Adjust the Bitstream policies to reflect what have been defined
* during the submission/workflow. The temporary SUBMISSION and WORKFLOW * during the submission/workflow. The temporary SUBMISSION and WORKFLOW
@@ -527,6 +569,29 @@ public interface ItemService
public void adjustBitstreamPolicies(Context context, Item item, Collection collection, Bitstream bitstream) public void adjustBitstreamPolicies(Context context, Item item, Collection collection, Bitstream bitstream)
throws SQLException, AuthorizeException; throws SQLException, AuthorizeException;
/**
* Adjust the Bitstream policies to reflect what have been defined
* during the submission/workflow. The temporary SUBMISSION and WORKFLOW
* policies are removed and the policies defined at the item and collection
* level are copied and inherited as appropriate. Custom selected Item policies
* are copied to the bitstream only if no explicit custom policies were
* already applied to the bitstream. Collection's policies are inherited
* if there are no other policies defined or if the append mode is defined by
* the configuration via the core.authorization.installitem.inheritance-read.append-mode property
*
* @param context DSpace context object
* @param item Item to adjust policies on
* @param collection Collection
* @param bitstream Bitstream to adjust policies on
* @param replaceReadRPWithCollectionRP If true, all read policies on the bitstream are replaced (but only if the
* collection has a default read policy)
* @throws SQLException If database error
* @throws AuthorizeException If authorization error
*/
public void adjustBitstreamPolicies(Context context, Item item, Collection collection, Bitstream bitstream,
boolean replaceReadRPWithCollectionRP)
throws SQLException, AuthorizeException;
/** /**
* Adjust the Item's policies to reflect what have been defined during the * Adjust the Item's policies to reflect what have been defined during the
@@ -545,6 +610,26 @@ public interface ItemService
public void adjustItemPolicies(Context context, Item item, Collection collection) public void adjustItemPolicies(Context context, Item item, Collection collection)
throws SQLException, AuthorizeException; throws SQLException, AuthorizeException;
/**
* Adjust the Item's policies to reflect what have been defined during the
* submission/workflow. The temporary SUBMISSION and WORKFLOW policies are
* removed and the default policies defined at the collection level are
* inherited as appropriate. Collection's policies are inherited if there are no
* other policies defined or if the append mode is defined by the configuration
* via the core.authorization.installitem.inheritance-read.append-mode property
*
* @param context DSpace context object
* @param item Item to adjust policies on
* @param collection Collection
* @param replaceReadRPWithCollectionRP If true, all read policies on the item are replaced (but only if the
* collection has a default read policy)
* @throws SQLException If database error
* @throws AuthorizeException If authorization error
*/
public void adjustItemPolicies(Context context, Item item, Collection collection,
boolean replaceReadRPWithCollectionRP)
throws SQLException, AuthorizeException;
/** /**
* Moves the item from one collection to another one * Moves the item from one collection to another one
* *

View File

@@ -21,7 +21,6 @@ import java.util.ArrayList;
import java.util.Collections; import java.util.Collections;
import java.util.Date; import java.util.Date;
import java.util.Enumeration; import java.util.Enumeration;
import java.util.Iterator;
import java.util.List; import java.util.List;
import java.util.Properties; import java.util.Properties;
import javax.activation.DataHandler; import javax.activation.DataHandler;
@@ -41,7 +40,6 @@ import javax.mail.internet.MimeMessage;
import javax.mail.internet.MimeMultipart; import javax.mail.internet.MimeMultipart;
import javax.mail.internet.ParseException; import javax.mail.internet.ParseException;
import org.apache.commons.lang3.StringUtils;
import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger; import org.apache.logging.log4j.Logger;
import org.apache.velocity.Template; import org.apache.velocity.Template;
@@ -57,26 +55,40 @@ import org.dspace.services.ConfigurationService;
import org.dspace.services.factory.DSpaceServicesFactory; import org.dspace.services.factory.DSpaceServicesFactory;
/** /**
* Class representing an e-mail message, also used to send e-mails. * Class representing an e-mail message. The {@link send} method causes the
* assembled message to be formatted and sent.
* <p> * <p>
* Typical use: * Typical use:
* </p> * <pre>
* <code>Email email = Email.getEmail(path);</code>
* <code>email.addRecipient("foo@bar.com");</code>
* <code>email.addArgument("John");</code>
* <code>email.addArgument("On the Testing of DSpace");</code>
* <code>email.send();</code>
* </pre>
* {@code path} is the filesystem path of an email template, typically in
* {@code ${dspace.dir}/config/emails/} and can include the subject -- see
* below. Templates are processed by <a href='https://velocity.apache.org/'>
* Apache Velocity</a>. They may contain VTL directives and property
* placeholders.
* <p> * <p>
* <code>Email email = new Email();</code><br> * {@link addArgument(string)} adds a property to the {@code params} array
* <code>email.addRecipient("foo@bar.com");</code><br> * in the Velocity context, which can be used to replace placeholder tokens
* <code>email.addArgument("John");</code><br> * in the message. These arguments are indexed by number in the order they were
* <code>email.addArgument("On the Testing of DSpace");</code><br> * added to the message.
* <code>email.send();</code><br>
* </p>
* <p> * <p>
* <code>name</code> is the name of an email template in * The DSpace configuration properties are also available to templates as the
* <code>dspace-dir/config/emails/</code> (which also includes the subject.) * array {@code config}, indexed by name. Example: {@code ${config.get('dspace.name')}}
* <code>arg0</code> and <code>arg1</code> are arguments to fill out the * <p>
* message with. * Recipients and attachments may be added as needed. See {@link addRecipient},
* <P> * {@link addAttachment(File, String)}, and
* Emails are formatted using Apache Velocity. Headers such as Subject may be * {@link addAttachment(InputStream, String, String)}.
* supplied by the template, by defining them using #set(). Example: * <p>
* </p> * Headers such as Subject may be supplied by the template, by defining them
* using the VTL directive {@code #set()}. Only headers named in the DSpace
* configuration array property {@code mail.message.headers} will be added.
* <p>
* Example:
* *
* <pre> * <pre>
* *
@@ -91,12 +103,14 @@ import org.dspace.services.factory.DSpaceServicesFactory;
* *
* Thank you for sending us your submission &quot;${params[1]}&quot;. * Thank you for sending us your submission &quot;${params[1]}&quot;.
* *
* --
* The ${config.get('dspace.name')} Team
*
* </pre> * </pre>
* *
* <p> * <p>
* If the example code above was used to send this mail, the resulting mail * If the example code above was used to send this mail, the resulting mail
* would have the subject <code>Example e-mail</code> and the body would be: * would have the subject <code>Example e-mail</code> and the body would be:
* </p>
* *
* <pre> * <pre>
* *
@@ -105,7 +119,16 @@ import org.dspace.services.factory.DSpaceServicesFactory;
* *
* Thank you for sending us your submission &quot;On the Testing of DSpace&quot;. * Thank you for sending us your submission &quot;On the Testing of DSpace&quot;.
* *
* --
* The DSpace Team
*
* </pre> * </pre>
* <p>
* There are two ways to load a message body. One can create an instance of
* {@link Email} and call {@link setContent} on it, passing the body as a String. Or
* one can use the static factory method {@link getEmail} to load a file by its
* complete filesystem path. In either case the text will be loaded into a
* Velocity template.
* *
* @author Robert Tansley * @author Robert Tansley
* @author Jim Downing - added attachment handling code * @author Jim Downing - added attachment handling code
@@ -115,7 +138,6 @@ public class Email {
/** /**
* The content of the message * The content of the message
*/ */
private String content;
private String contentName; private String contentName;
/** /**
@@ -176,13 +198,12 @@ public class Email {
moreAttachments = new ArrayList<>(10); moreAttachments = new ArrayList<>(10);
subject = ""; subject = "";
template = null; template = null;
content = "";
replyTo = null; replyTo = null;
charset = null; charset = null;
} }
/** /**
* Add a recipient * Add a recipient.
* *
* @param email the recipient's email address * @param email the recipient's email address
*/ */
@@ -196,16 +217,24 @@ public class Email {
* "Subject:" line must be stripped. * "Subject:" line must be stripped.
* *
* @param name a name for this message body * @param name a name for this message body
* @param cnt the content of the message * @param content the content of the message
*/ */
public void setContent(String name, String cnt) { public void setContent(String name, String content) {
content = cnt;
contentName = name; contentName = name;
arguments.clear(); arguments.clear();
VelocityEngine templateEngine = new VelocityEngine();
templateEngine.init(VELOCITY_PROPERTIES);
StringResourceRepository repo = (StringResourceRepository)
templateEngine.getApplicationAttribute(RESOURCE_REPOSITORY_NAME);
repo.putStringResource(contentName, content);
// Turn content into a template.
template = templateEngine.getTemplate(contentName);
} }
/** /**
* Set the subject of the message * Set the subject of the message.
* *
* @param s the subject of the message * @param s the subject of the message
*/ */
@@ -214,7 +243,7 @@ public class Email {
} }
/** /**
* Set the reply-to email address * Set the reply-to email address.
* *
* @param email the reply-to email address * @param email the reply-to email address
*/ */
@@ -223,7 +252,7 @@ public class Email {
} }
/** /**
* Fill out the next argument in the template * Fill out the next argument in the template.
* *
* @param arg the value for the next argument * @param arg the value for the next argument
*/ */
@@ -231,6 +260,13 @@ public class Email {
arguments.add(arg); arguments.add(arg);
} }
/**
* Add an attachment bodypart to the message from an external file.
*
* @param f reference to a file to be attached.
* @param name a name for the resulting bodypart in the message's MIME
* structure.
*/
public void addAttachment(File f, String name) { public void addAttachment(File f, String name) {
attachments.add(new FileAttachment(f, name)); attachments.add(new FileAttachment(f, name));
} }
@@ -238,6 +274,17 @@ public class Email {
/** When given a bad MIME type for an attachment, use this instead. */ /** When given a bad MIME type for an attachment, use this instead. */
private static final String DEFAULT_ATTACHMENT_TYPE = "application/octet-stream"; private static final String DEFAULT_ATTACHMENT_TYPE = "application/octet-stream";
/**
* Add an attachment bodypart to the message from a byte stream.
*
* @param is the content of this stream will become the content of the
* bodypart.
* @param name a name for the resulting bodypart in the message's MIME
* structure.
* @param mimetype the MIME type of the resulting bodypart, such as
* "text/pdf". If {@code null} it will default to
* "application/octet-stream", which is MIME for "unknown format".
*/
public void addAttachment(InputStream is, String name, String mimetype) { public void addAttachment(InputStream is, String name, String mimetype) {
if (null == mimetype) { if (null == mimetype) {
LOG.error("Null MIME type replaced with '" + DEFAULT_ATTACHMENT_TYPE LOG.error("Null MIME type replaced with '" + DEFAULT_ATTACHMENT_TYPE
@@ -257,6 +304,11 @@ public class Email {
moreAttachments.add(new InputStreamAttachment(is, name, mimetype)); moreAttachments.add(new InputStreamAttachment(is, name, mimetype));
} }
/**
* Set the character set of the message.
*
* @param cs the name of a character set, such as "UTF-8" or "EUC-JP".
*/
public void setCharset(String cs) { public void setCharset(String cs) {
charset = cs; charset = cs;
} }
@@ -280,15 +332,20 @@ public class Email {
* {@code mail.message.headers} then that name and its value will be added * {@code mail.message.headers} then that name and its value will be added
* to the message's headers. * to the message's headers.
* *
* <p>"subject" is treated specially: if {@link setSubject()} has not been called, * <p>"subject" is treated specially: if {@link setSubject()} has not been
* the value of any "subject" property will be used as if setSubject had * called, the value of any "subject" property will be used as if setSubject
* been called with that value. Thus a template may define its subject, but * had been called with that value. Thus a template may define its subject,
* the caller may override it. * but the caller may override it.
* *
* @throws MessagingException if there was a problem sending the mail. * @throws MessagingException if there was a problem sending the mail.
* @throws IOException if IO error * @throws IOException if IO error
*/ */
public void send() throws MessagingException, IOException { public void send() throws MessagingException, IOException {
if (null == template) {
// No template -- no content -- PANIC!!!
throw new MessagingException("Email has no body");
}
ConfigurationService config ConfigurationService config
= DSpaceServicesFactory.getInstance().getConfigurationService(); = DSpaceServicesFactory.getInstance().getConfigurationService();
@@ -308,37 +365,18 @@ public class Email {
MimeMessage message = new MimeMessage(session); MimeMessage message = new MimeMessage(session);
// Set the recipients of the message // Set the recipients of the message
Iterator<String> i = recipients.iterator(); for (String recipient : recipients) {
message.addRecipient(Message.RecipientType.TO,
while (i.hasNext()) { new InternetAddress(recipient));
message.addRecipient(Message.RecipientType.TO, new InternetAddress(
i.next()));
} }
// Get headers defined by the template. // Get headers defined by the template.
String[] templateHeaders = config.getArrayProperty("mail.message.headers"); String[] templateHeaders = config.getArrayProperty("mail.message.headers");
// Format the mail message body // Format the mail message body
VelocityEngine templateEngine = new VelocityEngine();
templateEngine.init(VELOCITY_PROPERTIES);
VelocityContext vctx = new VelocityContext(); VelocityContext vctx = new VelocityContext();
vctx.put("config", new UnmodifiableConfigurationService(config)); vctx.put("config", new UnmodifiableConfigurationService(config));
vctx.put("params", Collections.unmodifiableList(arguments)); vctx.put("params", Collections.unmodifiableList(arguments));
if (null == template) {
if (StringUtils.isBlank(content)) {
// No template and no content -- PANIC!!!
throw new MessagingException("Email has no body");
}
// No template, so use a String of content.
StringResourceRepository repo = (StringResourceRepository)
templateEngine.getApplicationAttribute(RESOURCE_REPOSITORY_NAME);
repo.putStringResource(contentName, content);
// Turn content into a template.
template = templateEngine.getTemplate(contentName);
templateHeaders = new String[] {};
}
StringWriter writer = new StringWriter(); StringWriter writer = new StringWriter();
try { try {
template.merge(vctx, writer); template.merge(vctx, writer);
@@ -405,7 +443,8 @@ public class Email {
// add the stream // add the stream
messageBodyPart = new MimeBodyPart(); messageBodyPart = new MimeBodyPart();
messageBodyPart.setDataHandler(new DataHandler( messageBodyPart.setDataHandler(new DataHandler(
new InputStreamDataSource(attachment.name,attachment.mimetype,attachment.is))); new InputStreamDataSource(attachment.name,
attachment.mimetype, attachment.is)));
messageBodyPart.setFileName(attachment.name); messageBodyPart.setFileName(attachment.name);
multipart.addBodyPart(messageBodyPart); multipart.addBodyPart(messageBodyPart);
} }
@@ -447,6 +486,9 @@ public class Email {
/** /**
* Get the VTL template for an email message. The message is suitable * Get the VTL template for an email message. The message is suitable
* for inserting values using Apache Velocity. * for inserting values using Apache Velocity.
* <p>
* Note that everything is stored here, so that only send() throws a
* MessagingException.
* *
* @param emailFile * @param emailFile
* full name for the email template, for example "/dspace/config/emails/register". * full name for the email template, for example "/dspace/config/emails/register".
@@ -484,15 +526,6 @@ public class Email {
} }
return email; return email;
} }
/*
* Implementation note: It might be necessary to add a quick utility method
* like "send(to, subject, message)". We'll see how far we get without it -
* having all emails as templates in the config allows customisation and
* internationalisation.
*
* Note that everything is stored and the run in send() so that only send()
* throws a MessagingException.
*/
/** /**
* Test method to send an email to check email server settings * Test method to send an email to check email server settings
@@ -547,7 +580,7 @@ public class Email {
} }
/** /**
* Utility struct class for handling file attachments. * Utility record class for handling file attachments.
* *
* @author ojd20 * @author ojd20
*/ */
@@ -563,7 +596,7 @@ public class Email {
} }
/** /**
* Utility struct class for handling file attachments. * Utility record class for handling file attachments.
* *
* @author Adán Román Ruiz at arvo.es * @author Adán Román Ruiz at arvo.es
*/ */
@@ -580,6 +613,8 @@ public class Email {
} }
/** /**
* Wrap an {@link InputStream} in a {@link DataSource}.
*
* @author arnaldo * @author arnaldo
*/ */
public static class InputStreamDataSource implements DataSource { public static class InputStreamDataSource implements DataSource {
@@ -587,6 +622,14 @@ public class Email {
private final String contentType; private final String contentType;
private final ByteArrayOutputStream baos; private final ByteArrayOutputStream baos;
/**
* Consume the content of an InputStream and store it in a local buffer.
*
* @param name give the DataSource a name.
* @param contentType the DataSource contains this type of data.
* @param inputStream content to be buffered in the DataSource.
* @throws IOException if the stream cannot be read.
*/
InputStreamDataSource(String name, String contentType, InputStream inputStream) throws IOException { InputStreamDataSource(String name, String contentType, InputStream inputStream) throws IOException {
this.name = name; this.name = name;
this.contentType = contentType; this.contentType = contentType;

View File

@@ -17,9 +17,12 @@ import java.io.InputStream;
import java.io.InputStreamReader; import java.io.InputStreamReader;
import java.io.OutputStreamWriter; import java.io.OutputStreamWriter;
import java.io.PrintWriter; import java.io.PrintWriter;
import javax.servlet.http.HttpServletRequest;
import org.dspace.core.service.LicenseService; import org.dspace.core.service.LicenseService;
import org.dspace.services.factory.DSpaceServicesFactory; import org.dspace.services.factory.DSpaceServicesFactory;
import org.dspace.services.model.Request;
import org.dspace.web.ContextUtil;
import org.slf4j.Logger; import org.slf4j.Logger;
import org.slf4j.LoggerFactory; import org.slf4j.LoggerFactory;
@@ -101,13 +104,14 @@ public class LicenseServiceImpl implements LicenseService {
/** /**
* Get the site-wide default license that submitters need to grant * Get the site-wide default license that submitters need to grant
* *
* Localized license requires: default_{{locale}}.license file.
* Locale also must be listed in webui.supported.locales setting.
*
* @return the default license * @return the default license
*/ */
@Override @Override
public String getDefaultSubmissionLicense() { public String getDefaultSubmissionLicense() {
if (null == license) {
init(); init();
}
return license; return license;
} }
@@ -115,9 +119,8 @@ public class LicenseServiceImpl implements LicenseService {
* Load in the default license. * Load in the default license.
*/ */
protected void init() { protected void init() {
File licenseFile = new File( Context context = obtainContext();
DSpaceServicesFactory.getInstance().getConfigurationService().getProperty("dspace.dir") File licenseFile = new File(I18nUtil.getDefaultLicense(context));
+ File.separator + "config" + File.separator + "default.license");
FileInputStream fir = null; FileInputStream fir = null;
InputStreamReader ir = null; InputStreamReader ir = null;
@@ -169,4 +172,24 @@ public class LicenseServiceImpl implements LicenseService {
} }
} }
} }
/**
* Obtaining current request context.
* Return new context if getting one from current request failed.
*
* @return DSpace context object
*/
private Context obtainContext() {
try {
Request currentRequest = DSpaceServicesFactory.getInstance().getRequestService().getCurrentRequest();
if (currentRequest != null) {
HttpServletRequest request = currentRequest.getHttpServletRequest();
return ContextUtil.obtainContext(request);
}
} catch (Exception e) {
log.error("Can't load current request context.");
}
return new Context();
}
} }

View File

@@ -13,6 +13,7 @@ import java.sql.SQLException;
import java.util.ArrayList; import java.util.ArrayList;
import java.util.List; import java.util.List;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger; import org.apache.logging.log4j.Logger;
import org.dspace.authorize.AuthorizeException; import org.dspace.authorize.AuthorizeException;
import org.dspace.content.Collection; import org.dspace.content.Collection;
@@ -30,6 +31,7 @@ import org.dspace.workflow.CurationTaskConfig;
import org.dspace.workflow.FlowStep; import org.dspace.workflow.FlowStep;
import org.dspace.workflow.Task; import org.dspace.workflow.Task;
import org.dspace.workflow.TaskSet; import org.dspace.workflow.TaskSet;
import org.dspace.xmlworkflow.Role;
import org.dspace.xmlworkflow.RoleMembers; import org.dspace.xmlworkflow.RoleMembers;
import org.dspace.xmlworkflow.WorkflowConfigurationException; import org.dspace.xmlworkflow.WorkflowConfigurationException;
import org.dspace.xmlworkflow.factory.XmlWorkflowFactory; import org.dspace.xmlworkflow.factory.XmlWorkflowFactory;
@@ -47,14 +49,17 @@ import org.springframework.stereotype.Service;
* Manage interactions between curation and workflow. A curation task can be * Manage interactions between curation and workflow. A curation task can be
* attached to a workflow step, to be executed during the step. * attached to a workflow step, to be executed during the step.
* *
* <p>
* <strong>NOTE:</strong> when run in workflow, curation tasks <em>run with
* authorization disabled</em>.
*
* @see CurationTaskConfig * @see CurationTaskConfig
* @author mwood * @author mwood
*/ */
@Service @Service
public class XmlWorkflowCuratorServiceImpl public class XmlWorkflowCuratorServiceImpl
implements XmlWorkflowCuratorService { implements XmlWorkflowCuratorService {
private static final Logger LOG private static final Logger LOG = LogManager.getLogger();
= org.apache.logging.log4j.LogManager.getLogger();
@Autowired(required = true) @Autowired(required = true)
protected XmlWorkflowFactory workflowFactory; protected XmlWorkflowFactory workflowFactory;
@@ -97,7 +102,18 @@ public class XmlWorkflowCuratorServiceImpl
throws AuthorizeException, IOException, SQLException { throws AuthorizeException, IOException, SQLException {
Curator curator = new Curator(); Curator curator = new Curator();
curator.setReporter(reporter); curator.setReporter(reporter);
return curate(curator, c, wfi); c.turnOffAuthorisationSystem();
boolean wasAnonymous = false;
if (null == c.getCurrentUser()) { // We need someone to email
wasAnonymous = true;
c.setCurrentUser(ePersonService.getSystemEPerson(c));
}
boolean failedP = curate(curator, c, wfi);
if (wasAnonymous) {
c.setCurrentUser(null);
}
c.restoreAuthSystemState();
return failedP;
} }
@Override @Override
@@ -123,7 +139,7 @@ public class XmlWorkflowCuratorServiceImpl
item.setOwningCollection(wfi.getCollection()); item.setOwningCollection(wfi.getCollection());
for (Task task : step.tasks) { for (Task task : step.tasks) {
curator.addTask(task.name); curator.addTask(task.name);
curator.curate(item); curator.curate(c, item);
int status = curator.getStatus(task.name); int status = curator.getStatus(task.name);
String result = curator.getResult(task.name); String result = curator.getResult(task.name);
String action = "none"; String action = "none";
@@ -223,8 +239,12 @@ public class XmlWorkflowCuratorServiceImpl
String status, String action, String message) String status, String action, String message)
throws AuthorizeException, IOException, SQLException { throws AuthorizeException, IOException, SQLException {
List<EPerson> epa = resolveContacts(c, task.getContacts(status), wfi); List<EPerson> epa = resolveContacts(c, task.getContacts(status), wfi);
if (epa.size() > 0) { if (!epa.isEmpty()) {
workflowService.notifyOfCuration(c, wfi, epa, task.name, action, message); workflowService.notifyOfCuration(c, wfi, epa, task.name, action, message);
} else {
LOG.warn("No contacts were found for workflow item {}: "
+ "task {} returned action {} with message {}",
wfi.getID(), task.name, action, message);
} }
} }
@@ -247,8 +267,7 @@ public class XmlWorkflowCuratorServiceImpl
// decode contacts // decode contacts
if ("$flowgroup".equals(contact)) { if ("$flowgroup".equals(contact)) {
// special literal for current flowgoup // special literal for current flowgoup
ClaimedTask claimedTask = claimedTaskService.findByWorkflowIdAndEPerson(c, wfi, c.getCurrentUser()); String stepID = getFlowStep(c, wfi).step;
String stepID = claimedTask.getStepID();
Step step; Step step;
try { try {
Workflow workflow = workflowFactory.getWorkflow(wfi.getCollection()); Workflow workflow = workflowFactory.getWorkflow(wfi.getCollection());
@@ -258,19 +277,26 @@ public class XmlWorkflowCuratorServiceImpl
String.valueOf(wfi.getID()), e); String.valueOf(wfi.getID()), e);
return epList; return epList;
} }
RoleMembers roleMembers = step.getRole().getMembers(c, wfi); Role role = step.getRole();
if (null != role) {
RoleMembers roleMembers = role.getMembers(c, wfi);
for (EPerson ep : roleMembers.getEPersons()) { for (EPerson ep : roleMembers.getEPersons()) {
epList.add(ep); epList.add(ep);
} }
for (Group group : roleMembers.getGroups()) { for (Group group : roleMembers.getGroups()) {
epList.addAll(group.getMembers()); epList.addAll(group.getMembers());
} }
} else {
epList.add(ePersonService.getSystemEPerson(c));
}
} else if ("$colladmin".equals(contact)) { } else if ("$colladmin".equals(contact)) {
// special literal for collection administrators
Group adGroup = wfi.getCollection().getAdministrators(); Group adGroup = wfi.getCollection().getAdministrators();
if (adGroup != null) { if (adGroup != null) {
epList.addAll(groupService.allMembers(c, adGroup)); epList.addAll(groupService.allMembers(c, adGroup));
} }
} else if ("$siteadmin".equals(contact)) { } else if ("$siteadmin".equals(contact)) {
// special literal for site administrator
EPerson siteEp = ePersonService.findByEmail(c, EPerson siteEp = ePersonService.findByEmail(c,
configurationService.getProperty("mail.admin")); configurationService.getProperty("mail.admin"));
if (siteEp != null) { if (siteEp != null) {

View File

@@ -42,9 +42,9 @@ public interface XmlWorkflowCuratorService {
* *
* @param c the context * @param c the context
* @param wfi the workflow item * @param wfi the workflow item
* @return true if curation was completed or not required, * @return true if curation was completed or not required;
* false if tasks were queued for later completion, * false if tasks were queued for later completion,
* or item was rejected * or item was rejected.
* @throws AuthorizeException if authorization error * @throws AuthorizeException if authorization error
* @throws IOException if IO error * @throws IOException if IO error
* @throws SQLException if database error * @throws SQLException if database error
@@ -58,7 +58,9 @@ public interface XmlWorkflowCuratorService {
* @param curator the curation context * @param curator the curation context
* @param c the user context * @param c the user context
* @param wfId the workflow item's ID * @param wfId the workflow item's ID
* @return true if curation failed. * @return true if curation curation was completed or not required;
* false if tasks were queued for later completion,
* or item was rejected.
* @throws AuthorizeException if authorization error * @throws AuthorizeException if authorization error
* @throws IOException if IO error * @throws IOException if IO error
* @throws SQLException if database error * @throws SQLException if database error
@@ -72,7 +74,9 @@ public interface XmlWorkflowCuratorService {
* @param curator the curation context * @param curator the curation context
* @param c the user context * @param c the user context
* @param wfi the workflow item * @param wfi the workflow item
* @return true if curation failed. * @return true if workflow curation was completed or not required;
* false if tasks were queued for later completion,
* or item was rejected.
* @throws AuthorizeException if authorization error * @throws AuthorizeException if authorization error
* @throws IOException if IO error * @throws IOException if IO error
* @throws SQLException if database error * @throws SQLException if database error

View File

@@ -76,14 +76,19 @@ public class FullTextContentStreams extends ContentStreamBase {
if (StringUtils.equals(FULLTEXT_BUNDLE, myBundle.getName())) { if (StringUtils.equals(FULLTEXT_BUNDLE, myBundle.getName())) {
// a-ha! grab the text out of the bitstreams // a-ha! grab the text out of the bitstreams
List<Bitstream> bitstreams = myBundle.getBitstreams(); List<Bitstream> bitstreams = myBundle.getBitstreams();
log.debug("Processing full-text bitstreams. Item handle: " + sourceInfo);
for (Bitstream fulltextBitstream : emptyIfNull(bitstreams)) { for (Bitstream fulltextBitstream : emptyIfNull(bitstreams)) {
fullTextStreams.add(new FullTextBitstream(sourceInfo, fulltextBitstream)); fullTextStreams.add(new FullTextBitstream(sourceInfo, fulltextBitstream));
if (fulltextBitstream != null) {
log.debug("Added BitStream: " log.debug("Added BitStream: "
+ fulltextBitstream.getStoreNumber() + " " + fulltextBitstream.getStoreNumber() + " "
+ fulltextBitstream.getSequenceID() + " " + fulltextBitstream.getSequenceID() + " "
+ fulltextBitstream.getName()); + fulltextBitstream.getName());
} else {
log.error("Found a NULL bitstream when processing full-text files: item handle:" + sourceInfo);
}
} }
} }
} }
@@ -158,16 +163,16 @@ public class FullTextContentStreams extends ContentStreamBase {
} }
public String getContentType(final Context context) throws SQLException { public String getContentType(final Context context) throws SQLException {
BitstreamFormat format = bitstream.getFormat(context); BitstreamFormat format = bitstream != null ? bitstream.getFormat(context) : null;
return format == null ? null : StringUtils.trimToEmpty(format.getMIMEType()); return format == null ? null : StringUtils.trimToEmpty(format.getMIMEType());
} }
public String getFileName() { public String getFileName() {
return StringUtils.trimToEmpty(bitstream.getName()); return bitstream != null ? StringUtils.trimToEmpty(bitstream.getName()) : null;
} }
public long getSize() { public long getSize() {
return bitstream.getSizeBytes(); return bitstream != null ? bitstream.getSizeBytes() : -1;
} }
public InputStream getInputStream() throws SQLException, IOException, AuthorizeException { public InputStream getInputStream() throws SQLException, IOException, AuthorizeException {

View File

@@ -154,7 +154,11 @@ public class IndexEventConsumer implements Consumer {
case Event.REMOVE: case Event.REMOVE:
case Event.ADD: case Event.ADD:
if (object == null) { // At this time, ADD and REMOVE actions are ignored on SITE object. They are only triggered for
// top-level communities. No action is necessary as Community itself is indexed (or deleted) separately.
if (event.getSubjectType() == Constants.SITE) {
log.debug(event.getEventTypeAsString() + " event triggered for Site object. Skipping it.");
} else if (object == null) {
log.warn(event.getEventTypeAsString() + " event, could not get object for " log.warn(event.getEventTypeAsString() + " event, could not get object for "
+ event.getObjectTypeAsString() + " id=" + event.getObjectTypeAsString() + " id="
+ event.getObjectID() + event.getObjectID()

View File

@@ -172,13 +172,6 @@ public class ItemIndexFactoryImpl extends DSpaceObjectIndexFactoryImpl<Indexable
addNamedResourceTypeIndex(doc, acvalue); addNamedResourceTypeIndex(doc, acvalue);
} }
// write the index and close the inputstreamreaders
try {
log.info("Wrote Item: " + item.getID() + " to Index");
} catch (RuntimeException e) {
log.error("Error while writing item to discovery index: " + item.getID() + " message:"
+ e.getMessage(), e);
}
return doc; return doc;
} }
@@ -845,7 +838,7 @@ public class ItemIndexFactoryImpl extends DSpaceObjectIndexFactoryImpl<Indexable
private void saveFacetPrefixParts(SolrInputDocument doc, DiscoverySearchFilter searchFilter, String value, private void saveFacetPrefixParts(SolrInputDocument doc, DiscoverySearchFilter searchFilter, String value,
String separator, String authority, String preferedLabel) { String separator, String authority, String preferedLabel) {
value = StringUtils.normalizeSpace(value); value = StringUtils.normalizeSpace(value);
Pattern pattern = Pattern.compile("\\b\\w+\\b", Pattern.CASE_INSENSITIVE); Pattern pattern = Pattern.compile("\\b\\w+\\b", Pattern.CASE_INSENSITIVE | Pattern.UNICODE_CHARACTER_CLASS);
Matcher matcher = pattern.matcher(value); Matcher matcher = pattern.matcher(value);
while (matcher.find()) { while (matcher.find()) {
int index = matcher.start(); int index = matcher.start();

View File

@@ -47,6 +47,7 @@ import org.dspace.eperson.service.GroupService;
import org.dspace.eperson.service.SubscribeService; import org.dspace.eperson.service.SubscribeService;
import org.dspace.event.Event; import org.dspace.event.Event;
import org.dspace.orcid.service.OrcidTokenService; import org.dspace.orcid.service.OrcidTokenService;
import org.dspace.services.ConfigurationService;
import org.dspace.util.UUIDUtils; import org.dspace.util.UUIDUtils;
import org.dspace.versioning.Version; import org.dspace.versioning.Version;
import org.dspace.versioning.VersionHistory; import org.dspace.versioning.VersionHistory;
@@ -101,6 +102,8 @@ public class EPersonServiceImpl extends DSpaceObjectServiceImpl<EPerson> impleme
protected VersionDAO versionDAO; protected VersionDAO versionDAO;
@Autowired(required = true) @Autowired(required = true)
protected ClaimedTaskService claimedTaskService; protected ClaimedTaskService claimedTaskService;
@Autowired(required = true)
protected ConfigurationService configurationService;
@Autowired @Autowired
protected OrcidTokenService orcidTokenService; protected OrcidTokenService orcidTokenService;
@@ -113,6 +116,30 @@ public class EPersonServiceImpl extends DSpaceObjectServiceImpl<EPerson> impleme
return ePersonDAO.findByID(context, EPerson.class, id); return ePersonDAO.findByID(context, EPerson.class, id);
} }
/**
* Create a fake EPerson which can receive email. Its address will be the
* value of "mail.admin", or "postmaster" if all else fails.
* @param c
* @return
* @throws SQLException
*/
@Override
public EPerson getSystemEPerson(Context c)
throws SQLException {
String adminEmail = configurationService.getProperty("mail.admin");
if (null == adminEmail) {
adminEmail = "postmaster"; // Last-ditch attempt to send *somewhere*
}
EPerson systemEPerson = findByEmail(c, adminEmail);
if (null == systemEPerson) {
systemEPerson = new EPerson();
systemEPerson.setEmail(adminEmail);
}
return systemEPerson;
}
@Override @Override
public EPerson findByIdOrLegacyId(Context context, String id) throws SQLException { public EPerson findByIdOrLegacyId(Context context, String id) throws SQLException {
if (StringUtils.isNumeric(id)) { if (StringUtils.isNumeric(id)) {

View File

@@ -141,15 +141,6 @@ public class Groomer {
System.out.println(); System.out.println();
if (delete) { if (delete) {
List<String> whyNot = ePersonService.getDeleteConstraints(myContext, account);
if (!whyNot.isEmpty()) {
System.out.print("\tCannot be deleted; referenced in");
for (String table : whyNot) {
System.out.print(' ');
System.out.print(table);
}
System.out.println();
} else {
try { try {
ePersonService.delete(myContext, account); ePersonService.delete(myContext, account);
} catch (AuthorizeException | IOException ex) { } catch (AuthorizeException | IOException ex) {
@@ -157,7 +148,6 @@ public class Groomer {
} }
} }
} }
}
myContext.restoreAuthSystemState(); myContext.restoreAuthSystemState();
myContext.complete(); myContext.complete();

View File

@@ -13,6 +13,7 @@ import java.sql.SQLException;
import java.util.Date; import java.util.Date;
import java.util.List; import java.util.List;
import java.util.Set; import java.util.Set;
import javax.validation.constraints.NotNull;
import org.dspace.authorize.AuthorizeException; import org.dspace.authorize.AuthorizeException;
import org.dspace.content.Item; import org.dspace.content.Item;
@@ -157,6 +158,19 @@ public interface EPersonService extends DSpaceObjectService<EPerson>, DSpaceObje
public List<EPerson> findAll(Context context, int sortField, int pageSize, int offset) public List<EPerson> findAll(Context context, int sortField, int pageSize, int offset)
throws SQLException; throws SQLException;
/**
* The "System EPerson" is a fake account that exists only to receive email.
* It has an email address that should be presumed usable. It does not
* exist in the database and is not complete.
*
* @param context current DSpace session.
* @return an EPerson that can presumably receive email.
* @throws SQLException
*/
@NotNull
public EPerson getSystemEPerson(Context context)
throws SQLException;
/** /**
* Create a new eperson * Create a new eperson
* *

View File

@@ -22,6 +22,8 @@ import org.apache.commons.collections.buffer.CircularFifoBuffer;
import org.apache.commons.lang.StringUtils; import org.apache.commons.lang.StringUtils;
import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger; import org.apache.logging.log4j.Logger;
import org.dspace.content.Bitstream;
import org.dspace.content.Bundle;
import org.dspace.content.factory.ContentServiceFactory; import org.dspace.content.factory.ContentServiceFactory;
import org.dspace.core.Constants; import org.dspace.core.Constants;
import org.dspace.core.Context; import org.dspace.core.Context;
@@ -77,7 +79,7 @@ public class GoogleAsyncEventListener extends AbstractUsageEventListener {
UsageEvent usageEvent = (UsageEvent) event; UsageEvent usageEvent = (UsageEvent) event;
LOGGER.debug("Usage event received " + event.getName()); LOGGER.debug("Usage event received " + event.getName());
if (isNotBitstreamViewEvent(usageEvent)) { if (!isContentBitstream(usageEvent)) {
return; return;
} }
@@ -171,9 +173,33 @@ public class GoogleAsyncEventListener extends AbstractUsageEventListener {
return documentPath; return documentPath;
} }
private boolean isNotBitstreamViewEvent(UsageEvent usageEvent) { /**
return usageEvent.getAction() != UsageEvent.Action.VIEW * Verifies if the usage event is a content bitstream view event, by checking if:<ul>
|| usageEvent.getObject().getType() != Constants.BITSTREAM; * <li>the usage event is a view event</li>
* <li>the object of the usage event is a bitstream</li>
* <li>the bitstream belongs to one of the configured bundles (fallback: ORIGINAL bundle)</li></ul>
*/
private boolean isContentBitstream(UsageEvent usageEvent) {
// check if event is a VIEW event and object is a Bitstream
if (usageEvent.getAction() == UsageEvent.Action.VIEW
&& usageEvent.getObject().getType() == Constants.BITSTREAM) {
// check if bitstream belongs to a configured bundle
List<String> allowedBundles = List.of(configurationService
.getArrayProperty("google-analytics.bundles", new String[]{Constants.CONTENT_BUNDLE_NAME}));
if (allowedBundles.contains("none")) {
// GA events for bitstream views were turned off in config
return false;
}
List<String> bitstreamBundles;
try {
bitstreamBundles = ((Bitstream) usageEvent.getObject())
.getBundles().stream().map(Bundle::getName).collect(Collectors.toList());
} catch (SQLException e) {
throw new RuntimeException(e.getMessage(), e);
}
return allowedBundles.stream().anyMatch(bitstreamBundles::contains);
}
return false;
} }
private boolean isGoogleAnalyticsKeyNotConfigured() { private boolean isGoogleAnalyticsKeyNotConfigured() {

View File

@@ -90,13 +90,11 @@ public class HandleDAOImpl extends AbstractHibernateDAO<Handle> implements Handl
@Override @Override
public long countHandlesByPrefix(Context context, String prefix) throws SQLException { public long countHandlesByPrefix(Context context, String prefix) throws SQLException {
CriteriaBuilder criteriaBuilder = getCriteriaBuilder(context); CriteriaBuilder criteriaBuilder = getCriteriaBuilder(context);
CriteriaQuery<Long> criteriaQuery = criteriaBuilder.createQuery(Long.class); CriteriaQuery criteriaQuery = getCriteriaQuery(criteriaBuilder, Handle.class);
Root<Handle> handleRoot = criteriaQuery.from(Handle.class); Root<Handle> handleRoot = criteriaQuery.from(Handle.class);
criteriaQuery.select(criteriaBuilder.count(criteriaQuery.from(Handle.class))); criteriaQuery.select(handleRoot);
criteriaQuery.where(criteriaBuilder.like(handleRoot.get(Handle_.handle), prefix + "%")); criteriaQuery.where(criteriaBuilder.like(handleRoot.get(Handle_.handle), prefix + "%"));
return countLong(context, criteriaQuery, criteriaBuilder, handleRoot); return countLong(context, criteriaQuery, criteriaBuilder, handleRoot);
} }

View File

@@ -68,10 +68,9 @@ public class HandleIdentifierProvider extends IdentifierProvider {
try { try {
String id = mint(context, dso); String id = mint(context, dso);
// move canonical to point the latest version // Populate metadata
if (dso instanceof Item || dso instanceof Collection || dso instanceof Community) { if (dso instanceof Item || dso instanceof Collection || dso instanceof Community) {
Item item = (Item) dso; populateHandleMetadata(context, dso, id);
populateHandleMetadata(context, item, id);
} }
return id; return id;
@@ -88,8 +87,7 @@ public class HandleIdentifierProvider extends IdentifierProvider {
try { try {
handleService.createHandle(context, dso, identifier); handleService.createHandle(context, dso, identifier);
if (dso instanceof Item || dso instanceof Collection || dso instanceof Community) { if (dso instanceof Item || dso instanceof Collection || dso instanceof Community) {
Item item = (Item) dso; populateHandleMetadata(context, dso, identifier);
populateHandleMetadata(context, item, identifier);
} }
} catch (IOException | IllegalStateException | SQLException | AuthorizeException e) { } catch (IOException | IllegalStateException | SQLException | AuthorizeException e) {
log.error(LogHelper.getHeader(context, log.error(LogHelper.getHeader(context,

View File

@@ -95,11 +95,11 @@ public class VersionedHandleIdentifierProviderWithCanonicalHandles extends Ident
String id = mint(context, dso); String id = mint(context, dso);
// move canonical to point the latest version // move canonical to point the latest version
if (dso != null && dso.getType() == Constants.ITEM) { if (dso.getType() == Constants.ITEM && dso instanceof Item) {
Item item = (Item) dso; Item item = (Item) dso;
VersionHistory history = null; VersionHistory history;
try { try {
history = versionHistoryService.findByItem(context, (Item) dso); history = versionHistoryService.findByItem(context, item);
} catch (SQLException ex) { } catch (SQLException ex) {
throw new RuntimeException("A problem with the database connection occured.", ex); throw new RuntimeException("A problem with the database connection occured.", ex);
} }
@@ -180,13 +180,12 @@ public class VersionedHandleIdentifierProviderWithCanonicalHandles extends Ident
@Override @Override
public void register(Context context, DSpaceObject dso, String identifier) { public void register(Context context, DSpaceObject dso, String identifier) {
try { try {
if (dso instanceof Item) {
Item item = (Item) dso; Item item = (Item) dso;
// if this identifier is already present in the Handle table and the corresponding item
// if for this identifier is already present a record in the Handle table and the corresponding item // has a history, then someone is trying to restore the latest version for the item. When
// has an history someone is trying to restore the latest version for the item. When // trying to restore the latest version, the identifier in input doesn't have the
// trying to restore the latest version the identifier in input doesn't have the for 1234/123.latestVersion // 1234/123.latestVersion. Instead, it is the canonical 1234/123
// it is the canonical 1234/123
VersionHistory itemHistory = getHistory(context, identifier); VersionHistory itemHistory = getHistory(context, identifier);
if (!identifier.matches(".*/.*\\.\\d+") && itemHistory != null) { if (!identifier.matches(".*/.*\\.\\d+") && itemHistory != null) {
@@ -214,11 +213,13 @@ public class VersionedHandleIdentifierProviderWithCanonicalHandles extends Ident
} }
} }
} else { } else {
//A regular handle // A regular handle to create for an Item
createNewIdentifier(context, dso, identifier); createNewIdentifier(context, dso, identifier);
if (dso instanceof Item) {
modifyHandleMetadata(context, item, getCanonical(identifier)); modifyHandleMetadata(context, item, getCanonical(identifier));
} }
} else {
// Handle being registered for a different type of object (e.g. Collection or Community)
createNewIdentifier(context, dso, identifier);
} }
} catch (IOException | SQLException | AuthorizeException e) { } catch (IOException | SQLException | AuthorizeException e) {
log.error(LogHelper.getHeader(context, log.error(LogHelper.getHeader(context,

View File

@@ -7,7 +7,8 @@
*/ */
package org.dspace.importer.external.crossref; package org.dspace.importer.external.crossref;
import java.text.SimpleDateFormat; import java.time.LocalDate;
import java.time.format.DateTimeFormatter;
import java.util.ArrayList; import java.util.ArrayList;
import java.util.Collection; import java.util.Collection;
import java.util.Iterator; import java.util.Iterator;
@@ -18,12 +19,11 @@ import com.fasterxml.jackson.databind.ObjectMapper;
import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger; import org.apache.logging.log4j.Logger;
import org.dspace.importer.external.metadatamapping.contributor.JsonPathMetadataProcessor; import org.dspace.importer.external.metadatamapping.contributor.JsonPathMetadataProcessor;
import org.joda.time.LocalDate;
/** /**
* This class is used for CrossRef's Live-Import to extract * This class is used for CrossRef's Live-Import to extract
* issued attribute. * issued attribute.
* Beans are configured in the crossref-integration.xml file. * Beans are configured in the {@code crossref-integration.xml} file.
* *
* @author Francesco Pio Scognamiglio (francescopio.scognamiglio at 4science.com) * @author Francesco Pio Scognamiglio (francescopio.scognamiglio at 4science.com)
*/ */
@@ -41,22 +41,25 @@ public class CrossRefDateMetadataProcessor implements JsonPathMetadataProcessor
while (dates.hasNext()) { while (dates.hasNext()) {
JsonNode date = dates.next(); JsonNode date = dates.next();
LocalDate issuedDate = null; LocalDate issuedDate = null;
SimpleDateFormat issuedDateFormat = null; DateTimeFormatter issuedDateFormat = null;
if (date.has(0) && date.has(1) && date.has(2)) { if (date.has(0) && date.has(1) && date.has(2)) {
issuedDate = new LocalDate( issuedDate = LocalDate.of(
date.get(0).numberValue().intValue(), date.get(0).numberValue().intValue(),
date.get(1).numberValue().intValue(), date.get(1).numberValue().intValue(),
date.get(2).numberValue().intValue()); date.get(2).numberValue().intValue());
issuedDateFormat = new SimpleDateFormat("yyyy-MM-dd"); issuedDateFormat = DateTimeFormatter.ISO_LOCAL_DATE;
} else if (date.has(0) && date.has(1)) { } else if (date.has(0) && date.has(1)) {
issuedDate = new LocalDate().withYear(date.get(0).numberValue().intValue()) issuedDate = LocalDate.of(date.get(0).numberValue().intValue(),
.withMonthOfYear(date.get(1).numberValue().intValue()); date.get(1).numberValue().intValue(),
issuedDateFormat = new SimpleDateFormat("yyyy-MM"); 1);
issuedDateFormat = DateTimeFormatter.ofPattern("yyyy-MM");
} else if (date.has(0)) { } else if (date.has(0)) {
issuedDate = new LocalDate().withYear(date.get(0).numberValue().intValue()); issuedDate = LocalDate.of(date.get(0).numberValue().intValue(),
issuedDateFormat = new SimpleDateFormat("yyyy"); 1,
1);
issuedDateFormat = DateTimeFormatter.ofPattern("yyyy");
} }
values.add(issuedDateFormat.format(issuedDate.toDate())); values.add(issuedDate.format(issuedDateFormat));
} }
return values; return values;
} }

View File

@@ -18,6 +18,7 @@ import org.apache.commons.cli.Option;
import org.apache.commons.cli.Options; import org.apache.commons.cli.Options;
import org.apache.commons.cli.ParseException; import org.apache.commons.cli.ParseException;
import org.apache.commons.lang3.StringUtils; import org.apache.commons.lang3.StringUtils;
import org.dspace.cli.DSpaceSkipUnknownArgumentsParser;
import org.dspace.eperson.EPerson; import org.dspace.eperson.EPerson;
import org.dspace.scripts.configuration.ScriptConfiguration; import org.dspace.scripts.configuration.ScriptConfiguration;
import org.dspace.scripts.handler.DSpaceRunnableHandler; import org.dspace.scripts.handler.DSpaceRunnableHandler;
@@ -36,6 +37,11 @@ public abstract class DSpaceRunnable<T extends ScriptConfiguration> implements R
*/ */
protected CommandLine commandLine; protected CommandLine commandLine;
/**
* The minimal CommandLine object for the script that'll hold help information
*/
protected CommandLine helpCommandLine;
/** /**
* This EPerson identifier variable is the UUID of the EPerson that's running the script * This EPerson identifier variable is the UUID of the EPerson that's running the script
*/ */
@@ -64,26 +70,66 @@ public abstract class DSpaceRunnable<T extends ScriptConfiguration> implements R
* @param args The arguments given to the script * @param args The arguments given to the script
* @param dSpaceRunnableHandler The DSpaceRunnableHandler object that defines from where the script was ran * @param dSpaceRunnableHandler The DSpaceRunnableHandler object that defines from where the script was ran
* @param currentUser * @param currentUser
* @return the result of this step; StepResult.Continue: continue the normal process,
* initialize is successful; otherwise exit the process (the help or version is shown)
* @throws ParseException If something goes wrong * @throws ParseException If something goes wrong
*/ */
public void initialize(String[] args, DSpaceRunnableHandler dSpaceRunnableHandler, public StepResult initialize(String[] args, DSpaceRunnableHandler dSpaceRunnableHandler,
EPerson currentUser) throws ParseException { EPerson currentUser) throws ParseException {
if (currentUser != null) { if (currentUser != null) {
this.setEpersonIdentifier(currentUser.getID()); this.setEpersonIdentifier(currentUser.getID());
} }
this.setHandler(dSpaceRunnableHandler); this.setHandler(dSpaceRunnableHandler);
this.parse(args);
// parse the command line in a first step for the help options
// --> no other option is required
StepResult result = this.parseForHelp(args);
switch (result) {
case Exit:
// arguments of the command line matches the help options, handle this
handleHelpCommandLine();
break;
case Continue:
// arguments of the command line matches NOT the help options, parse the args for the normal options
result = this.parse(args);
break;
default:
break;
} }
return result;
}
/**
* This method handle the help command line. In this easy implementation only the help is printed. For more
* complexity override this method.
*/
private void handleHelpCommandLine() {
printHelp();
}
/** /**
* This method will take the primitive array of String objects that represent the parameters given to the String * This method will take the primitive array of String objects that represent the parameters given to the String
* and it'll parse these into a CommandLine object that can be used by the script to retrieve the data * and it'll parse these into a CommandLine object that can be used by the script to retrieve the data
* @param args The primitive array of Strings representing the parameters * @param args The primitive array of Strings representing the parameters
* @throws ParseException If something goes wrong * @throws ParseException If something goes wrong
*/ */
private void parse(String[] args) throws ParseException { private StepResult parse(String[] args) throws ParseException {
commandLine = new DefaultParser().parse(getScriptConfiguration().getOptions(), args); commandLine = new DefaultParser().parse(getScriptConfiguration().getOptions(), args);
setup(); setup();
return StepResult.Continue;
}
private StepResult parseForHelp(String[] args) throws ParseException {
helpCommandLine = new DSpaceSkipUnknownArgumentsParser().parse(getScriptConfiguration().getHelpOptions(), args);
if (helpCommandLine.getOptions() != null && helpCommandLine.getOptions().length > 0) {
return StepResult.Exit;
}
return StepResult.Continue;
} }
/** /**
@@ -158,4 +204,8 @@ public abstract class DSpaceRunnable<T extends ScriptConfiguration> implements R
public void setEpersonIdentifier(UUID epersonIdentifier) { public void setEpersonIdentifier(UUID epersonIdentifier) {
this.epersonIdentifier = epersonIdentifier; this.epersonIdentifier = epersonIdentifier;
} }
public enum StepResult {
Continue, Exit;
}
} }

View File

@@ -10,6 +10,7 @@ package org.dspace.scripts.configuration;
import java.sql.SQLException; import java.sql.SQLException;
import java.util.List; import java.util.List;
import org.apache.commons.cli.Option;
import org.apache.commons.cli.Options; import org.apache.commons.cli.Options;
import org.dspace.authorize.service.AuthorizeService; import org.dspace.authorize.service.AuthorizeService;
import org.dspace.core.Context; import org.dspace.core.Context;
@@ -105,6 +106,19 @@ public abstract class ScriptConfiguration<T extends DSpaceRunnable> implements B
*/ */
public abstract Options getOptions(); public abstract Options getOptions();
/**
* The getter for the options of the Script (help informations)
*
* @return the options value of this ScriptConfiguration for help
*/
public Options getHelpOptions() {
Options options = new Options();
options.addOption(Option.builder("h").longOpt("help").desc("help").hasArg(false).required(false).build());
return options;
}
@Override @Override
public void setBeanName(String beanName) { public void setBeanName(String beanName) {
this.name = beanName; this.name = beanName;

View File

@@ -37,7 +37,7 @@ public class GeoIpService {
public DatabaseReader getDatabaseReader() throws IllegalStateException { public DatabaseReader getDatabaseReader() throws IllegalStateException {
String dbPath = configurationService.getProperty("usage-statistics.dbfile"); String dbPath = configurationService.getProperty("usage-statistics.dbfile");
if (StringUtils.isBlank(dbPath)) { if (StringUtils.isBlank(dbPath)) {
throw new IllegalStateException("The required 'dbfile' configuration is missing in solr-statistics.cfg!"); throw new IllegalStateException("The required 'dbfile' configuration is missing in usage-statistics.cfg!");
} }
try { try {

View File

@@ -1203,22 +1203,6 @@ public class SolrLoggerServiceImpl implements SolrLoggerService, InitializingBea
} }
@Override
public void optimizeSOLR() {
try {
long start = System.currentTimeMillis();
System.out.println("SOLR Optimize -- Process Started:" + start);
solr.optimize();
long finish = System.currentTimeMillis();
System.out.println("SOLR Optimize -- Process Finished:" + finish);
System.out.println("SOLR Optimize -- Total time taken:" + (finish - start) + " (ms).");
} catch (SolrServerException sse) {
System.err.println(sse.getMessage());
} catch (IOException ioe) {
System.err.println(ioe.getMessage());
}
}
@Override @Override
public void shardSolrIndex() throws IOException, SolrServerException { public void shardSolrIndex() throws IOException, SolrServerException {
if (!(solr instanceof HttpSolrClient)) { if (!(solr instanceof HttpSolrClient)) {
@@ -1691,11 +1675,14 @@ public class SolrLoggerServiceImpl implements SolrLoggerService, InitializingBea
statisticYearCores statisticYearCores
.add(baseSolrUrl.replace("http://", "").replace("https://", "") + statCoreName); .add(baseSolrUrl.replace("http://", "").replace("https://", "") + statCoreName);
} }
//Also add the core containing the current year ! var baseCore = ((HttpSolrClient) solr)
statisticYearCores.add(((HttpSolrClient) solr)
.getBaseURL() .getBaseURL()
.replace("http://", "") .replace("http://", "")
.replace("https://", "")); .replace("https://", "");
if (!statisticYearCores.contains(baseCore)) {
//Also add the core containing the current year, if it hasn't been added already
statisticYearCores.add(baseCore);
}
} catch (IOException | SolrServerException e) { } catch (IOException | SolrServerException e) {
log.error(e.getMessage(), e); log.error(e.getMessage(), e);
} }

View File

@@ -266,12 +266,6 @@ public interface SolrLoggerService {
*/ */
public String getIgnoreSpiderIPs(); public String getIgnoreSpiderIPs();
/**
* Maintenance to keep a SOLR index efficient.
* Note: This might take a long time.
*/
public void optimizeSOLR();
public void shardSolrIndex() throws IOException, SolrServerException; public void shardSolrIndex() throws IOException, SolrServerException;
public void reindexBitstreamHits(boolean removeDeletedBitstreams) throws Exception; public void reindexBitstreamHits(boolean removeDeletedBitstreams) throws Exception;

View File

@@ -67,7 +67,6 @@ public class StatisticsClient {
options.addOption("m", "mark-spiders", false, "Update isBot Flag in Solr"); options.addOption("m", "mark-spiders", false, "Update isBot Flag in Solr");
options.addOption("f", "delete-spiders-by-flag", false, "Delete Spiders in Solr By isBot Flag"); options.addOption("f", "delete-spiders-by-flag", false, "Delete Spiders in Solr By isBot Flag");
options.addOption("i", "delete-spiders-by-ip", false, "Delete Spiders in Solr By IP Address"); options.addOption("i", "delete-spiders-by-ip", false, "Delete Spiders in Solr By IP Address");
options.addOption("o", "optimize", false, "Run maintenance on the SOLR index");
options.addOption("b", "reindex-bitstreams", false, "Reindex the bitstreams to ensure we have the bundle name"); options.addOption("b", "reindex-bitstreams", false, "Reindex the bitstreams to ensure we have the bundle name");
options.addOption("e", "export", false, options.addOption("e", "export", false,
"Export SOLR view statistics data to usage-statistics-intermediate-format"); "Export SOLR view statistics data to usage-statistics-intermediate-format");
@@ -93,8 +92,6 @@ public class StatisticsClient {
solrLoggerService.deleteRobotsByIsBotFlag(); solrLoggerService.deleteRobotsByIsBotFlag();
} else if (line.hasOption('i')) { } else if (line.hasOption('i')) {
solrLoggerService.deleteRobotsByIP(); solrLoggerService.deleteRobotsByIP();
} else if (line.hasOption('o')) {
solrLoggerService.optimizeSOLR();
} else if (line.hasOption('b')) { } else if (line.hasOption('b')) {
solrLoggerService.reindexBitstreamHits(line.hasOption('r')); solrLoggerService.reindexBitstreamHits(line.hasOption('r'));
} else if (line.hasOption('e')) { } else if (line.hasOption('e')) {

View File

@@ -1465,6 +1465,7 @@ public class DatabaseUtils {
Context context = null; Context context = null;
try { try {
context = new Context(); context = new Context();
context.setMode(Context.Mode.READ_ONLY);
context.turnOffAuthorisationSystem(); context.turnOffAuthorisationSystem();
log.info( log.info(
"Post database migration, reindexing all content in Discovery search and browse engine"); "Post database migration, reindexing all content in Discovery search and browse engine");

View File

@@ -35,6 +35,8 @@ public class SolrUtils {
* @return date formatter compatible with Solr. * @return date formatter compatible with Solr.
*/ */
public static DateFormat getDateFormatter() { public static DateFormat getDateFormatter() {
return new SimpleDateFormat(SolrUtils.SOLR_DATE_FORMAT); DateFormat formatter = new SimpleDateFormat(SolrUtils.SOLR_DATE_FORMAT);
formatter.setTimeZone(SOLR_TIME_ZONE);
return formatter;
} }
} }

View File

@@ -0,0 +1,43 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.util;
/**
* Things you wish {@link Throwable} or some logging package would do for you.
*
* @author mwood
*/
public class ThrowableUtils {
/**
* Utility class: do not instantiate.
*/
private ThrowableUtils() { }
/**
* Trace a chain of {@code Throwable}s showing only causes.
* Less voluminous than a stack trace. Useful if you just want to know
* what caused third-party code to return an uninformative exception
* message.
*
* @param throwable the exception or whatever.
* @return list of messages from each {@code Throwable} in the chain,
* separated by '\n'.
*/
static public String formatCauseChain(Throwable throwable) {
StringBuilder trace = new StringBuilder();
trace.append(throwable.getMessage());
Throwable cause = throwable.getCause();
while (null != cause) {
trace.append("\nCaused by: ")
.append(cause.getClass().getCanonicalName()).append(' ')
.append(cause.getMessage());
cause = cause.getCause();
}
return trace.toString();
}
}

View File

@@ -221,6 +221,8 @@ public class XmlWorkflowServiceImpl implements XmlWorkflowService {
//Get our next step, if none is found, archive our item //Get our next step, if none is found, archive our item
firstStep = wf.getNextStep(context, wfi, firstStep, ActionResult.OUTCOME_COMPLETE); firstStep = wf.getNextStep(context, wfi, firstStep, ActionResult.OUTCOME_COMPLETE);
if (firstStep == null) { if (firstStep == null) {
// record the submitted provenance message
recordStart(context, wfi.getItem(),null);
archive(context, wfi); archive(context, wfi);
} else { } else {
activateFirstStep(context, wf, firstStep, wfi); activateFirstStep(context, wf, firstStep, wfi);
@@ -1187,25 +1189,30 @@ public class XmlWorkflowServiceImpl implements XmlWorkflowService {
DCDate now = DCDate.getCurrent(); DCDate now = DCDate.getCurrent();
// Create provenance description // Create provenance description
String provmessage = ""; StringBuffer provmessage = new StringBuffer();
if (myitem.getSubmitter() != null) { if (myitem.getSubmitter() != null) {
provmessage = "Submitted by " + myitem.getSubmitter().getFullName() provmessage.append("Submitted by ").append(myitem.getSubmitter().getFullName())
+ " (" + myitem.getSubmitter().getEmail() + ") on " .append(" (").append(myitem.getSubmitter().getEmail()).append(") on ")
+ now.toString() + " workflow start=" + action.getProvenanceStartId() + "\n"; .append(now.toString());
} else { } else {
// else, null submitter // else, null submitter
provmessage = "Submitted by unknown (probably automated) on" provmessage.append("Submitted by unknown (probably automated) on")
+ now.toString() + " workflow start=" + action.getProvenanceStartId() + "\n"; .append(now.toString());
}
if (action != null) {
provmessage.append(" workflow start=").append(action.getProvenanceStartId()).append("\n");
} else {
provmessage.append("\n");
} }
// add sizes and checksums of bitstreams // add sizes and checksums of bitstreams
provmessage += installItemService.getBitstreamProvenanceMessage(context, myitem); provmessage.append(installItemService.getBitstreamProvenanceMessage(context, myitem));
// Add message to the DC // Add message to the DC
itemService itemService
.addMetadata(context, myitem, MetadataSchemaEnum.DC.getName(), .addMetadata(context, myitem, MetadataSchemaEnum.DC.getName(),
"description", "provenance", "en", provmessage); "description", "provenance", "en", provmessage.toString());
itemService.update(context, myitem); itemService.update(context, myitem);
} }

View File

@@ -1,9 +0,0 @@
--
-- The contents of this file are subject to the license and copyright
-- detailed in the LICENSE and NOTICE files at the root of the source
-- tree and available online at
--
-- http://www.dspace.org/license/
--
ALTER TABLE process MODIFY (parameters CLOB);

View File

@@ -0,0 +1,34 @@
--
-- The contents of this file are subject to the license and copyright
-- detailed in the LICENSE and NOTICE files at the root of the source
-- tree and available online at
--
-- http://www.dspace.org/license/
--
BEGIN;
-- Unset any primary bitstream that is marked as deleted
UPDATE bundle
SET primary_bitstream_id = NULL
WHERE primary_bitstream_id IN
( SELECT bs.uuid
FROM bitstream AS bs
INNER JOIN bundle as bl ON bs.uuid = bl.primary_bitstream_id
WHERE bs.deleted IS TRUE );
-- Unset any primary bitstream that don't belong to bundle's bitstream list
UPDATE bundle
SET primary_bitstream_id = NULL
WHERE primary_bitstream_id IN
( SELECT bl.primary_bitstream_id
FROM bundle as bl
WHERE bl.primary_bitstream_id IS NOT NULL
AND bl.primary_bitstream_id NOT IN
( SELECT bitstream_id
FROM bundle2bitstream AS b2b
WHERE b2b.bundle_id = bl.uuid
)
);
COMMIT;

View File

@@ -14,6 +14,8 @@ import static org.junit.Assert.fail;
import java.io.ByteArrayInputStream; import java.io.ByteArrayInputStream;
import java.nio.charset.StandardCharsets; import java.nio.charset.StandardCharsets;
import java.sql.SQLException; import java.sql.SQLException;
import java.time.LocalDate;
import java.time.ZoneId;
import java.util.ArrayList; import java.util.ArrayList;
import java.util.Date; import java.util.Date;
import java.util.List; import java.util.List;
@@ -42,7 +44,6 @@ import org.dspace.core.Constants;
import org.dspace.eperson.Group; import org.dspace.eperson.Group;
import org.dspace.eperson.factory.EPersonServiceFactory; import org.dspace.eperson.factory.EPersonServiceFactory;
import org.dspace.eperson.service.GroupService; import org.dspace.eperson.service.GroupService;
import org.joda.time.LocalDate;
import org.junit.After; import org.junit.After;
import org.junit.Before; import org.junit.Before;
import org.junit.Test; import org.junit.Test;
@@ -129,7 +130,7 @@ public class DefaultAccessStatusHelperTest extends AbstractUnitTest {
fail("SQL Error in init: " + ex.getMessage()); fail("SQL Error in init: " + ex.getMessage());
} }
helper = new DefaultAccessStatusHelper(); helper = new DefaultAccessStatusHelper();
threshold = new LocalDate(10000, 1, 1).toDate(); threshold = dateFrom(10000, 1, 1);
} }
/** /**
@@ -266,13 +267,15 @@ public class DefaultAccessStatusHelperTest extends AbstractUnitTest {
Group group = groupService.findByName(context, Group.ANONYMOUS); Group group = groupService.findByName(context, Group.ANONYMOUS);
policy.setGroup(group); policy.setGroup(group);
policy.setAction(Constants.READ); policy.setAction(Constants.READ);
policy.setStartDate(new LocalDate(9999, 12, 31).toDate()); policy.setStartDate(dateFrom(9999, 12, 31));
policies.add(policy); policies.add(policy);
authorizeService.removeAllPolicies(context, bitstream); authorizeService.removeAllPolicies(context, bitstream);
authorizeService.addPolicies(context, policies, bitstream); authorizeService.addPolicies(context, policies, bitstream);
context.restoreAuthSystemState(); context.restoreAuthSystemState();
String status = helper.getAccessStatusFromItem(context, itemWithEmbargo, threshold); String status = helper.getAccessStatusFromItem(context, itemWithEmbargo, threshold);
assertThat("testWithEmbargo 0", status, equalTo(DefaultAccessStatusHelper.EMBARGO)); assertThat("testWithEmbargo 0", status, equalTo(DefaultAccessStatusHelper.EMBARGO));
String embargoDate = helper.getEmbargoFromItem(context, itemWithEmbargo, threshold);
assertThat("testWithEmbargo 1", embargoDate, equalTo(policy.getStartDate().toString()));
} }
/** /**
@@ -293,7 +296,7 @@ public class DefaultAccessStatusHelperTest extends AbstractUnitTest {
Group group = groupService.findByName(context, Group.ANONYMOUS); Group group = groupService.findByName(context, Group.ANONYMOUS);
policy.setGroup(group); policy.setGroup(group);
policy.setAction(Constants.READ); policy.setAction(Constants.READ);
policy.setStartDate(new LocalDate(10000, 1, 1).toDate()); policy.setStartDate(dateFrom(10000, 1, 1));
policies.add(policy); policies.add(policy);
authorizeService.removeAllPolicies(context, bitstream); authorizeService.removeAllPolicies(context, bitstream);
authorizeService.addPolicies(context, policies, bitstream); authorizeService.addPolicies(context, policies, bitstream);
@@ -383,13 +386,15 @@ public class DefaultAccessStatusHelperTest extends AbstractUnitTest {
Group group = groupService.findByName(context, Group.ANONYMOUS); Group group = groupService.findByName(context, Group.ANONYMOUS);
policy.setGroup(group); policy.setGroup(group);
policy.setAction(Constants.READ); policy.setAction(Constants.READ);
policy.setStartDate(new LocalDate(9999, 12, 31).toDate()); policy.setStartDate(dateFrom(9999, 12, 31));
policies.add(policy); policies.add(policy);
authorizeService.removeAllPolicies(context, primaryBitstream); authorizeService.removeAllPolicies(context, primaryBitstream);
authorizeService.addPolicies(context, policies, primaryBitstream); authorizeService.addPolicies(context, policies, primaryBitstream);
context.restoreAuthSystemState(); context.restoreAuthSystemState();
String status = helper.getAccessStatusFromItem(context, itemWithPrimaryAndMultipleBitstreams, threshold); String status = helper.getAccessStatusFromItem(context, itemWithPrimaryAndMultipleBitstreams, threshold);
assertThat("testWithPrimaryAndMultipleBitstreams 0", status, equalTo(DefaultAccessStatusHelper.EMBARGO)); assertThat("testWithPrimaryAndMultipleBitstreams 0", status, equalTo(DefaultAccessStatusHelper.EMBARGO));
String embargoDate = helper.getEmbargoFromItem(context, itemWithPrimaryAndMultipleBitstreams, threshold);
assertThat("testWithPrimaryAndMultipleBitstreams 1", embargoDate, equalTo(policy.getStartDate().toString()));
} }
/** /**
@@ -412,12 +417,29 @@ public class DefaultAccessStatusHelperTest extends AbstractUnitTest {
Group group = groupService.findByName(context, Group.ANONYMOUS); Group group = groupService.findByName(context, Group.ANONYMOUS);
policy.setGroup(group); policy.setGroup(group);
policy.setAction(Constants.READ); policy.setAction(Constants.READ);
policy.setStartDate(new LocalDate(9999, 12, 31).toDate()); policy.setStartDate(dateFrom(9999, 12, 31));
policies.add(policy); policies.add(policy);
authorizeService.removeAllPolicies(context, anotherBitstream); authorizeService.removeAllPolicies(context, anotherBitstream);
authorizeService.addPolicies(context, policies, anotherBitstream); authorizeService.addPolicies(context, policies, anotherBitstream);
context.restoreAuthSystemState(); context.restoreAuthSystemState();
String status = helper.getAccessStatusFromItem(context, itemWithoutPrimaryAndMultipleBitstreams, threshold); String status = helper.getAccessStatusFromItem(context, itemWithoutPrimaryAndMultipleBitstreams, threshold);
assertThat("testWithNoPrimaryAndMultipleBitstreams 0", status, equalTo(DefaultAccessStatusHelper.OPEN_ACCESS)); assertThat("testWithNoPrimaryAndMultipleBitstreams 0", status, equalTo(DefaultAccessStatusHelper.OPEN_ACCESS));
String embargoDate = helper.getEmbargoFromItem(context, itemWithEmbargo, threshold);
assertThat("testWithNoPrimaryAndMultipleBitstreams 1", embargoDate, equalTo(null));
}
/**
* Create a Date from local year, month, day.
*
* @param year the year.
* @param month the month.
* @param day the day.
* @return the assembled date.
*/
private Date dateFrom(int year, int month, int day) {
return Date.from(LocalDate.of(year, month, day)
.atStartOfDay()
.atZone(ZoneId.systemDefault())
.toInstant());
} }
} }

View File

@@ -99,10 +99,11 @@ public class MetadataExportIT
script = scriptService.createDSpaceRunnableForScriptConfiguration(scriptConfiguration); script = scriptService.createDSpaceRunnableForScriptConfiguration(scriptConfiguration);
} }
if (script != null) { if (script != null) {
script.initialize(args, testDSpaceRunnableHandler, null); if (DSpaceRunnable.StepResult.Continue.equals(script.initialize(args, testDSpaceRunnableHandler, null))) {
script.run(); script.run();
} }
} }
}
@Test @Test
public void metadataExportToCsvTestUUID() throws Exception { public void metadataExportToCsvTestUUID() throws Exception {
@@ -206,9 +207,10 @@ public class MetadataExportIT
script = scriptService.createDSpaceRunnableForScriptConfiguration(scriptConfiguration); script = scriptService.createDSpaceRunnableForScriptConfiguration(scriptConfiguration);
} }
if (script != null) { if (script != null) {
script.initialize(args, testDSpaceRunnableHandler, null); if (DSpaceRunnable.StepResult.Continue.equals(script.initialize(args, testDSpaceRunnableHandler, null))) {
script.run(); script.run();
} }
}
Exception exceptionDuringTestRun = testDSpaceRunnableHandler.getException(); Exception exceptionDuringTestRun = testDSpaceRunnableHandler.getException();
assertTrue("Random UUID caused IllegalArgumentException", assertTrue("Random UUID caused IllegalArgumentException",
@@ -235,9 +237,10 @@ public class MetadataExportIT
script = scriptService.createDSpaceRunnableForScriptConfiguration(scriptConfiguration); script = scriptService.createDSpaceRunnableForScriptConfiguration(scriptConfiguration);
} }
if (script != null) { if (script != null) {
script.initialize(args, testDSpaceRunnableHandler, null); if (DSpaceRunnable.StepResult.Continue.equals(script.initialize(args, testDSpaceRunnableHandler, null))) {
script.run(); script.run();
} }
}
Exception exceptionDuringTestRun = testDSpaceRunnableHandler.getException(); Exception exceptionDuringTestRun = testDSpaceRunnableHandler.getException();
assertTrue("UUID of non-supported dsoType IllegalArgumentException", assertTrue("UUID of non-supported dsoType IllegalArgumentException",

View File

@@ -144,10 +144,11 @@ public class MetadataImportIT extends AbstractIntegrationTestWithDatabase {
script = scriptService.createDSpaceRunnableForScriptConfiguration(scriptConfiguration); script = scriptService.createDSpaceRunnableForScriptConfiguration(scriptConfiguration);
} }
if (script != null) { if (script != null) {
script.initialize(args, testDSpaceRunnableHandler, null); if (DSpaceRunnable.StepResult.Continue.equals(script.initialize(args, testDSpaceRunnableHandler, null))) {
script.run(); script.run();
} }
} }
}
@Test @Test
public void relationshipMetadataImportTest() throws Exception { public void relationshipMetadataImportTest() throws Exception {

View File

@@ -702,9 +702,11 @@ public class CSVMetadataImportReferenceIT extends AbstractIntegrationTestWithDat
script = scriptService.createDSpaceRunnableForScriptConfiguration(scriptConfiguration); script = scriptService.createDSpaceRunnableForScriptConfiguration(scriptConfiguration);
} }
if (script != null) { if (script != null) {
script.initialize(args, testDSpaceRunnableHandler, null); if (DSpaceRunnable.StepResult.Continue
.equals(script.initialize(args, testDSpaceRunnableHandler, null))) {
script.run(); script.run();
} }
}
if (testDSpaceRunnableHandler.getException() != null) { if (testDSpaceRunnableHandler.getException() != null) {
throw testDSpaceRunnableHandler.getException(); throw testDSpaceRunnableHandler.getException();
} }

View File

@@ -16,11 +16,15 @@ import java.io.ByteArrayInputStream;
import java.io.IOException; import java.io.IOException;
import java.nio.charset.StandardCharsets; import java.nio.charset.StandardCharsets;
import java.sql.SQLException; import java.sql.SQLException;
import java.time.Period;
import java.time.ZoneOffset;
import java.time.ZonedDateTime;
import java.util.Date; import java.util.Date;
import java.util.List; import java.util.List;
import java.util.Map; import java.util.Map;
import com.google.common.base.Splitter; import com.google.common.base.Splitter;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger; import org.apache.logging.log4j.Logger;
import org.dspace.AbstractUnitTest; import org.dspace.AbstractUnitTest;
import org.dspace.authorize.AuthorizeException; import org.dspace.authorize.AuthorizeException;
@@ -41,10 +45,6 @@ import org.dspace.core.Constants;
import org.dspace.eperson.Group; import org.dspace.eperson.Group;
import org.dspace.eperson.factory.EPersonServiceFactory; import org.dspace.eperson.factory.EPersonServiceFactory;
import org.dspace.eperson.service.GroupService; import org.dspace.eperson.service.GroupService;
import org.joda.time.DateTime;
import org.joda.time.DateTimeZone;
import org.joda.time.MutablePeriod;
import org.joda.time.format.PeriodFormat;
import org.junit.After; import org.junit.After;
import org.junit.Before; import org.junit.Before;
import org.junit.Test; import org.junit.Test;
@@ -54,7 +54,7 @@ public class GoogleMetadataTest extends AbstractUnitTest {
/** /**
* log4j category * log4j category
*/ */
private static final Logger log = org.apache.logging.log4j.LogManager.getLogger(GoogleMetadataTest.class); private static final Logger log = LogManager.getLogger();
/** /**
* Item instance for the tests * Item instance for the tests
@@ -319,6 +319,7 @@ public class GoogleMetadataTest extends AbstractUnitTest {
/** /**
* Test empty bitstreams * Test empty bitstreams
* @throws java.lang.Exception passed through.
*/ */
@Test @Test
public void testGetPDFURLWithEmptyBitstreams() throws Exception { public void testGetPDFURLWithEmptyBitstreams() throws Exception {
@@ -348,8 +349,9 @@ public class GoogleMetadataTest extends AbstractUnitTest {
} }
/** /**
* Verify there is no mapping for {@link GoogleMetadata#PDF} if there are only embargoed (non-publically accessible * Verify there is no mapping for {@link GoogleMetadata#PDF} if there are
* bitstream) files * only embargoed (non-publicly accessible bitstream) files.
* @throws java.lang.Exception passed through.
*/ */
@Test @Test
public void testGetPdfUrlOfEmbargoed() throws Exception { public void testGetPdfUrlOfEmbargoed() throws Exception {
@@ -363,8 +365,10 @@ public class GoogleMetadataTest extends AbstractUnitTest {
b.getFormat(context).setMIMEType("unknown"); b.getFormat(context).setMIMEType("unknown");
bundleService.addBitstream(context, bundle, b); bundleService.addBitstream(context, bundle, b);
// Set 3 month embargo on pdf // Set 3 month embargo on pdf
MutablePeriod period = PeriodFormat.getDefault().parseMutablePeriod("3 months"); Period period = Period.ofMonths(3);
Date embargoDate = DateTime.now(DateTimeZone.UTC).plus(period).toDate(); Date embargoDate = Date.from(ZonedDateTime.now(ZoneOffset.UTC)
.plus(period)
.toInstant());
Group anonGroup = groupService.findByName(context, Group.ANONYMOUS); Group anonGroup = groupService.findByName(context, Group.ANONYMOUS);
authorizeService.removeAllPolicies(context, b); authorizeService.removeAllPolicies(context, b);
resourcePolicyService.removeAllPolicies(context, b); resourcePolicyService.removeAllPolicies(context, b);

View File

@@ -0,0 +1,52 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.authority;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertNull;
import java.time.LocalDateTime;
import java.time.ZoneId;
import java.time.ZoneOffset;
import java.util.Date;
import org.junit.Test;
/**
*
* @author mwood
*/
public class AuthorityValueTest {
/**
* Test of stringToDate method, of class AuthorityValue.
*/
@Test
public void testStringToDate() {
Date expected;
Date actual;
// Test an invalid date.
actual = AuthorityValue.stringToDate("not a date");
assertNull("Unparseable date should return null", actual);
// Test a date-time without zone or offset.
expected = Date.from(LocalDateTime.of(1957, 01, 27, 01, 23, 45)
.atZone(ZoneId.systemDefault())
.toInstant());
actual = AuthorityValue.stringToDate("1957-01-27T01:23:45");
assertEquals("Local date-time should convert", expected, actual);
// Test a date-time with milliseconds and offset from UTC.
expected = Date.from(LocalDateTime.of(1957, 01, 27, 01, 23, 45, 678_000_000)
.atZone(ZoneOffset.of("-05"))
.toInstant());
actual = AuthorityValue.stringToDate("1957-01-27T01:23:45.678-05");
assertEquals("Zoned date-time with milliseconds should convert",
expected, actual);
}
}

View File

@@ -26,7 +26,7 @@ import org.mockito.junit.MockitoJUnitRunner;
* @author Luca Giamminonni (luca.giamminonni at 4science.it) * @author Luca Giamminonni (luca.giamminonni at 4science.it)
*/ */
@RunWith(MockitoJUnitRunner.class) @RunWith(MockitoJUnitRunner.class)
public class RegexPasswordValidatorTest extends AbstractIntegrationTest { public class RegexPasswordValidatorIT extends AbstractIntegrationTest {
@Mock @Mock
private ConfigurationService configurationService; private ConfigurationService configurationService;

View File

@@ -0,0 +1,103 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.browse;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertNull;
import org.dspace.AbstractDSpaceTest;
import org.dspace.services.ConfigurationService;
import org.dspace.utils.DSpace;
import org.junit.Before;
import org.junit.Test;
/**
* Test class for {@link CrossLinks}
*/
public class CrossLinksTest extends AbstractDSpaceTest {
protected ConfigurationService configurationService;
@Before
public void setUp() {
configurationService = new DSpace().getConfigurationService();
}
@Test
public void testFindLinkType_Null() throws Exception {
CrossLinks crossLinks = new CrossLinks();
assertNull(crossLinks.findLinkType(null));
}
@Test
public void testFindLinkType_NoMatch() throws Exception {
CrossLinks crossLinks = new CrossLinks();
String metadataField = "foo.bar.baz.does.not.exist";
assertNull(crossLinks.findLinkType(metadataField));
}
@Test
public void testFindLinkType_WildcardMatch() throws Exception {
configurationService.setProperty("webui.browse.link.1", "author:dc.contributor.*");
CrossLinks crossLinks = new CrossLinks();
String metadataField = "dc.contributor.author";
assertEquals("author",crossLinks.findLinkType(metadataField));
}
@Test
public void testFindLinkType_SingleExactMatch_Author() throws Exception {
configurationService.setProperty("webui.browse.link.1", "author:dc.contributor.author");
CrossLinks crossLinks = new CrossLinks();
assertEquals("type",crossLinks.findLinkType("dc.genre"));
assertEquals("author",crossLinks.findLinkType("dc.contributor.author"));
}
@Test
public void testFindLinkType_SingleExactMatch_Type() throws Exception {
configurationService.setProperty("webui.browse.link.1", "type:dc.genre");
CrossLinks crossLinks = new CrossLinks();
assertEquals("type",crossLinks.findLinkType("dc.genre"));
}
@Test
public void testFindLinkType_MultipleExactMatches_DifferentIndexes() throws Exception {
configurationService.setProperty("webui.browse.link.1", "author:dc.contributor.author");
configurationService.setProperty("webui.browse.link.2", "type:dc.genre");
CrossLinks crossLinks = new CrossLinks();
assertEquals("author",crossLinks.findLinkType("dc.contributor.author"));
assertEquals("type",crossLinks.findLinkType("dc.genre"));
}
@Test
public void testFindLinkType_MultipleWildcardMatches_DifferentIndexes() throws Exception {
configurationService.setProperty("webui.browse.link.1", "author:dc.contributor.*");
configurationService.setProperty("webui.browse.link.2", "subject:dc.subject.*");
CrossLinks crossLinks = new CrossLinks();
assertEquals("author",crossLinks.findLinkType("dc.contributor.author"));
assertEquals("subject",crossLinks.findLinkType("dc.subject.lcsh"));
}
@Test
public void testFindLinkType_MultiplExactAndWildcardMatches_DifferentIndexes() throws Exception {
configurationService.setProperty("webui.browse.link.1", "author:dc.contributor.*");
configurationService.setProperty("webui.browse.link.2", "subject:dc.subject.*");
configurationService.setProperty("webui.browse.link.3", "type:dc.genre");
configurationService.setProperty("webui.browse.link.4", "dateissued:dc.date.issued");
CrossLinks crossLinks = new CrossLinks();
assertEquals("author",crossLinks.findLinkType("dc.contributor.author"));
assertEquals("subject",crossLinks.findLinkType("dc.subject.lcsh"));
assertEquals("type",crossLinks.findLinkType("dc.genre"));
assertEquals("dateissued",crossLinks.findLinkType("dc.date.issued"));
}
}

View File

@@ -8,6 +8,10 @@
package org.dspace.builder; package org.dspace.builder;
import java.sql.SQLException; import java.sql.SQLException;
import java.time.Instant;
import java.time.LocalDate;
import java.time.Period;
import java.time.ZoneId;
import java.util.Date; import java.util.Date;
import org.apache.logging.log4j.Logger; import org.apache.logging.log4j.Logger;
@@ -20,17 +24,13 @@ import org.dspace.core.Constants;
import org.dspace.core.Context; import org.dspace.core.Context;
import org.dspace.eperson.EPerson; import org.dspace.eperson.EPerson;
import org.dspace.eperson.Group; import org.dspace.eperson.Group;
import org.joda.time.DateTime;
import org.joda.time.DateTimeZone;
import org.joda.time.MutablePeriod;
import org.joda.time.format.PeriodFormat;
import org.joda.time.format.PeriodFormatter;
/** /**
* Abstract builder to construct DSpace Objects * Abstract builder to construct DSpace Objects
* *
* @author Tom Desair (tom dot desair at atmire dot com) * @author Tom Desair (tom dot desair at atmire dot com)
* @author Raf Ponsaerts (raf dot ponsaerts at atmire dot com) * @author Raf Ponsaerts (raf dot ponsaerts at atmire dot com)
* @param <T> concrete type of DSpaceObject
*/ */
public abstract class AbstractDSpaceObjectBuilder<T extends DSpaceObject> public abstract class AbstractDSpaceObjectBuilder<T extends DSpaceObject>
extends AbstractBuilder<T, DSpaceObjectService> { extends AbstractBuilder<T, DSpaceObjectService> {
@@ -112,21 +112,27 @@ public abstract class AbstractDSpaceObjectBuilder<T extends DSpaceObject>
} }
/** /**
* Support method to grant the {@link Constants#READ} permission over an object only to the {@link Group#ANONYMOUS} * Support method to grant the {@link Constants#READ} permission over an
* after the specified embargoPeriod. Any other READ permissions will be removed * object only to the {@link Group#ANONYMOUS} after the specified
* embargoPeriod. Any other READ permissions will be removed.
* *
* @param <B> type of this Builder.
* @param embargoPeriod * @param embargoPeriod
* the embargo period after which the READ permission will be active. It is parsed using the * the embargo period after which the READ permission will be
* {@link PeriodFormatter#parseMutablePeriod(String)} method of the joda library * active.
* @param dso * @param dso the DSpaceObject on which to grant the permission.
* the DSpaceObject on which grant the permission * @return the builder properly configured to retain read permission on the
* @return the builder properly configured to retain read permission on the object only for the specified group * object only for the specified group.
*/ */
protected <B extends AbstractDSpaceObjectBuilder<T>> B setEmbargo(String embargoPeriod, DSpaceObject dso) { protected <B extends AbstractDSpaceObjectBuilder<T>> B setEmbargo(Period embargoPeriod, DSpaceObject dso) {
// add policy just for anonymous // add policy just for anonymous
try { try {
MutablePeriod period = PeriodFormat.getDefault().parseMutablePeriod(embargoPeriod); Instant embargoInstant = LocalDate.now()
Date embargoDate = DateTime.now(DateTimeZone.UTC).plus(period).toDate(); .plus(embargoPeriod)
.atStartOfDay()
.atZone(ZoneId.systemDefault())
.toInstant();
Date embargoDate = Date.from(embargoInstant);
return setOnlyReadPermission(dso, groupService.findByName(context, Group.ANONYMOUS), embargoDate); return setOnlyReadPermission(dso, groupService.findByName(context, Group.ANONYMOUS), embargoDate);
} catch (Exception e) { } catch (Exception e) {
@@ -135,14 +141,19 @@ public abstract class AbstractDSpaceObjectBuilder<T extends DSpaceObject>
} }
/** /**
* Support method to grant the {@link Constants#READ} permission over an object only to a specific group. Any other * Support method to grant the {@link Constants#READ} permission over an
* READ permissions will be removed * object only to a specific group.Any other READ permissions will be
* removed.
* *
* @param <B> type of this Builder.
* @param dso * @param dso
* the DSpaceObject on which grant the permission * the DSpaceObject on which grant the permission
* @param group * @param group
* the EPersonGroup that will be granted of the permission * the EPersonGroup that will be granted of the permission
* @return the builder properly configured to retain read permission on the object only for the specified group * @param startDate
* the date on which access begins.
* @return the builder properly configured to retain read permission on the
* object only for the specified group.
*/ */
protected <B extends AbstractDSpaceObjectBuilder<T>> B setOnlyReadPermission(DSpaceObject dso, Group group, protected <B extends AbstractDSpaceObjectBuilder<T>> B setOnlyReadPermission(DSpaceObject dso, Group group,
Date startDate) { Date startDate) {
@@ -161,15 +172,20 @@ public abstract class AbstractDSpaceObjectBuilder<T extends DSpaceObject>
} }
return (B) this; return (B) this;
} }
/** /**
* Support method to grant the {@link Constants#ADMIN} permission over an object only to a specific eperson. * Support method to grant the {@link Constants#READ} permission over an
* If another ADMIN policy is in place for an eperson it will be replaced * object only to a specific EPerson. Any other READ permissions will be
* removed.
* *
* @param <B> type of this Builder.
* @param dso * @param dso
* the DSpaceObject on which grant the permission * the DSpaceObject on which grant the permission
* @param eperson * @param eperson
* the eperson that will be granted of the permission * the EPerson that will be granted of the permission
* @return the builder properly configured to build the object with the additional admin permission * @param startDate the date on which access begins.
* @return the builder properly configured to build the object with the
* additional admin permission.
*/ */
protected <B extends AbstractDSpaceObjectBuilder<T>> B setAdminPermission(DSpaceObject dso, EPerson eperson, protected <B extends AbstractDSpaceObjectBuilder<T>> B setAdminPermission(DSpaceObject dso, EPerson eperson,
Date startDate) { Date startDate) {
@@ -191,6 +207,7 @@ public abstract class AbstractDSpaceObjectBuilder<T extends DSpaceObject>
/** /**
* Support method to grant {@link Constants#REMOVE} permission to a specific eperson * Support method to grant {@link Constants#REMOVE} permission to a specific eperson
* *
* @param <B> type of this Builder.
* @param dso * @param dso
* the DSpaceObject on which grant the permission * the DSpaceObject on which grant the permission
* @param eperson * @param eperson
@@ -220,6 +237,7 @@ public abstract class AbstractDSpaceObjectBuilder<T extends DSpaceObject>
/** /**
* Support method to grant {@link Constants#ADD} permission to a specific eperson * Support method to grant {@link Constants#ADD} permission to a specific eperson
* *
* @param <B> type of this Builder.
* @param dso * @param dso
* the DSpaceObject on which grant the permission * the DSpaceObject on which grant the permission
* @param eperson * @param eperson
@@ -249,6 +267,7 @@ public abstract class AbstractDSpaceObjectBuilder<T extends DSpaceObject>
/** /**
* Support method to grant {@link Constants#WRITE} permission to a specific eperson * Support method to grant {@link Constants#WRITE} permission to a specific eperson
* *
* @param <B> type of this Builder.
* @param dso * @param dso
* the DSpaceObject on which grant the permission * the DSpaceObject on which grant the permission
* @param eperson * @param eperson

View File

@@ -10,6 +10,7 @@ package org.dspace.builder;
import java.io.IOException; import java.io.IOException;
import java.io.InputStream; import java.io.InputStream;
import java.sql.SQLException; import java.sql.SQLException;
import java.time.Period;
import java.util.List; import java.util.List;
import org.dspace.authorize.AuthorizeException; import org.dspace.authorize.AuthorizeException;
@@ -171,7 +172,7 @@ public class BitstreamBuilder extends AbstractDSpaceObjectBuilder<Bitstream> {
return targetBundle; return targetBundle;
} }
public BitstreamBuilder withEmbargoPeriod(String embargoPeriod) { public BitstreamBuilder withEmbargoPeriod(Period embargoPeriod) {
return setEmbargo(embargoPeriod, bitstream); return setEmbargo(embargoPeriod, bitstream);
} }

View File

@@ -13,6 +13,7 @@ import static org.dspace.content.authority.Choices.CF_ACCEPTED;
import java.io.IOException; import java.io.IOException;
import java.sql.SQLException; import java.sql.SQLException;
import java.time.Period;
import java.util.UUID; import java.util.UUID;
import org.dspace.authorize.AuthorizeException; import org.dspace.authorize.AuthorizeException;
@@ -281,8 +282,8 @@ public class ItemBuilder extends AbstractDSpaceObjectBuilder<Item> {
} }
/** /**
* Withdrawn the item under build. Please note that an user need to be loggedin the context to avoid NPE during the * Withdraw the item under build. Please note that the Context must be
* creation of the provenance metadata * logged in to avoid NPE during the creation of the provenance metadata.
* *
* @return the ItemBuilder * @return the ItemBuilder
*/ */
@@ -291,7 +292,13 @@ public class ItemBuilder extends AbstractDSpaceObjectBuilder<Item> {
return this; return this;
} }
public ItemBuilder withEmbargoPeriod(String embargoPeriod) { /**
* Set an embargo to end after some time from "now".
*
* @param embargoPeriod embargo starting "now", for this long.
* @return the ItemBuilder.
*/
public ItemBuilder withEmbargoPeriod(Period embargoPeriod) {
return setEmbargo(embargoPeriod, item); return setEmbargo(embargoPeriod, item);
} }

View File

@@ -432,6 +432,51 @@ public class BitstreamTest extends AbstractDSpaceObjectTest {
assertThat("testExpunge 0", bitstreamService.find(context, bitstreamId), nullValue()); assertThat("testExpunge 0", bitstreamService.find(context, bitstreamId), nullValue());
} }
/**
* Test of delete method, of class Bitstream.
*/
@Test
public void testDeleteBitstreamAndUnsetPrimaryBitstreamID()
throws IOException, SQLException, AuthorizeException {
context.turnOffAuthorisationSystem();
Community owningCommunity = communityService.create(null, context);
Collection collection = collectionService.create(context, owningCommunity);
WorkspaceItem workspaceItem = workspaceItemService.create(context, collection, false);
Item item = installItemService.installItem(context, workspaceItem);
Bundle b = bundleService.create(context, item, "TESTBUNDLE");
// Allow Bundle REMOVE permissions
doNothing().when(authorizeServiceSpy).authorizeAction(context, b, Constants.REMOVE);
// Allow Bitstream WRITE permissions
doNothing().when(authorizeServiceSpy)
.authorizeAction(any(Context.class), any(Bitstream.class), eq(Constants.WRITE));
// Allow Bitstream DELETE permissions
doNothing().when(authorizeServiceSpy)
.authorizeAction(any(Context.class), any(Bitstream.class), eq(Constants.DELETE));
//set a value different than default
File f = new File(testProps.get("test.bitstream").toString());
// Create a new bitstream, which we can delete.
Bitstream delBS = bitstreamService.create(context, new FileInputStream(f));
bundleService.addBitstream(context, b, delBS);
// set primary bitstream
b.setPrimaryBitstreamID(delBS);
context.restoreAuthSystemState();
// Test that delete will flag the bitstream as deleted
assertFalse("testDeleteBitstreamAndUnsetPrimaryBitstreamID 0", delBS.isDeleted());
assertThat("testDeleteBitstreamAndUnsetPrimaryBitstreamID 1", b.getPrimaryBitstream(), equalTo(delBS));
// Delete bitstream
bitstreamService.delete(context, delBS);
assertTrue("testDeleteBitstreamAndUnsetPrimaryBitstreamID 2", delBS.isDeleted());
// Now test if the primary bitstream was unset from bundle
assertThat("testDeleteBitstreamAndUnsetPrimaryBitstreamID 3", b.getPrimaryBitstream(), equalTo(null));
}
/** /**
* Test of retrieve method, of class Bitstream. * Test of retrieve method, of class Bitstream.
*/ */

View File

@@ -513,6 +513,41 @@ public class BundleTest extends AbstractDSpaceObjectTest {
} }
/**
* Test removeBitstream method and also the unsetPrimaryBitstreamID method, of class Bundle.
*/
@Test
public void testRemoveBitstreamAuthAndUnsetPrimaryBitstreamID()
throws IOException, SQLException, AuthorizeException {
// Allow Item WRITE permissions
doNothing().when(authorizeServiceSpy).authorizeAction(context, item, Constants.WRITE);
// Allow Bundle ADD permissions
doNothing().when(authorizeServiceSpy).authorizeAction(context, b, Constants.ADD);
// Allow Bundle REMOVE permissions
doNothing().when(authorizeServiceSpy).authorizeAction(context, b, Constants.REMOVE);
// Allow Bitstream WRITE permissions
doNothing().when(authorizeServiceSpy)
.authorizeAction(any(Context.class), any(Bitstream.class), eq(Constants.WRITE));
// Allow Bitstream DELETE permissions
doNothing().when(authorizeServiceSpy)
.authorizeAction(any(Context.class), any(Bitstream.class), eq(Constants.DELETE));
context.turnOffAuthorisationSystem();
//set a value different than default
File f = new File(testProps.get("test.bitstream").toString());
Bitstream bs = bitstreamService.create(context, new FileInputStream(f));
bundleService.addBitstream(context, b, bs);
b.setPrimaryBitstreamID(bs);
context.restoreAuthSystemState();
assertThat("testRemoveBitstreamAuthAndUnsetPrimaryBitstreamID 0", b.getPrimaryBitstream(), equalTo(bs));
//remove bitstream
bundleService.removeBitstream(context, b, bs);
//is -1 when not set
assertThat("testRemoveBitstreamAuthAndUnsetPrimaryBitstreamID 1", b.getPrimaryBitstream(), equalTo(null));
}
/** /**
* Test of update method, of class Bundle. * Test of update method, of class Bundle.
*/ */

View File

@@ -26,7 +26,7 @@ import org.dspace.services.factory.DSpaceServicesFactory;
import org.junit.Before; import org.junit.Before;
import org.junit.Test; import org.junit.Test;
public class RelationshipServiceImplVersioningTest extends AbstractIntegrationTestWithDatabase { public class RelationshipServiceImplVersioningIT extends AbstractIntegrationTestWithDatabase {
private RelationshipService relationshipService; private RelationshipService relationshipService;
private RelationshipDAO relationshipDAO; private RelationshipDAO relationshipDAO;

View File

@@ -70,7 +70,7 @@ import org.junit.Before;
import org.junit.Test; import org.junit.Test;
import org.springframework.beans.factory.config.AutowireCapableBeanFactory; import org.springframework.beans.factory.config.AutowireCapableBeanFactory;
public class VersioningWithRelationshipsTest extends AbstractIntegrationTestWithDatabase { public class VersioningWithRelationshipsIT extends AbstractIntegrationTestWithDatabase {
private final RelationshipService relationshipService = private final RelationshipService relationshipService =
ContentServiceFactory.getInstance().getRelationshipService(); ContentServiceFactory.getInstance().getRelationshipService();

View File

@@ -39,9 +39,9 @@ import org.junit.Test;
* Created by: Andrew Wood * Created by: Andrew Wood
* Date: 20 Sep 2019 * Date: 20 Sep 2019
*/ */
public class RelationshipDAOImplTest extends AbstractIntegrationTest { public class RelationshipDAOImplIT extends AbstractIntegrationTest {
private static final Logger log = org.apache.logging.log4j.LogManager.getLogger(RelationshipDAOImplTest.class); private static final Logger log = org.apache.logging.log4j.LogManager.getLogger(RelationshipDAOImplIT.class);
private Relationship relationship; private Relationship relationship;

View File

@@ -35,9 +35,9 @@ import org.junit.After;
import org.junit.Before; import org.junit.Before;
import org.junit.Test; import org.junit.Test;
public class RelationshipTypeDAOImplTest extends AbstractIntegrationTest { public class RelationshipTypeDAOImplIT extends AbstractIntegrationTest {
private static final Logger log = org.apache.logging.log4j.LogManager.getLogger(RelationshipTypeDAOImplTest.class); private static final Logger log = org.apache.logging.log4j.LogManager.getLogger(RelationshipTypeDAOImplIT.class);
private Relationship relationship; private Relationship relationship;

View File

@@ -26,6 +26,8 @@ import org.dspace.AbstractIntegrationTestWithDatabase;
import org.dspace.app.requestitem.RequestItem; import org.dspace.app.requestitem.RequestItem;
import org.dspace.authorize.AuthorizeException; import org.dspace.authorize.AuthorizeException;
import org.dspace.authorize.ResourcePolicy; import org.dspace.authorize.ResourcePolicy;
import org.dspace.authorize.factory.AuthorizeServiceFactory;
import org.dspace.authorize.service.AuthorizeService;
import org.dspace.builder.BitstreamBuilder; import org.dspace.builder.BitstreamBuilder;
import org.dspace.builder.CollectionBuilder; import org.dspace.builder.CollectionBuilder;
import org.dspace.builder.CommunityBuilder; import org.dspace.builder.CommunityBuilder;
@@ -37,6 +39,7 @@ import org.dspace.builder.RelationshipTypeBuilder;
import org.dspace.builder.RequestItemBuilder; import org.dspace.builder.RequestItemBuilder;
import org.dspace.builder.ResourcePolicyBuilder; import org.dspace.builder.ResourcePolicyBuilder;
import org.dspace.content.Bitstream; import org.dspace.content.Bitstream;
import org.dspace.content.Bundle;
import org.dspace.content.Collection; import org.dspace.content.Collection;
import org.dspace.content.Community; import org.dspace.content.Community;
import org.dspace.content.EntityType; import org.dspace.content.EntityType;
@@ -48,14 +51,16 @@ import org.dspace.content.WorkspaceItem;
import org.dspace.content.factory.ContentServiceFactory; import org.dspace.content.factory.ContentServiceFactory;
import org.dspace.core.Constants; import org.dspace.core.Constants;
import org.dspace.eperson.Group; import org.dspace.eperson.Group;
import org.dspace.eperson.factory.EPersonServiceFactory;
import org.dspace.eperson.service.GroupService;
import org.dspace.versioning.Version; import org.dspace.versioning.Version;
import org.dspace.versioning.factory.VersionServiceFactory; import org.dspace.versioning.factory.VersionServiceFactory;
import org.dspace.versioning.service.VersioningService; import org.dspace.versioning.service.VersioningService;
import org.junit.Before; import org.junit.Before;
import org.junit.Test; import org.junit.Test;
public class ItemServiceTest extends AbstractIntegrationTestWithDatabase { public class ItemServiceIT extends AbstractIntegrationTestWithDatabase {
private static final Logger log = org.apache.logging.log4j.LogManager.getLogger(ItemServiceTest.class); private static final Logger log = org.apache.logging.log4j.LogManager.getLogger(ItemServiceIT.class);
protected RelationshipService relationshipService = ContentServiceFactory.getInstance().getRelationshipService(); protected RelationshipService relationshipService = ContentServiceFactory.getInstance().getRelationshipService();
protected RelationshipTypeService relationshipTypeService = ContentServiceFactory.getInstance() protected RelationshipTypeService relationshipTypeService = ContentServiceFactory.getInstance()
@@ -68,6 +73,8 @@ public class ItemServiceTest extends AbstractIntegrationTestWithDatabase {
protected WorkspaceItemService workspaceItemService = ContentServiceFactory.getInstance().getWorkspaceItemService(); protected WorkspaceItemService workspaceItemService = ContentServiceFactory.getInstance().getWorkspaceItemService();
protected MetadataValueService metadataValueService = ContentServiceFactory.getInstance().getMetadataValueService(); protected MetadataValueService metadataValueService = ContentServiceFactory.getInstance().getMetadataValueService();
protected VersioningService versioningService = VersionServiceFactory.getInstance().getVersionService(); protected VersioningService versioningService = VersionServiceFactory.getInstance().getVersionService();
protected AuthorizeService authorizeService = AuthorizeServiceFactory.getInstance().getAuthorizeService();
protected GroupService groupService = EPersonServiceFactory.getInstance().getGroupService();
Community community; Community community;
Collection collection1; Collection collection1;
@@ -752,6 +759,154 @@ public class ItemServiceTest extends AbstractIntegrationTestWithDatabase {
assertNull(itemService.find(context, item.getID())); assertNull(itemService.find(context, item.getID()));
} }
@Test
public void testMoveItemToCollectionWithMoreRestrictiveItemReadPolicy() throws Exception {
/* Verify that, if we move an item from a collection with a permissive default item READ policy
* to a collection with a restrictive default item READ policy,
* that the item and its bundles do not retain the original permissive item READ policy.
* However, its bitstreams do.
*/
context.turnOffAuthorisationSystem();
Group anonymous = groupService.findByName(context, Group.ANONYMOUS);
Group admin = groupService.findByName(context, Group.ADMIN);
// Set up the two different collections: one permissive and one restrictive in its default READ policy.
Collection permissive = CollectionBuilder
.createCollection(context, community)
.build();
Collection restrictive = CollectionBuilder
.createCollection(context, community)
.build();
authorizeService.removePoliciesActionFilter(context, restrictive, Constants.DEFAULT_ITEM_READ);
authorizeService.addPolicy(context, restrictive, Constants.DEFAULT_ITEM_READ, admin);
// Add an item to the permissive collection.
Item item = ItemBuilder
.createItem(context, permissive)
.build();
Bitstream bitstream = BitstreamBuilder.createBitstream(context, item, InputStream.nullInputStream())
.build();
Bundle bundle = item.getBundles("ORIGINAL").get(0);
// Verify that the item, bundle and bitstream each have exactly one READ policy, for the anonymous group.
assertEquals(
List.of(anonymous),
authorizeService.getPoliciesActionFilter(context, item, Constants.READ)
.stream().map(ResourcePolicy::getGroup).collect(Collectors.toList())
);
assertEquals(
List.of(anonymous),
authorizeService.getPoliciesActionFilter(context, bundle, Constants.READ)
.stream().map(ResourcePolicy::getGroup).collect(Collectors.toList())
);
assertEquals(
List.of(anonymous),
authorizeService.getPoliciesActionFilter(context, bitstream, Constants.READ)
.stream().map(ResourcePolicy::getGroup).collect(Collectors.toList())
);
// Move the item to the restrictive collection, making sure to inherit default policies.
itemService.move(context, item, permissive, restrictive, true);
// Verify that the item's read policy now only allows administrators.
assertEquals(
List.of(admin),
authorizeService.getPoliciesActionFilter(context, item, Constants.READ)
.stream().map(ResourcePolicy::getGroup).collect(Collectors.toList())
);
assertEquals(
List.of(admin),
authorizeService.getPoliciesActionFilter(context, bundle, Constants.READ)
.stream().map(ResourcePolicy::getGroup).collect(Collectors.toList())
);
assertEquals(
List.of(anonymous),
authorizeService.getPoliciesActionFilter(context, bitstream, Constants.READ)
.stream().map(ResourcePolicy::getGroup).collect(Collectors.toList())
);
context.restoreAuthSystemState();
}
@Test
public void testMoveItemToCollectionWithMoreRestrictiveBitstreamReadPolicy() throws Exception {
/* Verify that, if we move an item from a collection with a permissive default bitstream READ policy
* to a collection with a restrictive default bitstream READ policy,
* that the item's bitstreams do not retain the original permissive READ policy.
* However, the item itself and its bundles do retain the original policy.
*/
context.turnOffAuthorisationSystem();
Group anonymous = groupService.findByName(context, Group.ANONYMOUS);
Group admin = groupService.findByName(context, Group.ADMIN);
// Set up the two different collections: one permissive and one restrictive in its default READ policy.
Collection permissive = CollectionBuilder
.createCollection(context, community)
.build();
Collection restrictive = CollectionBuilder
.createCollection(context, community)
.build();
authorizeService.removePoliciesActionFilter(context, restrictive, Constants.DEFAULT_BITSTREAM_READ);
authorizeService.addPolicy(context, restrictive, Constants.DEFAULT_BITSTREAM_READ, admin);
// Add an item to the permissive collection.
Item item = ItemBuilder
.createItem(context, permissive)
.build();
Bitstream bitstream = BitstreamBuilder.createBitstream(context, item, InputStream.nullInputStream())
.build();
Bundle bundle = item.getBundles("ORIGINAL").get(0);
// Verify that the item, bundle and bitstream each have exactly one READ policy, for the anonymous group.
assertEquals(
List.of(anonymous),
authorizeService.getPoliciesActionFilter(context, item, Constants.READ)
.stream().map(ResourcePolicy::getGroup).collect(Collectors.toList())
);
assertEquals(
List.of(anonymous),
authorizeService.getPoliciesActionFilter(context, bundle, Constants.READ)
.stream().map(ResourcePolicy::getGroup).collect(Collectors.toList())
);
assertEquals(
List.of(anonymous),
authorizeService.getPoliciesActionFilter(context, bitstream, Constants.READ)
.stream().map(ResourcePolicy::getGroup).collect(Collectors.toList())
);
// Move the item to the restrictive collection, making sure to inherit default policies.
itemService.move(context, item, permissive, restrictive, true);
// Verify that the bundle and bitstream's read policies now only allows administrators.
assertEquals(
List.of(anonymous),
authorizeService.getPoliciesActionFilter(context, item, Constants.READ)
.stream().map(ResourcePolicy::getGroup).collect(Collectors.toList())
);
assertEquals(
List.of(anonymous),
authorizeService.getPoliciesActionFilter(context, bundle, Constants.READ)
.stream().map(ResourcePolicy::getGroup).collect(Collectors.toList())
);
assertEquals(
List.of(admin),
authorizeService.getPoliciesActionFilter(context, bitstream, Constants.READ)
.stream().map(ResourcePolicy::getGroup).collect(Collectors.toList())
);
context.restoreAuthSystemState();
}
private void assertMetadataValue(String authorQualifier, String contributorElement, String dcSchema, String value, private void assertMetadataValue(String authorQualifier, String contributorElement, String dcSchema, String value,
String authority, int place, MetadataValue metadataValue) { String authority, int place, MetadataValue metadataValue) {
assertThat(metadataValue.getValue(), equalTo(value)); assertThat(metadataValue.getValue(), equalTo(value));

View File

@@ -43,10 +43,11 @@ public class CurationIT extends AbstractIntegrationTestWithDatabase {
script = scriptService.createDSpaceRunnableForScriptConfiguration(scriptConfiguration); script = scriptService.createDSpaceRunnableForScriptConfiguration(scriptConfiguration);
} }
if (script != null) { if (script != null) {
script.initialize(args, testDSpaceRunnableHandler, null); if (DSpaceRunnable.StepResult.Continue.equals(script.initialize(args, testDSpaceRunnableHandler, null))) {
script.run(); script.run();
} }
} }
}
@Test @Test
public void curationWithEPersonParameterTest() throws Exception { public void curationWithEPersonParameterTest() throws Exception {
@@ -69,8 +70,9 @@ public class CurationIT extends AbstractIntegrationTestWithDatabase {
script = scriptService.createDSpaceRunnableForScriptConfiguration(scriptConfiguration); script = scriptService.createDSpaceRunnableForScriptConfiguration(scriptConfiguration);
} }
if (script != null) { if (script != null) {
script.initialize(args, testDSpaceRunnableHandler, null); if (DSpaceRunnable.StepResult.Continue.equals(script.initialize(args, testDSpaceRunnableHandler, null))) {
script.run(); script.run();
} }
} }
}
} }

View File

@@ -27,7 +27,7 @@ import org.dspace.services.factory.DSpaceServicesFactory;
import org.junit.Before; import org.junit.Before;
import org.junit.Test; import org.junit.Test;
public class VersionedHandleIdentifierProviderTest extends AbstractIntegrationTestWithDatabase { public class VersionedHandleIdentifierProviderIT extends AbstractIntegrationTestWithDatabase {
private ServiceManager serviceManager; private ServiceManager serviceManager;
private IdentifierServiceImpl identifierService; private IdentifierServiceImpl identifierService;

View File

@@ -0,0 +1,38 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.importer.external.crossref;
import static org.junit.Assert.assertEquals;
import java.util.Collection;
import org.junit.Test;
/**
*
* @author mwood
*/
public class CrossRefDateMetadataProcessorTest {
/**
* Test of processMetadata method, of class CrossRefDateMetadataProcessor.
*/
@Test
public void testProcessMetadata() {
CrossRefDateMetadataProcessor unit = new CrossRefDateMetadataProcessor();
unit.setPathToArray("/dates");
Collection metadata = unit.processMetadata("{\"dates\": ["
+ "[1957, 1, 27],"
+ "[1957, 1],"
+ "[1957]"
+ "]}");
String[] metadataValues = (String[]) metadata.toArray(new String[3]);
assertEquals("[yyyy, MM, dd] should parse", "1957-01-27", metadataValues[0]);
assertEquals("[yyyy, MM] should parse", "1957-01", metadataValues[1]);
assertEquals("[yyyy] should parse", "1957", metadataValues[2]);
}
}

View File

@@ -15,7 +15,7 @@
<parent> <parent>
<groupId>org.dspace</groupId> <groupId>org.dspace</groupId>
<artifactId>dspace-parent</artifactId> <artifactId>dspace-parent</artifactId>
<version>7.6</version> <version>8.0-SNAPSHOT</version>
<relativePath>..</relativePath> <relativePath>..</relativePath>
</parent> </parent>
@@ -93,7 +93,7 @@
<dependency> <dependency>
<groupId>de.digitalcollections.iiif</groupId> <groupId>de.digitalcollections.iiif</groupId>
<artifactId>iiif-apis</artifactId> <artifactId>iiif-apis</artifactId>
<version>0.3.9</version> <version>0.3.10</version>
<exclusions> <exclusions>
<exclusion> <exclusion>
<groupId>org.javassist</groupId> <groupId>org.javassist</groupId>

View File

@@ -8,14 +8,14 @@
<parent> <parent>
<artifactId>dspace-parent</artifactId> <artifactId>dspace-parent</artifactId>
<groupId>org.dspace</groupId> <groupId>org.dspace</groupId>
<version>7.6</version> <version>8.0-SNAPSHOT</version>
<relativePath>..</relativePath> <relativePath>..</relativePath>
</parent> </parent>
<properties> <properties>
<!-- This is the path to the root [dspace-src] directory. --> <!-- This is the path to the root [dspace-src] directory. -->
<root.basedir>${basedir}/..</root.basedir> <root.basedir>${basedir}/..</root.basedir>
<xoai.version>3.3.0</xoai.version> <xoai.version>3.4.0</xoai.version>
<jtwig.version>5.87.0.RELEASE</jtwig.version> <jtwig.version>5.87.0.RELEASE</jtwig.version>
</properties> </properties>
@@ -55,41 +55,10 @@
<artifactId>xoai</artifactId> <artifactId>xoai</artifactId>
<version>${xoai.version}</version> <version>${xoai.version}</version>
<exclusions> <exclusions>
<!-- Use version provided by SolrJ -->
<exclusion> <exclusion>
<groupId>org.hamcrest</groupId> <groupId>com.fasterxml.woodstox</groupId>
<artifactId>hamcrest-all</artifactId> <artifactId>woodstox-core</artifactId>
</exclusion>
<exclusion>
<!--Hard pinned below.-->
<groupId>org.mockito</groupId>
<artifactId>mockito-all</artifactId>
</exclusion>
<exclusion>
<groupId>org.apache.commons</groupId>
<artifactId>commons-lang3</artifactId>
</exclusion>
<exclusion>
<groupId>log4j</groupId>
<artifactId>log4j</artifactId>
</exclusion>
<exclusion>
<groupId>org.slf4j</groupId>
<artifactId>slf4j-log4j12</artifactId>
</exclusion>
<!-- Later version provided by SolrJ -->
<exclusion>
<groupId>org.codehaus.woodstox</groupId>
<artifactId>wstx-asl</artifactId>
</exclusion>
<!-- Later version provided by Hibernate -->
<exclusion>
<groupId>org.dom4j</groupId>
<artifactId>dom4j</artifactId>
</exclusion>
<!-- We don't use this test framework & it causes dependency convergence issues -->
<exclusion>
<groupId>com.lyncode</groupId>
<artifactId>test-support</artifactId>
</exclusion> </exclusion>
</exclusions> </exclusions>
</dependency> </dependency>

View File

@@ -85,7 +85,6 @@ public class XOAI {
// needed because the solr query only returns 10 rows by default // needed because the solr query only returns 10 rows by default
private final Context context; private final Context context;
private boolean optimize;
private final boolean verbose; private final boolean verbose;
private boolean clean; private boolean clean;
@@ -122,9 +121,8 @@ public class XOAI {
return formats; return formats;
} }
public XOAI(Context context, boolean optimize, boolean clean, boolean verbose) { public XOAI(Context context, boolean clean, boolean verbose) {
this.context = context; this.context = context;
this.optimize = optimize;
this.clean = clean; this.clean = clean;
this.verbose = verbose; this.verbose = verbose;
@@ -173,12 +171,6 @@ public class XOAI {
} }
solrServerResolver.getServer().commit(); solrServerResolver.getServer().commit();
if (optimize) {
println("Optimizing Index");
solrServerResolver.getServer().optimize();
println("Index optimized");
}
// Set last compilation date // Set last compilation date
xoaiLastCompilationCacheService.put(new Date()); xoaiLastCompilationCacheService.put(new Date());
return result; return result;
@@ -586,7 +578,6 @@ public class XOAI {
CommandLineParser parser = new DefaultParser(); CommandLineParser parser = new DefaultParser();
Options options = new Options(); Options options = new Options();
options.addOption("c", "clear", false, "Clear index before indexing"); options.addOption("c", "clear", false, "Clear index before indexing");
options.addOption("o", "optimize", false, "Optimize index at the end");
options.addOption("v", "verbose", false, "Verbose output"); options.addOption("v", "verbose", false, "Verbose output");
options.addOption("h", "help", false, "Shows some help"); options.addOption("h", "help", false, "Shows some help");
options.addOption("n", "number", true, "FOR DEVELOPMENT MUST DELETE"); options.addOption("n", "number", true, "FOR DEVELOPMENT MUST DELETE");
@@ -620,7 +611,7 @@ public class XOAI {
if (COMMAND_IMPORT.equals(command)) { if (COMMAND_IMPORT.equals(command)) {
ctx = new Context(Context.Mode.READ_ONLY); ctx = new Context(Context.Mode.READ_ONLY);
XOAI indexer = new XOAI(ctx, line.hasOption('o'), line.hasOption('c'), line.hasOption('v')); XOAI indexer = new XOAI(ctx, line.hasOption('c'), line.hasOption('v'));
applicationContext.getAutowireCapableBeanFactory().autowireBean(indexer); applicationContext.getAutowireCapableBeanFactory().autowireBean(indexer);
@@ -706,7 +697,6 @@ public class XOAI {
System.out.println(" " + COMMAND_IMPORT + " - To import DSpace items into OAI index and cache system"); System.out.println(" " + COMMAND_IMPORT + " - To import DSpace items into OAI index and cache system");
System.out.println(" " + COMMAND_CLEAN_CACHE + " - Cleans the OAI cached responses"); System.out.println(" " + COMMAND_CLEAN_CACHE + " - Cleans the OAI cached responses");
System.out.println("> Parameters:"); System.out.println("> Parameters:");
System.out.println(" -o Optimize index after indexing (" + COMMAND_IMPORT + " only)");
System.out.println(" -c Clear index (" + COMMAND_IMPORT + " only)"); System.out.println(" -c Clear index (" + COMMAND_IMPORT + " only)");
System.out.println(" -v Verbose output"); System.out.println(" -v Verbose output");
System.out.println(" -h Shows this text"); System.out.println(" -h Shows this text");

View File

@@ -12,6 +12,7 @@ import java.util.List;
import com.lyncode.xoai.dataprovider.xml.xoai.Element; import com.lyncode.xoai.dataprovider.xml.xoai.Element;
import com.lyncode.xoai.dataprovider.xml.xoai.Metadata; import com.lyncode.xoai.dataprovider.xml.xoai.Metadata;
import org.apache.commons.lang3.StringUtils;
import org.dspace.access.status.factory.AccessStatusServiceFactory; import org.dspace.access.status.factory.AccessStatusServiceFactory;
import org.dspace.access.status.service.AccessStatusService; import org.dspace.access.status.service.AccessStatusService;
import org.dspace.content.Item; import org.dspace.content.Item;
@@ -31,6 +32,13 @@ import org.dspace.xoai.util.ItemUtils;
* <field name="value">open.access</field> * <field name="value">open.access</field>
* </element> * </element>
* </element> * </element>
* OR
* <element name="others">
* <element name="access-status">
* <field name="value">embargo</field>
* <field name="embargo">2024-10-10</field>
* </element>
* </element>
* } * }
* </pre> * </pre>
* Returning Values are based on: * Returning Values are based on:
@@ -46,9 +54,15 @@ public class AccessStatusElementItemCompilePlugin implements XOAIExtensionItemCo
String accessStatusType; String accessStatusType;
accessStatusType = accessStatusService.getAccessStatus(context, item); accessStatusType = accessStatusService.getAccessStatus(context, item);
String embargoFromItem = accessStatusService.getEmbargoFromItem(context, item);
Element accessStatus = ItemUtils.create("access-status"); Element accessStatus = ItemUtils.create("access-status");
accessStatus.getField().add(ItemUtils.createValue("value", accessStatusType)); accessStatus.getField().add(ItemUtils.createValue("value", accessStatusType));
if (StringUtils.isNotEmpty(embargoFromItem)) {
accessStatus.getField().add(ItemUtils.createValue("embargo", embargoFromItem));
}
Element others; Element others;
List<Element> elements = metadata.getElement(); List<Element> elements = metadata.getElement();
if (ItemUtils.getElement(elements, "others") != null) { if (ItemUtils.getElement(elements, "others") != null) {

View File

@@ -12,7 +12,7 @@ import java.io.FileInputStream;
import java.io.IOException; import java.io.IOException;
import java.io.InputStream; import java.io.InputStream;
import javax.xml.transform.Source; import javax.xml.transform.Source;
import javax.xml.transform.Transformer; import javax.xml.transform.Templates;
import javax.xml.transform.TransformerConfigurationException; import javax.xml.transform.TransformerConfigurationException;
import javax.xml.transform.TransformerFactory; import javax.xml.transform.TransformerFactory;
import javax.xml.transform.stream.StreamSource; import javax.xml.transform.stream.StreamSource;
@@ -40,8 +40,7 @@ public class DSpaceResourceResolver implements ResourceResolver {
} }
@Override @Override
public Transformer getTransformer(String path) throws IOException, public Templates getTemplates(String path) throws IOException, TransformerConfigurationException {
TransformerConfigurationException {
// construct a Source that reads from an InputStream // construct a Source that reads from an InputStream
Source mySrc = new StreamSource(getResource(path)); Source mySrc = new StreamSource(getResource(path));
// specify a system ID (the path to the XSLT-file on the filesystem) // specify a system ID (the path to the XSLT-file on the filesystem)
@@ -49,6 +48,6 @@ public class DSpaceResourceResolver implements ResourceResolver {
// XSLT-files (like <xsl:import href="utils.xsl"/>) // XSLT-files (like <xsl:import href="utils.xsl"/>)
String systemId = basePath + "/" + path; String systemId = basePath + "/" + path;
mySrc.setSystemId(systemId); mySrc.setSystemId(systemId);
return transformerFactory.newTransformer(mySrc); return transformerFactory.newTemplates(mySrc);
} }
} }

View File

@@ -103,6 +103,11 @@ public class ItemUtils {
bundle.getElement().add(bitstreams); bundle.getElement().add(bitstreams);
List<Bitstream> bits = b.getBitstreams(); List<Bitstream> bits = b.getBitstreams();
for (Bitstream bit : bits) { for (Bitstream bit : bits) {
// Check if bitstream is null and log the error
if (bit == null) {
log.error("Null bitstream found, check item uuid: " + item.getID());
break;
}
Element bitstream = create("bitstream"); Element bitstream = create("bitstream");
bitstreams.getElement().add(bitstream); bitstreams.getElement().add(bitstream);
String url = ""; String url = "";

View File

@@ -29,7 +29,7 @@ public class PipelineTest {
InputStream input = PipelineTest.class.getClassLoader().getResourceAsStream("item.xml"); InputStream input = PipelineTest.class.getClassLoader().getResourceAsStream("item.xml");
InputStream xslt = PipelineTest.class.getClassLoader().getResourceAsStream("oai_dc.xsl"); InputStream xslt = PipelineTest.class.getClassLoader().getResourceAsStream("oai_dc.xsl");
String output = FileUtils.readAllText(new XSLPipeline(input, true) String output = FileUtils.readAllText(new XSLPipeline(input, true)
.apply(factory.newTransformer(new StreamSource(xslt))) .apply(factory.newTemplates(new StreamSource(xslt)))
.getTransformed()); .getTransformed());
assertThat(output, oai_dc().withXPath("/oai_dc:dc/dc:title", equalTo("Teste"))); assertThat(output, oai_dc().withXPath("/oai_dc:dc/dc:title", equalTo("Teste")));

View File

@@ -9,7 +9,7 @@
<parent> <parent>
<groupId>org.dspace</groupId> <groupId>org.dspace</groupId>
<artifactId>dspace-parent</artifactId> <artifactId>dspace-parent</artifactId>
<version>7.6</version> <version>8.0-SNAPSHOT</version>
<relativePath>..</relativePath> <relativePath>..</relativePath>
</parent> </parent>

View File

@@ -3,7 +3,7 @@
<groupId>org.dspace</groupId> <groupId>org.dspace</groupId>
<artifactId>dspace-rest</artifactId> <artifactId>dspace-rest</artifactId>
<packaging>war</packaging> <packaging>war</packaging>
<version>7.6</version> <version>8.0-SNAPSHOT</version>
<name>DSpace (Deprecated) REST Webapp</name> <name>DSpace (Deprecated) REST Webapp</name>
<description>DSpace RESTful Web Services API. NOTE: this REST API is DEPRECATED. <description>DSpace RESTful Web Services API. NOTE: this REST API is DEPRECATED.
Please consider using the REST API in the dspace-server-webapp instead!</description> Please consider using the REST API in the dspace-server-webapp instead!</description>
@@ -12,7 +12,7 @@
<parent> <parent>
<groupId>org.dspace</groupId> <groupId>org.dspace</groupId>
<artifactId>dspace-parent</artifactId> <artifactId>dspace-parent</artifactId>
<version>7.6</version> <version>8.0-SNAPSHOT</version>
<relativePath>..</relativePath> <relativePath>..</relativePath>
</parent> </parent>

View File

@@ -10,7 +10,7 @@ This webapp uses the following technologies:
We don't use Spring Data REST as we haven't a spring data layer and we want to provide clear separation between the persistence representation and the REST representation We don't use Spring Data REST as we haven't a spring data layer and we want to provide clear separation between the persistence representation and the REST representation
## How to contribute ## How to contribute
Check the infomation available on the DSpace Official Wiki page for the [DSpace 7 Working Group](https://wiki.duraspace.org/display/DSPACE/DSpace+7+UI+Working+Group) Check the information available on the DSpace Official Wiki page for the [DSpace 7 Working Group](https://wiki.duraspace.org/display/DSPACE/DSpace+7+UI+Working+Group)
[DSpace 7 REST: Coding DSpace Objects](https://wiki.duraspace.org/display/DSPACE/DSpace+7+REST%3A+Coding+DSpace+Objects) [DSpace 7 REST: Coding DSpace Objects](https://wiki.duraspace.org/display/DSPACE/DSpace+7+REST%3A+Coding+DSpace+Objects)

View File

@@ -15,7 +15,7 @@
<parent> <parent>
<groupId>org.dspace</groupId> <groupId>org.dspace</groupId>
<artifactId>dspace-parent</artifactId> <artifactId>dspace-parent</artifactId>
<version>7.6</version> <version>8.0-SNAPSHOT</version>
<relativePath>..</relativePath> <relativePath>..</relativePath>
</parent> </parent>
@@ -288,7 +288,7 @@
<dependency> <dependency>
<groupId>com.flipkart.zjsonpatch</groupId> <groupId>com.flipkart.zjsonpatch</groupId>
<artifactId>zjsonpatch</artifactId> <artifactId>zjsonpatch</artifactId>
<version>0.4.6</version> <version>0.4.14</version>
</dependency> </dependency>
<!-- HAL Browser (via WebJars) : https://github.com/mikekelly/hal-browser --> <!-- HAL Browser (via WebJars) : https://github.com/mikekelly/hal-browser -->
@@ -308,7 +308,7 @@
<dependency> <dependency>
<groupId>org.webjars.bowergithub.jquery</groupId> <groupId>org.webjars.bowergithub.jquery</groupId>
<artifactId>jquery-dist</artifactId> <artifactId>jquery-dist</artifactId>
<version>3.6.0</version> <version>3.7.0</version>
</dependency> </dependency>
<!-- Pull in current version of Toastr (toastrjs.com) via WebJars <!-- Pull in current version of Toastr (toastrjs.com) via WebJars
Made available at: webjars/toastr/build/toastr.min.js --> Made available at: webjars/toastr/build/toastr.min.js -->
@@ -322,7 +322,7 @@
<dependency> <dependency>
<groupId>org.webjars.bowergithub.medialize</groupId> <groupId>org.webjars.bowergithub.medialize</groupId>
<artifactId>uri.js</artifactId> <artifactId>uri.js</artifactId>
<version>1.19.10</version> <version>1.19.11</version>
</dependency> </dependency>
<!-- Pull in current version of Underscore.js (https://underscorejs.org/) via WebJars <!-- Pull in current version of Underscore.js (https://underscorejs.org/) via WebJars
Made available at: webjars/underscore/underscore-min.js --> Made available at: webjars/underscore/underscore-min.js -->
@@ -433,10 +433,6 @@
<groupId>commons-validator</groupId> <groupId>commons-validator</groupId>
<artifactId>commons-validator</artifactId> <artifactId>commons-validator</artifactId>
</dependency> </dependency>
<dependency>
<groupId>joda-time</groupId>
<artifactId>joda-time</artifactId>
</dependency>
<dependency> <dependency>
<groupId>com.fasterxml.jackson.core</groupId> <groupId>com.fasterxml.jackson.core</groupId>
<artifactId>jackson-databind</artifactId> <artifactId>jackson-databind</artifactId>
@@ -541,7 +537,7 @@
<dependency> <dependency>
<groupId>org.exparity</groupId> <groupId>org.exparity</groupId>
<artifactId>hamcrest-date</artifactId> <artifactId>hamcrest-date</artifactId>
<version>2.0.7</version> <version>2.0.8</version>
<scope>test</scope> <scope>test</scope>
</dependency> </dependency>
<dependency> <dependency>

View File

@@ -132,7 +132,7 @@ public class DSpaceApiExceptionControllerAdvice extends ResponseEntityExceptionH
Exception ex) throws IOException { Exception ex) throws IOException {
//422 is not defined in HttpServletResponse. Its meaning is "Unprocessable Entity". //422 is not defined in HttpServletResponse. Its meaning is "Unprocessable Entity".
//Using the value from HttpStatus. //Using the value from HttpStatus.
sendErrorResponse(request, response, null, sendErrorResponse(request, response, ex,
"Unprocessable or invalid entity", "Unprocessable or invalid entity",
HttpStatus.UNPROCESSABLE_ENTITY.value()); HttpStatus.UNPROCESSABLE_ENTITY.value());
} }
@@ -140,7 +140,7 @@ public class DSpaceApiExceptionControllerAdvice extends ResponseEntityExceptionH
@ExceptionHandler( {InvalidSearchRequestException.class}) @ExceptionHandler( {InvalidSearchRequestException.class})
protected void handleInvalidSearchRequestException(HttpServletRequest request, HttpServletResponse response, protected void handleInvalidSearchRequestException(HttpServletRequest request, HttpServletResponse response,
Exception ex) throws IOException { Exception ex) throws IOException {
sendErrorResponse(request, response, null, sendErrorResponse(request, response, ex,
"Invalid search request", "Invalid search request",
HttpStatus.UNPROCESSABLE_ENTITY.value()); HttpStatus.UNPROCESSABLE_ENTITY.value());
} }
@@ -180,7 +180,7 @@ public class DSpaceApiExceptionControllerAdvice extends ResponseEntityExceptionH
TranslatableException ex) throws IOException { TranslatableException ex) throws IOException {
Context context = ContextUtil.obtainContext(request); Context context = ContextUtil.obtainContext(request);
sendErrorResponse( sendErrorResponse(
request, response, null, ex.getLocalizedMessage(context), HttpStatus.UNPROCESSABLE_ENTITY.value() request, response, (Exception) ex, ex.getLocalizedMessage(context), HttpStatus.UNPROCESSABLE_ENTITY.value()
); );
} }
@@ -188,7 +188,7 @@ public class DSpaceApiExceptionControllerAdvice extends ResponseEntityExceptionH
protected void ParameterConversionException(HttpServletRequest request, HttpServletResponse response, Exception ex) protected void ParameterConversionException(HttpServletRequest request, HttpServletResponse response, Exception ex)
throws IOException { throws IOException {
// we want the 400 status for missing parameters, see https://jira.lyrasis.org/browse/DS-4428 // we want the 400 status for missing parameters, see https://jira.lyrasis.org/browse/DS-4428
sendErrorResponse(request, response, null, sendErrorResponse(request, response, ex,
"A required parameter is invalid", "A required parameter is invalid",
HttpStatus.BAD_REQUEST.value()); HttpStatus.BAD_REQUEST.value());
} }
@@ -197,7 +197,7 @@ public class DSpaceApiExceptionControllerAdvice extends ResponseEntityExceptionH
protected void MissingParameterException(HttpServletRequest request, HttpServletResponse response, Exception ex) protected void MissingParameterException(HttpServletRequest request, HttpServletResponse response, Exception ex)
throws IOException { throws IOException {
// we want the 400 status for missing parameters, see https://jira.lyrasis.org/browse/DS-4428 // we want the 400 status for missing parameters, see https://jira.lyrasis.org/browse/DS-4428
sendErrorResponse(request, response, null, sendErrorResponse(request, response, ex,
"A required parameter is missing", "A required parameter is missing",
HttpStatus.BAD_REQUEST.value()); HttpStatus.BAD_REQUEST.value());
} }

View File

@@ -21,7 +21,6 @@ import javax.servlet.http.HttpServletRequest;
import com.fasterxml.jackson.databind.JsonNode; import com.fasterxml.jackson.databind.JsonNode;
import com.fasterxml.jackson.databind.ObjectMapper; import com.fasterxml.jackson.databind.ObjectMapper;
import org.apache.commons.text.StringEscapeUtils;
import org.apache.commons.validator.routines.EmailValidator; import org.apache.commons.validator.routines.EmailValidator;
import org.apache.http.client.utils.URIBuilder; import org.apache.http.client.utils.URIBuilder;
import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.LogManager;
@@ -48,7 +47,7 @@ import org.springframework.data.domain.Page;
import org.springframework.data.domain.Pageable; import org.springframework.data.domain.Pageable;
import org.springframework.security.access.prepost.PreAuthorize; import org.springframework.security.access.prepost.PreAuthorize;
import org.springframework.stereotype.Component; import org.springframework.stereotype.Component;
import org.springframework.web.util.HtmlUtils;
/** /**
* Component to expose item requests. * Component to expose item requests.
* *
@@ -173,11 +172,11 @@ public class RequestItemRepository
username = user.getFullName(); username = user.getFullName();
} else { // An anonymous session may provide a name. } else { // An anonymous session may provide a name.
// Escape username to evade nasty XSS attempts // Escape username to evade nasty XSS attempts
username = StringEscapeUtils.escapeHtml4(rir.getRequestName()); username = HtmlUtils.htmlEscape(rir.getRequestName(),"UTF-8");
} }
// Requester's message text, escaped to evade nasty XSS attempts // Requester's message text, escaped to evade nasty XSS attempts
String message = StringEscapeUtils.escapeHtml4(rir.getRequestMessage()); String message = HtmlUtils.htmlEscape(rir.getRequestMessage(),"UTF-8");
// Create the request. // Create the request.
String token; String token;

View File

@@ -49,6 +49,7 @@ import java.io.IOException;
import java.io.InputStream; import java.io.InputStream;
import java.io.StringWriter; import java.io.StringWriter;
import java.io.Writer; import java.io.Writer;
import java.time.Period;
import java.util.UUID; import java.util.UUID;
import org.apache.commons.io.IOUtils; import org.apache.commons.io.IOUtils;
@@ -393,7 +394,7 @@ public class BitstreamRestControllerIT extends AbstractControllerIntegrationTest
.withName("Test Embargoed Bitstream") .withName("Test Embargoed Bitstream")
.withDescription("This bitstream is embargoed") .withDescription("This bitstream is embargoed")
.withMimeType("text/plain") .withMimeType("text/plain")
.withEmbargoPeriod("6 months") .withEmbargoPeriod(Period.ofMonths(6))
.build(); .build();
} }
context.restoreAuthSystemState(); context.restoreAuthSystemState();
@@ -437,7 +438,7 @@ public class BitstreamRestControllerIT extends AbstractControllerIntegrationTest
.withName("Test Embargoed Bitstream") .withName("Test Embargoed Bitstream")
.withDescription("This bitstream is embargoed") .withDescription("This bitstream is embargoed")
.withMimeType("text/plain") .withMimeType("text/plain")
.withEmbargoPeriod("3 months") .withEmbargoPeriod(Period.ofMonths(3))
.build(); .build();
} }
context.restoreAuthSystemState(); context.restoreAuthSystemState();
@@ -480,7 +481,7 @@ public class BitstreamRestControllerIT extends AbstractControllerIntegrationTest
.withName("Test Embargoed Bitstream") .withName("Test Embargoed Bitstream")
.withDescription("This bitstream is embargoed") .withDescription("This bitstream is embargoed")
.withMimeType("text/plain") .withMimeType("text/plain")
.withEmbargoPeriod("-3 months") .withEmbargoPeriod(Period.ofMonths(-3))
.build(); .build();
} }
context.restoreAuthSystemState(); context.restoreAuthSystemState();
@@ -558,7 +559,7 @@ public class BitstreamRestControllerIT extends AbstractControllerIntegrationTest
.withName("Bitstream") .withName("Bitstream")
.withDescription("Description") .withDescription("Description")
.withMimeType("text/plain") .withMimeType("text/plain")
.withEmbargoPeriod("2 week") .withEmbargoPeriod(Period.ofWeeks(2))
.build(); .build();
} }
context.restoreAuthSystemState(); context.restoreAuthSystemState();

View File

@@ -25,6 +25,7 @@ import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.
import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.status; import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.status;
import java.io.InputStream; import java.io.InputStream;
import java.time.Period;
import java.util.ArrayList; import java.util.ArrayList;
import java.util.Comparator; import java.util.Comparator;
import java.util.List; import java.util.List;
@@ -310,7 +311,7 @@ public class BitstreamRestRepositoryIT extends AbstractControllerIntegrationTest
.withName("Test Embargoed Bitstream") .withName("Test Embargoed Bitstream")
.withDescription("This bitstream is embargoed") .withDescription("This bitstream is embargoed")
.withMimeType("text/plain") .withMimeType("text/plain")
.withEmbargoPeriod("3 months") .withEmbargoPeriod(Period.ofMonths(3))
.build(); .build();
} }
context.restoreAuthSystemState(); context.restoreAuthSystemState();
@@ -363,7 +364,7 @@ public class BitstreamRestRepositoryIT extends AbstractControllerIntegrationTest
.withName("Test Embargoed Bitstream") .withName("Test Embargoed Bitstream")
.withDescription("This bitstream is embargoed") .withDescription("This bitstream is embargoed")
.withMimeType(bitstreamFormat.getMIMEType()) .withMimeType(bitstreamFormat.getMIMEType())
.withEmbargoPeriod("3 months") .withEmbargoPeriod(Period.ofMonths(3))
.build(); .build();
} }
context.restoreAuthSystemState(); context.restoreAuthSystemState();
@@ -517,7 +518,7 @@ public class BitstreamRestRepositoryIT extends AbstractControllerIntegrationTest
.withName("Test Embargoed Bitstream") .withName("Test Embargoed Bitstream")
.withDescription("This bitstream is embargoed") .withDescription("This bitstream is embargoed")
.withMimeType("text/plain") .withMimeType("text/plain")
.withEmbargoPeriod("3 months") .withEmbargoPeriod(Period.ofMonths(3))
.build(); .build();
} }
@@ -577,7 +578,7 @@ public class BitstreamRestRepositoryIT extends AbstractControllerIntegrationTest
.withName("Test Embargoed Bitstream") .withName("Test Embargoed Bitstream")
.withDescription("This bitstream is embargoed") .withDescription("This bitstream is embargoed")
.withMimeType(bitstreamFormat.getMIMEType()) .withMimeType(bitstreamFormat.getMIMEType())
.withEmbargoPeriod("3 months") .withEmbargoPeriod(Period.ofMonths(3))
.build(); .build();
} }
@@ -638,7 +639,7 @@ public class BitstreamRestRepositoryIT extends AbstractControllerIntegrationTest
.withName("Test Embargoed Bitstream") .withName("Test Embargoed Bitstream")
.withDescription("This bitstream is embargoed") .withDescription("This bitstream is embargoed")
.withMimeType("text/plain") .withMimeType("text/plain")
.withEmbargoPeriod("3 months") .withEmbargoPeriod(Period.ofMonths(3))
.build(); .build();
} }
@@ -701,7 +702,7 @@ public class BitstreamRestRepositoryIT extends AbstractControllerIntegrationTest
.withName("Test Embargoed Bitstream") .withName("Test Embargoed Bitstream")
.withDescription("This bitstream is embargoed") .withDescription("This bitstream is embargoed")
.withMimeType(bitstreamFormat.getMIMEType()) .withMimeType(bitstreamFormat.getMIMEType())
.withEmbargoPeriod("3 months") .withEmbargoPeriod(Period.ofMonths(3))
.build(); .build();
} }
@@ -768,7 +769,7 @@ public class BitstreamRestRepositoryIT extends AbstractControllerIntegrationTest
.withName("Test Embargoed Bitstream") .withName("Test Embargoed Bitstream")
.withDescription("This bitstream is embargoed") .withDescription("This bitstream is embargoed")
.withMimeType("text/plain") .withMimeType("text/plain")
.withEmbargoPeriod("3 months") .withEmbargoPeriod(Period.ofMonths(3))
.build(); .build();
} }
@@ -826,7 +827,7 @@ public class BitstreamRestRepositoryIT extends AbstractControllerIntegrationTest
.withName("Test Embargoed Bitstream") .withName("Test Embargoed Bitstream")
.withDescription("This bitstream is embargoed") .withDescription("This bitstream is embargoed")
.withMimeType("text/plain") .withMimeType("text/plain")
.withEmbargoPeriod("3 months") .withEmbargoPeriod(Period.ofMonths(3))
.build(); .build();
} }
@@ -1694,6 +1695,53 @@ public class BitstreamRestRepositoryIT extends AbstractControllerIntegrationTest
.andExpect(jsonPath("$.type", is("bitstream"))); .andExpect(jsonPath("$.type", is("bitstream")));
} }
@Test
public void thumbnailEndpointTestWithSpecialCharactersInFileName() throws Exception {
// Given an Item
context.turnOffAuthorisationSystem();
parentCommunity = CommunityBuilder.createCommunity(context)
.withName("Parent Community")
.build();
Collection col1 = CollectionBuilder.createCollection(context, parentCommunity)
.withName("Collection 1").build();
Item item = ItemBuilder.createItem(context, col1)
.withTitle("Test item -- thumbnail")
.withIssueDate("2017-10-17")
.withAuthor("Smith, Donald").withAuthor("Doe, John")
.build();
Bundle originalBundle = BundleBuilder.createBundle(context, item)
.withName(Constants.DEFAULT_BUNDLE_NAME)
.build();
Bundle thumbnailBundle = BundleBuilder.createBundle(context, item)
.withName("THUMBNAIL")
.build();
InputStream is = IOUtils.toInputStream("dummy", "utf-8");
// With an ORIGINAL Bitstream & matching THUMBNAIL Bitstream containing special characters in filenames
Bitstream bitstream = BitstreamBuilder.createBitstream(context, originalBundle, is)
.withName("test (2023) file.pdf")
.withMimeType("application/pdf")
.build();
Bitstream thumbnail = BitstreamBuilder.createBitstream(context, thumbnailBundle, is)
.withName("test (2023) file.pdf.jpg")
.withMimeType("image/jpeg")
.build();
context.restoreAuthSystemState();
String tokenAdmin = getAuthToken(admin.getEmail(), password);
getClient(tokenAdmin).perform(get("/api/core/bitstreams/" + bitstream.getID() + "/thumbnail"))
.andExpect(status().isOk())
.andExpect(jsonPath("$.uuid", Matchers.is(thumbnail.getID().toString())))
.andExpect(jsonPath("$.type", is("bitstream")));
}
@Test @Test
public void thumbnailEndpointMultipleThumbnailsWithPrimaryBitstreamTest() throws Exception { public void thumbnailEndpointMultipleThumbnailsWithPrimaryBitstreamTest() throws Exception {
// Given an Item // Given an Item
@@ -1899,7 +1947,7 @@ public class BitstreamRestRepositoryIT extends AbstractControllerIntegrationTest
.withTitle("Test") .withTitle("Test")
.withIssueDate("2010-10-17") .withIssueDate("2010-10-17")
.withAuthor("Smith, Donald") .withAuthor("Smith, Donald")
.withEmbargoPeriod("6 months") .withEmbargoPeriod(Period.ofMonths(6))
.build(); .build();
String bitstreamContent = "This is an archived bitstream"; String bitstreamContent = "This is an archived bitstream";
@@ -2372,7 +2420,7 @@ public class BitstreamRestRepositoryIT extends AbstractControllerIntegrationTest
.withName("Test Embargoed Bitstream") .withName("Test Embargoed Bitstream")
.withDescription("This bitstream is embargoed") .withDescription("This bitstream is embargoed")
.withMimeType("text/plain") .withMimeType("text/plain")
.withEmbargoPeriod("3 months") .withEmbargoPeriod(Period.ofMonths(3))
.build(); .build();
} }

View File

@@ -21,6 +21,8 @@ import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.
import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.jsonPath; import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.jsonPath;
import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.status; import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.status;
import java.time.Period;
import org.dspace.app.rest.matcher.BrowseEntryResourceMatcher; import org.dspace.app.rest.matcher.BrowseEntryResourceMatcher;
import org.dspace.app.rest.matcher.BrowseIndexMatcher; import org.dspace.app.rest.matcher.BrowseIndexMatcher;
import org.dspace.app.rest.matcher.ItemMatcher; import org.dspace.app.rest.matcher.ItemMatcher;
@@ -776,7 +778,7 @@ public class BrowsesResourceControllerIT extends AbstractControllerIntegrationTe
.withIssueDate("2017-08-10") .withIssueDate("2017-08-10")
.withAuthor("Mouse, Mickey") .withAuthor("Mouse, Mickey")
.withSubject("Cartoons").withSubject("Mice") .withSubject("Cartoons").withSubject("Mice")
.withEmbargoPeriod("12 months") .withEmbargoPeriod(Period.ofMonths(12))
.build(); .build();
//5. An item that is only readable for an internal groups //5. An item that is only readable for an internal groups
@@ -909,7 +911,7 @@ public class BrowsesResourceControllerIT extends AbstractControllerIntegrationTe
.withIssueDate("2017-08-10") .withIssueDate("2017-08-10")
.withAuthor("Mouse, Mickey") .withAuthor("Mouse, Mickey")
.withSubject("Cartoons").withSubject("Mice") .withSubject("Cartoons").withSubject("Mice")
.withEmbargoPeriod("12 months") .withEmbargoPeriod(Period.ofMonths(12))
.build(); .build();
//5. An item that is only readable for an internal groups //5. An item that is only readable for an internal groups

View File

@@ -26,6 +26,7 @@ import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.
import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.status; import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.status;
import java.io.InputStream; import java.io.InputStream;
import java.time.Period;
import java.util.ArrayList; import java.util.ArrayList;
import java.util.List; import java.util.List;
import java.util.UUID; import java.util.UUID;
@@ -2471,7 +2472,7 @@ public class DiscoveryRestControllerIT extends AbstractControllerIntegrationTest
.withAuthor("test2, test2").withAuthor("Maybe, Maybe") .withAuthor("test2, test2").withAuthor("Maybe, Maybe")
.withSubject("AnotherTest").withSubject("TestingForMore") .withSubject("AnotherTest").withSubject("TestingForMore")
.withSubject("ExtraEntry") .withSubject("ExtraEntry")
.withEmbargoPeriod("12 months") .withEmbargoPeriod(Period.ofMonths(12))
.build(); .build();
//Turn on the authorization again //Turn on the authorization again
@@ -2777,7 +2778,9 @@ public class DiscoveryRestControllerIT extends AbstractControllerIntegrationTest
/** /**
* This test verifies that * This test verifies that
* {@link org.dspace.discovery.indexobject.InprogressSubmissionIndexFactoryImpl#storeInprogressItemFields} * {@link org.dspace.discovery.indexobject.InprogressSubmissionIndexFactoryImpl#storeInprogressItemFields}
* indexes the owning collection of workspace items * indexes the owning collection of workspace items.
*
* @throws java.lang.Exception passed through.
*/ */
@Test @Test
public void discoverSearchObjectsTestForWorkspaceItemInCollectionScope() throws Exception { public void discoverSearchObjectsTestForWorkspaceItemInCollectionScope() throws Exception {
@@ -2828,7 +2831,8 @@ public class DiscoveryRestControllerIT extends AbstractControllerIntegrationTest
/** /**
* This test verifies that * This test verifies that
* {@link org.dspace.discovery.indexobject.InprogressSubmissionIndexFactoryImpl#storeInprogressItemFields} * {@link org.dspace.discovery.indexobject.InprogressSubmissionIndexFactoryImpl#storeInprogressItemFields}
* indexes the owning collection of workflow items * indexes the owning collection of workflow items.
* @throws java.lang.Exception passed through.
*/ */
@Test @Test
public void discoverSearchObjectsTestForWorkflowItemInCollectionScope() throws Exception { public void discoverSearchObjectsTestForWorkflowItemInCollectionScope() throws Exception {

View File

@@ -67,7 +67,7 @@ public class HealthIndicatorsIT extends AbstractControllerIntegrationTest {
match("solrSearchCore", Status.UP, Map.of("status", 0, "detectedPathType", "root")), match("solrSearchCore", Status.UP, Map.of("status", 0, "detectedPathType", "root")),
match("solrStatisticsCore", Status.UP, Map.of("status", 0, "detectedPathType", "root")), match("solrStatisticsCore", Status.UP, Map.of("status", 0, "detectedPathType", "root")),
match("geoIp", UP_WITH_ISSUES_STATUS, match("geoIp", UP_WITH_ISSUES_STATUS,
Map.of("reason", "The required 'dbfile' configuration is missing in solr-statistics.cfg!")) Map.of("reason", "The required 'dbfile' configuration is missing in usage-statistics.cfg!"))
))); )));
} }

Some files were not shown because too many files have changed in this diff Show More