mirror of
https://github.com/DSpace/DSpace.git
synced 2025-10-16 22:43:12 +00:00
Merge branch 'main' of https://github.com/DSpace/DSpace into CST-11298
This commit is contained in:
@@ -4,13 +4,6 @@
|
||||
# Can be validated via instructions at:
|
||||
# https://docs.codecov.io/docs/codecov-yaml#validate-your-repository-yaml
|
||||
|
||||
# Tell Codecov not to send a coverage notification until (at least) 2 builds are completed
|
||||
# Since we run Unit & Integration tests in parallel, this lets Codecov know that coverage
|
||||
# needs to be merged across those builds
|
||||
codecov:
|
||||
notify:
|
||||
after_n_builds: 2
|
||||
|
||||
# Settings related to code coverage analysis
|
||||
coverage:
|
||||
status:
|
||||
|
@@ -6,6 +6,5 @@ dspace/modules/*/target/
|
||||
Dockerfile.*
|
||||
dspace/src/main/docker/dspace-postgres-pgcrypto
|
||||
dspace/src/main/docker/dspace-postgres-pgcrypto-curl
|
||||
dspace/src/main/docker/solr
|
||||
dspace/src/main/docker/README.md
|
||||
dspace/src/main/docker-compose/
|
||||
|
@@ -1,26 +0,0 @@
|
||||
# This workflow runs whenever a new pull request is created
|
||||
# TEMPORARILY DISABLED. Unfortunately this doesn't work for PRs created from forked repositories (which is how we tend to create PRs).
|
||||
# There is no known workaround yet. See https://github.community/t/how-to-use-github-token-for-prs-from-forks/16818
|
||||
name: Pull Request opened
|
||||
|
||||
# Only run for newly opened PRs against the "main" branch
|
||||
on:
|
||||
pull_request:
|
||||
types: [opened]
|
||||
branches:
|
||||
- main
|
||||
|
||||
jobs:
|
||||
automation:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
# Assign the PR to whomever created it. This is useful for visualizing assignments on project boards
|
||||
# See https://github.com/marketplace/actions/pull-request-assigner
|
||||
- name: Assign PR to creator
|
||||
uses: thomaseizinger/assign-pr-creator-action@v1.0.0
|
||||
# Note, this authentication token is created automatically
|
||||
# See: https://docs.github.com/en/actions/configuring-and-managing-workflows/authenticating-with-the-github_token
|
||||
with:
|
||||
repo-token: ${{ secrets.GITHUB_TOKEN }}
|
||||
# Ignore errors. It is possible the PR was created by someone who cannot be assigned
|
||||
continue-on-error: true
|
10
.github/pull_request_template.md
vendored
10
.github/pull_request_template.md
vendored
@@ -1,7 +1,7 @@
|
||||
## References
|
||||
_Add references/links to any related issues or PRs. These may include:_
|
||||
* Fixes #[issue-number]
|
||||
* Related to [REST Contract](https://github.com/DSpace/Rest7Contract)
|
||||
* Fixes #`issue-number` (if this fixes an issue ticket)
|
||||
* Related to DSpace/RestContract#`pr-number` (if a corresponding REST Contract PR exists)
|
||||
|
||||
## Description
|
||||
Short summary of changes (1-2 sentences).
|
||||
@@ -22,5 +22,7 @@ _This checklist provides a reminder of what we are going to look for when review
|
||||
- [ ] My PR passes Checkstyle validation based on the [Code Style Guide](https://wiki.lyrasis.org/display/DSPACE/Code+Style+Guide).
|
||||
- [ ] My PR includes Javadoc for _all new (or modified) public methods and classes_. It also includes Javadoc for large or complex private methods.
|
||||
- [ ] My PR passes all tests and includes new/updated Unit or Integration Tests based on the [Code Testing Guide](https://wiki.lyrasis.org/display/DSPACE/Code+Testing+Guide).
|
||||
- [ ] If my PR includes new, third-party dependencies (in any `pom.xml`), I've made sure their licenses align with the [DSpace BSD License](https://github.com/DSpace/DSpace/blob/main/LICENSE) based on the [Licensing of Contributions](https://wiki.lyrasis.org/display/DSPACE/Code+Contribution+Guidelines#CodeContributionGuidelines-LicensingofContributions) documentation.
|
||||
- [ ] If my PR modifies the REST API, I've opened a separate [REST Contract](https://github.com/DSpace/RestContract/blob/main/README.md) PR related to this change.
|
||||
- [ ] If my PR includes new libraries/dependencies (in any `pom.xml`), I've made sure their licenses align with the [DSpace BSD License](https://github.com/DSpace/DSpace/blob/main/LICENSE) based on the [Licensing of Contributions](https://wiki.lyrasis.org/display/DSPACE/Code+Contribution+Guidelines#CodeContributionGuidelines-LicensingofContributions) documentation.
|
||||
- [ ] If my PR modifies REST API endpoints, I've opened a separate [REST Contract](https://github.com/DSpace/RestContract/blob/main/README.md) PR related to this change.
|
||||
- [ ] If my PR includes new configurations, I've provided basic technical documentation in the PR itself.
|
||||
- [ ] If my PR fixes an issue ticket, I've [linked them together](https://docs.github.com/en/issues/tracking-your-work-with-issues/linking-a-pull-request-to-an-issue).
|
||||
|
37
.github/workflows/build.yml
vendored
37
.github/workflows/build.yml
vendored
@@ -79,6 +79,39 @@ jobs:
|
||||
name: ${{ matrix.type }} results
|
||||
path: ${{ matrix.resultsdir }}
|
||||
|
||||
# https://github.com/codecov/codecov-action
|
||||
# Upload code coverage report to artifact, so that it can be shared with the 'codecov' job (see below)
|
||||
- name: Upload code coverage report to Artifact
|
||||
uses: actions/upload-artifact@v3
|
||||
with:
|
||||
name: ${{ matrix.type }} coverage report
|
||||
path: 'dspace/target/site/jacoco-aggregate/jacoco.xml'
|
||||
retention-days: 14
|
||||
|
||||
# Codecov upload is a separate job in order to allow us to restart this separate from the entire build/test
|
||||
# job above. This is necessary because Codecov uploads seem to randomly fail at times.
|
||||
# See https://community.codecov.com/t/upload-issues-unable-to-locate-build-via-github-actions-api/3954
|
||||
codecov:
|
||||
# Must run after 'tests' job above
|
||||
needs: tests
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v3
|
||||
|
||||
# Download artifacts from previous 'tests' job
|
||||
- name: Download coverage artifacts
|
||||
uses: actions/download-artifact@v3
|
||||
|
||||
# Now attempt upload to Codecov using its action.
|
||||
# NOTE: We use a retry action to retry the Codecov upload if it fails the first time.
|
||||
#
|
||||
# Retry action: https://github.com/marketplace/actions/retry-action
|
||||
# Codecov action: https://github.com/codecov/codecov-action
|
||||
- name: Upload coverage to Codecov.io
|
||||
uses: codecov/codecov-action@v3
|
||||
uses: Wandalen/wretry.action@v1.0.36
|
||||
with:
|
||||
action: codecov/codecov-action@v3
|
||||
# Try upload 5 times max
|
||||
attempt_limit: 5
|
||||
# Run again in 30 seconds
|
||||
attempt_delay: 30000
|
||||
|
63
.github/workflows/codescan.yml
vendored
Normal file
63
.github/workflows/codescan.yml
vendored
Normal file
@@ -0,0 +1,63 @@
|
||||
# DSpace CodeQL code scanning configuration for GitHub
|
||||
# https://docs.github.com/en/code-security/code-scanning
|
||||
#
|
||||
# NOTE: Code scanning must be run separate from our default build.yml
|
||||
# because CodeQL requires a fresh build with all tests *disabled*.
|
||||
name: "Code Scanning"
|
||||
|
||||
# Run this code scan for all pushes / PRs to main or maintenance branches. Also run once a week.
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- main
|
||||
- 'dspace-**'
|
||||
pull_request:
|
||||
branches:
|
||||
- main
|
||||
- 'dspace-**'
|
||||
# Don't run if PR is only updating static documentation
|
||||
paths-ignore:
|
||||
- '**/*.md'
|
||||
- '**/*.txt'
|
||||
schedule:
|
||||
- cron: "37 0 * * 1"
|
||||
|
||||
jobs:
|
||||
analyze:
|
||||
name: Analyze Code
|
||||
runs-on: ubuntu-latest
|
||||
# Limit permissions of this GitHub action. Can only write to security-events
|
||||
permissions:
|
||||
actions: read
|
||||
contents: read
|
||||
security-events: write
|
||||
|
||||
steps:
|
||||
# https://github.com/actions/checkout
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v3
|
||||
|
||||
# https://github.com/actions/setup-java
|
||||
- name: Install JDK
|
||||
uses: actions/setup-java@v3
|
||||
with:
|
||||
java-version: 11
|
||||
distribution: 'temurin'
|
||||
|
||||
# Initializes the CodeQL tools for scanning.
|
||||
# https://github.com/github/codeql-action
|
||||
- name: Initialize CodeQL
|
||||
uses: github/codeql-action/init@v2
|
||||
with:
|
||||
# Codescan Javascript as well since a few JS files exist in REST API's interface
|
||||
languages: java, javascript
|
||||
|
||||
# Autobuild attempts to build any compiled languages
|
||||
# NOTE: Based on testing, this autobuild process works well for DSpace. A custom
|
||||
# DSpace build w/caching (like in build.yml) was about the same speed as autobuild.
|
||||
- name: Autobuild
|
||||
uses: github/codeql-action/autobuild@v2
|
||||
|
||||
# Perform GitHub Code Scanning.
|
||||
- name: Perform CodeQL Analysis
|
||||
uses: github/codeql-action/analyze@v2
|
290
.github/workflows/docker.yml
vendored
290
.github/workflows/docker.yml
vendored
@@ -15,23 +15,19 @@ on:
|
||||
permissions:
|
||||
contents: read # to fetch code (actions/checkout)
|
||||
|
||||
jobs:
|
||||
docker:
|
||||
# Ensure this job never runs on forked repos. It's only executed for 'dspace/dspace'
|
||||
if: github.repository == 'dspace/dspace'
|
||||
runs-on: ubuntu-latest
|
||||
env:
|
||||
# Define shared environment variables for all jobs below
|
||||
env:
|
||||
# Define tags to use for Docker images based on Git tags/branches (for docker/metadata-action)
|
||||
# For a new commit on default branch (main), use the literal tag 'dspace-7_x' on Docker image.
|
||||
# For a new commit on default branch (main), use the literal tag 'latest' on Docker image.
|
||||
# For a new commit on other branches, use the branch name as the tag for Docker image.
|
||||
# For a new tag, copy that tag name as the tag for Docker image.
|
||||
IMAGE_TAGS: |
|
||||
type=raw,value=dspace-7_x,enable=${{ endsWith(github.ref, github.event.repository.default_branch) }}
|
||||
type=raw,value=latest,enable=${{ endsWith(github.ref, github.event.repository.default_branch) }}
|
||||
type=ref,event=branch,enable=${{ !endsWith(github.ref, github.event.repository.default_branch) }}
|
||||
type=ref,event=tag
|
||||
# Define default tag "flavor" for docker/metadata-action per
|
||||
# https://github.com/docker/metadata-action#flavor-input
|
||||
# We turn off 'latest' tag by default.
|
||||
# We manage the 'latest' tag ourselves to the 'main' branch (see settings above)
|
||||
TAGS_FLAVOR: |
|
||||
latest=false
|
||||
# Architectures / Platforms for which we will build Docker images
|
||||
@@ -40,6 +36,16 @@ jobs:
|
||||
# longer (around 45mins or so) which is why we only run it when pushing a new Docker image.
|
||||
PLATFORMS: linux/amd64${{ github.event_name != 'pull_request' && ', linux/arm64' || '' }}
|
||||
|
||||
jobs:
|
||||
####################################################
|
||||
# Build/Push the 'dspace/dspace-dependencies' image.
|
||||
# This image is used by all other jobs.
|
||||
####################################################
|
||||
dspace-dependencies:
|
||||
# Ensure this job never runs on forked repos. It's only executed for 'dspace/dspace'
|
||||
if: github.repository == 'dspace/dspace'
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
steps:
|
||||
# https://github.com/actions/checkout
|
||||
- name: Checkout codebase
|
||||
@@ -62,9 +68,6 @@ jobs:
|
||||
username: ${{ secrets.DOCKER_USERNAME }}
|
||||
password: ${{ secrets.DOCKER_ACCESS_TOKEN }}
|
||||
|
||||
####################################################
|
||||
# Build/Push the 'dspace/dspace-dependencies' image
|
||||
####################################################
|
||||
# https://github.com/docker/metadata-action
|
||||
# Get Metadata for docker_build_deps step below
|
||||
- name: Sync metadata (tags, labels) from GitHub to Docker for 'dspace-dependencies' image
|
||||
@@ -78,7 +81,7 @@ jobs:
|
||||
# https://github.com/docker/build-push-action
|
||||
- name: Build and push 'dspace-dependencies' image
|
||||
id: docker_build_deps
|
||||
uses: docker/build-push-action@v3
|
||||
uses: docker/build-push-action@v4
|
||||
with:
|
||||
context: .
|
||||
file: ./Dockerfile.dependencies
|
||||
@@ -93,6 +96,35 @@ jobs:
|
||||
#######################################
|
||||
# Build/Push the 'dspace/dspace' image
|
||||
#######################################
|
||||
dspace:
|
||||
# Ensure this job never runs on forked repos. It's only executed for 'dspace/dspace'
|
||||
if: github.repository == 'dspace/dspace'
|
||||
# Must run after 'dspace-dependencies' job above
|
||||
needs: dspace-dependencies
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
steps:
|
||||
# https://github.com/actions/checkout
|
||||
- name: Checkout codebase
|
||||
uses: actions/checkout@v3
|
||||
|
||||
# https://github.com/docker/setup-buildx-action
|
||||
- name: Setup Docker Buildx
|
||||
uses: docker/setup-buildx-action@v2
|
||||
|
||||
# https://github.com/docker/setup-qemu-action
|
||||
- name: Set up QEMU emulation to build for multiple architectures
|
||||
uses: docker/setup-qemu-action@v2
|
||||
|
||||
# https://github.com/docker/login-action
|
||||
- name: Login to DockerHub
|
||||
# Only login if not a PR, as PRs only trigger a Docker build and not a push
|
||||
if: github.event_name != 'pull_request'
|
||||
uses: docker/login-action@v2
|
||||
with:
|
||||
username: ${{ secrets.DOCKER_USERNAME }}
|
||||
password: ${{ secrets.DOCKER_ACCESS_TOKEN }}
|
||||
|
||||
# Get Metadata for docker_build step below
|
||||
- name: Sync metadata (tags, labels) from GitHub to Docker for 'dspace' image
|
||||
id: meta_build
|
||||
@@ -104,7 +136,7 @@ jobs:
|
||||
|
||||
- name: Build and push 'dspace' image
|
||||
id: docker_build
|
||||
uses: docker/build-push-action@v3
|
||||
uses: docker/build-push-action@v4
|
||||
with:
|
||||
context: .
|
||||
file: ./Dockerfile
|
||||
@@ -116,9 +148,38 @@ jobs:
|
||||
tags: ${{ steps.meta_build.outputs.tags }}
|
||||
labels: ${{ steps.meta_build.outputs.labels }}
|
||||
|
||||
#####################################################
|
||||
#############################################################
|
||||
# Build/Push the 'dspace/dspace' image ('-test' tag)
|
||||
#####################################################
|
||||
#############################################################
|
||||
dspace-test:
|
||||
# Ensure this job never runs on forked repos. It's only executed for 'dspace/dspace'
|
||||
if: github.repository == 'dspace/dspace'
|
||||
# Must run after 'dspace-dependencies' job above
|
||||
needs: dspace-dependencies
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
steps:
|
||||
# https://github.com/actions/checkout
|
||||
- name: Checkout codebase
|
||||
uses: actions/checkout@v3
|
||||
|
||||
# https://github.com/docker/setup-buildx-action
|
||||
- name: Setup Docker Buildx
|
||||
uses: docker/setup-buildx-action@v2
|
||||
|
||||
# https://github.com/docker/setup-qemu-action
|
||||
- name: Set up QEMU emulation to build for multiple architectures
|
||||
uses: docker/setup-qemu-action@v2
|
||||
|
||||
# https://github.com/docker/login-action
|
||||
- name: Login to DockerHub
|
||||
# Only login if not a PR, as PRs only trigger a Docker build and not a push
|
||||
if: github.event_name != 'pull_request'
|
||||
uses: docker/login-action@v2
|
||||
with:
|
||||
username: ${{ secrets.DOCKER_USERNAME }}
|
||||
password: ${{ secrets.DOCKER_ACCESS_TOKEN }}
|
||||
|
||||
# Get Metadata for docker_build_test step below
|
||||
- name: Sync metadata (tags, labels) from GitHub to Docker for 'dspace-test' image
|
||||
id: meta_build_test
|
||||
@@ -133,7 +194,7 @@ jobs:
|
||||
|
||||
- name: Build and push 'dspace-test' image
|
||||
id: docker_build_test
|
||||
uses: docker/build-push-action@v3
|
||||
uses: docker/build-push-action@v4
|
||||
with:
|
||||
context: .
|
||||
file: ./Dockerfile.test
|
||||
@@ -148,6 +209,35 @@ jobs:
|
||||
###########################################
|
||||
# Build/Push the 'dspace/dspace-cli' image
|
||||
###########################################
|
||||
dspace-cli:
|
||||
# Ensure this job never runs on forked repos. It's only executed for 'dspace/dspace'
|
||||
if: github.repository == 'dspace/dspace'
|
||||
# Must run after 'dspace-dependencies' job above
|
||||
needs: dspace-dependencies
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
steps:
|
||||
# https://github.com/actions/checkout
|
||||
- name: Checkout codebase
|
||||
uses: actions/checkout@v3
|
||||
|
||||
# https://github.com/docker/setup-buildx-action
|
||||
- name: Setup Docker Buildx
|
||||
uses: docker/setup-buildx-action@v2
|
||||
|
||||
# https://github.com/docker/setup-qemu-action
|
||||
- name: Set up QEMU emulation to build for multiple architectures
|
||||
uses: docker/setup-qemu-action@v2
|
||||
|
||||
# https://github.com/docker/login-action
|
||||
- name: Login to DockerHub
|
||||
# Only login if not a PR, as PRs only trigger a Docker build and not a push
|
||||
if: github.event_name != 'pull_request'
|
||||
uses: docker/login-action@v2
|
||||
with:
|
||||
username: ${{ secrets.DOCKER_USERNAME }}
|
||||
password: ${{ secrets.DOCKER_ACCESS_TOKEN }}
|
||||
|
||||
# Get Metadata for docker_build_test step below
|
||||
- name: Sync metadata (tags, labels) from GitHub to Docker for 'dspace-cli' image
|
||||
id: meta_build_cli
|
||||
@@ -159,7 +249,7 @@ jobs:
|
||||
|
||||
- name: Build and push 'dspace-cli' image
|
||||
id: docker_build_cli
|
||||
uses: docker/build-push-action@v3
|
||||
uses: docker/build-push-action@v4
|
||||
with:
|
||||
context: .
|
||||
file: ./Dockerfile.cli
|
||||
@@ -170,3 +260,167 @@ jobs:
|
||||
# Use tags / labels provided by 'docker/metadata-action' above
|
||||
tags: ${{ steps.meta_build_cli.outputs.tags }}
|
||||
labels: ${{ steps.meta_build_cli.outputs.labels }}
|
||||
|
||||
###########################################
|
||||
# Build/Push the 'dspace/dspace-solr' image
|
||||
###########################################
|
||||
dspace-solr:
|
||||
# Ensure this job never runs on forked repos. It's only executed for 'dspace/dspace'
|
||||
if: github.repository == 'dspace/dspace'
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
steps:
|
||||
# https://github.com/actions/checkout
|
||||
- name: Checkout codebase
|
||||
uses: actions/checkout@v3
|
||||
|
||||
# https://github.com/docker/setup-buildx-action
|
||||
- name: Setup Docker Buildx
|
||||
uses: docker/setup-buildx-action@v2
|
||||
|
||||
# https://github.com/docker/setup-qemu-action
|
||||
- name: Set up QEMU emulation to build for multiple architectures
|
||||
uses: docker/setup-qemu-action@v2
|
||||
|
||||
# https://github.com/docker/login-action
|
||||
- name: Login to DockerHub
|
||||
# Only login if not a PR, as PRs only trigger a Docker build and not a push
|
||||
if: github.event_name != 'pull_request'
|
||||
uses: docker/login-action@v2
|
||||
with:
|
||||
username: ${{ secrets.DOCKER_USERNAME }}
|
||||
password: ${{ secrets.DOCKER_ACCESS_TOKEN }}
|
||||
|
||||
# Get Metadata for docker_build_solr step below
|
||||
- name: Sync metadata (tags, labels) from GitHub to Docker for 'dspace-solr' image
|
||||
id: meta_build_solr
|
||||
uses: docker/metadata-action@v4
|
||||
with:
|
||||
images: dspace/dspace-solr
|
||||
tags: ${{ env.IMAGE_TAGS }}
|
||||
flavor: ${{ env.TAGS_FLAVOR }}
|
||||
|
||||
- name: Build and push 'dspace-solr' image
|
||||
id: docker_build_solr
|
||||
uses: docker/build-push-action@v4
|
||||
with:
|
||||
context: .
|
||||
file: ./dspace/src/main/docker/dspace-solr/Dockerfile
|
||||
platforms: ${{ env.PLATFORMS }}
|
||||
# For pull requests, we run the Docker build (to ensure no PR changes break the build),
|
||||
# but we ONLY do an image push to DockerHub if it's NOT a PR
|
||||
push: ${{ github.event_name != 'pull_request' }}
|
||||
# Use tags / labels provided by 'docker/metadata-action' above
|
||||
tags: ${{ steps.meta_build_solr.outputs.tags }}
|
||||
labels: ${{ steps.meta_build_solr.outputs.labels }}
|
||||
|
||||
###########################################################
|
||||
# Build/Push the 'dspace/dspace-postgres-pgcrypto' image
|
||||
###########################################################
|
||||
dspace-postgres-pgcrypto:
|
||||
# Ensure this job never runs on forked repos. It's only executed for 'dspace/dspace'
|
||||
if: github.repository == 'dspace/dspace'
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
steps:
|
||||
# https://github.com/actions/checkout
|
||||
- name: Checkout codebase
|
||||
uses: actions/checkout@v3
|
||||
|
||||
# https://github.com/docker/setup-buildx-action
|
||||
- name: Setup Docker Buildx
|
||||
uses: docker/setup-buildx-action@v2
|
||||
|
||||
# https://github.com/docker/setup-qemu-action
|
||||
- name: Set up QEMU emulation to build for multiple architectures
|
||||
uses: docker/setup-qemu-action@v2
|
||||
|
||||
# https://github.com/docker/login-action
|
||||
- name: Login to DockerHub
|
||||
# Only login if not a PR, as PRs only trigger a Docker build and not a push
|
||||
if: github.event_name != 'pull_request'
|
||||
uses: docker/login-action@v2
|
||||
with:
|
||||
username: ${{ secrets.DOCKER_USERNAME }}
|
||||
password: ${{ secrets.DOCKER_ACCESS_TOKEN }}
|
||||
|
||||
# Get Metadata for docker_build_postgres step below
|
||||
- name: Sync metadata (tags, labels) from GitHub to Docker for 'dspace-postgres-pgcrypto' image
|
||||
id: meta_build_postgres
|
||||
uses: docker/metadata-action@v4
|
||||
with:
|
||||
images: dspace/dspace-postgres-pgcrypto
|
||||
tags: ${{ env.IMAGE_TAGS }}
|
||||
flavor: ${{ env.TAGS_FLAVOR }}
|
||||
|
||||
- name: Build and push 'dspace-postgres-pgcrypto' image
|
||||
id: docker_build_postgres
|
||||
uses: docker/build-push-action@v4
|
||||
with:
|
||||
# Must build out of subdirectory to have access to install script for pgcrypto
|
||||
context: ./dspace/src/main/docker/dspace-postgres-pgcrypto/
|
||||
dockerfile: Dockerfile
|
||||
platforms: ${{ env.PLATFORMS }}
|
||||
# For pull requests, we run the Docker build (to ensure no PR changes break the build),
|
||||
# but we ONLY do an image push to DockerHub if it's NOT a PR
|
||||
push: ${{ github.event_name != 'pull_request' }}
|
||||
# Use tags / labels provided by 'docker/metadata-action' above
|
||||
tags: ${{ steps.meta_build_postgres.outputs.tags }}
|
||||
labels: ${{ steps.meta_build_postgres.outputs.labels }}
|
||||
|
||||
########################################################################
|
||||
# Build/Push the 'dspace/dspace-postgres-pgcrypto' image (-loadsql tag)
|
||||
########################################################################
|
||||
dspace-postgres-pgcrypto-loadsql:
|
||||
# Ensure this job never runs on forked repos. It's only executed for 'dspace/dspace'
|
||||
if: github.repository == 'dspace/dspace'
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
steps:
|
||||
# https://github.com/actions/checkout
|
||||
- name: Checkout codebase
|
||||
uses: actions/checkout@v3
|
||||
|
||||
# https://github.com/docker/setup-buildx-action
|
||||
- name: Setup Docker Buildx
|
||||
uses: docker/setup-buildx-action@v2
|
||||
|
||||
# https://github.com/docker/setup-qemu-action
|
||||
- name: Set up QEMU emulation to build for multiple architectures
|
||||
uses: docker/setup-qemu-action@v2
|
||||
|
||||
# https://github.com/docker/login-action
|
||||
- name: Login to DockerHub
|
||||
# Only login if not a PR, as PRs only trigger a Docker build and not a push
|
||||
if: github.event_name != 'pull_request'
|
||||
uses: docker/login-action@v2
|
||||
with:
|
||||
username: ${{ secrets.DOCKER_USERNAME }}
|
||||
password: ${{ secrets.DOCKER_ACCESS_TOKEN }}
|
||||
|
||||
# Get Metadata for docker_build_postgres_loadsql step below
|
||||
- name: Sync metadata (tags, labels) from GitHub to Docker for 'dspace-postgres-pgcrypto-loadsql' image
|
||||
id: meta_build_postgres_loadsql
|
||||
uses: docker/metadata-action@v4
|
||||
with:
|
||||
images: dspace/dspace-postgres-pgcrypto
|
||||
tags: ${{ env.IMAGE_TAGS }}
|
||||
# Suffix all tags with "-loadsql". Otherwise, it uses the same
|
||||
# tagging logic as the primary 'dspace/dspace-postgres-pgcrypto' image above.
|
||||
flavor: ${{ env.TAGS_FLAVOR }}
|
||||
suffix=-loadsql
|
||||
|
||||
- name: Build and push 'dspace-postgres-pgcrypto-loadsql' image
|
||||
id: docker_build_postgres_loadsql
|
||||
uses: docker/build-push-action@v4
|
||||
with:
|
||||
# Must build out of subdirectory to have access to install script for pgcrypto
|
||||
context: ./dspace/src/main/docker/dspace-postgres-pgcrypto-curl/
|
||||
dockerfile: Dockerfile
|
||||
platforms: ${{ env.PLATFORMS }}
|
||||
# For pull requests, we run the Docker build (to ensure no PR changes break the build),
|
||||
# but we ONLY do an image push to DockerHub if it's NOT a PR
|
||||
push: ${{ github.event_name != 'pull_request' }}
|
||||
# Use tags / labels provided by 'docker/metadata-action' above
|
||||
tags: ${{ steps.meta_build_postgres_loadsql.outputs.tags }}
|
||||
labels: ${{ steps.meta_build_postgres_loadsql.outputs.labels }}
|
2
.github/workflows/issue_opened.yml
vendored
2
.github/workflows/issue_opened.yml
vendored
@@ -16,7 +16,7 @@ jobs:
|
||||
# Only add to project board if issue is flagged as "needs triage" or has no labels
|
||||
# NOTE: By default we flag new issues as "needs triage" in our issue template
|
||||
if: (contains(github.event.issue.labels.*.name, 'needs triage') || join(github.event.issue.labels.*.name) == '')
|
||||
uses: actions/add-to-project@v0.3.0
|
||||
uses: actions/add-to-project@v0.5.0
|
||||
# Note, the authentication token below is an ORG level Secret.
|
||||
# It must be created/recreated manually via a personal access token with admin:org, project, public_repo permissions
|
||||
# See: https://docs.github.com/en/actions/configuring-and-managing-workflows/authenticating-with-the-github_token#permissions-for-the-github_token
|
||||
|
11
.github/workflows/label_merge_conflicts.yml
vendored
11
.github/workflows/label_merge_conflicts.yml
vendored
@@ -1,11 +1,12 @@
|
||||
# This workflow checks open PRs for merge conflicts and labels them when conflicts are found
|
||||
name: Check for merge conflicts
|
||||
|
||||
# Run whenever the "main" branch is updated
|
||||
# NOTE: This means merge conflicts are only checked for when a PR is merged to main.
|
||||
# Run this for all pushes (i.e. merges) to 'main' or maintenance branches
|
||||
on:
|
||||
push:
|
||||
branches: [ main ]
|
||||
branches:
|
||||
- main
|
||||
- 'dspace-**'
|
||||
# So that the `conflict_label_name` is removed if conflicts are resolved,
|
||||
# we allow this to run for `pull_request_target` so that github secrets are available.
|
||||
pull_request_target:
|
||||
@@ -23,7 +24,9 @@ jobs:
|
||||
steps:
|
||||
# See: https://github.com/prince-chrismc/label-merge-conflicts-action
|
||||
- name: Auto-label PRs with merge conflicts
|
||||
uses: prince-chrismc/label-merge-conflicts-action@v2
|
||||
uses: prince-chrismc/label-merge-conflicts-action@v3
|
||||
# Ignore any failures -- may occur (randomly?) for older, outdated PRs.
|
||||
continue-on-error: true
|
||||
# Add "merge conflict" label if a merge conflict is detected. Remove it when resolved.
|
||||
# Note, the authentication token is created automatically
|
||||
# See: https://docs.github.com/en/actions/configuring-and-managing-workflows/authenticating-with-the-github_token
|
||||
|
44
.github/workflows/port_merged_pull_request.yml
vendored
Normal file
44
.github/workflows/port_merged_pull_request.yml
vendored
Normal file
@@ -0,0 +1,44 @@
|
||||
# This workflow will attempt to port a merged pull request to
|
||||
# the branch specified in a "port to" label (if exists)
|
||||
name: Port merged Pull Request
|
||||
|
||||
# Only run for merged PRs against the "main" or maintenance branches
|
||||
# We allow this to run for `pull_request_target` so that github secrets are available
|
||||
# (This is required when the PR comes from a forked repo)
|
||||
on:
|
||||
pull_request_target:
|
||||
types: [ closed ]
|
||||
branches:
|
||||
- main
|
||||
- 'dspace-**'
|
||||
|
||||
permissions:
|
||||
contents: write # so action can add comments
|
||||
pull-requests: write # so action can create pull requests
|
||||
|
||||
jobs:
|
||||
port_pr:
|
||||
runs-on: ubuntu-latest
|
||||
# Don't run on closed *unmerged* pull requests
|
||||
if: github.event.pull_request.merged
|
||||
steps:
|
||||
# Checkout code
|
||||
- uses: actions/checkout@v3
|
||||
# Port PR to other branch (ONLY if labeled with "port to")
|
||||
# See https://github.com/korthout/backport-action
|
||||
- name: Create backport pull requests
|
||||
uses: korthout/backport-action@v1
|
||||
with:
|
||||
# Trigger based on a "port to [branch]" label on PR
|
||||
# (This label must specify the branch name to port to)
|
||||
label_pattern: '^port to ([^ ]+)$'
|
||||
# Title to add to the (newly created) port PR
|
||||
pull_title: '[Port ${target_branch}] ${pull_title}'
|
||||
# Description to add to the (newly created) port PR
|
||||
pull_description: 'Port of #${pull_number} by @${pull_author} to `${target_branch}`.'
|
||||
# Copy all labels from original PR to (newly created) port PR
|
||||
# NOTE: The labels matching 'label_pattern' are automatically excluded
|
||||
copy_labels_pattern: '.*'
|
||||
# Use a personal access token (PAT) to create PR as 'dspace-bot' user.
|
||||
# A PAT is required in order for the new PR to trigger its own actions (for CI checks)
|
||||
github_token: ${{ secrets.PR_PORT_TOKEN }}
|
24
.github/workflows/pull_request_opened.yml
vendored
Normal file
24
.github/workflows/pull_request_opened.yml
vendored
Normal file
@@ -0,0 +1,24 @@
|
||||
# This workflow runs whenever a new pull request is created
|
||||
name: Pull Request opened
|
||||
|
||||
# Only run for newly opened PRs against the "main" or maintenance branches
|
||||
# We allow this to run for `pull_request_target` so that github secrets are available
|
||||
# (This is required to assign a PR back to the creator when the PR comes from a forked repo)
|
||||
on:
|
||||
pull_request_target:
|
||||
types: [ opened ]
|
||||
branches:
|
||||
- main
|
||||
- 'dspace-**'
|
||||
|
||||
permissions:
|
||||
pull-requests: write
|
||||
|
||||
jobs:
|
||||
automation:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
# Assign the PR to whomever created it. This is useful for visualizing assignments on project boards
|
||||
# See https://github.com/toshimaru/auto-author-assign
|
||||
- name: Assign PR to creator
|
||||
uses: toshimaru/auto-author-assign@v1.6.2
|
45
CONTRIBUTING.md
Normal file
45
CONTRIBUTING.md
Normal file
@@ -0,0 +1,45 @@
|
||||
# How to Contribute
|
||||
|
||||
DSpace is a community built and supported project. We do not have a centralized development or support team, but have a dedicated group of volunteers who help us improve the software, documentation, resources, etc.
|
||||
|
||||
* [Contribute new code via a Pull Request](#contribute-new-code-via-a-pull-request)
|
||||
* [Contribute documentation](#contribute-documentation)
|
||||
* [Help others on mailing lists or Slack](#help-others-on-mailing-lists-or-slack)
|
||||
* [Join a working or interest group](#join-a-working-or-interest-group)
|
||||
|
||||
## Contribute new code via a Pull Request
|
||||
|
||||
We accept [GitHub Pull Requests (PRs)](https://docs.github.com/en/pull-requests/collaborating-with-pull-requests/proposing-changes-to-your-work-with-pull-requests/creating-a-pull-request-from-a-fork) at any time from anyone.
|
||||
Contributors to each release are recognized in our [Release Notes](https://wiki.lyrasis.org/display/DSDOC7x/Release+Notes).
|
||||
|
||||
Code Contribution Checklist
|
||||
- [ ] PRs _should_ be smaller in size (ideally less than 1,000 lines of code, not including comments & tests)
|
||||
- [ ] PRs **must** pass Checkstyle validation based on our [Code Style Guide](https://wiki.lyrasis.org/display/DSPACE/Code+Style+Guide).
|
||||
- [ ] PRs **must** include Javadoc for _all new/modified public methods and classes_. Larger private methods should also have Javadoc
|
||||
- [ ] PRs **must** pass all automated tests and include new/updated Unit or Integration tests based on our [Code Testing Guide](https://wiki.lyrasis.org/display/DSPACE/Code+Testing+Guide).
|
||||
- [ ] If a PR includes new libraries/dependencies (in any `pom.xml`), then their software licenses **must** align with the [DSpace BSD License](https://github.com/DSpace/DSpace/blob/main/LICENSE) based on the [Licensing of Contributions](https://wiki.lyrasis.org/display/DSPACE/Code+Contribution+Guidelines#CodeContributionGuidelines-LicensingofContributions) documentation.
|
||||
- [ ] Basic technical documentation _should_ be provided for any new features or changes to the REST API. REST API changes should be documented in our [Rest Contract](https://github.com/DSpace/RestContract).
|
||||
- [ ] If a PR fixes an issue ticket, please [link them together](https://docs.github.com/en/issues/tracking-your-work-with-issues/linking-a-pull-request-to-an-issue).
|
||||
|
||||
Additional details on the code contribution process can be found in our [Code Contribution Guidelines](https://wiki.lyrasis.org/display/DSPACE/Code+Contribution+Guidelines)
|
||||
|
||||
## Contribute documentation
|
||||
|
||||
DSpace Documentation is a collaborative effort in a shared Wiki. The latest documentation is at https://wiki.lyrasis.org/display/DSDOC7x
|
||||
|
||||
If you find areas of the DSpace Documentation which you wish to improve, please request a Wiki account by emailing wikihelp@lyrasis.org.
|
||||
Once you have an account setup, contact @tdonohue (via [Slack](https://wiki.lyrasis.org/display/DSPACE/Slack) or email) for access to edit our Documentation.
|
||||
|
||||
## Help others on mailing lists or Slack
|
||||
|
||||
DSpace has our own [Slack](https://wiki.lyrasis.org/display/DSPACE/Slack) community and [Mailing Lists](https://wiki.lyrasis.org/display/DSPACE/Mailing+Lists) where discussions take place and questions are answered.
|
||||
Anyone is welcome to join and help others. We just ask you to follow our [Code of Conduct](https://www.lyrasis.org/about/Pages/Code-of-Conduct.aspx) (adopted via LYRASIS).
|
||||
|
||||
## Join a working or interest group
|
||||
|
||||
Most of the work in building/improving DSpace comes via [Working Groups](https://wiki.lyrasis.org/display/DSPACE/DSpace+Working+Groups) or [Interest Groups](https://wiki.lyrasis.org/display/DSPACE/DSpace+Interest+Groups).
|
||||
|
||||
All working/interest groups are open to anyone to join and participate. A few key groups to be aware of include:
|
||||
|
||||
* [DSpace 7 Working Group](https://wiki.lyrasis.org/display/DSPACE/DSpace+7+Working+Group) - This is the main (mostly volunteer) development team. We meet weekly to review our current development [project board](https://github.com/orgs/DSpace/projects), assigning tickets and/or PRs.
|
||||
* [DSpace Community Advisory Team (DCAT)](https://wiki.lyrasis.org/display/cmtygp/DSpace+Community+Advisory+Team) - This is an interest group for repository managers/administrators. We meet monthly to discuss DSpace, share tips & provide feedback back to developers.
|
@@ -1,14 +1,15 @@
|
||||
# This image will be published as dspace/dspace
|
||||
# See https://github.com/DSpace/DSpace/tree/main/dspace/src/main/docker for usage details
|
||||
#
|
||||
# - note: default tag for branch: dspace/dspace: dspace/dspace:dspace-7_x
|
||||
# - note: default tag for branch: dspace/dspace: dspace/dspace:latest
|
||||
|
||||
# This Dockerfile uses JDK11 by default, but has also been tested with JDK17.
|
||||
# To build with JDK17, use "--build-arg JDK_VERSION=17"
|
||||
ARG JDK_VERSION=11
|
||||
ARG DSPACE_VERSION=latest
|
||||
|
||||
# Step 1 - Run Maven Build
|
||||
FROM dspace/dspace-dependencies:dspace-7_x as build
|
||||
FROM dspace/dspace-dependencies:${DSPACE_VERSION} as build
|
||||
ARG TARGET_DIR=dspace-installer
|
||||
WORKDIR /app
|
||||
# The dspace-installer directory will be written to /install
|
||||
@@ -31,7 +32,7 @@ ARG TARGET_DIR=dspace-installer
|
||||
COPY --from=build /install /dspace-src
|
||||
WORKDIR /dspace-src
|
||||
# Create the initial install deployment using ANT
|
||||
ENV ANT_VERSION 1.10.12
|
||||
ENV ANT_VERSION 1.10.13
|
||||
ENV ANT_HOME /tmp/ant-$ANT_VERSION
|
||||
ENV PATH $ANT_HOME/bin:$PATH
|
||||
# Need wget to install ant
|
||||
|
@@ -1,14 +1,15 @@
|
||||
# This image will be published as dspace/dspace-cli
|
||||
# See https://github.com/DSpace/DSpace/tree/main/dspace/src/main/docker for usage details
|
||||
#
|
||||
# - note: default tag for branch: dspace/dspace-cli: dspace/dspace-cli:dspace-7_x
|
||||
# - note: default tag for branch: dspace/dspace-cli: dspace/dspace-cli:latest
|
||||
|
||||
# This Dockerfile uses JDK11 by default, but has also been tested with JDK17.
|
||||
# To build with JDK17, use "--build-arg JDK_VERSION=17"
|
||||
ARG JDK_VERSION=11
|
||||
ARG DSPACE_VERSION=latest
|
||||
|
||||
# Step 1 - Run Maven Build
|
||||
FROM dspace/dspace-dependencies:dspace-7_x as build
|
||||
FROM dspace/dspace-dependencies:${DSPACE_VERSION} as build
|
||||
ARG TARGET_DIR=dspace-installer
|
||||
WORKDIR /app
|
||||
# The dspace-installer directory will be written to /install
|
||||
@@ -30,12 +31,12 @@ ARG TARGET_DIR=dspace-installer
|
||||
COPY --from=build /install /dspace-src
|
||||
WORKDIR /dspace-src
|
||||
# Create the initial install deployment using ANT
|
||||
ENV ANT_VERSION 1.10.12
|
||||
ENV ANT_VERSION 1.10.13
|
||||
ENV ANT_HOME /tmp/ant-$ANT_VERSION
|
||||
ENV PATH $ANT_HOME/bin:$PATH
|
||||
# Need wget to install ant
|
||||
# Need wget to install ant, and unzip for managing AIPs
|
||||
RUN apt-get update \
|
||||
&& apt-get install -y --no-install-recommends wget \
|
||||
&& apt-get install -y --no-install-recommends wget unzip \
|
||||
&& apt-get purge -y --auto-remove \
|
||||
&& rm -rf /var/lib/apt/lists/*
|
||||
# Download and install 'ant'
|
||||
|
@@ -1,16 +1,17 @@
|
||||
# This image will be published as dspace/dspace
|
||||
# See https://github.com/DSpace/DSpace/tree/main/dspace/src/main/docker for usage details
|
||||
#
|
||||
# - note: default tag for branch: dspace/dspace: dspace/dspace:dspace-7_x-test
|
||||
# - note: default tag for branch: dspace/dspace: dspace/dspace:latest-test
|
||||
#
|
||||
# This image is meant for TESTING/DEVELOPMENT ONLY as it deploys the old v6 REST API under HTTP (not HTTPS)
|
||||
|
||||
# This Dockerfile uses JDK11 by default, but has also been tested with JDK17.
|
||||
# To build with JDK17, use "--build-arg JDK_VERSION=17"
|
||||
ARG JDK_VERSION=11
|
||||
ARG DSPACE_VERSION=latest
|
||||
|
||||
# Step 1 - Run Maven Build
|
||||
FROM dspace/dspace-dependencies:dspace-7_x as build
|
||||
FROM dspace/dspace-dependencies:${DSPACE_VERSION} as build
|
||||
ARG TARGET_DIR=dspace-installer
|
||||
WORKDIR /app
|
||||
# The dspace-installer directory will be written to /install
|
||||
|
@@ -26,20 +26,20 @@ https://wiki.lyrasis.org/display/DSPACE/Code+Contribution+Guidelines
|
||||
* AWS Java SDK for Amazon S3 (com.amazonaws:aws-java-sdk-s3:1.12.261 - https://aws.amazon.com/sdkforjava)
|
||||
* JMES Path Query library (com.amazonaws:jmespath-java:1.12.261 - https://aws.amazon.com/sdkforjava)
|
||||
* HPPC Collections (com.carrotsearch:hppc:0.8.1 - http://labs.carrotsearch.com/hppc.html/hppc)
|
||||
* com.drewnoakes:metadata-extractor (com.drewnoakes:metadata-extractor:2.16.0 - https://drewnoakes.com/code/exif/)
|
||||
* com.drewnoakes:metadata-extractor (com.drewnoakes:metadata-extractor:2.18.0 - https://drewnoakes.com/code/exif/)
|
||||
* parso (com.epam:parso:2.0.14 - https://github.com/epam/parso)
|
||||
* Esri Geometry API for Java (com.esri.geometry:esri-geometry-api:2.2.0 - https://github.com/Esri/geometry-api-java)
|
||||
* ClassMate (com.fasterxml:classmate:1.3.0 - http://github.com/cowtowncoder/java-classmate)
|
||||
* Jackson-annotations (com.fasterxml.jackson.core:jackson-annotations:2.12.6 - http://github.com/FasterXML/jackson)
|
||||
* Jackson-core (com.fasterxml.jackson.core:jackson-core:2.12.6 - https://github.com/FasterXML/jackson-core)
|
||||
* jackson-databind (com.fasterxml.jackson.core:jackson-databind:2.12.6.1 - http://github.com/FasterXML/jackson)
|
||||
* Jackson-annotations (com.fasterxml.jackson.core:jackson-annotations:2.13.4 - http://github.com/FasterXML/jackson)
|
||||
* Jackson-core (com.fasterxml.jackson.core:jackson-core:2.13.4 - https://github.com/FasterXML/jackson-core)
|
||||
* jackson-databind (com.fasterxml.jackson.core:jackson-databind:2.13.4.2 - http://github.com/FasterXML/jackson)
|
||||
* Jackson dataformat: CBOR (com.fasterxml.jackson.dataformat:jackson-dataformat-cbor:2.12.6 - http://github.com/FasterXML/jackson-dataformats-binary)
|
||||
* Jackson dataformat: Smile (com.fasterxml.jackson.dataformat:jackson-dataformat-smile:2.12.3 - http://github.com/FasterXML/jackson-dataformats-binary)
|
||||
* Jackson dataformat: Smile (com.fasterxml.jackson.dataformat:jackson-dataformat-smile:2.13.3 - http://github.com/FasterXML/jackson-dataformats-binary)
|
||||
* Jackson-dataformat-YAML (com.fasterxml.jackson.dataformat:jackson-dataformat-yaml:2.11.1 - https://github.com/FasterXML/jackson-dataformats-text)
|
||||
* Jackson datatype: jdk8 (com.fasterxml.jackson.datatype:jackson-datatype-jdk8:2.13.3 - https://github.com/FasterXML/jackson-modules-java8/jackson-datatype-jdk8)
|
||||
* Jackson datatype: jdk8 (com.fasterxml.jackson.datatype:jackson-datatype-jdk8:2.13.5 - https://github.com/FasterXML/jackson-modules-java8/jackson-datatype-jdk8)
|
||||
* Jackson datatype: JSR310 (com.fasterxml.jackson.datatype:jackson-datatype-jsr310:2.11.1 - https://github.com/FasterXML/jackson-modules-java8/jackson-datatype-jsr310)
|
||||
* Jackson datatype: JSR310 (com.fasterxml.jackson.datatype:jackson-datatype-jsr310:2.13.3 - https://github.com/FasterXML/jackson-modules-java8/jackson-datatype-jsr310)
|
||||
* Jackson-module-parameter-names (com.fasterxml.jackson.module:jackson-module-parameter-names:2.13.3 - https://github.com/FasterXML/jackson-modules-java8/jackson-module-parameter-names)
|
||||
* Jackson datatype: JSR310 (com.fasterxml.jackson.datatype:jackson-datatype-jsr310:2.13.5 - https://github.com/FasterXML/jackson-modules-java8/jackson-datatype-jsr310)
|
||||
* Jackson-module-parameter-names (com.fasterxml.jackson.module:jackson-module-parameter-names:2.13.5 - https://github.com/FasterXML/jackson-modules-java8/jackson-module-parameter-names)
|
||||
* Java UUID Generator (com.fasterxml.uuid:java-uuid-generator:4.0.1 - https://github.com/cowtowncoder/java-uuid-generator)
|
||||
* Woodstox (com.fasterxml.woodstox:woodstox-core:6.2.4 - https://github.com/FasterXML/woodstox)
|
||||
* zjsonpatch (com.flipkart.zjsonpatch:zjsonpatch:0.4.6 - https://github.com/flipkart-incubator/zjsonpatch/)
|
||||
@@ -56,19 +56,19 @@ https://wiki.lyrasis.org/display/DSPACE/Code+Contribution+Guidelines
|
||||
* Google Analytics API v3-rev145-1.23.0 (com.google.apis:google-api-services-analytics:v3-rev145-1.23.0 - http://nexus.sonatype.org/oss-repository-hosting.html/google-api-services-analytics)
|
||||
* FindBugs-jsr305 (com.google.code.findbugs:jsr305:3.0.1 - http://findbugs.sourceforge.net/)
|
||||
* Gson (com.google.code.gson:gson:2.9.0 - https://github.com/google/gson/gson)
|
||||
* error-prone annotations (com.google.errorprone:error_prone_annotations:2.7.1 - http://nexus.sonatype.org/oss-repository-hosting.html/error_prone_parent/error_prone_annotations)
|
||||
* error-prone annotations (com.google.errorprone:error_prone_annotations:2.18.0 - https://errorprone.info/error_prone_annotations)
|
||||
* Guava InternalFutureFailureAccess and InternalFutures (com.google.guava:failureaccess:1.0.1 - https://github.com/google/guava/failureaccess)
|
||||
* Guava: Google Core Libraries for Java (com.google.guava:guava:31.0.1-jre - https://github.com/google/guava)
|
||||
* Guava: Google Core Libraries for Java (com.google.guava:guava:32.0.0-jre - https://github.com/google/guava)
|
||||
* Guava: Google Core Libraries for Java (JDK5 Backport) (com.google.guava:guava-jdk5:17.0 - http://code.google.com/p/guava-libraries/guava-jdk5)
|
||||
* Guava ListenableFuture only (com.google.guava:listenablefuture:9999.0-empty-to-avoid-conflict-with-guava - https://github.com/google/guava/listenablefuture)
|
||||
* Google HTTP Client Library for Java (com.google.http-client:google-http-client:1.23.0 - https://github.com/google/google-http-java-client/google-http-client)
|
||||
* GSON extensions to the Google HTTP Client Library for Java. (com.google.http-client:google-http-client-gson:1.41.7 - https://github.com/googleapis/google-http-java-client/google-http-client-gson)
|
||||
* Jackson 2 extensions to the Google HTTP Client Library for Java. (com.google.http-client:google-http-client-jackson2:1.23.0 - https://github.com/google/google-http-java-client/google-http-client-jackson2)
|
||||
* J2ObjC Annotations (com.google.j2objc:j2objc-annotations:1.3 - https://github.com/google/j2objc/)
|
||||
* J2ObjC Annotations (com.google.j2objc:j2objc-annotations:2.8 - https://github.com/google/j2objc/)
|
||||
* Google OAuth Client Library for Java (com.google.oauth-client:google-oauth-client:1.33.3 - https://github.com/googleapis/google-oauth-java-client/google-oauth-client)
|
||||
* ConcurrentLinkedHashMap (com.googlecode.concurrentlinkedhashmap:concurrentlinkedhashmap-lru:1.4.2 - http://code.google.com/p/concurrentlinkedhashmap)
|
||||
* libphonenumber (com.googlecode.libphonenumber:libphonenumber:8.11.1 - https://github.com/google/libphonenumber/)
|
||||
* Jackcess (com.healthmarketscience.jackcess:jackcess:4.0.1 - https://jackcess.sourceforge.io)
|
||||
* Jackcess (com.healthmarketscience.jackcess:jackcess:4.0.2 - https://jackcess.sourceforge.io)
|
||||
* Jackcess Encrypt (com.healthmarketscience.jackcess:jackcess-encrypt:4.0.1 - http://jackcessencrypt.sf.net)
|
||||
* project ':json-path' (com.jayway.jsonpath:json-path:2.6.0 - https://github.com/jayway/JsonPath)
|
||||
* project ':json-path-assert' (com.jayway.jsonpath:json-path-assert:2.6.0 - https://github.com/jayway/JsonPath)
|
||||
@@ -79,11 +79,20 @@ https://wiki.lyrasis.org/display/DSPACE/Code+Contribution+Guidelines
|
||||
* Nimbus JOSE+JWT (com.nimbusds:nimbus-jose-jwt:7.9 - https://bitbucket.org/connect2id/nimbus-jose-jwt)
|
||||
* opencsv (com.opencsv:opencsv:5.6 - http://opencsv.sf.net)
|
||||
* java-libpst (com.pff:java-libpst:0.9.3 - https://github.com/rjohnsondev/java-libpst)
|
||||
* rome (com.rometools:rome:1.18.0 - http://rometools.com/rome)
|
||||
* rome-modules (com.rometools:rome-modules:1.18.0 - http://rometools.com/rome-modules)
|
||||
* rome-utils (com.rometools:rome-utils:1.18.0 - http://rometools.com/rome-utils)
|
||||
* rome (com.rometools:rome:1.19.0 - http://rometools.com/rome)
|
||||
* rome-modules (com.rometools:rome-modules:1.19.0 - http://rometools.com/rome-modules)
|
||||
* rome-utils (com.rometools:rome-utils:1.19.0 - http://rometools.com/rome-utils)
|
||||
* fastinfoset (com.sun.xml.fastinfoset:FastInfoset:1.2.15 - http://fi.java.net)
|
||||
* T-Digest (com.tdunning:t-digest:3.1 - https://github.com/tdunning/t-digest)
|
||||
* config (com.typesafe:config:1.3.3 - https://github.com/lightbend/config)
|
||||
* ssl-config-core (com.typesafe:ssl-config-core_2.13:0.3.8 - https://github.com/lightbend/ssl-config)
|
||||
* akka-actor (com.typesafe.akka:akka-actor_2.13:2.5.31 - https://akka.io/)
|
||||
* akka-http-core (com.typesafe.akka:akka-http-core_2.13:10.1.12 - https://akka.io)
|
||||
* akka-http (com.typesafe.akka:akka-http_2.13:10.1.12 - https://akka.io)
|
||||
* akka-parsing (com.typesafe.akka:akka-parsing_2.13:10.1.12 - https://akka.io)
|
||||
* akka-protobuf (com.typesafe.akka:akka-protobuf_2.13:2.5.31 - https://akka.io/)
|
||||
* akka-stream (com.typesafe.akka:akka-stream_2.13:2.5.31 - https://akka.io/)
|
||||
* scala-logging (com.typesafe.scala-logging:scala-logging_2.13:3.9.2 - https://github.com/lightbend/scala-logging)
|
||||
* JSON library from Android SDK (com.vaadin.external.google:android-json:0.0.20131108.vaadin1 - http://developer.android.com/sdk)
|
||||
* SparseBitSet (com.zaxxer:SparseBitSet:1.2 - https://github.com/brettwooldridge/SparseBitSet)
|
||||
* Apache Commons BeanUtils (commons-beanutils:commons-beanutils:1.9.4 - https://commons.apache.org/proper/commons-beanutils/)
|
||||
@@ -91,20 +100,19 @@ https://wiki.lyrasis.org/display/DSPACE/Code+Contribution+Guidelines
|
||||
* Apache Commons Codec (commons-codec:commons-codec:1.10 - http://commons.apache.org/proper/commons-codec/)
|
||||
* Apache Commons Collections (commons-collections:commons-collections:3.2.2 - http://commons.apache.org/collections/)
|
||||
* Commons Digester (commons-digester:commons-digester:1.8.1 - http://commons.apache.org/digester/)
|
||||
* Apache Commons FileUpload (commons-fileupload:commons-fileupload:1.3.3 - http://commons.apache.org/proper/commons-fileupload/)
|
||||
* Apache Commons FileUpload (commons-fileupload:commons-fileupload:1.5 - https://commons.apache.org/proper/commons-fileupload/)
|
||||
* Apache Commons IO (commons-io:commons-io:2.7 - https://commons.apache.org/proper/commons-io/)
|
||||
* Commons Lang (commons-lang:commons-lang:2.6 - http://commons.apache.org/lang/)
|
||||
* Apache Commons Logging (commons-logging:commons-logging:1.2 - http://commons.apache.org/proper/commons-logging/)
|
||||
* Apache Commons Validator (commons-validator:commons-validator:1.5.0 - http://commons.apache.org/proper/commons-validator/)
|
||||
* GeoJson POJOs for Jackson (de.grundid.opendatalab:geojson-jackson:1.14 - https://github.com/opendatalab-de/geojson-jackson)
|
||||
* Boilerpipe -- Boilerplate Removal and Fulltext Extraction from HTML pages (de.l3s.boilerpipe:boilerpipe:1.1.0 - http://code.google.com/p/boilerpipe/)
|
||||
* OpenAIRE Funders Model (eu.openaire:funders-model:2.0.0 - https://api.openaire.eu)
|
||||
* Metrics Core (io.dropwizard.metrics:metrics-core:4.1.5 - https://metrics.dropwizard.io/metrics-core)
|
||||
* Graphite Integration for Metrics (io.dropwizard.metrics:metrics-graphite:4.1.5 - https://metrics.dropwizard.io/metrics-graphite)
|
||||
* Metrics Integration for Jetty 9.3 and higher (io.dropwizard.metrics:metrics-jetty9:4.1.5 - https://metrics.dropwizard.io/metrics-jetty9)
|
||||
* Metrics Integration with JMX (io.dropwizard.metrics:metrics-jmx:4.1.5 - https://metrics.dropwizard.io/metrics-jmx)
|
||||
* JVM Integration for Metrics (io.dropwizard.metrics:metrics-jvm:4.1.5 - https://metrics.dropwizard.io/metrics-jvm)
|
||||
* micrometer-core (io.micrometer:micrometer-core:1.8.6 - https://github.com/micrometer-metrics/micrometer)
|
||||
* micrometer-core (io.micrometer:micrometer-core:1.9.11 - https://github.com/micrometer-metrics/micrometer)
|
||||
* Netty/Buffer (io.netty:netty-buffer:4.1.68.Final - https://netty.io/netty-buffer/)
|
||||
* Netty/Codec (io.netty:netty-codec:4.1.68.Final - https://netty.io/netty-codec/)
|
||||
* Netty/Codec/HTTP (io.netty:netty-codec-http:4.1.53.Final - https://netty.io/netty-codec-http/)
|
||||
@@ -151,7 +159,7 @@ https://wiki.lyrasis.org/display/DSPACE/Code+Contribution+Guidelines
|
||||
* I18N Libraries (org.apache.abdera:abdera-i18n:1.1.3 - http://abdera.apache.org)
|
||||
* Apache Ant Core (org.apache.ant:ant:1.10.11 - https://ant.apache.org/)
|
||||
* Apache Ant Launcher (org.apache.ant:ant-launcher:1.10.11 - https://ant.apache.org/)
|
||||
* Apache Commons BCEL (org.apache.bcel:bcel:6.4.0 - https://commons.apache.org/proper/commons-bcel)
|
||||
* Apache Commons BCEL (org.apache.bcel:bcel:6.6.0 - https://commons.apache.org/proper/commons-bcel)
|
||||
* Calcite Core (org.apache.calcite:calcite-core:1.27.0 - https://calcite.apache.org)
|
||||
* Calcite Linq4j (org.apache.calcite:calcite-linq4j:1.27.0 - https://calcite.apache.org)
|
||||
* Apache Calcite Avatica (org.apache.calcite.avatica:avatica-core:1.18.0 - https://calcite.apache.org/avatica)
|
||||
@@ -159,12 +167,12 @@ https://wiki.lyrasis.org/display/DSPACE/Code+Contribution+Guidelines
|
||||
* Apache Commons Compress (org.apache.commons:commons-compress:1.21 - https://commons.apache.org/proper/commons-compress/)
|
||||
* Apache Commons Configuration (org.apache.commons:commons-configuration2:2.8.0 - https://commons.apache.org/proper/commons-configuration/)
|
||||
* Apache Commons CSV (org.apache.commons:commons-csv:1.9.0 - https://commons.apache.org/proper/commons-csv/)
|
||||
* Apache Commons DBCP (org.apache.commons:commons-dbcp2:2.8.0 - https://commons.apache.org/dbcp/)
|
||||
* Apache Commons DBCP (org.apache.commons:commons-dbcp2:2.9.0 - https://commons.apache.org/dbcp/)
|
||||
* Apache Commons Exec (org.apache.commons:commons-exec:1.3 - http://commons.apache.org/proper/commons-exec/)
|
||||
* Apache Commons Lang (org.apache.commons:commons-lang3:3.12.0 - https://commons.apache.org/proper/commons-lang/)
|
||||
* Apache Commons Math (org.apache.commons:commons-math3:3.6.1 - http://commons.apache.org/proper/commons-math/)
|
||||
* Apache Commons Pool (org.apache.commons:commons-pool2:2.9.0 - https://commons.apache.org/proper/commons-pool/)
|
||||
* Apache Commons Text (org.apache.commons:commons-text:1.9 - https://commons.apache.org/proper/commons-text)
|
||||
* Apache Commons Pool (org.apache.commons:commons-pool2:2.11.1 - https://commons.apache.org/proper/commons-pool/)
|
||||
* Apache Commons Text (org.apache.commons:commons-text:1.10.0 - https://commons.apache.org/proper/commons-text)
|
||||
* Curator Client (org.apache.curator:curator-client:2.13.0 - http://curator.apache.org/curator-client)
|
||||
* Curator Framework (org.apache.curator:curator-framework:2.13.0 - http://curator.apache.org/curator-framework)
|
||||
* Curator Recipes (org.apache.curator:curator-recipes:2.13.0 - http://curator.apache.org/curator-recipes)
|
||||
@@ -188,88 +196,87 @@ https://wiki.lyrasis.org/display/DSPACE/Code+Contribution+Guidelines
|
||||
* Kerby-kerb Util (org.apache.kerby:kerb-util:1.0.1 - http://directory.apache.org/kerby/kerby-kerb/kerb-util)
|
||||
* Kerby ASN1 Project (org.apache.kerby:kerby-asn1:1.0.1 - http://directory.apache.org/kerby/kerby-common/kerby-asn1)
|
||||
* Kerby PKIX Project (org.apache.kerby:kerby-pkix:1.0.1 - http://directory.apache.org/kerby/kerby-pkix)
|
||||
* Apache Log4j 1.x Compatibility API (org.apache.logging.log4j:log4j-1.2-api:2.17.1 - https://logging.apache.org/log4j/2.x/log4j-1.2-api/)
|
||||
* Apache Log4j API (org.apache.logging.log4j:log4j-api:2.17.1 - https://logging.apache.org/log4j/2.x/log4j-api/)
|
||||
* Apache Log4j Core (org.apache.logging.log4j:log4j-core:2.17.1 - https://logging.apache.org/log4j/2.x/log4j-core/)
|
||||
* Apache Log4j JUL Adapter (org.apache.logging.log4j:log4j-jul:2.17.1 - https://logging.apache.org/log4j/2.x/log4j-jul/)
|
||||
* Apache Log4j Layout for JSON template (org.apache.logging.log4j:log4j-layout-template-json:2.16.0 - https://logging.apache.org/log4j/2.x/log4j-layout-template-json/)
|
||||
* Apache Log4j SLF4J Binding (org.apache.logging.log4j:log4j-slf4j-impl:2.17.1 - https://logging.apache.org/log4j/2.x/log4j-slf4j-impl/)
|
||||
* Apache Log4j Web (org.apache.logging.log4j:log4j-web:2.17.1 - https://logging.apache.org/log4j/2.x/log4j-web/)
|
||||
* Lucene Common Analyzers (org.apache.lucene:lucene-analyzers-common:8.11.1 - https://lucene.apache.org/lucene-parent/lucene-analyzers-common)
|
||||
* Lucene ICU Analysis Components (org.apache.lucene:lucene-analyzers-icu:8.11.1 - https://lucene.apache.org/lucene-parent/lucene-analyzers-icu)
|
||||
* Lucene Kuromoji Japanese Morphological Analyzer (org.apache.lucene:lucene-analyzers-kuromoji:8.11.1 - https://lucene.apache.org/lucene-parent/lucene-analyzers-kuromoji)
|
||||
* Lucene Nori Korean Morphological Analyzer (org.apache.lucene:lucene-analyzers-nori:8.11.1 - https://lucene.apache.org/lucene-parent/lucene-analyzers-nori)
|
||||
* Lucene Phonetic Filters (org.apache.lucene:lucene-analyzers-phonetic:8.11.1 - https://lucene.apache.org/lucene-parent/lucene-analyzers-phonetic)
|
||||
* Lucene Smart Chinese Analyzer (org.apache.lucene:lucene-analyzers-smartcn:8.11.1 - https://lucene.apache.org/lucene-parent/lucene-analyzers-smartcn)
|
||||
* Lucene Stempel Analyzer (org.apache.lucene:lucene-analyzers-stempel:8.11.1 - https://lucene.apache.org/lucene-parent/lucene-analyzers-stempel)
|
||||
* Lucene Memory (org.apache.lucene:lucene-backward-codecs:8.11.1 - https://lucene.apache.org/lucene-parent/lucene-backward-codecs)
|
||||
* Lucene Classification (org.apache.lucene:lucene-classification:8.11.1 - https://lucene.apache.org/lucene-parent/lucene-classification)
|
||||
* Lucene codecs (org.apache.lucene:lucene-codecs:8.11.1 - https://lucene.apache.org/lucene-parent/lucene-codecs)
|
||||
* Lucene Core (org.apache.lucene:lucene-core:8.11.1 - https://lucene.apache.org/lucene-parent/lucene-core)
|
||||
* Lucene Expressions (org.apache.lucene:lucene-expressions:8.11.1 - https://lucene.apache.org/lucene-parent/lucene-expressions)
|
||||
* Lucene Grouping (org.apache.lucene:lucene-grouping:8.11.1 - https://lucene.apache.org/lucene-parent/lucene-grouping)
|
||||
* Lucene Highlighter (org.apache.lucene:lucene-highlighter:8.11.1 - https://lucene.apache.org/lucene-parent/lucene-highlighter)
|
||||
* Lucene Join (org.apache.lucene:lucene-join:8.11.1 - https://lucene.apache.org/lucene-parent/lucene-join)
|
||||
* Lucene Memory (org.apache.lucene:lucene-memory:8.11.1 - https://lucene.apache.org/lucene-parent/lucene-memory)
|
||||
* Lucene Miscellaneous (org.apache.lucene:lucene-misc:8.11.1 - https://lucene.apache.org/lucene-parent/lucene-misc)
|
||||
* Lucene Queries (org.apache.lucene:lucene-queries:8.11.1 - https://lucene.apache.org/lucene-parent/lucene-queries)
|
||||
* Lucene QueryParsers (org.apache.lucene:lucene-queryparser:8.11.1 - https://lucene.apache.org/lucene-parent/lucene-queryparser)
|
||||
* Lucene Sandbox (org.apache.lucene:lucene-sandbox:8.11.1 - https://lucene.apache.org/lucene-parent/lucene-sandbox)
|
||||
* Lucene Spatial Extras (org.apache.lucene:lucene-spatial-extras:8.11.1 - https://lucene.apache.org/lucene-parent/lucene-spatial-extras)
|
||||
* Lucene Spatial 3D (org.apache.lucene:lucene-spatial3d:8.11.1 - https://lucene.apache.org/lucene-parent/lucene-spatial3d)
|
||||
* Lucene Suggest (org.apache.lucene:lucene-suggest:8.11.1 - https://lucene.apache.org/lucene-parent/lucene-suggest)
|
||||
* Apache FontBox (org.apache.pdfbox:fontbox:2.0.24 - http://pdfbox.apache.org/)
|
||||
* PDFBox JBIG2 ImageIO plugin (org.apache.pdfbox:jbig2-imageio:3.0.3 - https://www.apache.org/jbig2-imageio/)
|
||||
* Apache JempBox (org.apache.pdfbox:jempbox:1.8.16 - http://www.apache.org/pdfbox-parent/jempbox/)
|
||||
* Apache PDFBox (org.apache.pdfbox:pdfbox:2.0.24 - https://www.apache.org/pdfbox-parent/pdfbox/)
|
||||
* Apache PDFBox Debugger (org.apache.pdfbox:pdfbox-debugger:2.0.25 - https://www.apache.org/pdfbox-parent/pdfbox-debugger/)
|
||||
* Apache PDFBox tools (org.apache.pdfbox:pdfbox-tools:2.0.25 - https://www.apache.org/pdfbox-parent/pdfbox-tools/)
|
||||
* Apache XmpBox (org.apache.pdfbox:xmpbox:2.0.25 - https://www.apache.org/pdfbox-parent/xmpbox/)
|
||||
* Apache POI - Common (org.apache.poi:poi:5.2.0 - https://poi.apache.org/)
|
||||
* Apache POI - API based on OPC and OOXML schemas (org.apache.poi:poi-ooxml:5.2.0 - https://poi.apache.org/)
|
||||
* Apache POI (org.apache.poi:poi-ooxml-lite:5.2.0 - https://poi.apache.org/)
|
||||
* Apache POI (org.apache.poi:poi-scratchpad:5.2.0 - https://poi.apache.org/)
|
||||
* Apache Solr Core (org.apache.solr:solr-core:8.11.1 - https://lucene.apache.org/solr-parent/solr-core)
|
||||
* Apache Solr Solrj (org.apache.solr:solr-solrj:8.11.1 - https://lucene.apache.org/solr-parent/solr-solrj)
|
||||
* Apache Log4j 1.x Compatibility API (org.apache.logging.log4j:log4j-1.2-api:2.20.0 - https://logging.apache.org/log4j/2.x/log4j-1.2-api/)
|
||||
* Apache Log4j API (org.apache.logging.log4j:log4j-api:2.20.0 - https://logging.apache.org/log4j/2.x/log4j-api/)
|
||||
* Apache Log4j Core (org.apache.logging.log4j:log4j-core:2.20.0 - https://logging.apache.org/log4j/2.x/log4j-core/)
|
||||
* Apache Log4j JUL Adapter (org.apache.logging.log4j:log4j-jul:2.20.0 - https://logging.apache.org/log4j/2.x/log4j-jul/)
|
||||
* Apache Log4j Layout for JSON template (org.apache.logging.log4j:log4j-layout-template-json:2.17.1 - https://logging.apache.org/log4j/2.x/log4j-layout-template-json/)
|
||||
* Apache Log4j SLF4J Binding (org.apache.logging.log4j:log4j-slf4j-impl:2.20.0 - https://logging.apache.org/log4j/2.x/log4j-slf4j-impl/)
|
||||
* Apache Log4j Web (org.apache.logging.log4j:log4j-web:2.20.0 - https://logging.apache.org/log4j/2.x/log4j-web/)
|
||||
* Lucene Common Analyzers (org.apache.lucene:lucene-analyzers-common:8.11.2 - https://lucene.apache.org/lucene-parent/lucene-analyzers-common)
|
||||
* Lucene ICU Analysis Components (org.apache.lucene:lucene-analyzers-icu:8.11.2 - https://lucene.apache.org/lucene-parent/lucene-analyzers-icu)
|
||||
* Lucene Kuromoji Japanese Morphological Analyzer (org.apache.lucene:lucene-analyzers-kuromoji:8.11.2 - https://lucene.apache.org/lucene-parent/lucene-analyzers-kuromoji)
|
||||
* Lucene Nori Korean Morphological Analyzer (org.apache.lucene:lucene-analyzers-nori:8.11.2 - https://lucene.apache.org/lucene-parent/lucene-analyzers-nori)
|
||||
* Lucene Phonetic Filters (org.apache.lucene:lucene-analyzers-phonetic:8.11.2 - https://lucene.apache.org/lucene-parent/lucene-analyzers-phonetic)
|
||||
* Lucene Smart Chinese Analyzer (org.apache.lucene:lucene-analyzers-smartcn:8.11.2 - https://lucene.apache.org/lucene-parent/lucene-analyzers-smartcn)
|
||||
* Lucene Stempel Analyzer (org.apache.lucene:lucene-analyzers-stempel:8.11.2 - https://lucene.apache.org/lucene-parent/lucene-analyzers-stempel)
|
||||
* Lucene Memory (org.apache.lucene:lucene-backward-codecs:8.11.2 - https://lucene.apache.org/lucene-parent/lucene-backward-codecs)
|
||||
* Lucene Classification (org.apache.lucene:lucene-classification:8.11.2 - https://lucene.apache.org/lucene-parent/lucene-classification)
|
||||
* Lucene codecs (org.apache.lucene:lucene-codecs:8.11.2 - https://lucene.apache.org/lucene-parent/lucene-codecs)
|
||||
* Lucene Core (org.apache.lucene:lucene-core:8.11.2 - https://lucene.apache.org/lucene-parent/lucene-core)
|
||||
* Lucene Expressions (org.apache.lucene:lucene-expressions:8.11.2 - https://lucene.apache.org/lucene-parent/lucene-expressions)
|
||||
* Lucene Grouping (org.apache.lucene:lucene-grouping:8.11.2 - https://lucene.apache.org/lucene-parent/lucene-grouping)
|
||||
* Lucene Highlighter (org.apache.lucene:lucene-highlighter:8.11.2 - https://lucene.apache.org/lucene-parent/lucene-highlighter)
|
||||
* Lucene Join (org.apache.lucene:lucene-join:8.11.2 - https://lucene.apache.org/lucene-parent/lucene-join)
|
||||
* Lucene Memory (org.apache.lucene:lucene-memory:8.11.2 - https://lucene.apache.org/lucene-parent/lucene-memory)
|
||||
* Lucene Miscellaneous (org.apache.lucene:lucene-misc:8.11.2 - https://lucene.apache.org/lucene-parent/lucene-misc)
|
||||
* Lucene Queries (org.apache.lucene:lucene-queries:8.11.2 - https://lucene.apache.org/lucene-parent/lucene-queries)
|
||||
* Lucene QueryParsers (org.apache.lucene:lucene-queryparser:8.11.2 - https://lucene.apache.org/lucene-parent/lucene-queryparser)
|
||||
* Lucene Sandbox (org.apache.lucene:lucene-sandbox:8.11.2 - https://lucene.apache.org/lucene-parent/lucene-sandbox)
|
||||
* Lucene Spatial Extras (org.apache.lucene:lucene-spatial-extras:8.11.2 - https://lucene.apache.org/lucene-parent/lucene-spatial-extras)
|
||||
* Lucene Spatial 3D (org.apache.lucene:lucene-spatial3d:8.11.2 - https://lucene.apache.org/lucene-parent/lucene-spatial3d)
|
||||
* Lucene Suggest (org.apache.lucene:lucene-suggest:8.11.2 - https://lucene.apache.org/lucene-parent/lucene-suggest)
|
||||
* Apache FontBox (org.apache.pdfbox:fontbox:2.0.28 - http://pdfbox.apache.org/)
|
||||
* PDFBox JBIG2 ImageIO plugin (org.apache.pdfbox:jbig2-imageio:3.0.4 - https://www.apache.org/jbig2-imageio/)
|
||||
* Apache JempBox (org.apache.pdfbox:jempbox:1.8.17 - http://www.apache.org/pdfbox-parent/jempbox/)
|
||||
* Apache PDFBox (org.apache.pdfbox:pdfbox:2.0.28 - https://www.apache.org/pdfbox-parent/pdfbox/)
|
||||
* Apache PDFBox tools (org.apache.pdfbox:pdfbox-tools:2.0.27 - https://www.apache.org/pdfbox-parent/pdfbox-tools/)
|
||||
* Apache XmpBox (org.apache.pdfbox:xmpbox:2.0.27 - https://www.apache.org/pdfbox-parent/xmpbox/)
|
||||
* Apache POI - Common (org.apache.poi:poi:5.2.3 - https://poi.apache.org/)
|
||||
* Apache POI - API based on OPC and OOXML schemas (org.apache.poi:poi-ooxml:5.2.3 - https://poi.apache.org/)
|
||||
* Apache POI (org.apache.poi:poi-ooxml-lite:5.2.3 - https://poi.apache.org/)
|
||||
* Apache POI (org.apache.poi:poi-scratchpad:5.2.3 - https://poi.apache.org/)
|
||||
* Apache Solr Core (org.apache.solr:solr-core:8.11.2 - https://lucene.apache.org/solr-parent/solr-core)
|
||||
* Apache Solr Solrj (org.apache.solr:solr-solrj:8.11.2 - https://lucene.apache.org/solr-parent/solr-solrj)
|
||||
* Apache Standard Taglib Implementation (org.apache.taglibs:taglibs-standard-impl:1.2.5 - http://tomcat.apache.org/taglibs/standard-1.2.5/taglibs-standard-impl)
|
||||
* Apache Standard Taglib Specification API (org.apache.taglibs:taglibs-standard-spec:1.2.5 - http://tomcat.apache.org/taglibs/standard-1.2.5/taglibs-standard-spec)
|
||||
* Apache Thrift (org.apache.thrift:libthrift:0.9.2 - http://thrift.apache.org)
|
||||
* Apache Tika core (org.apache.tika:tika-core:2.3.0 - https://tika.apache.org/)
|
||||
* Apache Tika Apple parser module (org.apache.tika:tika-parser-apple-module:2.3.0 - https://tika.apache.org/tika-parser-apple-module/)
|
||||
* Apache Tika audiovideo parser module (org.apache.tika:tika-parser-audiovideo-module:2.3.0 - https://tika.apache.org/tika-parser-audiovideo-module/)
|
||||
* Apache Tika cad parser module (org.apache.tika:tika-parser-cad-module:2.3.0 - https://tika.apache.org/tika-parser-cad-module/)
|
||||
* Apache Tika code parser module (org.apache.tika:tika-parser-code-module:2.3.0 - https://tika.apache.org/tika-parser-code-module/)
|
||||
* Apache Tika crypto parser module (org.apache.tika:tika-parser-crypto-module:2.3.0 - https://tika.apache.org/tika-parser-crypto-module/)
|
||||
* Apache Tika digest commons (org.apache.tika:tika-parser-digest-commons:2.3.0 - https://tika.apache.org/tika-parser-digest-commons/)
|
||||
* Apache Tika font parser module (org.apache.tika:tika-parser-font-module:2.3.0 - https://tika.apache.org/tika-parser-font-module/)
|
||||
* Apache Tika html commons (org.apache.tika:tika-parser-html-commons:2.3.0 - https://tika.apache.org/tika-parser-html-commons/)
|
||||
* Apache Tika html parser module (org.apache.tika:tika-parser-html-module:2.3.0 - https://tika.apache.org/tika-parser-html-module/)
|
||||
* Apache Tika image parser module (org.apache.tika:tika-parser-image-module:2.3.0 - https://tika.apache.org/tika-parser-image-module/)
|
||||
* Apache Tika mail commons (org.apache.tika:tika-parser-mail-commons:2.3.0 - https://tika.apache.org/tika-parser-mail-commons/)
|
||||
* Apache Tika mail parser module (org.apache.tika:tika-parser-mail-module:2.3.0 - https://tika.apache.org/tika-parser-mail-module/)
|
||||
* Apache Tika Microsoft parser module (org.apache.tika:tika-parser-microsoft-module:2.3.0 - https://tika.apache.org/tika-parser-microsoft-module/)
|
||||
* Apache Tika miscellaneous office format parser module (org.apache.tika:tika-parser-miscoffice-module:2.3.0 - https://tika.apache.org/tika-parser-miscoffice-module/)
|
||||
* Apache Tika news parser module (org.apache.tika:tika-parser-news-module:2.3.0 - https://tika.apache.org/tika-parser-news-module/)
|
||||
* Apache Tika OCR parser module (org.apache.tika:tika-parser-ocr-module:2.3.0 - https://tika.apache.org/tika-parser-ocr-module/)
|
||||
* Apache Tika PDF parser module (org.apache.tika:tika-parser-pdf-module:2.3.0 - https://tika.apache.org/tika-parser-pdf-module/)
|
||||
* Apache Tika package parser module (org.apache.tika:tika-parser-pkg-module:2.3.0 - https://tika.apache.org/tika-parser-pkg-module/)
|
||||
* Apache Tika text parser module (org.apache.tika:tika-parser-text-module:2.3.0 - https://tika.apache.org/tika-parser-text-module/)
|
||||
* Apache Tika XML parser module (org.apache.tika:tika-parser-xml-module:2.3.0 - https://tika.apache.org/tika-parser-xml-module/)
|
||||
* Apache Tika XMP commons (org.apache.tika:tika-parser-xmp-commons:2.3.0 - https://tika.apache.org/tika-parser-xmp-commons/)
|
||||
* Apache Tika ZIP commons (org.apache.tika:tika-parser-zip-commons:2.3.0 - https://tika.apache.org/tika-parser-zip-commons/)
|
||||
* Apache Tika standard parser package (org.apache.tika:tika-parsers-standard-package:2.3.0 - https://tika.apache.org/tika-parsers/tika-parsers-standard/tika-parsers-standard-package/)
|
||||
* tomcat-embed-core (org.apache.tomcat.embed:tomcat-embed-core:9.0.63 - https://tomcat.apache.org/)
|
||||
* tomcat-embed-el (org.apache.tomcat.embed:tomcat-embed-el:9.0.63 - https://tomcat.apache.org/)
|
||||
* tomcat-embed-websocket (org.apache.tomcat.embed:tomcat-embed-websocket:9.0.63 - https://tomcat.apache.org/)
|
||||
* Apache Tika core (org.apache.tika:tika-core:2.5.0 - https://tika.apache.org/)
|
||||
* Apache Tika Apple parser module (org.apache.tika:tika-parser-apple-module:2.5.0 - https://tika.apache.org/tika-parser-apple-module/)
|
||||
* Apache Tika audiovideo parser module (org.apache.tika:tika-parser-audiovideo-module:2.5.0 - https://tika.apache.org/tika-parser-audiovideo-module/)
|
||||
* Apache Tika cad parser module (org.apache.tika:tika-parser-cad-module:2.5.0 - https://tika.apache.org/tika-parser-cad-module/)
|
||||
* Apache Tika code parser module (org.apache.tika:tika-parser-code-module:2.5.0 - https://tika.apache.org/tika-parser-code-module/)
|
||||
* Apache Tika crypto parser module (org.apache.tika:tika-parser-crypto-module:2.5.0 - https://tika.apache.org/tika-parser-crypto-module/)
|
||||
* Apache Tika digest commons (org.apache.tika:tika-parser-digest-commons:2.5.0 - https://tika.apache.org/tika-parser-digest-commons/)
|
||||
* Apache Tika font parser module (org.apache.tika:tika-parser-font-module:2.5.0 - https://tika.apache.org/tika-parser-font-module/)
|
||||
* Apache Tika html parser module (org.apache.tika:tika-parser-html-module:2.5.0 - https://tika.apache.org/tika-parser-html-module/)
|
||||
* Apache Tika image parser module (org.apache.tika:tika-parser-image-module:2.5.0 - https://tika.apache.org/tika-parser-image-module/)
|
||||
* Apache Tika mail commons (org.apache.tika:tika-parser-mail-commons:2.5.0 - https://tika.apache.org/tika-parser-mail-commons/)
|
||||
* Apache Tika mail parser module (org.apache.tika:tika-parser-mail-module:2.5.0 - https://tika.apache.org/tika-parser-mail-module/)
|
||||
* Apache Tika Microsoft parser module (org.apache.tika:tika-parser-microsoft-module:2.5.0 - https://tika.apache.org/tika-parser-microsoft-module/)
|
||||
* Apache Tika miscellaneous office format parser module (org.apache.tika:tika-parser-miscoffice-module:2.5.0 - https://tika.apache.org/tika-parser-miscoffice-module/)
|
||||
* Apache Tika news parser module (org.apache.tika:tika-parser-news-module:2.5.0 - https://tika.apache.org/tika-parser-news-module/)
|
||||
* Apache Tika OCR parser module (org.apache.tika:tika-parser-ocr-module:2.5.0 - https://tika.apache.org/tika-parser-ocr-module/)
|
||||
* Apache Tika PDF parser module (org.apache.tika:tika-parser-pdf-module:2.5.0 - https://tika.apache.org/tika-parser-pdf-module/)
|
||||
* Apache Tika package parser module (org.apache.tika:tika-parser-pkg-module:2.5.0 - https://tika.apache.org/tika-parser-pkg-module/)
|
||||
* Apache Tika text parser module (org.apache.tika:tika-parser-text-module:2.5.0 - https://tika.apache.org/tika-parser-text-module/)
|
||||
* Apache Tika WARC parser module (org.apache.tika:tika-parser-webarchive-module:2.5.0 - https://tika.apache.org/tika-parser-webarchive-module/)
|
||||
* Apache Tika XML parser module (org.apache.tika:tika-parser-xml-module:2.5.0 - https://tika.apache.org/tika-parser-xml-module/)
|
||||
* Apache Tika XMP commons (org.apache.tika:tika-parser-xmp-commons:2.5.0 - https://tika.apache.org/tika-parser-xmp-commons/)
|
||||
* Apache Tika ZIP commons (org.apache.tika:tika-parser-zip-commons:2.5.0 - https://tika.apache.org/tika-parser-zip-commons/)
|
||||
* Apache Tika standard parser package (org.apache.tika:tika-parsers-standard-package:2.5.0 - https://tika.apache.org/tika-parsers/tika-parsers-standard/tika-parsers-standard-package/)
|
||||
* tomcat-embed-core (org.apache.tomcat.embed:tomcat-embed-core:9.0.75 - https://tomcat.apache.org/)
|
||||
* tomcat-embed-el (org.apache.tomcat.embed:tomcat-embed-el:9.0.75 - https://tomcat.apache.org/)
|
||||
* tomcat-embed-websocket (org.apache.tomcat.embed:tomcat-embed-websocket:9.0.75 - https://tomcat.apache.org/)
|
||||
* Apache Velocity - Engine (org.apache.velocity:velocity-engine-core:2.3 - http://velocity.apache.org/engine/devel/velocity-engine-core/)
|
||||
* Apache Velocity - JSR 223 Scripting (org.apache.velocity:velocity-engine-scripting:2.2 - http://velocity.apache.org/engine/devel/velocity-engine-scripting/)
|
||||
* Axiom API (org.apache.ws.commons.axiom:axiom-api:1.2.22 - http://ws.apache.org/axiom/)
|
||||
* Abdera Model (FOM) Implementation (org.apache.ws.commons.axiom:fom-impl:1.2.22 - http://ws.apache.org/axiom/implementations/fom-impl/)
|
||||
* XmlBeans (org.apache.xmlbeans:xmlbeans:5.0.3 - https://xmlbeans.apache.org/)
|
||||
* XmlBeans (org.apache.xmlbeans:xmlbeans:5.1.1 - https://xmlbeans.apache.org/)
|
||||
* Apache ZooKeeper - Server (org.apache.zookeeper:zookeeper:3.6.2 - http://zookeeper.apache.org/zookeeper)
|
||||
* Apache ZooKeeper - Jute (org.apache.zookeeper:zookeeper-jute:3.6.2 - http://zookeeper.apache.org/zookeeper-jute)
|
||||
* org.apiguardian:apiguardian-api (org.apiguardian:apiguardian-api:1.1.0 - https://github.com/apiguardian-team/apiguardian)
|
||||
* AssertJ fluent assertions (org.assertj:assertj-core:3.21.0 - https://assertj.github.io/doc/assertj-core/)
|
||||
* AssertJ fluent assertions (org.assertj:assertj-core:3.22.0 - https://assertj.github.io/doc/assertj-core/)
|
||||
* Evo Inflector (org.atteo:evo-inflector:1.3 - http://atteo.org/static/evo-inflector)
|
||||
* jose4j (org.bitbucket.b_c:jose4j:0.6.5 - https://bitbucket.org/b_c/jose4j/)
|
||||
* TagSoup (org.ccil.cowan.tagsoup:tagsoup:1.2.1 - http://home.ccil.org/~cowan/XML/tagsoup/)
|
||||
@@ -279,34 +286,34 @@ https://wiki.lyrasis.org/display/DSPACE/Code+Contribution+Guidelines
|
||||
* Apache :: JSTL module (org.eclipse.jetty:apache-jstl:9.4.15.v20190215 - http://tomcat.apache.org/taglibs/standard/)
|
||||
* Jetty :: ALPN :: Client (org.eclipse.jetty:jetty-alpn-client:9.4.44.v20210927 - https://eclipse.org/jetty/jetty-alpn-parent/jetty-alpn-client)
|
||||
* Jetty :: ALPN :: JDK9 Client Implementation (org.eclipse.jetty:jetty-alpn-java-client:9.4.44.v20210927 - https://eclipse.org/jetty/jetty-alpn-parent/jetty-alpn-java-client)
|
||||
* Jetty :: ALPN :: JDK9 Server Implementation (org.eclipse.jetty:jetty-alpn-java-server:9.4.48.v20220622 - https://eclipse.org/jetty/jetty-alpn-parent/jetty-alpn-java-server)
|
||||
* Jetty :: ALPN :: JDK9 Server Implementation (org.eclipse.jetty:jetty-alpn-java-server:9.4.51.v20230217 - https://eclipse.org/jetty/jetty-alpn-parent/jetty-alpn-java-server)
|
||||
* Jetty :: ALPN :: Server (org.eclipse.jetty:jetty-alpn-server:9.4.44.v20210927 - https://eclipse.org/jetty/jetty-alpn-parent/jetty-alpn-server)
|
||||
* Jetty :: ALPN :: Server (org.eclipse.jetty:jetty-alpn-server:9.4.48.v20220622 - https://eclipse.org/jetty/jetty-alpn-parent/jetty-alpn-server)
|
||||
* Jetty :: ALPN :: Server (org.eclipse.jetty:jetty-alpn-server:9.4.51.v20230217 - https://eclipse.org/jetty/jetty-alpn-parent/jetty-alpn-server)
|
||||
* Jetty :: Servlet Annotations (org.eclipse.jetty:jetty-annotations:9.4.15.v20190215 - http://www.eclipse.org/jetty)
|
||||
* Jetty :: Asynchronous HTTP Client (org.eclipse.jetty:jetty-client:9.4.44.v20210927 - https://eclipse.org/jetty/jetty-client)
|
||||
* Jetty :: Continuation (org.eclipse.jetty:jetty-continuation:9.4.44.v20210927 - https://eclipse.org/jetty/jetty-continuation)
|
||||
* Jetty :: Continuation (org.eclipse.jetty:jetty-continuation:9.4.48.v20220622 - https://eclipse.org/jetty/jetty-continuation)
|
||||
* Jetty :: Deployers (org.eclipse.jetty:jetty-deploy:9.4.48.v20220622 - https://eclipse.org/jetty/jetty-deploy)
|
||||
* Jetty :: Http Utility (org.eclipse.jetty:jetty-http:9.4.48.v20220622 - https://eclipse.org/jetty/jetty-http)
|
||||
* Jetty :: IO Utility (org.eclipse.jetty:jetty-io:9.4.48.v20220622 - https://eclipse.org/jetty/jetty-io)
|
||||
* Jetty :: Continuation (org.eclipse.jetty:jetty-continuation:9.4.51.v20230217 - https://eclipse.org/jetty/jetty-continuation)
|
||||
* Jetty :: Deployers (org.eclipse.jetty:jetty-deploy:9.4.51.v20230217 - https://eclipse.org/jetty/jetty-deploy)
|
||||
* Jetty :: Http Utility (org.eclipse.jetty:jetty-http:9.4.51.v20230217 - https://eclipse.org/jetty/jetty-http)
|
||||
* Jetty :: IO Utility (org.eclipse.jetty:jetty-io:9.4.51.v20230217 - https://eclipse.org/jetty/jetty-io)
|
||||
* Jetty :: JMX Management (org.eclipse.jetty:jetty-jmx:9.4.44.v20210927 - https://eclipse.org/jetty/jetty-jmx)
|
||||
* Jetty :: JNDI Naming (org.eclipse.jetty:jetty-jndi:9.4.15.v20190215 - http://www.eclipse.org/jetty)
|
||||
* Jetty :: Plus (org.eclipse.jetty:jetty-plus:9.4.15.v20190215 - http://www.eclipse.org/jetty)
|
||||
* Jetty :: Rewrite Handler (org.eclipse.jetty:jetty-rewrite:9.4.44.v20210927 - https://eclipse.org/jetty/jetty-rewrite)
|
||||
* Jetty :: Security (org.eclipse.jetty:jetty-security:9.4.44.v20210927 - https://eclipse.org/jetty/jetty-security)
|
||||
* Jetty :: Security (org.eclipse.jetty:jetty-security:9.4.48.v20220622 - https://eclipse.org/jetty/jetty-security)
|
||||
* Jetty :: Server Core (org.eclipse.jetty:jetty-server:9.4.48.v20220622 - https://eclipse.org/jetty/jetty-server)
|
||||
* Jetty :: Servlet Handling (org.eclipse.jetty:jetty-servlet:9.4.48.v20220622 - https://eclipse.org/jetty/jetty-servlet)
|
||||
* Jetty :: Utility Servlets and Filters (org.eclipse.jetty:jetty-servlets:9.4.48.v20220622 - https://eclipse.org/jetty/jetty-servlets)
|
||||
* Jetty :: Utilities (org.eclipse.jetty:jetty-util:9.4.48.v20220622 - https://eclipse.org/jetty/jetty-util)
|
||||
* Jetty :: Utilities :: Ajax(JSON) (org.eclipse.jetty:jetty-util-ajax:9.4.48.v20220622 - https://eclipse.org/jetty/jetty-util-ajax)
|
||||
* Jetty :: Webapp Application Support (org.eclipse.jetty:jetty-webapp:9.4.48.v20220622 - https://eclipse.org/jetty/jetty-webapp)
|
||||
* Jetty :: XML utilities (org.eclipse.jetty:jetty-xml:9.4.48.v20220622 - https://eclipse.org/jetty/jetty-xml)
|
||||
* Jetty :: Security (org.eclipse.jetty:jetty-security:9.4.51.v20230217 - https://eclipse.org/jetty/jetty-security)
|
||||
* Jetty :: Server Core (org.eclipse.jetty:jetty-server:9.4.51.v20230217 - https://eclipse.org/jetty/jetty-server)
|
||||
* Jetty :: Servlet Handling (org.eclipse.jetty:jetty-servlet:9.4.51.v20230217 - https://eclipse.org/jetty/jetty-servlet)
|
||||
* Jetty :: Utility Servlets and Filters (org.eclipse.jetty:jetty-servlets:9.4.51.v20230217 - https://eclipse.org/jetty/jetty-servlets)
|
||||
* Jetty :: Utilities (org.eclipse.jetty:jetty-util:9.4.51.v20230217 - https://eclipse.org/jetty/jetty-util)
|
||||
* Jetty :: Utilities :: Ajax(JSON) (org.eclipse.jetty:jetty-util-ajax:9.4.51.v20230217 - https://eclipse.org/jetty/jetty-util-ajax)
|
||||
* Jetty :: Webapp Application Support (org.eclipse.jetty:jetty-webapp:9.4.51.v20230217 - https://eclipse.org/jetty/jetty-webapp)
|
||||
* Jetty :: XML utilities (org.eclipse.jetty:jetty-xml:9.4.51.v20230217 - https://eclipse.org/jetty/jetty-xml)
|
||||
* Jetty :: HTTP2 :: Client (org.eclipse.jetty.http2:http2-client:9.4.44.v20210927 - https://eclipse.org/jetty/http2-parent/http2-client)
|
||||
* Jetty :: HTTP2 :: Common (org.eclipse.jetty.http2:http2-common:9.4.48.v20220622 - https://eclipse.org/jetty/http2-parent/http2-common)
|
||||
* Jetty :: HTTP2 :: Common (org.eclipse.jetty.http2:http2-common:9.4.51.v20230217 - https://eclipse.org/jetty/http2-parent/http2-common)
|
||||
* Jetty :: HTTP2 :: HPACK (org.eclipse.jetty.http2:http2-hpack:9.4.44.v20210927 - https://eclipse.org/jetty/http2-parent/http2-hpack)
|
||||
* Jetty :: HTTP2 :: HTTP Client Transport (org.eclipse.jetty.http2:http2-http-client-transport:9.4.44.v20210927 - https://eclipse.org/jetty/http2-parent/http2-http-client-transport)
|
||||
* Jetty :: HTTP2 :: Server (org.eclipse.jetty.http2:http2-server:9.4.48.v20220622 - https://eclipse.org/jetty/http2-parent/http2-server)
|
||||
* Jetty :: HTTP2 :: Server (org.eclipse.jetty.http2:http2-server:9.4.51.v20230217 - https://eclipse.org/jetty/http2-parent/http2-server)
|
||||
* Jetty :: Schemas (org.eclipse.jetty.toolchain:jetty-schemas:3.1.2 - https://eclipse.org/jetty/jetty-schemas)
|
||||
* Ehcache (org.ehcache:ehcache:3.4.0 - http://ehcache.org)
|
||||
* flyway-core (org.flywaydb:flyway-core:8.4.4 - https://flywaydb.org/flyway-core)
|
||||
@@ -315,8 +322,10 @@ https://wiki.lyrasis.org/display/DSPACE/Code+Contribution+Guidelines
|
||||
* jersey-core-client (org.glassfish.jersey.core:jersey-client:2.35 - https://projects.eclipse.org/projects/ee4j.jersey/jersey-client)
|
||||
* jersey-core-common (org.glassfish.jersey.core:jersey-common:2.35 - https://projects.eclipse.org/projects/ee4j.jersey/jersey-common)
|
||||
* jersey-inject-hk2 (org.glassfish.jersey.inject:jersey-hk2:2.35 - https://projects.eclipse.org/projects/ee4j.jersey/project/jersey-hk2)
|
||||
* Hibernate Validator Engine (org.hibernate.validator:hibernate-validator:6.0.23.Final - http://hibernate.org/validator/hibernate-validator)
|
||||
* Hibernate Validator Portable Extension (org.hibernate.validator:hibernate-validator-cdi:6.0.23.Final - http://hibernate.org/validator/hibernate-validator-cdi)
|
||||
* Hibernate Validator Engine (org.hibernate.validator:hibernate-validator:6.2.5.Final - http://hibernate.org/validator/hibernate-validator)
|
||||
* Hibernate Validator Portable Extension (org.hibernate.validator:hibernate-validator-cdi:6.2.5.Final - http://hibernate.org/validator/hibernate-validator-cdi)
|
||||
* leveldb (org.iq80.leveldb:leveldb:0.12 - http://github.com/dain/leveldb/leveldb)
|
||||
* leveldb-api (org.iq80.leveldb:leveldb-api:0.12 - http://github.com/dain/leveldb/leveldb-api)
|
||||
* Javassist (org.javassist:javassist:3.25.0-GA - http://www.javassist.org/)
|
||||
* Java Annotation Indexer (org.jboss:jandex:2.4.2.Final - http://www.jboss.org/jandex)
|
||||
* JBoss Logging 3 (org.jboss.logging:jboss-logging:3.4.3.Final - http://www.jboss.org)
|
||||
@@ -337,59 +346,67 @@ https://wiki.lyrasis.org/display/DSPACE/Code+Contribution+Guidelines
|
||||
* Jetty Servlet Tester (org.mortbay.jetty:jetty-servlet-tester:6.1.26 - http://www.eclipse.org/jetty/jetty-parent/project/jetty-servlet-tester)
|
||||
* Jetty Utilities (org.mortbay.jetty:jetty-util:6.1.26 - http://www.eclipse.org/jetty/jetty-parent/project/jetty-util)
|
||||
* Servlet Specification API (org.mortbay.jetty:servlet-api:2.5-20081211 - http://jetty.mortbay.org/servlet-api)
|
||||
* jwarc (org.netpreserve:jwarc:0.19.0 - https://github.com/iipc/jwarc)
|
||||
* Objenesis (org.objenesis:objenesis:3.2 - http://objenesis.org/objenesis)
|
||||
* parboiled-core (org.parboiled:parboiled-core:1.3.1 - http://parboiled.org)
|
||||
* parboiled-java (org.parboiled:parboiled-java:1.3.1 - http://parboiled.org)
|
||||
* RRD4J (org.rrd4j:rrd4j:3.5 - https://github.com/rrd4j/rrd4j/)
|
||||
* JSONassert (org.skyscreamer:jsonassert:1.5.0 - https://github.com/skyscreamer/JSONassert)
|
||||
* Spring AOP (org.springframework:spring-aop:5.3.20 - https://github.com/spring-projects/spring-framework)
|
||||
* Spring Beans (org.springframework:spring-beans:5.3.20 - https://github.com/spring-projects/spring-framework)
|
||||
* Spring Context (org.springframework:spring-context:5.3.20 - https://github.com/spring-projects/spring-framework)
|
||||
* Spring Context Support (org.springframework:spring-context-support:5.3.20 - https://github.com/spring-projects/spring-framework)
|
||||
* Spring Core (org.springframework:spring-core:5.3.20 - https://github.com/spring-projects/spring-framework)
|
||||
* Spring Expression Language (SpEL) (org.springframework:spring-expression:5.3.20 - https://github.com/spring-projects/spring-framework)
|
||||
* Spring Commons Logging Bridge (org.springframework:spring-jcl:5.3.20 - https://github.com/spring-projects/spring-framework)
|
||||
* Spring JDBC (org.springframework:spring-jdbc:5.3.20 - https://github.com/spring-projects/spring-framework)
|
||||
* Spring Object/Relational Mapping (org.springframework:spring-orm:5.3.20 - https://github.com/spring-projects/spring-framework)
|
||||
* Spring TestContext Framework (org.springframework:spring-test:5.3.20 - https://github.com/spring-projects/spring-framework)
|
||||
* Spring Transaction (org.springframework:spring-tx:5.3.20 - https://github.com/spring-projects/spring-framework)
|
||||
* Spring Web (org.springframework:spring-web:5.3.20 - https://github.com/spring-projects/spring-framework)
|
||||
* Spring Web MVC (org.springframework:spring-webmvc:5.3.20 - https://github.com/spring-projects/spring-framework)
|
||||
* spring-boot (org.springframework.boot:spring-boot:2.6.8 - https://spring.io/projects/spring-boot)
|
||||
* spring-boot-actuator (org.springframework.boot:spring-boot-actuator:2.6.8 - https://spring.io/projects/spring-boot)
|
||||
* spring-boot-actuator-autoconfigure (org.springframework.boot:spring-boot-actuator-autoconfigure:2.6.8 - https://spring.io/projects/spring-boot)
|
||||
* spring-boot-autoconfigure (org.springframework.boot:spring-boot-autoconfigure:2.6.8 - https://spring.io/projects/spring-boot)
|
||||
* Scala Library (org.scala-lang:scala-library:2.13.9 - https://www.scala-lang.org/)
|
||||
* Scala Compiler (org.scala-lang:scala-reflect:2.13.0 - https://www.scala-lang.org/)
|
||||
* scala-collection-compat (org.scala-lang.modules:scala-collection-compat_2.13:2.1.6 - http://www.scala-lang.org/)
|
||||
* scala-java8-compat (org.scala-lang.modules:scala-java8-compat_2.13:0.9.0 - http://www.scala-lang.org/)
|
||||
* scala-parser-combinators (org.scala-lang.modules:scala-parser-combinators_2.13:1.1.2 - http://www.scala-lang.org/)
|
||||
* scala-xml (org.scala-lang.modules:scala-xml_2.13:1.3.0 - http://www.scala-lang.org/)
|
||||
* JSONassert (org.skyscreamer:jsonassert:1.5.1 - https://github.com/skyscreamer/JSONassert)
|
||||
* JCL 1.2 implemented over SLF4J (org.slf4j:jcl-over-slf4j:1.7.36 - http://www.slf4j.org)
|
||||
* Spring AOP (org.springframework:spring-aop:5.3.27 - https://github.com/spring-projects/spring-framework)
|
||||
* Spring Beans (org.springframework:spring-beans:5.3.27 - https://github.com/spring-projects/spring-framework)
|
||||
* Spring Context (org.springframework:spring-context:5.3.27 - https://github.com/spring-projects/spring-framework)
|
||||
* Spring Context Support (org.springframework:spring-context-support:5.3.27 - https://github.com/spring-projects/spring-framework)
|
||||
* Spring Core (org.springframework:spring-core:5.3.27 - https://github.com/spring-projects/spring-framework)
|
||||
* Spring Expression Language (SpEL) (org.springframework:spring-expression:5.3.27 - https://github.com/spring-projects/spring-framework)
|
||||
* Spring Commons Logging Bridge (org.springframework:spring-jcl:5.3.27 - https://github.com/spring-projects/spring-framework)
|
||||
* Spring JDBC (org.springframework:spring-jdbc:5.3.27 - https://github.com/spring-projects/spring-framework)
|
||||
* Spring Object/Relational Mapping (org.springframework:spring-orm:5.3.27 - https://github.com/spring-projects/spring-framework)
|
||||
* Spring TestContext Framework (org.springframework:spring-test:5.3.27 - https://github.com/spring-projects/spring-framework)
|
||||
* Spring Transaction (org.springframework:spring-tx:5.3.27 - https://github.com/spring-projects/spring-framework)
|
||||
* Spring Web (org.springframework:spring-web:5.3.27 - https://github.com/spring-projects/spring-framework)
|
||||
* Spring Web MVC (org.springframework:spring-webmvc:5.3.27 - https://github.com/spring-projects/spring-framework)
|
||||
* spring-boot (org.springframework.boot:spring-boot:2.7.12 - https://spring.io/projects/spring-boot)
|
||||
* spring-boot-actuator (org.springframework.boot:spring-boot-actuator:2.7.12 - https://spring.io/projects/spring-boot)
|
||||
* spring-boot-actuator-autoconfigure (org.springframework.boot:spring-boot-actuator-autoconfigure:2.7.12 - https://spring.io/projects/spring-boot)
|
||||
* spring-boot-autoconfigure (org.springframework.boot:spring-boot-autoconfigure:2.7.12 - https://spring.io/projects/spring-boot)
|
||||
* Spring Boot Configuration Processor (org.springframework.boot:spring-boot-configuration-processor:2.0.0.RELEASE - https://projects.spring.io/spring-boot/#/spring-boot-parent/spring-boot-tools/spring-boot-configuration-processor)
|
||||
* spring-boot-starter (org.springframework.boot:spring-boot-starter:2.6.8 - https://spring.io/projects/spring-boot)
|
||||
* spring-boot-starter-actuator (org.springframework.boot:spring-boot-starter-actuator:2.6.8 - https://spring.io/projects/spring-boot)
|
||||
* spring-boot-starter-aop (org.springframework.boot:spring-boot-starter-aop:2.6.8 - https://spring.io/projects/spring-boot)
|
||||
* spring-boot-starter-cache (org.springframework.boot:spring-boot-starter-cache:2.6.8 - https://spring.io/projects/spring-boot)
|
||||
* spring-boot-starter-data-rest (org.springframework.boot:spring-boot-starter-data-rest:2.6.8 - https://spring.io/projects/spring-boot)
|
||||
* spring-boot-starter-json (org.springframework.boot:spring-boot-starter-json:2.6.8 - https://spring.io/projects/spring-boot)
|
||||
* spring-boot-starter-log4j2 (org.springframework.boot:spring-boot-starter-log4j2:2.6.8 - https://spring.io/projects/spring-boot)
|
||||
* spring-boot-starter-security (org.springframework.boot:spring-boot-starter-security:2.6.8 - https://spring.io/projects/spring-boot)
|
||||
* spring-boot-starter-test (org.springframework.boot:spring-boot-starter-test:2.6.8 - https://spring.io/projects/spring-boot)
|
||||
* spring-boot-starter-tomcat (org.springframework.boot:spring-boot-starter-tomcat:2.6.8 - https://spring.io/projects/spring-boot)
|
||||
* spring-boot-starter-web (org.springframework.boot:spring-boot-starter-web:2.6.8 - https://spring.io/projects/spring-boot)
|
||||
* spring-boot-test (org.springframework.boot:spring-boot-test:2.6.8 - https://spring.io/projects/spring-boot)
|
||||
* spring-boot-test-autoconfigure (org.springframework.boot:spring-boot-test-autoconfigure:2.6.8 - https://spring.io/projects/spring-boot)
|
||||
* Spring Data Core (org.springframework.data:spring-data-commons:2.6.4 - https://www.spring.io/spring-data/spring-data-commons)
|
||||
* Spring Data REST - Core (org.springframework.data:spring-data-rest-core:3.6.4 - https://www.spring.io/spring-data/spring-data-rest-parent/spring-data-rest-core)
|
||||
* Spring Data REST - WebMVC (org.springframework.data:spring-data-rest-webmvc:3.6.4 - https://www.spring.io/spring-data/spring-data-rest-parent/spring-data-rest-webmvc)
|
||||
* Spring HATEOAS (org.springframework.hateoas:spring-hateoas:1.4.2 - https://github.com/spring-projects/spring-hateoas)
|
||||
* spring-boot-starter (org.springframework.boot:spring-boot-starter:2.7.12 - https://spring.io/projects/spring-boot)
|
||||
* spring-boot-starter-actuator (org.springframework.boot:spring-boot-starter-actuator:2.7.12 - https://spring.io/projects/spring-boot)
|
||||
* spring-boot-starter-aop (org.springframework.boot:spring-boot-starter-aop:2.7.12 - https://spring.io/projects/spring-boot)
|
||||
* spring-boot-starter-cache (org.springframework.boot:spring-boot-starter-cache:2.7.12 - https://spring.io/projects/spring-boot)
|
||||
* spring-boot-starter-data-rest (org.springframework.boot:spring-boot-starter-data-rest:2.7.12 - https://spring.io/projects/spring-boot)
|
||||
* spring-boot-starter-json (org.springframework.boot:spring-boot-starter-json:2.7.12 - https://spring.io/projects/spring-boot)
|
||||
* spring-boot-starter-log4j2 (org.springframework.boot:spring-boot-starter-log4j2:2.7.12 - https://spring.io/projects/spring-boot)
|
||||
* spring-boot-starter-security (org.springframework.boot:spring-boot-starter-security:2.7.12 - https://spring.io/projects/spring-boot)
|
||||
* spring-boot-starter-test (org.springframework.boot:spring-boot-starter-test:2.7.12 - https://spring.io/projects/spring-boot)
|
||||
* spring-boot-starter-tomcat (org.springframework.boot:spring-boot-starter-tomcat:2.7.12 - https://spring.io/projects/spring-boot)
|
||||
* spring-boot-starter-web (org.springframework.boot:spring-boot-starter-web:2.7.12 - https://spring.io/projects/spring-boot)
|
||||
* spring-boot-test (org.springframework.boot:spring-boot-test:2.7.12 - https://spring.io/projects/spring-boot)
|
||||
* spring-boot-test-autoconfigure (org.springframework.boot:spring-boot-test-autoconfigure:2.7.12 - https://spring.io/projects/spring-boot)
|
||||
* Spring Data Core (org.springframework.data:spring-data-commons:2.7.12 - https://www.spring.io/spring-data/spring-data-commons)
|
||||
* Spring Data REST - Core (org.springframework.data:spring-data-rest-core:3.7.12 - https://www.spring.io/spring-data/spring-data-rest-parent/spring-data-rest-core)
|
||||
* Spring Data REST - WebMVC (org.springframework.data:spring-data-rest-webmvc:3.7.12 - https://www.spring.io/spring-data/spring-data-rest-parent/spring-data-rest-webmvc)
|
||||
* Spring HATEOAS (org.springframework.hateoas:spring-hateoas:1.5.4 - https://github.com/spring-projects/spring-hateoas)
|
||||
* Spring Plugin - Core (org.springframework.plugin:spring-plugin-core:2.0.0.RELEASE - https://github.com/spring-projects/spring-plugin/spring-plugin-core)
|
||||
* spring-security-config (org.springframework.security:spring-security-config:5.6.5 - https://spring.io/projects/spring-security)
|
||||
* spring-security-core (org.springframework.security:spring-security-core:5.6.5 - https://spring.io/projects/spring-security)
|
||||
* spring-security-crypto (org.springframework.security:spring-security-crypto:5.6.5 - https://spring.io/projects/spring-security)
|
||||
* spring-security-test (org.springframework.security:spring-security-test:5.6.5 - https://spring.io/projects/spring-security)
|
||||
* spring-security-web (org.springframework.security:spring-security-web:5.6.5 - https://spring.io/projects/spring-security)
|
||||
* spring-security-config (org.springframework.security:spring-security-config:5.7.8 - https://spring.io/projects/spring-security)
|
||||
* spring-security-core (org.springframework.security:spring-security-core:5.7.8 - https://spring.io/projects/spring-security)
|
||||
* spring-security-crypto (org.springframework.security:spring-security-crypto:5.7.8 - https://spring.io/projects/spring-security)
|
||||
* spring-security-test (org.springframework.security:spring-security-test:5.7.8 - https://spring.io/projects/spring-security)
|
||||
* spring-security-web (org.springframework.security:spring-security-web:5.7.8 - https://spring.io/projects/spring-security)
|
||||
* SWORD v2 :: Common Server Library (org.swordapp:sword2-server:1.0 - http://www.swordapp.org/)
|
||||
* snappy-java (org.xerial.snappy:snappy-java:1.1.7.6 - https://github.com/xerial/snappy-java)
|
||||
* xml-matchers (org.xmlmatchers:xml-matchers:0.10 - http://code.google.com/p/xml-matchers/)
|
||||
* org.xmlunit:xmlunit-core (org.xmlunit:xmlunit-core:2.8.0 - https://www.xmlunit.org/)
|
||||
* org.xmlunit:xmlunit-core (org.xmlunit:xmlunit-core:2.8.4 - https://www.xmlunit.org/)
|
||||
* org.xmlunit:xmlunit-core (org.xmlunit:xmlunit-core:2.9.1 - https://www.xmlunit.org/)
|
||||
* org.xmlunit:xmlunit-placeholders (org.xmlunit:xmlunit-placeholders:2.8.0 - https://www.xmlunit.org/xmlunit-placeholders/)
|
||||
* SnakeYAML (org.yaml:snakeyaml:1.29 - http://www.snakeyaml.org)
|
||||
* SnakeYAML (org.yaml:snakeyaml:1.30 - https://bitbucket.org/snakeyaml/snakeyaml)
|
||||
* software.amazon.ion:ion-java (software.amazon.ion:ion-java:1.0.2 - https://github.com/amznlabs/ion-java/)
|
||||
* Xalan Java Serializer (xalan:serializer:2.7.2 - http://xml.apache.org/xalan-j/)
|
||||
* xalan (xalan:xalan:2.7.0 - no url defined)
|
||||
@@ -404,7 +421,7 @@ https://wiki.lyrasis.org/display/DSPACE/Code+Contribution+Guidelines
|
||||
* coverity-escapers (com.coverity.security:coverity-escapers:1.1.1 - http://coverity.com/security)
|
||||
* Java Advanced Imaging Image I/O Tools API core (standalone) (com.github.jai-imageio:jai-imageio-core:1.4.0 - https://github.com/jai-imageio/jai-imageio-core)
|
||||
* JSONLD Java :: Core (com.github.jsonld-java:jsonld-java:0.5.1 - http://github.com/jsonld-java/jsonld-java/jsonld-java/)
|
||||
* curvesapi (com.github.virtuald:curvesapi:1.06 - https://github.com/virtuald/curvesapi)
|
||||
* curvesapi (com.github.virtuald:curvesapi:1.07 - https://github.com/virtuald/curvesapi)
|
||||
* Protocol Buffers [Core] (com.google.protobuf:protobuf-java:3.11.0 - https://developers.google.com/protocol-buffers/protobuf-java/)
|
||||
* JZlib (com.jcraft:jzlib:1.1.3 - http://www.jcraft.com/jzlib/)
|
||||
* dnsjava (dnsjava:dnsjava:2.1.7 - http://www.dnsjava.org)
|
||||
@@ -426,11 +443,15 @@ https://wiki.lyrasis.org/display/DSPACE/Code+Contribution+Guidelines
|
||||
* asm-commons (org.ow2.asm:asm-commons:8.0.1 - http://asm.ow2.io/)
|
||||
* asm-tree (org.ow2.asm:asm-tree:7.1 - http://asm.ow2.org/)
|
||||
* asm-util (org.ow2.asm:asm-util:7.1 - http://asm.ow2.org/)
|
||||
* PostgreSQL JDBC Driver (org.postgresql:postgresql:42.4.1 - https://jdbc.postgresql.org)
|
||||
* PostgreSQL JDBC Driver (org.postgresql:postgresql:42.6.0 - https://jdbc.postgresql.org)
|
||||
* Reflections (org.reflections:reflections:0.9.12 - http://github.com/ronmamo/reflections)
|
||||
* JMatIO (org.tallison:jmatio:1.5 - https://github.com/tballison/jmatio)
|
||||
* XMLUnit for Java (xmlunit:xmlunit:1.3 - http://xmlunit.sourceforge.net/)
|
||||
|
||||
CC0:
|
||||
|
||||
* reactive-streams (org.reactivestreams:reactive-streams:1.0.2 - http://www.reactive-streams.org/)
|
||||
|
||||
Common Development and Distribution License (CDDL):
|
||||
|
||||
* istack common utility code runtime (com.sun.istack:istack-commons-runtime:3.0.7 - http://java.net/istack-commons/istack-commons-runtime/)
|
||||
@@ -446,7 +467,7 @@ https://wiki.lyrasis.org/display/DSPACE/Code+Contribution+Guidelines
|
||||
* Java Servlet API (javax.servlet:javax.servlet-api:3.1.0 - http://servlet-spec.java.net)
|
||||
* javax.transaction API (javax.transaction:javax.transaction-api:1.3 - http://jta-spec.java.net)
|
||||
* jaxb-api (javax.xml.bind:jaxb-api:2.3.1 - https://github.com/javaee/jaxb-spec/jaxb-api)
|
||||
* JHighlight (org.codelibs:jhighlight:1.0.3 - https://github.com/codelibs/jhighlight)
|
||||
* JHighlight (org.codelibs:jhighlight:1.1.0 - https://github.com/codelibs/jhighlight)
|
||||
* HK2 API module (org.glassfish.hk2:hk2-api:2.6.1 - https://github.com/eclipse-ee4j/glassfish-hk2/hk2-api)
|
||||
* ServiceLocator Default Implementation (org.glassfish.hk2:hk2-locator:2.6.1 - https://github.com/eclipse-ee4j/glassfish-hk2/hk2-locator)
|
||||
* HK2 Implementation Utilities (org.glassfish.hk2:hk2-utils:2.6.1 - https://github.com/eclipse-ee4j/glassfish-hk2/hk2-utils)
|
||||
@@ -489,34 +510,34 @@ https://wiki.lyrasis.org/display/DSPACE/Code+Contribution+Guidelines
|
||||
* Apache :: JSTL module (org.eclipse.jetty:apache-jstl:9.4.15.v20190215 - http://tomcat.apache.org/taglibs/standard/)
|
||||
* Jetty :: ALPN :: Client (org.eclipse.jetty:jetty-alpn-client:9.4.44.v20210927 - https://eclipse.org/jetty/jetty-alpn-parent/jetty-alpn-client)
|
||||
* Jetty :: ALPN :: JDK9 Client Implementation (org.eclipse.jetty:jetty-alpn-java-client:9.4.44.v20210927 - https://eclipse.org/jetty/jetty-alpn-parent/jetty-alpn-java-client)
|
||||
* Jetty :: ALPN :: JDK9 Server Implementation (org.eclipse.jetty:jetty-alpn-java-server:9.4.48.v20220622 - https://eclipse.org/jetty/jetty-alpn-parent/jetty-alpn-java-server)
|
||||
* Jetty :: ALPN :: JDK9 Server Implementation (org.eclipse.jetty:jetty-alpn-java-server:9.4.51.v20230217 - https://eclipse.org/jetty/jetty-alpn-parent/jetty-alpn-java-server)
|
||||
* Jetty :: ALPN :: Server (org.eclipse.jetty:jetty-alpn-server:9.4.44.v20210927 - https://eclipse.org/jetty/jetty-alpn-parent/jetty-alpn-server)
|
||||
* Jetty :: ALPN :: Server (org.eclipse.jetty:jetty-alpn-server:9.4.48.v20220622 - https://eclipse.org/jetty/jetty-alpn-parent/jetty-alpn-server)
|
||||
* Jetty :: ALPN :: Server (org.eclipse.jetty:jetty-alpn-server:9.4.51.v20230217 - https://eclipse.org/jetty/jetty-alpn-parent/jetty-alpn-server)
|
||||
* Jetty :: Servlet Annotations (org.eclipse.jetty:jetty-annotations:9.4.15.v20190215 - http://www.eclipse.org/jetty)
|
||||
* Jetty :: Asynchronous HTTP Client (org.eclipse.jetty:jetty-client:9.4.44.v20210927 - https://eclipse.org/jetty/jetty-client)
|
||||
* Jetty :: Continuation (org.eclipse.jetty:jetty-continuation:9.4.44.v20210927 - https://eclipse.org/jetty/jetty-continuation)
|
||||
* Jetty :: Continuation (org.eclipse.jetty:jetty-continuation:9.4.48.v20220622 - https://eclipse.org/jetty/jetty-continuation)
|
||||
* Jetty :: Deployers (org.eclipse.jetty:jetty-deploy:9.4.48.v20220622 - https://eclipse.org/jetty/jetty-deploy)
|
||||
* Jetty :: Http Utility (org.eclipse.jetty:jetty-http:9.4.48.v20220622 - https://eclipse.org/jetty/jetty-http)
|
||||
* Jetty :: IO Utility (org.eclipse.jetty:jetty-io:9.4.48.v20220622 - https://eclipse.org/jetty/jetty-io)
|
||||
* Jetty :: Continuation (org.eclipse.jetty:jetty-continuation:9.4.51.v20230217 - https://eclipse.org/jetty/jetty-continuation)
|
||||
* Jetty :: Deployers (org.eclipse.jetty:jetty-deploy:9.4.51.v20230217 - https://eclipse.org/jetty/jetty-deploy)
|
||||
* Jetty :: Http Utility (org.eclipse.jetty:jetty-http:9.4.51.v20230217 - https://eclipse.org/jetty/jetty-http)
|
||||
* Jetty :: IO Utility (org.eclipse.jetty:jetty-io:9.4.51.v20230217 - https://eclipse.org/jetty/jetty-io)
|
||||
* Jetty :: JMX Management (org.eclipse.jetty:jetty-jmx:9.4.44.v20210927 - https://eclipse.org/jetty/jetty-jmx)
|
||||
* Jetty :: JNDI Naming (org.eclipse.jetty:jetty-jndi:9.4.15.v20190215 - http://www.eclipse.org/jetty)
|
||||
* Jetty :: Plus (org.eclipse.jetty:jetty-plus:9.4.15.v20190215 - http://www.eclipse.org/jetty)
|
||||
* Jetty :: Rewrite Handler (org.eclipse.jetty:jetty-rewrite:9.4.44.v20210927 - https://eclipse.org/jetty/jetty-rewrite)
|
||||
* Jetty :: Security (org.eclipse.jetty:jetty-security:9.4.44.v20210927 - https://eclipse.org/jetty/jetty-security)
|
||||
* Jetty :: Security (org.eclipse.jetty:jetty-security:9.4.48.v20220622 - https://eclipse.org/jetty/jetty-security)
|
||||
* Jetty :: Server Core (org.eclipse.jetty:jetty-server:9.4.48.v20220622 - https://eclipse.org/jetty/jetty-server)
|
||||
* Jetty :: Servlet Handling (org.eclipse.jetty:jetty-servlet:9.4.48.v20220622 - https://eclipse.org/jetty/jetty-servlet)
|
||||
* Jetty :: Utility Servlets and Filters (org.eclipse.jetty:jetty-servlets:9.4.48.v20220622 - https://eclipse.org/jetty/jetty-servlets)
|
||||
* Jetty :: Utilities (org.eclipse.jetty:jetty-util:9.4.48.v20220622 - https://eclipse.org/jetty/jetty-util)
|
||||
* Jetty :: Utilities :: Ajax(JSON) (org.eclipse.jetty:jetty-util-ajax:9.4.48.v20220622 - https://eclipse.org/jetty/jetty-util-ajax)
|
||||
* Jetty :: Webapp Application Support (org.eclipse.jetty:jetty-webapp:9.4.48.v20220622 - https://eclipse.org/jetty/jetty-webapp)
|
||||
* Jetty :: XML utilities (org.eclipse.jetty:jetty-xml:9.4.48.v20220622 - https://eclipse.org/jetty/jetty-xml)
|
||||
* Jetty :: Security (org.eclipse.jetty:jetty-security:9.4.51.v20230217 - https://eclipse.org/jetty/jetty-security)
|
||||
* Jetty :: Server Core (org.eclipse.jetty:jetty-server:9.4.51.v20230217 - https://eclipse.org/jetty/jetty-server)
|
||||
* Jetty :: Servlet Handling (org.eclipse.jetty:jetty-servlet:9.4.51.v20230217 - https://eclipse.org/jetty/jetty-servlet)
|
||||
* Jetty :: Utility Servlets and Filters (org.eclipse.jetty:jetty-servlets:9.4.51.v20230217 - https://eclipse.org/jetty/jetty-servlets)
|
||||
* Jetty :: Utilities (org.eclipse.jetty:jetty-util:9.4.51.v20230217 - https://eclipse.org/jetty/jetty-util)
|
||||
* Jetty :: Utilities :: Ajax(JSON) (org.eclipse.jetty:jetty-util-ajax:9.4.51.v20230217 - https://eclipse.org/jetty/jetty-util-ajax)
|
||||
* Jetty :: Webapp Application Support (org.eclipse.jetty:jetty-webapp:9.4.51.v20230217 - https://eclipse.org/jetty/jetty-webapp)
|
||||
* Jetty :: XML utilities (org.eclipse.jetty:jetty-xml:9.4.51.v20230217 - https://eclipse.org/jetty/jetty-xml)
|
||||
* Jetty :: HTTP2 :: Client (org.eclipse.jetty.http2:http2-client:9.4.44.v20210927 - https://eclipse.org/jetty/http2-parent/http2-client)
|
||||
* Jetty :: HTTP2 :: Common (org.eclipse.jetty.http2:http2-common:9.4.48.v20220622 - https://eclipse.org/jetty/http2-parent/http2-common)
|
||||
* Jetty :: HTTP2 :: Common (org.eclipse.jetty.http2:http2-common:9.4.51.v20230217 - https://eclipse.org/jetty/http2-parent/http2-common)
|
||||
* Jetty :: HTTP2 :: HPACK (org.eclipse.jetty.http2:http2-hpack:9.4.44.v20210927 - https://eclipse.org/jetty/http2-parent/http2-hpack)
|
||||
* Jetty :: HTTP2 :: HTTP Client Transport (org.eclipse.jetty.http2:http2-http-client-transport:9.4.44.v20210927 - https://eclipse.org/jetty/http2-parent/http2-http-client-transport)
|
||||
* Jetty :: HTTP2 :: Server (org.eclipse.jetty.http2:http2-server:9.4.48.v20220622 - https://eclipse.org/jetty/http2-parent/http2-server)
|
||||
* Jetty :: HTTP2 :: Server (org.eclipse.jetty.http2:http2-server:9.4.51.v20230217 - https://eclipse.org/jetty/http2-parent/http2-server)
|
||||
* Jetty :: Schemas (org.eclipse.jetty.toolchain:jetty-schemas:3.1.2 - https://eclipse.org/jetty/jetty-schemas)
|
||||
* HK2 API module (org.glassfish.hk2:hk2-api:2.6.1 - https://github.com/eclipse-ee4j/glassfish-hk2/hk2-api)
|
||||
* ServiceLocator Default Implementation (org.glassfish.hk2:hk2-locator:2.6.1 - https://github.com/eclipse-ee4j/glassfish-hk2/hk2-locator)
|
||||
@@ -542,10 +563,10 @@ https://wiki.lyrasis.org/display/DSPACE/Code+Contribution+Guidelines
|
||||
* msg-simple (com.github.java-json-tools:msg-simple:1.2 - https://github.com/java-json-tools/msg-simple)
|
||||
* uri-template (com.github.java-json-tools:uri-template:0.10 - https://github.com/java-json-tools/uri-template)
|
||||
* FindBugs-Annotations (com.google.code.findbugs:annotations:3.0.1u2 - http://findbugs.sourceforge.net/)
|
||||
* JHighlight (org.codelibs:jhighlight:1.0.3 - https://github.com/codelibs/jhighlight)
|
||||
* Hibernate ORM - hibernate-core (org.hibernate:hibernate-core:5.6.5.Final - https://hibernate.org/orm)
|
||||
* Hibernate ORM - hibernate-jcache (org.hibernate:hibernate-jcache:5.6.5.Final - https://hibernate.org/orm)
|
||||
* Hibernate ORM - hibernate-jpamodelgen (org.hibernate:hibernate-jpamodelgen:5.6.5.Final - https://hibernate.org/orm)
|
||||
* JHighlight (org.codelibs:jhighlight:1.1.0 - https://github.com/codelibs/jhighlight)
|
||||
* Hibernate ORM - hibernate-core (org.hibernate:hibernate-core:5.6.15.Final - https://hibernate.org/orm)
|
||||
* Hibernate ORM - hibernate-jcache (org.hibernate:hibernate-jcache:5.6.15.Final - https://hibernate.org/orm)
|
||||
* Hibernate ORM - hibernate-jpamodelgen (org.hibernate:hibernate-jpamodelgen:5.6.15.Final - https://hibernate.org/orm)
|
||||
* Hibernate Commons Annotations (org.hibernate.common:hibernate-commons-annotations:5.1.2.Final - http://hibernate.org)
|
||||
* im4java (org.im4java:im4java:1.4.0 - http://sourceforge.net/projects/im4java/)
|
||||
* Javassist (org.javassist:javassist:3.25.0-GA - http://www.javassist.org/)
|
||||
@@ -562,9 +583,11 @@ https://wiki.lyrasis.org/display/DSPACE/Code+Contribution+Guidelines
|
||||
|
||||
MIT License:
|
||||
|
||||
* better-files (com.github.pathikrit:better-files_2.13:3.9.1 - https://github.com/pathikrit/better-files)
|
||||
* Java SemVer (com.github.zafarkhaja:java-semver:0.9.0 - https://github.com/zafarkhaja/jsemver)
|
||||
* dd-plist (com.googlecode.plist:dd-plist:1.23 - http://www.github.com/3breadt/dd-plist)
|
||||
* dd-plist (com.googlecode.plist:dd-plist:1.25 - http://www.github.com/3breadt/dd-plist)
|
||||
* DigitalCollections: IIIF API Library (de.digitalcollections.iiif:iiif-apis:0.3.9 - https://github.com/dbmdz/iiif-apis)
|
||||
* s3mock (io.findify:s3mock_2.13:0.2.6 - https://github.com/findify/s3mock)
|
||||
* JOpt Simple (net.sf.jopt-simple:jopt-simple:5.0.4 - http://jopt-simple.github.io/jopt-simple)
|
||||
* Bouncy Castle S/MIME API (org.bouncycastle:bcmail-jdk15on:1.70 - https://www.bouncycastle.org/java.html)
|
||||
* Bouncy Castle PKIX, CMS, EAC, TSP, PKCS, OCSP, CMP, and CRMF APIs (org.bouncycastle:bcpkix-jdk15on:1.70 - https://www.bouncycastle.org/java.html)
|
||||
@@ -572,15 +595,14 @@ https://wiki.lyrasis.org/display/DSPACE/Code+Contribution+Guidelines
|
||||
* Bouncy Castle ASN.1 Extension and Utility APIs (org.bouncycastle:bcutil-jdk15on:1.70 - https://www.bouncycastle.org/java.html)
|
||||
* org.brotli:dec (org.brotli:dec:0.1.2 - http://brotli.org/dec)
|
||||
* Checker Qual (org.checkerframework:checker-qual:3.10.0 - https://checkerframework.org)
|
||||
* Checker Qual (org.checkerframework:checker-qual:3.5.0 - https://checkerframework.org)
|
||||
* Checker Qual (org.checkerframework:checker-qual:3.31.0 - https://checkerframework.org)
|
||||
* jersey-core-client (org.glassfish.jersey.core:jersey-client:2.35 - https://projects.eclipse.org/projects/ee4j.jersey/jersey-client)
|
||||
* jersey-inject-hk2 (org.glassfish.jersey.inject:jersey-hk2:2.35 - https://projects.eclipse.org/projects/ee4j.jersey/project/jersey-hk2)
|
||||
* mockito-core (org.mockito:mockito-core:3.12.4 - https://github.com/mockito/mockito)
|
||||
* mockito-inline (org.mockito:mockito-inline:3.12.4 - https://github.com/mockito/mockito)
|
||||
* ORCID - Model (org.orcid:orcid-model:3.0.2 - http://github.com/ORCID/orcid-model)
|
||||
* JCL 1.2 implemented over SLF4J (org.slf4j:jcl-over-slf4j:1.7.25 - http://www.slf4j.org)
|
||||
* JUL to SLF4J bridge (org.slf4j:jul-to-slf4j:1.7.25 - http://www.slf4j.org)
|
||||
* SLF4J API Module (org.slf4j:slf4j-api:1.7.25 - http://www.slf4j.org)
|
||||
* JUL to SLF4J bridge (org.slf4j:jul-to-slf4j:1.7.36 - http://www.slf4j.org)
|
||||
* SLF4J API Module (org.slf4j:slf4j-api:1.7.36 - http://www.slf4j.org)
|
||||
* SLF4J Extensions Module (org.slf4j:slf4j-ext:1.7.28 - http://www.slf4j.org)
|
||||
* HAL Browser (org.webjars:hal-browser:ad9b865 - http://webjars.org)
|
||||
* toastr (org.webjars.bowergithub.codeseven:toastr:2.1.4 - http://webjars.org)
|
||||
@@ -589,7 +611,7 @@ https://wiki.lyrasis.org/display/DSPACE/Code+Contribution+Guidelines
|
||||
* jquery (org.webjars.bowergithub.jquery:jquery-dist:3.6.0 - https://www.webjars.org)
|
||||
* urijs (org.webjars.bowergithub.medialize:uri.js:1.19.10 - https://www.webjars.org)
|
||||
* bootstrap (org.webjars.bowergithub.twbs:bootstrap:4.6.1 - https://www.webjars.org)
|
||||
* core-js (org.webjars.npm:core-js:3.25.2 - https://www.webjars.org)
|
||||
* core-js (org.webjars.npm:core-js:3.30.1 - https://www.webjars.org)
|
||||
* @json-editor/json-editor (org.webjars.npm:json-editor__json-editor:2.6.1 - https://www.webjars.org)
|
||||
|
||||
Mozilla Public License:
|
||||
@@ -606,17 +628,14 @@ https://wiki.lyrasis.org/display/DSPACE/Code+Contribution+Guidelines
|
||||
* jersey-core-common (org.glassfish.jersey.core:jersey-common:2.35 - https://projects.eclipse.org/projects/ee4j.jersey/jersey-common)
|
||||
* jersey-inject-hk2 (org.glassfish.jersey.inject:jersey-hk2:2.35 - https://projects.eclipse.org/projects/ee4j.jersey/project/jersey-hk2)
|
||||
* HdrHistogram (org.hdrhistogram:HdrHistogram:2.1.12 - http://hdrhistogram.github.io/HdrHistogram/)
|
||||
* JSON in Java (org.json:json:20230227 - https://github.com/douglascrockford/JSON-java)
|
||||
* LatencyUtils (org.latencyutils:LatencyUtils:2.0.3 - http://latencyutils.github.io/LatencyUtils/)
|
||||
* Reflections (org.reflections:reflections:0.9.12 - http://github.com/ronmamo/reflections)
|
||||
* XZ for Java (org.tukaani:xz:1.9 - https://tukaani.org/xz/java.html)
|
||||
|
||||
The JSON License:
|
||||
|
||||
* JSON in Java (org.json:json:20180130 - https://github.com/douglascrockford/JSON-java)
|
||||
|
||||
UnRar License:
|
||||
|
||||
* Java Unrar (com.github.junrar:junrar:7.4.1 - https://github.com/junrar/junrar)
|
||||
* Java Unrar (com.github.junrar:junrar:7.5.3 - https://github.com/junrar/junrar)
|
||||
|
||||
Unicode/ICU License:
|
||||
|
||||
|
13
README.md
13
README.md
@@ -48,18 +48,7 @@ See [Running DSpace 7 with Docker Compose](dspace/src/main/docker-compose/README
|
||||
|
||||
## Contributing
|
||||
|
||||
DSpace is a community built and supported project. We do not have a centralized development or support team,
|
||||
but have a dedicated group of volunteers who help us improve the software, documentation, resources, etc.
|
||||
|
||||
We welcome contributions of any type. Here's a few basic guides that provide suggestions for contributing to DSpace:
|
||||
* [How to Contribute to DSpace](https://wiki.lyrasis.org/display/DSPACE/How+to+Contribute+to+DSpace): How to contribute in general (via code, documentation, bug reports, expertise, etc)
|
||||
* [Code Contribution Guidelines](https://wiki.lyrasis.org/display/DSPACE/Code+Contribution+Guidelines): How to give back code or contribute features, bug fixes, etc.
|
||||
* [DSpace Community Advisory Team (DCAT)](https://wiki.lyrasis.org/display/cmtygp/DSpace+Community+Advisory+Team): If you are not a developer, we also have an interest group specifically for repository managers. The DCAT group meets virtually, once a month, and sends open invitations to join their meetings via the [DCAT mailing list](https://groups.google.com/d/forum/DSpaceCommunityAdvisoryTeam).
|
||||
|
||||
We also encourage GitHub Pull Requests (PRs) at any time. Please see our [Development with Git](https://wiki.lyrasis.org/display/DSPACE/Development+with+Git) guide for more info.
|
||||
|
||||
In addition, a listing of all known contributors to DSpace software can be
|
||||
found online at: https://wiki.lyrasis.org/display/DSPACE/DSpaceContributors
|
||||
See [Contributing documentation](CONTRIBUTING.md)
|
||||
|
||||
## Getting Help
|
||||
|
||||
|
@@ -92,9 +92,7 @@ For more information on CheckStyle configurations below, see: http://checkstyle.
|
||||
<!-- Requirements for Javadocs for methods -->
|
||||
<module name="JavadocMethod">
|
||||
<!-- All public methods MUST HAVE Javadocs -->
|
||||
<!-- <property name="scope" value="public"/> -->
|
||||
<!-- TODO: Above rule has been disabled because of large amount of missing public method Javadocs -->
|
||||
<property name="scope" value="nothing"/>
|
||||
<property name="scope" value="public"/>
|
||||
<!-- Allow params, throws and return tags to be optional -->
|
||||
<property name="allowMissingParamTags" value="true"/>
|
||||
<property name="allowMissingReturnTag" value="true"/>
|
||||
|
@@ -2,7 +2,7 @@ version: "3.7"
|
||||
|
||||
services:
|
||||
dspace-cli:
|
||||
image: "${DOCKER_OWNER:-dspace}/dspace-cli:${DSPACE_VER:-dspace-7_x}"
|
||||
image: "${DOCKER_OWNER:-dspace}/dspace-cli:${DSPACE_VER:-latest}"
|
||||
container_name: dspace-cli
|
||||
build:
|
||||
context: .
|
||||
|
@@ -28,7 +28,7 @@ services:
|
||||
# proxies.trusted.ipranges: This setting is required for a REST API running in Docker to trust requests
|
||||
# from the host machine. This IP range MUST correspond to the 'dspacenet' subnet defined above.
|
||||
proxies__P__trusted__P__ipranges: '172.23.0'
|
||||
image: "${DOCKER_OWNER:-dspace}/dspace:${DSPACE_VER:-dspace-7_x-test}"
|
||||
image: "${DOCKER_OWNER:-dspace}/dspace:${DSPACE_VER:-latest-test}"
|
||||
build:
|
||||
context: .
|
||||
dockerfile: Dockerfile.test
|
||||
@@ -62,13 +62,17 @@ services:
|
||||
while (!</dev/tcp/dspacedb/5432) > /dev/null 2>&1; do sleep 1; done;
|
||||
/dspace/bin/dspace database migrate
|
||||
catalina.sh run
|
||||
# DSpace database container
|
||||
# DSpace PostgreSQL database container
|
||||
dspacedb:
|
||||
container_name: dspacedb
|
||||
# Uses a custom Postgres image with pgcrypto installed
|
||||
image: "${DOCKER_OWNER:-dspace}/dspace-postgres-pgcrypto:${DSPACE_VER:-latest}"
|
||||
build:
|
||||
# Must build out of subdirectory to have access to install script for pgcrypto
|
||||
context: ./dspace/src/main/docker/dspace-postgres-pgcrypto/
|
||||
environment:
|
||||
PGDATA: /pgdata
|
||||
# Uses a custom Postgres image with pgcrypto installed
|
||||
image: dspace/dspace-postgres-pgcrypto
|
||||
POSTGRES_PASSWORD: dspace
|
||||
networks:
|
||||
dspacenet:
|
||||
ports:
|
||||
@@ -77,12 +81,17 @@ services:
|
||||
stdin_open: true
|
||||
tty: true
|
||||
volumes:
|
||||
# Keep Postgres data directory between reboots
|
||||
- pgdata:/pgdata
|
||||
# DSpace Solr container
|
||||
dspacesolr:
|
||||
container_name: dspacesolr
|
||||
# Uses official Solr image at https://hub.docker.com/_/solr/
|
||||
image: solr:8.11-slim
|
||||
image: "${DOCKER_OWNER:-dspace}/dspace-solr:${DSPACE_VER:-latest}"
|
||||
build:
|
||||
context: .
|
||||
dockerfile: ./dspace/src/main/docker/dspace-solr/Dockerfile
|
||||
args:
|
||||
SOLR_VERSION: "${SOLR_VER:-8.11}"
|
||||
networks:
|
||||
dspacenet:
|
||||
ports:
|
||||
@@ -92,30 +101,25 @@ services:
|
||||
tty: true
|
||||
working_dir: /var/solr/data
|
||||
volumes:
|
||||
# Mount our local Solr core configs so that they are available as Solr configsets on container
|
||||
- ./dspace/solr/authority:/opt/solr/server/solr/configsets/authority
|
||||
- ./dspace/solr/oai:/opt/solr/server/solr/configsets/oai
|
||||
- ./dspace/solr/search:/opt/solr/server/solr/configsets/search
|
||||
- ./dspace/solr/statistics:/opt/solr/server/solr/configsets/statistics
|
||||
# Keep Solr data directory between reboots
|
||||
- solr_data:/var/solr/data
|
||||
# Initialize all DSpace Solr cores using the mounted local configsets (see above), then start Solr
|
||||
# Initialize all DSpace Solr cores then start Solr:
|
||||
# * First, run precreate-core to create the core (if it doesn't yet exist). If exists already, this is a no-op
|
||||
# * Second, copy updated configs from mounted configsets to this core. If it already existed, this updates core
|
||||
# to the latest configs. If it's a newly created core, this is a no-op.
|
||||
# * Second, copy configsets to this core:
|
||||
# Updates to Solr configs require the container to be rebuilt/restarted: `docker compose -p d7 up -d --build dspacesolr`
|
||||
entrypoint:
|
||||
- /bin/bash
|
||||
- '-c'
|
||||
- |
|
||||
init-var-solr
|
||||
precreate-core authority /opt/solr/server/solr/configsets/authority
|
||||
cp -r -u /opt/solr/server/solr/configsets/authority/* authority
|
||||
cp -r /opt/solr/server/solr/configsets/authority/* authority
|
||||
precreate-core oai /opt/solr/server/solr/configsets/oai
|
||||
cp -r -u /opt/solr/server/solr/configsets/oai/* oai
|
||||
cp -r /opt/solr/server/solr/configsets/oai/* oai
|
||||
precreate-core search /opt/solr/server/solr/configsets/search
|
||||
cp -r -u /opt/solr/server/solr/configsets/search/* search
|
||||
cp -r /opt/solr/server/solr/configsets/search/* search
|
||||
precreate-core statistics /opt/solr/server/solr/configsets/statistics
|
||||
cp -r -u /opt/solr/server/solr/configsets/statistics/* statistics
|
||||
cp -r /opt/solr/server/solr/configsets/statistics/* statistics
|
||||
exec solr -f
|
||||
volumes:
|
||||
assetstore:
|
||||
|
@@ -12,7 +12,7 @@
|
||||
<parent>
|
||||
<groupId>org.dspace</groupId>
|
||||
<artifactId>dspace-parent</artifactId>
|
||||
<version>7.5-SNAPSHOT</version>
|
||||
<version>8.0-SNAPSHOT</version>
|
||||
<relativePath>..</relativePath>
|
||||
</parent>
|
||||
|
||||
@@ -492,12 +492,6 @@
|
||||
<dependency>
|
||||
<groupId>jaxen</groupId>
|
||||
<artifactId>jaxen</artifactId>
|
||||
<exclusions>
|
||||
<exclusion>
|
||||
<artifactId>xom</artifactId>
|
||||
<groupId>xom</groupId>
|
||||
</exclusion>
|
||||
</exclusions>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.jdom</groupId>
|
||||
@@ -632,7 +626,7 @@
|
||||
<dependency>
|
||||
<groupId>dnsjava</groupId>
|
||||
<artifactId>dnsjava</artifactId>
|
||||
<version>2.1.7</version>
|
||||
<version>2.1.9</version>
|
||||
</dependency>
|
||||
|
||||
<dependency>
|
||||
@@ -668,7 +662,7 @@
|
||||
<dependency>
|
||||
<groupId>org.flywaydb</groupId>
|
||||
<artifactId>flyway-core</artifactId>
|
||||
<version>8.4.4</version>
|
||||
<version>8.5.13</version>
|
||||
</dependency>
|
||||
|
||||
<!-- Google Analytics -->
|
||||
@@ -776,7 +770,7 @@
|
||||
<dependency>
|
||||
<groupId>org.json</groupId>
|
||||
<artifactId>json</artifactId>
|
||||
<version>20180130</version>
|
||||
<version>20230227</version>
|
||||
</dependency>
|
||||
|
||||
<!-- Useful for testing command-line tools -->
|
||||
@@ -791,7 +785,7 @@
|
||||
<dependency>
|
||||
<groupId>com.opencsv</groupId>
|
||||
<artifactId>opencsv</artifactId>
|
||||
<version>5.6</version>
|
||||
<version>5.7.1</version>
|
||||
</dependency>
|
||||
|
||||
<!-- Email templating -->
|
||||
@@ -809,7 +803,8 @@
|
||||
<dependency>
|
||||
<groupId>org.apache.bcel</groupId>
|
||||
<artifactId>bcel</artifactId>
|
||||
<version>6.4.0</version>
|
||||
<version>6.7.0</version>
|
||||
<scope>test</scope>
|
||||
</dependency>
|
||||
|
||||
<!-- required for openaire api integration -->
|
||||
@@ -817,6 +812,13 @@
|
||||
<groupId>eu.openaire</groupId>
|
||||
<artifactId>funders-model</artifactId>
|
||||
<version>2.0.0</version>
|
||||
<exclusions>
|
||||
<!-- Newer version pulled in via Jersey below -->
|
||||
<exclusion>
|
||||
<groupId>org.javassist</groupId>
|
||||
<artifactId>javassist</artifactId>
|
||||
</exclusion>
|
||||
</exclusions>
|
||||
</dependency>
|
||||
|
||||
<dependency>
|
||||
@@ -838,37 +840,60 @@
|
||||
</exclusion>
|
||||
</exclusions>
|
||||
</dependency>
|
||||
|
||||
<dependency>
|
||||
<groupId>io.findify</groupId>
|
||||
<artifactId>s3mock_2.13</artifactId>
|
||||
<version>0.2.6</version>
|
||||
<scope>test</scope>
|
||||
<exclusions>
|
||||
<exclusion>
|
||||
<groupId>com.amazonawsl</groupId>
|
||||
<artifactId>aws-java-sdk-s3</artifactId>
|
||||
</exclusion>
|
||||
<exclusion>
|
||||
<groupId>com.amazonaws</groupId>
|
||||
<artifactId>aws-java-sdk-s3</artifactId>
|
||||
</exclusion>
|
||||
</exclusions>
|
||||
</dependency>
|
||||
|
||||
</dependencies>
|
||||
|
||||
<dependencyManagement>
|
||||
<dependencies>
|
||||
<!-- for mockserver -->
|
||||
<!-- Solve dependency convergence issues related to
|
||||
<!-- Solve dependency convergence issues related to Solr and
|
||||
'mockserver-junit-rule' by selecting the versions we want to use. -->
|
||||
<dependency>
|
||||
<groupId>io.netty</groupId>
|
||||
<artifactId>netty-buffer</artifactId>
|
||||
<version>4.1.68.Final</version>
|
||||
<version>4.1.94.Final</version>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>io.netty</groupId>
|
||||
<artifactId>netty-transport</artifactId>
|
||||
<version>4.1.68.Final</version>
|
||||
<version>4.1.94.Final</version>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>io.netty</groupId>
|
||||
<artifactId>netty-transport-native-unix-common</artifactId>
|
||||
<version>4.1.94.Final</version>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>io.netty</groupId>
|
||||
<artifactId>netty-common</artifactId>
|
||||
<version>4.1.68.Final</version>
|
||||
<version>4.1.94.Final</version>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>io.netty</groupId>
|
||||
<artifactId>netty-handler</artifactId>
|
||||
<version>4.1.68.Final</version>
|
||||
<version>4.1.94.Final</version>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>io.netty</groupId>
|
||||
<artifactId>netty-codec</artifactId>
|
||||
<version>4.1.68.Final</version>
|
||||
<version>4.1.94.Final</version>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.apache.velocity</groupId>
|
||||
@@ -901,6 +926,12 @@
|
||||
<artifactId>swagger-core</artifactId>
|
||||
<version>1.6.2</version>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.scala-lang</groupId>
|
||||
<artifactId>scala-library</artifactId>
|
||||
<version>2.13.11</version>
|
||||
<scope>test</scope>
|
||||
</dependency>
|
||||
</dependencies>
|
||||
</dependencyManagement>
|
||||
|
||||
|
@@ -7,33 +7,16 @@
|
||||
*/
|
||||
package org.dspace.administer;
|
||||
|
||||
import java.sql.SQLException;
|
||||
|
||||
import org.apache.commons.cli.Options;
|
||||
import org.dspace.authorize.service.AuthorizeService;
|
||||
import org.dspace.core.Context;
|
||||
import org.dspace.scripts.configuration.ScriptConfiguration;
|
||||
import org.springframework.beans.factory.annotation.Autowired;
|
||||
|
||||
/**
|
||||
* The {@link ScriptConfiguration} for the {@link ProcessCleaner} script.
|
||||
*/
|
||||
public class ProcessCleanerConfiguration<T extends ProcessCleaner> extends ScriptConfiguration<T> {
|
||||
|
||||
@Autowired
|
||||
private AuthorizeService authorizeService;
|
||||
|
||||
private Class<T> dspaceRunnableClass;
|
||||
|
||||
@Override
|
||||
public boolean isAllowedToExecute(Context context) {
|
||||
try {
|
||||
return authorizeService.isAdmin(context);
|
||||
} catch (SQLException e) {
|
||||
throw new RuntimeException("SQLException occurred when checking if the current user is an admin", e);
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public Options getOptions() {
|
||||
if (options == null) {
|
||||
|
@@ -0,0 +1,54 @@
|
||||
/**
|
||||
* The contents of this file are subject to the license and copyright
|
||||
* detailed in the LICENSE and NOTICE files at the root of the source
|
||||
* tree and available online at
|
||||
*
|
||||
* http://www.dspace.org/license/
|
||||
*/
|
||||
package org.dspace.alerts;
|
||||
|
||||
/**
|
||||
* Enum representing the options for allowing sessions:
|
||||
* ALLOW_ALL_SESSIONS - Will allow all users to log in and continue their sessions
|
||||
* ALLOW_CURRENT_SESSIONS_ONLY - Will prevent non admin users from logging in, however logged-in users
|
||||
* will remain logged in
|
||||
* ALLOW_ADMIN_SESSIONS_ONLY - Only admin users can log in, non admin sessions will be interrupted
|
||||
*
|
||||
* NOTE: This functionality can be stored in the database, but no support is present right now to interrupt and prevent
|
||||
* sessions.
|
||||
*/
|
||||
public enum AllowSessionsEnum {
|
||||
ALLOW_ALL_SESSIONS("all"),
|
||||
ALLOW_CURRENT_SESSIONS_ONLY("current"),
|
||||
ALLOW_ADMIN_SESSIONS_ONLY("admin");
|
||||
|
||||
private String allowSessionsType;
|
||||
|
||||
AllowSessionsEnum(String allowSessionsType) {
|
||||
this.allowSessionsType = allowSessionsType;
|
||||
}
|
||||
|
||||
public String getValue() {
|
||||
return allowSessionsType;
|
||||
}
|
||||
|
||||
public static AllowSessionsEnum fromString(String alertAllowSessionType) {
|
||||
if (alertAllowSessionType == null) {
|
||||
return AllowSessionsEnum.ALLOW_ALL_SESSIONS;
|
||||
}
|
||||
|
||||
switch (alertAllowSessionType) {
|
||||
case "all":
|
||||
return AllowSessionsEnum.ALLOW_ALL_SESSIONS;
|
||||
case "current":
|
||||
return AllowSessionsEnum.ALLOW_CURRENT_SESSIONS_ONLY;
|
||||
case "admin" :
|
||||
return AllowSessionsEnum.ALLOW_ADMIN_SESSIONS_ONLY;
|
||||
default:
|
||||
throw new IllegalArgumentException("No corresponding enum value for provided string: "
|
||||
+ alertAllowSessionType);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
}
|
179
dspace-api/src/main/java/org/dspace/alerts/SystemWideAlert.java
Normal file
179
dspace-api/src/main/java/org/dspace/alerts/SystemWideAlert.java
Normal file
@@ -0,0 +1,179 @@
|
||||
/**
|
||||
* The contents of this file are subject to the license and copyright
|
||||
* detailed in the LICENSE and NOTICE files at the root of the source
|
||||
* tree and available online at
|
||||
*
|
||||
* http://www.dspace.org/license/
|
||||
*/
|
||||
package org.dspace.alerts;
|
||||
|
||||
import java.util.Date;
|
||||
import javax.persistence.Cacheable;
|
||||
import javax.persistence.Column;
|
||||
import javax.persistence.Entity;
|
||||
import javax.persistence.GeneratedValue;
|
||||
import javax.persistence.GenerationType;
|
||||
import javax.persistence.Id;
|
||||
import javax.persistence.SequenceGenerator;
|
||||
import javax.persistence.Table;
|
||||
import javax.persistence.Temporal;
|
||||
import javax.persistence.TemporalType;
|
||||
|
||||
import org.apache.commons.lang3.builder.EqualsBuilder;
|
||||
import org.apache.commons.lang3.builder.HashCodeBuilder;
|
||||
import org.dspace.core.ReloadableEntity;
|
||||
import org.hibernate.annotations.CacheConcurrencyStrategy;
|
||||
|
||||
/**
|
||||
* Database object representing system-wide alerts
|
||||
*/
|
||||
@Entity
|
||||
@Cacheable
|
||||
@org.hibernate.annotations.Cache(usage = CacheConcurrencyStrategy.NONSTRICT_READ_WRITE, include = "non-lazy")
|
||||
@Table(name = "systemwidealert")
|
||||
public class SystemWideAlert implements ReloadableEntity<Integer> {
|
||||
|
||||
@Id
|
||||
@GeneratedValue(strategy = GenerationType.SEQUENCE, generator = "alert_id_seq")
|
||||
@SequenceGenerator(name = "alert_id_seq", sequenceName = "alert_id_seq", allocationSize = 1)
|
||||
@Column(name = "alert_id", unique = true, nullable = false)
|
||||
private Integer alertId;
|
||||
|
||||
@Column(name = "message", nullable = false)
|
||||
private String message;
|
||||
|
||||
@Column(name = "allow_sessions")
|
||||
private String allowSessions;
|
||||
|
||||
@Column(name = "countdown_to")
|
||||
@Temporal(TemporalType.TIMESTAMP)
|
||||
private Date countdownTo;
|
||||
|
||||
@Column(name = "active")
|
||||
private boolean active;
|
||||
|
||||
protected SystemWideAlert() {
|
||||
}
|
||||
|
||||
/**
|
||||
* This method returns the ID that the system-wide alert holds within the database
|
||||
*
|
||||
* @return The ID that the system-wide alert holds within the database
|
||||
*/
|
||||
@Override
|
||||
public Integer getID() {
|
||||
return alertId;
|
||||
}
|
||||
|
||||
/**
|
||||
* Set the ID for the system-wide alert
|
||||
*
|
||||
* @param alertID The ID to set
|
||||
*/
|
||||
public void setID(final Integer alertID) {
|
||||
this.alertId = alertID;
|
||||
}
|
||||
|
||||
/**
|
||||
* Retrieve the message of the system-wide alert
|
||||
*
|
||||
* @return the message of the system-wide alert
|
||||
*/
|
||||
public String getMessage() {
|
||||
return message;
|
||||
}
|
||||
|
||||
/**
|
||||
* Set the message of the system-wide alert
|
||||
*
|
||||
* @param message The message to set
|
||||
*/
|
||||
public void setMessage(final String message) {
|
||||
this.message = message;
|
||||
}
|
||||
|
||||
/**
|
||||
* Retrieve what kind of sessions are allowed while the system-wide alert is active
|
||||
*
|
||||
* @return what kind of sessions are allowed while the system-wide alert is active
|
||||
*/
|
||||
public AllowSessionsEnum getAllowSessions() {
|
||||
return AllowSessionsEnum.fromString(allowSessions);
|
||||
}
|
||||
|
||||
/**
|
||||
* Set what kind of sessions are allowed while the system-wide alert is active
|
||||
*
|
||||
* @param allowSessions Integer representing what kind of sessions are allowed
|
||||
*/
|
||||
public void setAllowSessions(AllowSessionsEnum allowSessions) {
|
||||
this.allowSessions = allowSessions.getValue();
|
||||
}
|
||||
|
||||
/**
|
||||
* Retrieve the date to which will be count down when the system-wide alert is active
|
||||
*
|
||||
* @return the date to which will be count down when the system-wide alert is active
|
||||
*/
|
||||
public Date getCountdownTo() {
|
||||
return countdownTo;
|
||||
}
|
||||
|
||||
/**
|
||||
* Set the date to which will be count down when the system-wide alert is active
|
||||
*
|
||||
* @param countdownTo The date to which will be count down
|
||||
*/
|
||||
public void setCountdownTo(final Date countdownTo) {
|
||||
this.countdownTo = countdownTo;
|
||||
}
|
||||
|
||||
/**
|
||||
* Retrieve whether the system-wide alert is active
|
||||
*
|
||||
* @return whether the system-wide alert is active
|
||||
*/
|
||||
public boolean isActive() {
|
||||
return active;
|
||||
}
|
||||
|
||||
/**
|
||||
* Set whether the system-wide alert is active
|
||||
*
|
||||
* @param active Whether the system-wide alert is active
|
||||
*/
|
||||
public void setActive(final boolean active) {
|
||||
this.active = active;
|
||||
}
|
||||
|
||||
/**
|
||||
* Return <code>true</code> if <code>other</code> is the same SystemWideAlert
|
||||
* as this object, <code>false</code> otherwise
|
||||
*
|
||||
* @param other object to compare to
|
||||
* @return <code>true</code> if object passed in represents the same
|
||||
* system-wide alert as this object
|
||||
*/
|
||||
@Override
|
||||
public boolean equals(Object other) {
|
||||
return (other instanceof SystemWideAlert &&
|
||||
new EqualsBuilder().append(this.getID(), ((SystemWideAlert) other).getID())
|
||||
.append(this.getMessage(), ((SystemWideAlert) other).getMessage())
|
||||
.append(this.getAllowSessions(), ((SystemWideAlert) other).getAllowSessions())
|
||||
.append(this.getCountdownTo(), ((SystemWideAlert) other).getCountdownTo())
|
||||
.append(this.isActive(), ((SystemWideAlert) other).isActive())
|
||||
.isEquals());
|
||||
}
|
||||
|
||||
@Override
|
||||
public int hashCode() {
|
||||
return new HashCodeBuilder(17, 37)
|
||||
.append(this.getID())
|
||||
.append(this.getMessage())
|
||||
.append(this.getAllowSessions())
|
||||
.append(this.getCountdownTo())
|
||||
.append(this.isActive())
|
||||
.toHashCode();
|
||||
}
|
||||
|
||||
}
|
@@ -0,0 +1,129 @@
|
||||
/**
|
||||
* The contents of this file are subject to the license and copyright
|
||||
* detailed in the LICENSE and NOTICE files at the root of the source
|
||||
* tree and available online at
|
||||
*
|
||||
* http://www.dspace.org/license/
|
||||
*/
|
||||
package org.dspace.alerts;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.sql.SQLException;
|
||||
import java.util.Date;
|
||||
import java.util.List;
|
||||
|
||||
import org.apache.logging.log4j.Logger;
|
||||
import org.dspace.alerts.dao.SystemWideAlertDAO;
|
||||
import org.dspace.alerts.service.SystemWideAlertService;
|
||||
import org.dspace.authorize.AuthorizeException;
|
||||
import org.dspace.authorize.service.AuthorizeService;
|
||||
import org.dspace.core.Context;
|
||||
import org.dspace.core.LogHelper;
|
||||
import org.dspace.eperson.EPerson;
|
||||
import org.springframework.beans.factory.annotation.Autowired;
|
||||
|
||||
/**
|
||||
* The implementation for the {@link SystemWideAlertService} class
|
||||
*/
|
||||
public class SystemWideAlertServiceImpl implements SystemWideAlertService {
|
||||
|
||||
private static final Logger log = org.apache.logging.log4j.LogManager.getLogger(SystemWideAlertService.class);
|
||||
|
||||
|
||||
@Autowired
|
||||
private SystemWideAlertDAO systemWideAlertDAO;
|
||||
|
||||
@Autowired
|
||||
private AuthorizeService authorizeService;
|
||||
|
||||
@Override
|
||||
public SystemWideAlert create(final Context context, final String message,
|
||||
final AllowSessionsEnum allowSessionsType,
|
||||
final Date countdownTo, final boolean active) throws SQLException,
|
||||
AuthorizeException {
|
||||
if (!authorizeService.isAdmin(context)) {
|
||||
throw new AuthorizeException(
|
||||
"Only administrators can create a system-wide alert");
|
||||
}
|
||||
SystemWideAlert systemWideAlert = new SystemWideAlert();
|
||||
systemWideAlert.setMessage(message);
|
||||
systemWideAlert.setAllowSessions(allowSessionsType);
|
||||
systemWideAlert.setCountdownTo(countdownTo);
|
||||
systemWideAlert.setActive(active);
|
||||
|
||||
SystemWideAlert createdAlert = systemWideAlertDAO.create(context, systemWideAlert);
|
||||
log.info(LogHelper.getHeader(context, "system_wide_alert_create",
|
||||
"System Wide Alert has been created with message: '" + message + "' and ID "
|
||||
+ createdAlert.getID() + " and allowSessionsType " + allowSessionsType +
|
||||
" and active set to " + active));
|
||||
|
||||
|
||||
return createdAlert;
|
||||
}
|
||||
|
||||
@Override
|
||||
public SystemWideAlert find(final Context context, final int alertId) throws SQLException {
|
||||
return systemWideAlertDAO.findByID(context, SystemWideAlert.class, alertId);
|
||||
}
|
||||
|
||||
@Override
|
||||
public List<SystemWideAlert> findAll(final Context context) throws SQLException {
|
||||
return systemWideAlertDAO.findAll(context, SystemWideAlert.class);
|
||||
}
|
||||
|
||||
@Override
|
||||
public List<SystemWideAlert> findAll(final Context context, final int limit, final int offset) throws SQLException {
|
||||
return systemWideAlertDAO.findAll(context, limit, offset);
|
||||
}
|
||||
|
||||
@Override
|
||||
public List<SystemWideAlert> findAllActive(final Context context, final int limit, final int offset)
|
||||
throws SQLException {
|
||||
return systemWideAlertDAO.findAllActive(context, limit, offset);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void delete(final Context context, final SystemWideAlert systemWideAlert)
|
||||
throws SQLException, IOException, AuthorizeException {
|
||||
if (!authorizeService.isAdmin(context)) {
|
||||
throw new AuthorizeException(
|
||||
"Only administrators can create a system-wide alert");
|
||||
}
|
||||
systemWideAlertDAO.delete(context, systemWideAlert);
|
||||
log.info(LogHelper.getHeader(context, "system_wide_alert_create",
|
||||
"System Wide Alert with ID " + systemWideAlert.getID() + " has been deleted"));
|
||||
|
||||
}
|
||||
|
||||
@Override
|
||||
public void update(final Context context, final SystemWideAlert systemWideAlert)
|
||||
throws SQLException, AuthorizeException {
|
||||
if (!authorizeService.isAdmin(context)) {
|
||||
throw new AuthorizeException(
|
||||
"Only administrators can create a system-wide alert");
|
||||
}
|
||||
systemWideAlertDAO.save(context, systemWideAlert);
|
||||
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean canNonAdminUserLogin(Context context) throws SQLException {
|
||||
List<SystemWideAlert> active = findAllActive(context, 1, 0);
|
||||
if (active == null || active.isEmpty()) {
|
||||
return true;
|
||||
}
|
||||
return active.get(0).getAllowSessions() == AllowSessionsEnum.ALLOW_ALL_SESSIONS;
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean canUserMaintainSession(Context context, EPerson ePerson) throws SQLException {
|
||||
if (authorizeService.isAdmin(context, ePerson)) {
|
||||
return true;
|
||||
}
|
||||
List<SystemWideAlert> active = findAllActive(context, 1, 0);
|
||||
if (active == null || active.isEmpty()) {
|
||||
return true;
|
||||
}
|
||||
return active.get(0).getAllowSessions() != AllowSessionsEnum.ALLOW_ADMIN_SESSIONS_ONLY;
|
||||
}
|
||||
}
|
@@ -0,0 +1,45 @@
|
||||
/**
|
||||
* The contents of this file are subject to the license and copyright
|
||||
* detailed in the LICENSE and NOTICE files at the root of the source
|
||||
* tree and available online at
|
||||
*
|
||||
* http://www.dspace.org/license/
|
||||
*/
|
||||
package org.dspace.alerts.dao;
|
||||
|
||||
import java.sql.SQLException;
|
||||
import java.util.List;
|
||||
|
||||
import org.dspace.alerts.SystemWideAlert;
|
||||
import org.dspace.core.Context;
|
||||
import org.dspace.core.GenericDAO;
|
||||
|
||||
/**
|
||||
* This is the Data Access Object for the {@link SystemWideAlert} object
|
||||
*/
|
||||
public interface SystemWideAlertDAO extends GenericDAO<SystemWideAlert> {
|
||||
|
||||
/**
|
||||
* Returns a list of all SystemWideAlert objects in the database
|
||||
*
|
||||
* @param context The relevant DSpace context
|
||||
* @param limit The limit for the amount of SystemWideAlerts returned
|
||||
* @param offset The offset for the Processes to be returned
|
||||
* @return The list of all SystemWideAlert objects in the Database
|
||||
* @throws SQLException If something goes wrong
|
||||
*/
|
||||
List<SystemWideAlert> findAll(Context context, int limit, int offset) throws SQLException;
|
||||
|
||||
/**
|
||||
* Returns a list of all active SystemWideAlert objects in the database
|
||||
*
|
||||
* @param context The relevant DSpace context
|
||||
* @param limit The limit for the amount of SystemWideAlerts returned
|
||||
* @param offset The offset for the Processes to be returned
|
||||
* @return The list of all SystemWideAlert objects in the Database
|
||||
* @throws SQLException If something goes wrong
|
||||
*/
|
||||
List<SystemWideAlert> findAllActive(Context context, int limit, int offset) throws SQLException;
|
||||
|
||||
|
||||
}
|
@@ -0,0 +1,48 @@
|
||||
/**
|
||||
* The contents of this file are subject to the license and copyright
|
||||
* detailed in the LICENSE and NOTICE files at the root of the source
|
||||
* tree and available online at
|
||||
*
|
||||
* http://www.dspace.org/license/
|
||||
*/
|
||||
package org.dspace.alerts.dao.impl;
|
||||
|
||||
import java.sql.SQLException;
|
||||
import java.util.List;
|
||||
import javax.persistence.criteria.CriteriaBuilder;
|
||||
import javax.persistence.criteria.CriteriaQuery;
|
||||
import javax.persistence.criteria.Root;
|
||||
|
||||
import org.dspace.alerts.SystemWideAlert;
|
||||
import org.dspace.alerts.SystemWideAlert_;
|
||||
import org.dspace.alerts.dao.SystemWideAlertDAO;
|
||||
import org.dspace.core.AbstractHibernateDAO;
|
||||
import org.dspace.core.Context;
|
||||
|
||||
/**
|
||||
* Implementation class for the {@link SystemWideAlertDAO}
|
||||
*/
|
||||
public class SystemWideAlertDAOImpl extends AbstractHibernateDAO<SystemWideAlert> implements SystemWideAlertDAO {
|
||||
|
||||
public List<SystemWideAlert> findAll(final Context context, final int limit, final int offset) throws SQLException {
|
||||
CriteriaBuilder criteriaBuilder = getCriteriaBuilder(context);
|
||||
CriteriaQuery criteriaQuery = getCriteriaQuery(criteriaBuilder, SystemWideAlert.class);
|
||||
Root<SystemWideAlert> alertRoot = criteriaQuery.from(SystemWideAlert.class);
|
||||
criteriaQuery.select(alertRoot);
|
||||
|
||||
return list(context, criteriaQuery, false, SystemWideAlert.class, limit, offset);
|
||||
}
|
||||
|
||||
public List<SystemWideAlert> findAllActive(final Context context, final int limit, final int offset)
|
||||
throws SQLException {
|
||||
CriteriaBuilder criteriaBuilder = getCriteriaBuilder(context);
|
||||
CriteriaQuery criteriaQuery = getCriteriaQuery(criteriaBuilder, SystemWideAlert.class);
|
||||
Root<SystemWideAlert> alertRoot = criteriaQuery.from(SystemWideAlert.class);
|
||||
criteriaQuery.select(alertRoot);
|
||||
criteriaQuery.where(criteriaBuilder.equal(alertRoot.get(SystemWideAlert_.active), true));
|
||||
|
||||
return list(context, criteriaQuery, false, SystemWideAlert.class, limit, offset);
|
||||
}
|
||||
|
||||
|
||||
}
|
@@ -0,0 +1,118 @@
|
||||
/**
|
||||
* The contents of this file are subject to the license and copyright
|
||||
* detailed in the LICENSE and NOTICE files at the root of the source
|
||||
* tree and available online at
|
||||
*
|
||||
* http://www.dspace.org/license/
|
||||
*/
|
||||
package org.dspace.alerts.service;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.sql.SQLException;
|
||||
import java.util.Date;
|
||||
import java.util.List;
|
||||
|
||||
import org.dspace.alerts.AllowSessionsEnum;
|
||||
import org.dspace.alerts.SystemWideAlert;
|
||||
import org.dspace.authorize.AuthorizeException;
|
||||
import org.dspace.core.Context;
|
||||
import org.dspace.eperson.EPerson;
|
||||
|
||||
/**
|
||||
* An interface for the SystemWideAlertService with methods regarding the SystemWideAlert workload
|
||||
*/
|
||||
public interface SystemWideAlertService {
|
||||
|
||||
/**
|
||||
* This method will create a SystemWideAlert object in the database
|
||||
*
|
||||
* @param context The relevant DSpace context
|
||||
* @param message The message of the system-wide alert
|
||||
* @param allowSessionsType Which sessions need to be allowed for the system-wide alert
|
||||
* @param countdownTo The date to which to count down to when the system-wide alert is active
|
||||
* @param active Whether the system-wide alert os active
|
||||
* @return The created SystemWideAlert object
|
||||
* @throws SQLException If something goes wrong
|
||||
*/
|
||||
SystemWideAlert create(Context context, String message, AllowSessionsEnum allowSessionsType,
|
||||
Date countdownTo, boolean active
|
||||
) throws SQLException, AuthorizeException;
|
||||
|
||||
/**
|
||||
* This method will retrieve a SystemWideAlert object from the Database with the given ID
|
||||
*
|
||||
* @param context The relevant DSpace context
|
||||
* @param alertId The alert id on which we'll search for in the database
|
||||
* @return The system-wide alert that holds the given alert id
|
||||
* @throws SQLException If something goes wrong
|
||||
*/
|
||||
SystemWideAlert find(Context context, int alertId) throws SQLException;
|
||||
|
||||
/**
|
||||
* Returns a list of all SystemWideAlert objects in the database
|
||||
*
|
||||
* @param context The relevant DSpace context
|
||||
* @return The list of all SystemWideAlert objects in the Database
|
||||
* @throws SQLException If something goes wrong
|
||||
*/
|
||||
List<SystemWideAlert> findAll(Context context) throws SQLException;
|
||||
|
||||
/**
|
||||
* Returns a list of all SystemWideAlert objects in the database
|
||||
*
|
||||
* @param context The relevant DSpace context
|
||||
* @param limit The limit for the amount of system-wide alerts returned
|
||||
* @param offset The offset for the system-wide alerts to be returned
|
||||
* @return The list of all SystemWideAlert objects in the Database
|
||||
* @throws SQLException If something goes wrong
|
||||
*/
|
||||
List<SystemWideAlert> findAll(Context context, int limit, int offset) throws SQLException;
|
||||
|
||||
|
||||
/**
|
||||
* Returns a list of all active SystemWideAlert objects in the database
|
||||
*
|
||||
* @param context The relevant DSpace context
|
||||
* @return The list of all active SystemWideAlert objects in the database
|
||||
* @throws SQLException If something goes wrong
|
||||
*/
|
||||
List<SystemWideAlert> findAllActive(Context context, int limit, int offset) throws SQLException;
|
||||
|
||||
/**
|
||||
* This method will delete the given SystemWideAlert object from the database
|
||||
*
|
||||
* @param context The relevant DSpace context
|
||||
* @param systemWideAlert The SystemWideAlert object to be deleted
|
||||
* @throws SQLException If something goes wrong
|
||||
*/
|
||||
void delete(Context context, SystemWideAlert systemWideAlert)
|
||||
throws SQLException, IOException, AuthorizeException;
|
||||
|
||||
|
||||
/**
|
||||
* This method will be used to update the given SystemWideAlert object in the database
|
||||
*
|
||||
* @param context The relevant DSpace context
|
||||
* @param systemWideAlert The SystemWideAlert object to be updated
|
||||
* @throws SQLException If something goes wrong
|
||||
*/
|
||||
void update(Context context, SystemWideAlert systemWideAlert) throws SQLException, AuthorizeException;
|
||||
|
||||
|
||||
/**
|
||||
* Verifies if the user connected to the current context can retain its session
|
||||
*
|
||||
* @param context The relevant DSpace context
|
||||
* @return if the user connected to the current context can retain its session
|
||||
*/
|
||||
boolean canUserMaintainSession(Context context, EPerson ePerson) throws SQLException;
|
||||
|
||||
|
||||
/**
|
||||
* Verifies if a non admin user can log in
|
||||
*
|
||||
* @param context The relevant DSpace context
|
||||
* @return if a non admin user can log in
|
||||
*/
|
||||
boolean canNonAdminUserLogin(Context context) throws SQLException;
|
||||
}
|
@@ -0,0 +1,689 @@
|
||||
/**
|
||||
* The contents of this file are subject to the license and copyright
|
||||
* detailed in the LICENSE and NOTICE files at the root of the source
|
||||
* tree and available online at
|
||||
*
|
||||
* http://www.dspace.org/license/
|
||||
*/
|
||||
package org.dspace.app.bulkaccesscontrol;
|
||||
|
||||
import static org.apache.commons.collections4.CollectionUtils.isEmpty;
|
||||
import static org.apache.commons.collections4.CollectionUtils.isNotEmpty;
|
||||
import static org.dspace.authorize.ResourcePolicy.TYPE_CUSTOM;
|
||||
import static org.dspace.authorize.ResourcePolicy.TYPE_INHERITED;
|
||||
import static org.dspace.core.Constants.CONTENT_BUNDLE_NAME;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.io.InputStream;
|
||||
import java.sql.SQLException;
|
||||
import java.text.DateFormat;
|
||||
import java.text.SimpleDateFormat;
|
||||
import java.util.Arrays;
|
||||
import java.util.Date;
|
||||
import java.util.Iterator;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.Objects;
|
||||
import java.util.Optional;
|
||||
import java.util.TimeZone;
|
||||
import java.util.UUID;
|
||||
import java.util.function.Function;
|
||||
import java.util.stream.Collectors;
|
||||
|
||||
import com.fasterxml.jackson.databind.ObjectMapper;
|
||||
import org.apache.commons.cli.ParseException;
|
||||
import org.apache.commons.lang3.StringUtils;
|
||||
import org.dspace.app.bulkaccesscontrol.exception.BulkAccessControlException;
|
||||
import org.dspace.app.bulkaccesscontrol.model.AccessCondition;
|
||||
import org.dspace.app.bulkaccesscontrol.model.AccessConditionBitstream;
|
||||
import org.dspace.app.bulkaccesscontrol.model.AccessConditionItem;
|
||||
import org.dspace.app.bulkaccesscontrol.model.BulkAccessConditionConfiguration;
|
||||
import org.dspace.app.bulkaccesscontrol.model.BulkAccessControlInput;
|
||||
import org.dspace.app.bulkaccesscontrol.service.BulkAccessConditionConfigurationService;
|
||||
import org.dspace.app.mediafilter.factory.MediaFilterServiceFactory;
|
||||
import org.dspace.app.mediafilter.service.MediaFilterService;
|
||||
import org.dspace.app.util.DSpaceObjectUtilsImpl;
|
||||
import org.dspace.app.util.service.DSpaceObjectUtils;
|
||||
import org.dspace.authorize.AuthorizeException;
|
||||
import org.dspace.authorize.factory.AuthorizeServiceFactory;
|
||||
import org.dspace.authorize.service.ResourcePolicyService;
|
||||
import org.dspace.content.Bitstream;
|
||||
import org.dspace.content.Collection;
|
||||
import org.dspace.content.DSpaceObject;
|
||||
import org.dspace.content.Item;
|
||||
import org.dspace.content.factory.ContentServiceFactory;
|
||||
import org.dspace.content.service.ItemService;
|
||||
import org.dspace.core.Constants;
|
||||
import org.dspace.core.Context;
|
||||
import org.dspace.discovery.DiscoverQuery;
|
||||
import org.dspace.discovery.SearchService;
|
||||
import org.dspace.discovery.SearchServiceException;
|
||||
import org.dspace.discovery.SearchUtils;
|
||||
import org.dspace.discovery.indexobject.IndexableItem;
|
||||
import org.dspace.eperson.EPerson;
|
||||
import org.dspace.eperson.factory.EPersonServiceFactory;
|
||||
import org.dspace.eperson.service.EPersonService;
|
||||
import org.dspace.scripts.DSpaceRunnable;
|
||||
import org.dspace.services.ConfigurationService;
|
||||
import org.dspace.services.factory.DSpaceServicesFactory;
|
||||
import org.dspace.submit.model.AccessConditionOption;
|
||||
import org.dspace.utils.DSpace;
|
||||
|
||||
/**
|
||||
* Implementation of {@link DSpaceRunnable} to perform a bulk access control via json file.
|
||||
*
|
||||
* @author Mohamed Eskander (mohamed.eskander at 4science.it)
|
||||
*
|
||||
*/
|
||||
public class BulkAccessControl extends DSpaceRunnable<BulkAccessControlScriptConfiguration<BulkAccessControl>> {
|
||||
|
||||
private DSpaceObjectUtils dSpaceObjectUtils;
|
||||
|
||||
private SearchService searchService;
|
||||
|
||||
private ItemService itemService;
|
||||
|
||||
private String filename;
|
||||
|
||||
private List<String> uuids;
|
||||
|
||||
private Context context;
|
||||
|
||||
private BulkAccessConditionConfigurationService bulkAccessConditionConfigurationService;
|
||||
|
||||
private ResourcePolicyService resourcePolicyService;
|
||||
|
||||
protected EPersonService epersonService;
|
||||
|
||||
private ConfigurationService configurationService;
|
||||
|
||||
private MediaFilterService mediaFilterService;
|
||||
|
||||
private Map<String, AccessConditionOption> itemAccessConditions;
|
||||
|
||||
private Map<String, AccessConditionOption> uploadAccessConditions;
|
||||
|
||||
private final String ADD_MODE = "add";
|
||||
|
||||
private final String REPLACE_MODE = "replace";
|
||||
|
||||
private boolean help = false;
|
||||
|
||||
protected String eperson = null;
|
||||
|
||||
@Override
|
||||
@SuppressWarnings("unchecked")
|
||||
public void setup() throws ParseException {
|
||||
|
||||
this.searchService = SearchUtils.getSearchService();
|
||||
this.itemService = ContentServiceFactory.getInstance().getItemService();
|
||||
this.resourcePolicyService = AuthorizeServiceFactory.getInstance().getResourcePolicyService();
|
||||
this.epersonService = EPersonServiceFactory.getInstance().getEPersonService();
|
||||
this.configurationService = DSpaceServicesFactory.getInstance().getConfigurationService();
|
||||
mediaFilterService = MediaFilterServiceFactory.getInstance().getMediaFilterService();
|
||||
mediaFilterService.setLogHandler(handler);
|
||||
this.bulkAccessConditionConfigurationService = new DSpace().getServiceManager().getServiceByName(
|
||||
"bulkAccessConditionConfigurationService", BulkAccessConditionConfigurationService.class);
|
||||
this.dSpaceObjectUtils = new DSpace().getServiceManager().getServiceByName(
|
||||
DSpaceObjectUtilsImpl.class.getName(), DSpaceObjectUtilsImpl.class);
|
||||
|
||||
BulkAccessConditionConfiguration bulkAccessConditionConfiguration =
|
||||
bulkAccessConditionConfigurationService.getBulkAccessConditionConfiguration("default");
|
||||
|
||||
itemAccessConditions = bulkAccessConditionConfiguration
|
||||
.getItemAccessConditionOptions()
|
||||
.stream()
|
||||
.collect(Collectors.toMap(AccessConditionOption::getName, Function.identity()));
|
||||
|
||||
uploadAccessConditions = bulkAccessConditionConfiguration
|
||||
.getBitstreamAccessConditionOptions()
|
||||
.stream()
|
||||
.collect(Collectors.toMap(AccessConditionOption::getName, Function.identity()));
|
||||
|
||||
help = commandLine.hasOption('h');
|
||||
filename = commandLine.getOptionValue('f');
|
||||
uuids = commandLine.hasOption('u') ? Arrays.asList(commandLine.getOptionValues('u')) : null;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void internalRun() throws Exception {
|
||||
|
||||
if (help) {
|
||||
printHelp();
|
||||
return;
|
||||
}
|
||||
|
||||
ObjectMapper mapper = new ObjectMapper();
|
||||
mapper.setTimeZone(TimeZone.getTimeZone("UTC"));
|
||||
BulkAccessControlInput accessControl;
|
||||
context = new Context(Context.Mode.BATCH_EDIT);
|
||||
setEPerson(context);
|
||||
|
||||
if (!isAuthorized(context)) {
|
||||
handler.logError("Current user is not eligible to execute script bulk-access-control");
|
||||
throw new AuthorizeException("Current user is not eligible to execute script bulk-access-control");
|
||||
}
|
||||
|
||||
if (uuids == null || uuids.size() == 0) {
|
||||
handler.logError("A target uuid must be provided with at least on uuid (run with -h flag for details)");
|
||||
throw new IllegalArgumentException("At least one target uuid must be provided");
|
||||
}
|
||||
|
||||
InputStream inputStream = handler.getFileStream(context, filename)
|
||||
.orElseThrow(() -> new IllegalArgumentException("Error reading file, the file couldn't be "
|
||||
+ "found for filename: " + filename));
|
||||
|
||||
try {
|
||||
accessControl = mapper.readValue(inputStream, BulkAccessControlInput.class);
|
||||
} catch (IOException e) {
|
||||
handler.logError("Error parsing json file " + e.getMessage());
|
||||
throw new IllegalArgumentException("Error parsing json file", e);
|
||||
}
|
||||
try {
|
||||
validate(accessControl);
|
||||
updateItemsAndBitstreamsPolices(accessControl);
|
||||
context.complete();
|
||||
} catch (Exception e) {
|
||||
handler.handleException(e);
|
||||
context.abort();
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* check the validation of mapped json data, it must
|
||||
* provide item or bitstream information or both of them
|
||||
* and check the validation of item node if provided,
|
||||
* and check the validation of bitstream node if provided.
|
||||
*
|
||||
* @param accessControl mapped json data
|
||||
* @throws SQLException if something goes wrong in the database
|
||||
* @throws BulkAccessControlException if accessControl is invalid
|
||||
*/
|
||||
private void validate(BulkAccessControlInput accessControl) throws SQLException {
|
||||
|
||||
AccessConditionItem item = accessControl.getItem();
|
||||
AccessConditionBitstream bitstream = accessControl.getBitstream();
|
||||
|
||||
if (Objects.isNull(item) && Objects.isNull(bitstream)) {
|
||||
handler.logError("item or bitstream node must be provided");
|
||||
throw new BulkAccessControlException("item or bitstream node must be provided");
|
||||
}
|
||||
|
||||
if (Objects.nonNull(item)) {
|
||||
validateItemNode(item);
|
||||
}
|
||||
|
||||
if (Objects.nonNull(bitstream)) {
|
||||
validateBitstreamNode(bitstream);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* check the validation of item node, the item mode
|
||||
* must be provided with value 'add' or 'replace'
|
||||
* if mode equals to add so the information
|
||||
* of accessCondition must be provided,
|
||||
* also checking that accessConditions information are valid.
|
||||
*
|
||||
* @param item the item node
|
||||
* @throws BulkAccessControlException if item node is invalid
|
||||
*/
|
||||
private void validateItemNode(AccessConditionItem item) {
|
||||
String mode = item.getMode();
|
||||
List<AccessCondition> accessConditions = item.getAccessConditions();
|
||||
|
||||
if (StringUtils.isEmpty(mode)) {
|
||||
handler.logError("item mode node must be provided");
|
||||
throw new BulkAccessControlException("item mode node must be provided");
|
||||
} else if (!(StringUtils.equalsAny(mode, ADD_MODE, REPLACE_MODE))) {
|
||||
handler.logError("wrong value for item mode<" + mode + ">");
|
||||
throw new BulkAccessControlException("wrong value for item mode<" + mode + ">");
|
||||
} else if (ADD_MODE.equals(mode) && isEmpty(accessConditions)) {
|
||||
handler.logError("accessConditions of item must be provided with mode<" + ADD_MODE + ">");
|
||||
throw new BulkAccessControlException(
|
||||
"accessConditions of item must be provided with mode<" + ADD_MODE + ">");
|
||||
}
|
||||
|
||||
for (AccessCondition accessCondition : accessConditions) {
|
||||
validateAccessCondition(accessCondition);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* check the validation of bitstream node, the bitstream mode
|
||||
* must be provided with value 'add' or 'replace'
|
||||
* if mode equals to add so the information of accessConditions
|
||||
* must be provided,
|
||||
* also checking that constraint information is valid,
|
||||
* also checking that accessConditions information are valid.
|
||||
*
|
||||
* @param bitstream the bitstream node
|
||||
* @throws SQLException if something goes wrong in the database
|
||||
* @throws BulkAccessControlException if bitstream node is invalid
|
||||
*/
|
||||
private void validateBitstreamNode(AccessConditionBitstream bitstream) throws SQLException {
|
||||
String mode = bitstream.getMode();
|
||||
List<AccessCondition> accessConditions = bitstream.getAccessConditions();
|
||||
|
||||
if (StringUtils.isEmpty(mode)) {
|
||||
handler.logError("bitstream mode node must be provided");
|
||||
throw new BulkAccessControlException("bitstream mode node must be provided");
|
||||
} else if (!(StringUtils.equalsAny(mode, ADD_MODE, REPLACE_MODE))) {
|
||||
handler.logError("wrong value for bitstream mode<" + mode + ">");
|
||||
throw new BulkAccessControlException("wrong value for bitstream mode<" + mode + ">");
|
||||
} else if (ADD_MODE.equals(mode) && isEmpty(accessConditions)) {
|
||||
handler.logError("accessConditions of bitstream must be provided with mode<" + ADD_MODE + ">");
|
||||
throw new BulkAccessControlException(
|
||||
"accessConditions of bitstream must be provided with mode<" + ADD_MODE + ">");
|
||||
}
|
||||
|
||||
validateConstraint(bitstream);
|
||||
|
||||
for (AccessCondition accessCondition : bitstream.getAccessConditions()) {
|
||||
validateAccessCondition(accessCondition);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* check the validation of constraint node if provided,
|
||||
* constraint isn't supported when multiple uuids are provided
|
||||
* or when uuid isn't an Item
|
||||
*
|
||||
* @param bitstream the bitstream node
|
||||
* @throws SQLException if something goes wrong in the database
|
||||
* @throws BulkAccessControlException if constraint node is invalid
|
||||
*/
|
||||
private void validateConstraint(AccessConditionBitstream bitstream) throws SQLException {
|
||||
if (uuids.size() > 1 && containsConstraints(bitstream)) {
|
||||
handler.logError("constraint isn't supported when multiple uuids are provided");
|
||||
throw new BulkAccessControlException("constraint isn't supported when multiple uuids are provided");
|
||||
} else if (uuids.size() == 1 && containsConstraints(bitstream)) {
|
||||
DSpaceObject dso =
|
||||
dSpaceObjectUtils.findDSpaceObject(context, UUID.fromString(uuids.get(0)));
|
||||
|
||||
if (Objects.nonNull(dso) && dso.getType() != Constants.ITEM) {
|
||||
handler.logError("constraint is not supported when uuid isn't an Item");
|
||||
throw new BulkAccessControlException("constraint is not supported when uuid isn't an Item");
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* check the validation of access condition,
|
||||
* the access condition name must equal to one of configured access conditions,
|
||||
* then call {@link AccessConditionOption#validateResourcePolicy(
|
||||
* Context, String, Date, Date)} if exception happens so, it's invalid.
|
||||
*
|
||||
* @param accessCondition the accessCondition
|
||||
* @throws BulkAccessControlException if the accessCondition is invalid
|
||||
*/
|
||||
private void validateAccessCondition(AccessCondition accessCondition) {
|
||||
|
||||
if (!itemAccessConditions.containsKey(accessCondition.getName())) {
|
||||
handler.logError("wrong access condition <" + accessCondition.getName() + ">");
|
||||
throw new BulkAccessControlException("wrong access condition <" + accessCondition.getName() + ">");
|
||||
}
|
||||
|
||||
try {
|
||||
itemAccessConditions.get(accessCondition.getName()).validateResourcePolicy(
|
||||
context, accessCondition.getName(), accessCondition.getStartDate(), accessCondition.getEndDate());
|
||||
} catch (Exception e) {
|
||||
handler.logError("invalid access condition, " + e.getMessage());
|
||||
handler.handleException(e);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* find all items of provided {@link #uuids} from solr,
|
||||
* then update the resource policies of items
|
||||
* or bitstreams of items (only bitstreams of ORIGINAL bundles)
|
||||
* and derivative bitstreams, or both of them.
|
||||
*
|
||||
* @param accessControl the access control input
|
||||
* @throws SQLException if something goes wrong in the database
|
||||
* @throws SearchServiceException if a search error occurs
|
||||
* @throws AuthorizeException if an authorization error occurs
|
||||
*/
|
||||
private void updateItemsAndBitstreamsPolices(BulkAccessControlInput accessControl)
|
||||
throws SQLException, SearchServiceException, AuthorizeException {
|
||||
|
||||
int counter = 0;
|
||||
int start = 0;
|
||||
int limit = 20;
|
||||
|
||||
String query = buildSolrQuery(uuids);
|
||||
|
||||
Iterator<Item> itemIterator = findItems(query, start, limit);
|
||||
|
||||
while (itemIterator.hasNext()) {
|
||||
|
||||
Item item = context.reloadEntity(itemIterator.next());
|
||||
|
||||
if (Objects.nonNull(accessControl.getItem())) {
|
||||
updateItemPolicies(item, accessControl);
|
||||
}
|
||||
|
||||
if (Objects.nonNull(accessControl.getBitstream())) {
|
||||
updateBitstreamsPolicies(item, accessControl);
|
||||
}
|
||||
|
||||
context.commit();
|
||||
context.uncacheEntity(item);
|
||||
counter++;
|
||||
|
||||
if (counter == limit) {
|
||||
counter = 0;
|
||||
start += limit;
|
||||
itemIterator = findItems(query, start, limit);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private String buildSolrQuery(List<String> uuids) throws SQLException {
|
||||
String [] query = new String[uuids.size()];
|
||||
|
||||
for (int i = 0 ; i < query.length ; i++) {
|
||||
DSpaceObject dso = dSpaceObjectUtils.findDSpaceObject(context, UUID.fromString(uuids.get(i)));
|
||||
|
||||
if (dso.getType() == Constants.COMMUNITY) {
|
||||
query[i] = "location.comm:" + dso.getID();
|
||||
} else if (dso.getType() == Constants.COLLECTION) {
|
||||
query[i] = "location.coll:" + dso.getID();
|
||||
} else if (dso.getType() == Constants.ITEM) {
|
||||
query[i] = "search.resourceid:" + dso.getID();
|
||||
}
|
||||
}
|
||||
return StringUtils.joinWith(" OR ", query);
|
||||
}
|
||||
|
||||
private Iterator<Item> findItems(String query, int start, int limit)
|
||||
throws SearchServiceException {
|
||||
|
||||
DiscoverQuery discoverQuery = buildDiscoveryQuery(query, start, limit);
|
||||
|
||||
return searchService.search(context, discoverQuery)
|
||||
.getIndexableObjects()
|
||||
.stream()
|
||||
.map(indexableObject ->
|
||||
((IndexableItem) indexableObject).getIndexedObject())
|
||||
.collect(Collectors.toList())
|
||||
.iterator();
|
||||
}
|
||||
|
||||
private DiscoverQuery buildDiscoveryQuery(String query, int start, int limit) {
|
||||
DiscoverQuery discoverQuery = new DiscoverQuery();
|
||||
discoverQuery.setDSpaceObjectFilter(IndexableItem.TYPE);
|
||||
discoverQuery.setQuery(query);
|
||||
discoverQuery.setStart(start);
|
||||
discoverQuery.setMaxResults(limit);
|
||||
|
||||
return discoverQuery;
|
||||
}
|
||||
|
||||
/**
|
||||
* update the item resource policies,
|
||||
* when mode equals to 'replace' will remove
|
||||
* all current resource polices of types 'TYPE_CUSTOM'
|
||||
* and 'TYPE_INHERITED' then, set the new resource policies.
|
||||
*
|
||||
* @param item the item
|
||||
* @param accessControl the access control input
|
||||
* @throws SQLException if something goes wrong in the database
|
||||
* @throws AuthorizeException if an authorization error occurs
|
||||
*/
|
||||
private void updateItemPolicies(Item item, BulkAccessControlInput accessControl)
|
||||
throws SQLException, AuthorizeException {
|
||||
|
||||
AccessConditionItem acItem = accessControl.getItem();
|
||||
|
||||
if (REPLACE_MODE.equals(acItem.getMode())) {
|
||||
removeReadPolicies(item, TYPE_CUSTOM);
|
||||
removeReadPolicies(item, TYPE_INHERITED);
|
||||
}
|
||||
|
||||
setItemPolicies(item, accessControl);
|
||||
logInfo(acItem.getAccessConditions(), acItem.getMode(), item);
|
||||
}
|
||||
|
||||
/**
|
||||
* create the new resource policies of item.
|
||||
* then, call {@link ItemService#adjustItemPolicies(
|
||||
* Context, Item, Collection)} to adjust item's default policies.
|
||||
*
|
||||
* @param item the item
|
||||
* @param accessControl the access control input
|
||||
* @throws SQLException if something goes wrong in the database
|
||||
* @throws AuthorizeException if an authorization error occurs
|
||||
*/
|
||||
private void setItemPolicies(Item item, BulkAccessControlInput accessControl)
|
||||
throws SQLException, AuthorizeException {
|
||||
|
||||
accessControl
|
||||
.getItem()
|
||||
.getAccessConditions()
|
||||
.forEach(accessCondition -> createResourcePolicy(item, accessCondition,
|
||||
itemAccessConditions.get(accessCondition.getName())));
|
||||
|
||||
itemService.adjustItemPolicies(context, item, item.getOwningCollection());
|
||||
}
|
||||
|
||||
/**
|
||||
* update the resource policies of all item's bitstreams
|
||||
* or bitstreams specified into constraint node,
|
||||
* and derivative bitstreams.
|
||||
*
|
||||
* <strong>NOTE:</strong> only bitstreams of ORIGINAL bundles
|
||||
*
|
||||
* @param item the item contains bitstreams
|
||||
* @param accessControl the access control input
|
||||
*/
|
||||
private void updateBitstreamsPolicies(Item item, BulkAccessControlInput accessControl) {
|
||||
AccessConditionBitstream.Constraint constraints = accessControl.getBitstream().getConstraints();
|
||||
|
||||
// look over all the bundles and force initialization of bitstreams collection
|
||||
// to avoid lazy initialization exception
|
||||
long count = item.getBundles()
|
||||
.stream()
|
||||
.flatMap(bundle ->
|
||||
bundle.getBitstreams().stream())
|
||||
.count();
|
||||
|
||||
item.getBundles(CONTENT_BUNDLE_NAME).stream()
|
||||
.flatMap(bundle -> bundle.getBitstreams().stream())
|
||||
.filter(bitstream -> constraints == null ||
|
||||
constraints.getUuid() == null ||
|
||||
constraints.getUuid().size() == 0 ||
|
||||
constraints.getUuid().contains(bitstream.getID().toString()))
|
||||
.forEach(bitstream -> updateBitstreamPolicies(bitstream, item, accessControl));
|
||||
}
|
||||
|
||||
/**
|
||||
* check that the bitstream node is existed,
|
||||
* and contains constraint node,
|
||||
* and constraint contains uuids.
|
||||
*
|
||||
* @param bitstream the bitstream node
|
||||
* @return true when uuids of constraint of bitstream is not empty,
|
||||
* otherwise false
|
||||
*/
|
||||
private boolean containsConstraints(AccessConditionBitstream bitstream) {
|
||||
return Objects.nonNull(bitstream) &&
|
||||
Objects.nonNull(bitstream.getConstraints()) &&
|
||||
isNotEmpty(bitstream.getConstraints().getUuid());
|
||||
}
|
||||
|
||||
/**
|
||||
* update the bitstream resource policies,
|
||||
* when mode equals to replace will remove
|
||||
* all current resource polices of types 'TYPE_CUSTOM'
|
||||
* and 'TYPE_INHERITED' then, set the new resource policies.
|
||||
*
|
||||
* @param bitstream the bitstream
|
||||
* @param item the item of bitstream
|
||||
* @param accessControl the access control input
|
||||
* @throws RuntimeException if something goes wrong in the database
|
||||
* or an authorization error occurs
|
||||
*/
|
||||
private void updateBitstreamPolicies(Bitstream bitstream, Item item, BulkAccessControlInput accessControl) {
|
||||
|
||||
AccessConditionBitstream acBitstream = accessControl.getBitstream();
|
||||
|
||||
if (REPLACE_MODE.equals(acBitstream.getMode())) {
|
||||
removeReadPolicies(bitstream, TYPE_CUSTOM);
|
||||
removeReadPolicies(bitstream, TYPE_INHERITED);
|
||||
}
|
||||
|
||||
try {
|
||||
setBitstreamPolicies(bitstream, item, accessControl);
|
||||
logInfo(acBitstream.getAccessConditions(), acBitstream.getMode(), bitstream);
|
||||
} catch (SQLException | AuthorizeException e) {
|
||||
throw new RuntimeException(e);
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
/**
|
||||
* remove dspace object's read policies.
|
||||
*
|
||||
* @param dso the dspace object
|
||||
* @param type resource policy type
|
||||
* @throws BulkAccessControlException if something goes wrong
|
||||
* in the database or an authorization error occurs
|
||||
*/
|
||||
private void removeReadPolicies(DSpaceObject dso, String type) {
|
||||
try {
|
||||
resourcePolicyService.removePolicies(context, dso, type, Constants.READ);
|
||||
} catch (SQLException | AuthorizeException e) {
|
||||
throw new BulkAccessControlException(e);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* create the new resource policies of bitstream.
|
||||
* then, call {@link ItemService#adjustItemPolicies(
|
||||
* Context, Item, Collection)} to adjust bitstream's default policies.
|
||||
* and also update the resource policies of its derivative bitstreams.
|
||||
*
|
||||
* @param bitstream the bitstream
|
||||
* @param item the item of bitstream
|
||||
* @param accessControl the access control input
|
||||
* @throws SQLException if something goes wrong in the database
|
||||
* @throws AuthorizeException if an authorization error occurs
|
||||
*/
|
||||
private void setBitstreamPolicies(Bitstream bitstream, Item item, BulkAccessControlInput accessControl)
|
||||
throws SQLException, AuthorizeException {
|
||||
|
||||
accessControl.getBitstream()
|
||||
.getAccessConditions()
|
||||
.forEach(accessCondition -> createResourcePolicy(bitstream, accessCondition,
|
||||
uploadAccessConditions.get(accessCondition.getName())));
|
||||
|
||||
itemService.adjustBitstreamPolicies(context, item, item.getOwningCollection(), bitstream);
|
||||
mediaFilterService.updatePoliciesOfDerivativeBitstreams(context, item, bitstream);
|
||||
}
|
||||
|
||||
/**
|
||||
* create the resource policy from the information
|
||||
* comes from the access condition.
|
||||
*
|
||||
* @param obj the dspace object
|
||||
* @param accessCondition the access condition
|
||||
* @param accessConditionOption the access condition option
|
||||
* @throws BulkAccessControlException if an exception occurs
|
||||
*/
|
||||
private void createResourcePolicy(DSpaceObject obj, AccessCondition accessCondition,
|
||||
AccessConditionOption accessConditionOption) {
|
||||
|
||||
String name = accessCondition.getName();
|
||||
String description = accessCondition.getDescription();
|
||||
Date startDate = accessCondition.getStartDate();
|
||||
Date endDate = accessCondition.getEndDate();
|
||||
|
||||
try {
|
||||
accessConditionOption.createResourcePolicy(context, obj, name, description, startDate, endDate);
|
||||
} catch (Exception e) {
|
||||
throw new BulkAccessControlException(e);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Set the eperson in the context
|
||||
*
|
||||
* @param context the context
|
||||
* @throws SQLException if database error
|
||||
*/
|
||||
protected void setEPerson(Context context) throws SQLException {
|
||||
EPerson myEPerson = epersonService.find(context, this.getEpersonIdentifier());
|
||||
|
||||
if (myEPerson == null) {
|
||||
handler.logError("EPerson cannot be found: " + this.getEpersonIdentifier());
|
||||
throw new UnsupportedOperationException("EPerson cannot be found: " + this.getEpersonIdentifier());
|
||||
}
|
||||
|
||||
context.setCurrentUser(myEPerson);
|
||||
}
|
||||
|
||||
private void logInfo(List<AccessCondition> accessConditions, String mode, DSpaceObject dso) {
|
||||
String type = dso.getClass().getSimpleName();
|
||||
|
||||
if (REPLACE_MODE.equals(mode) && isEmpty(accessConditions)) {
|
||||
handler.logInfo("Cleaning " + type + " {" + dso.getID() + "} policies");
|
||||
handler.logInfo("Inheriting policies from owning Collection in " + type + " {" + dso.getID() + "}");
|
||||
return;
|
||||
}
|
||||
|
||||
StringBuilder message = new StringBuilder();
|
||||
message.append(mode.equals(ADD_MODE) ? "Adding " : "Replacing ")
|
||||
.append(type)
|
||||
.append(" {")
|
||||
.append(dso.getID())
|
||||
.append("} policy")
|
||||
.append(mode.equals(ADD_MODE) ? " with " : " to ")
|
||||
.append("access conditions:");
|
||||
|
||||
AppendAccessConditionsInfo(message, accessConditions);
|
||||
|
||||
handler.logInfo(message.toString());
|
||||
|
||||
if (REPLACE_MODE.equals(mode) && isAppendModeEnabled()) {
|
||||
handler.logInfo("Inheriting policies from owning Collection in " + type + " {" + dso.getID() + "}");
|
||||
}
|
||||
}
|
||||
|
||||
private void AppendAccessConditionsInfo(StringBuilder message, List<AccessCondition> accessConditions) {
|
||||
DateFormat dateFormat = new SimpleDateFormat("yyyy-MM-dd");
|
||||
message.append("{");
|
||||
|
||||
for (int i = 0; i < accessConditions.size(); i++) {
|
||||
message.append(accessConditions.get(i).getName());
|
||||
|
||||
Optional.ofNullable(accessConditions.get(i).getStartDate())
|
||||
.ifPresent(date -> message.append(", start_date=" + dateFormat.format(date)));
|
||||
|
||||
Optional.ofNullable(accessConditions.get(i).getEndDate())
|
||||
.ifPresent(date -> message.append(", end_date=" + dateFormat.format(date)));
|
||||
|
||||
if (i != accessConditions.size() - 1) {
|
||||
message.append(", ");
|
||||
}
|
||||
}
|
||||
|
||||
message.append("}");
|
||||
}
|
||||
|
||||
private boolean isAppendModeEnabled() {
|
||||
return configurationService.getBooleanProperty("core.authorization.installitem.inheritance-read.append-mode");
|
||||
}
|
||||
|
||||
protected boolean isAuthorized(Context context) {
|
||||
return true;
|
||||
}
|
||||
|
||||
@Override
|
||||
@SuppressWarnings("unchecked")
|
||||
public BulkAccessControlScriptConfiguration<BulkAccessControl> getScriptConfiguration() {
|
||||
return new DSpace().getServiceManager()
|
||||
.getServiceByName("bulk-access-control", BulkAccessControlScriptConfiguration.class);
|
||||
}
|
||||
|
||||
}
|
@@ -0,0 +1,66 @@
|
||||
/**
|
||||
* The contents of this file are subject to the license and copyright
|
||||
* detailed in the LICENSE and NOTICE files at the root of the source
|
||||
* tree and available online at
|
||||
*
|
||||
* http://www.dspace.org/license/
|
||||
*/
|
||||
package org.dspace.app.bulkaccesscontrol;
|
||||
|
||||
import java.sql.SQLException;
|
||||
import java.util.Arrays;
|
||||
import java.util.UUID;
|
||||
import java.util.stream.Collectors;
|
||||
|
||||
import org.apache.commons.lang3.StringUtils;
|
||||
import org.dspace.core.Context;
|
||||
import org.dspace.eperson.EPerson;
|
||||
import org.dspace.scripts.DSpaceCommandLineParameter;
|
||||
|
||||
/**
|
||||
* Extension of {@link BulkAccessControl} for CLI.
|
||||
*
|
||||
* @author Mohamed Eskander (mohamed.eskander at 4science.it)
|
||||
*
|
||||
*/
|
||||
public class BulkAccessControlCli extends BulkAccessControl {
|
||||
|
||||
@Override
|
||||
protected void setEPerson(Context context) throws SQLException {
|
||||
EPerson myEPerson;
|
||||
eperson = commandLine.getOptionValue('e');
|
||||
|
||||
if (eperson == null) {
|
||||
handler.logError("An eperson to do the the Bulk Access Control must be specified " +
|
||||
"(run with -h flag for details)");
|
||||
throw new UnsupportedOperationException("An eperson to do the Bulk Access Control must be specified");
|
||||
}
|
||||
|
||||
if (StringUtils.contains(eperson, '@')) {
|
||||
myEPerson = epersonService.findByEmail(context, eperson);
|
||||
} else {
|
||||
myEPerson = epersonService.find(context, UUID.fromString(eperson));
|
||||
}
|
||||
|
||||
if (myEPerson == null) {
|
||||
handler.logError("EPerson cannot be found: " + eperson + " (run with -h flag for details)");
|
||||
throw new UnsupportedOperationException("EPerson cannot be found: " + eperson);
|
||||
}
|
||||
|
||||
context.setCurrentUser(myEPerson);
|
||||
}
|
||||
|
||||
@Override
|
||||
protected boolean isAuthorized(Context context) {
|
||||
|
||||
if (context.getCurrentUser() == null) {
|
||||
return false;
|
||||
}
|
||||
|
||||
return getScriptConfiguration().isAllowedToExecute(context,
|
||||
Arrays.stream(commandLine.getOptions())
|
||||
.map(option ->
|
||||
new DSpaceCommandLineParameter("-" + option.getOpt(), option.getValue()))
|
||||
.collect(Collectors.toList()));
|
||||
}
|
||||
}
|
@@ -0,0 +1,42 @@
|
||||
/**
|
||||
* The contents of this file are subject to the license and copyright
|
||||
* detailed in the LICENSE and NOTICE files at the root of the source
|
||||
* tree and available online at
|
||||
*
|
||||
* http://www.dspace.org/license/
|
||||
*/
|
||||
package org.dspace.app.bulkaccesscontrol;
|
||||
|
||||
import java.io.InputStream;
|
||||
|
||||
import org.apache.commons.cli.Options;
|
||||
|
||||
/**
|
||||
* Extension of {@link BulkAccessControlScriptConfiguration} for CLI.
|
||||
*
|
||||
* @author Mohamed Eskander (mohamed.eskander at 4science.it)
|
||||
*
|
||||
*/
|
||||
public class BulkAccessControlCliScriptConfiguration<T extends BulkAccessControlCli>
|
||||
extends BulkAccessControlScriptConfiguration<T> {
|
||||
|
||||
@Override
|
||||
public Options getOptions() {
|
||||
Options options = new Options();
|
||||
|
||||
options.addOption("u", "uuid", true, "target uuids of communities/collections/items");
|
||||
options.getOption("u").setType(String.class);
|
||||
options.getOption("u").setRequired(true);
|
||||
|
||||
options.addOption("f", "file", true, "source json file");
|
||||
options.getOption("f").setType(InputStream.class);
|
||||
options.getOption("f").setRequired(true);
|
||||
|
||||
options.addOption("e", "eperson", true, "email of EPerson used to perform actions");
|
||||
options.getOption("e").setRequired(true);
|
||||
|
||||
options.addOption("h", "help", false, "help");
|
||||
|
||||
return options;
|
||||
}
|
||||
}
|
@@ -0,0 +1,110 @@
|
||||
/**
|
||||
* The contents of this file are subject to the license and copyright
|
||||
* detailed in the LICENSE and NOTICE files at the root of the source
|
||||
* tree and available online at
|
||||
*
|
||||
* http://www.dspace.org/license/
|
||||
*/
|
||||
package org.dspace.app.bulkaccesscontrol;
|
||||
|
||||
import java.io.InputStream;
|
||||
import java.sql.SQLException;
|
||||
import java.util.List;
|
||||
import java.util.Objects;
|
||||
import java.util.UUID;
|
||||
import java.util.stream.Collectors;
|
||||
|
||||
import org.apache.commons.cli.Options;
|
||||
import org.dspace.app.util.DSpaceObjectUtilsImpl;
|
||||
import org.dspace.app.util.service.DSpaceObjectUtils;
|
||||
import org.dspace.content.DSpaceObject;
|
||||
import org.dspace.core.Context;
|
||||
import org.dspace.scripts.DSpaceCommandLineParameter;
|
||||
import org.dspace.scripts.configuration.ScriptConfiguration;
|
||||
import org.dspace.utils.DSpace;
|
||||
|
||||
/**
|
||||
* Script configuration for {@link BulkAccessControl}.
|
||||
*
|
||||
* @author Mohamed Eskander (mohamed.eskander at 4science.it)
|
||||
*
|
||||
* @param <T> the {@link BulkAccessControl} type
|
||||
*/
|
||||
public class BulkAccessControlScriptConfiguration<T extends BulkAccessControl> extends ScriptConfiguration<T> {
|
||||
|
||||
private Class<T> dspaceRunnableClass;
|
||||
|
||||
@Override
|
||||
public boolean isAllowedToExecute(Context context, List<DSpaceCommandLineParameter> commandLineParameters) {
|
||||
|
||||
try {
|
||||
if (Objects.isNull(commandLineParameters)) {
|
||||
return authorizeService.isAdmin(context) || authorizeService.isComColAdmin(context)
|
||||
|| authorizeService.isItemAdmin(context);
|
||||
} else {
|
||||
List<String> dspaceObjectIDs =
|
||||
commandLineParameters.stream()
|
||||
.filter(parameter -> "-u".equals(parameter.getName()))
|
||||
.map(DSpaceCommandLineParameter::getValue)
|
||||
.collect(Collectors.toList());
|
||||
|
||||
DSpaceObjectUtils dSpaceObjectUtils = new DSpace().getServiceManager().getServiceByName(
|
||||
DSpaceObjectUtilsImpl.class.getName(), DSpaceObjectUtilsImpl.class);
|
||||
|
||||
for (String dspaceObjectID : dspaceObjectIDs) {
|
||||
|
||||
DSpaceObject dso = dSpaceObjectUtils.findDSpaceObject(context, UUID.fromString(dspaceObjectID));
|
||||
|
||||
if (Objects.isNull(dso)) {
|
||||
throw new IllegalArgumentException();
|
||||
}
|
||||
|
||||
if (!authorizeService.isAdmin(context, dso)) {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
}
|
||||
} catch (SQLException e) {
|
||||
throw new RuntimeException(e);
|
||||
}
|
||||
|
||||
return true;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Options getOptions() {
|
||||
if (options == null) {
|
||||
Options options = new Options();
|
||||
|
||||
options.addOption("u", "uuid", true, "target uuids of communities/collections/items");
|
||||
options.getOption("u").setType(String.class);
|
||||
options.getOption("u").setRequired(true);
|
||||
|
||||
options.addOption("f", "file", true, "source json file");
|
||||
options.getOption("f").setType(InputStream.class);
|
||||
options.getOption("f").setRequired(true);
|
||||
|
||||
options.addOption("h", "help", false, "help");
|
||||
|
||||
super.options = options;
|
||||
}
|
||||
return options;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Class<T> getDspaceRunnableClass() {
|
||||
return dspaceRunnableClass;
|
||||
}
|
||||
|
||||
/**
|
||||
* Generic setter for the dspaceRunnableClass
|
||||
*
|
||||
* @param dspaceRunnableClass The dspaceRunnableClass to be set on this
|
||||
* BulkImportScriptConfiguration
|
||||
*/
|
||||
@Override
|
||||
public void setDspaceRunnableClass(Class<T> dspaceRunnableClass) {
|
||||
this.dspaceRunnableClass = dspaceRunnableClass;
|
||||
}
|
||||
|
||||
}
|
@@ -0,0 +1,48 @@
|
||||
/**
|
||||
* The contents of this file are subject to the license and copyright
|
||||
* detailed in the LICENSE and NOTICE files at the root of the source
|
||||
* tree and available online at
|
||||
*
|
||||
* http://www.dspace.org/license/
|
||||
*/
|
||||
package org.dspace.app.bulkaccesscontrol.exception;
|
||||
|
||||
/**
|
||||
* Exception for errors that occurs during the bulk access control
|
||||
*
|
||||
* @author Mohamed Eskander (mohamed.eskander at 4science.it)
|
||||
*
|
||||
*/
|
||||
public class BulkAccessControlException extends RuntimeException {
|
||||
|
||||
private static final long serialVersionUID = -74730626862418515L;
|
||||
|
||||
/**
|
||||
* Constructor with error message and cause.
|
||||
*
|
||||
* @param message the error message
|
||||
* @param cause the error cause
|
||||
*/
|
||||
public BulkAccessControlException(String message, Throwable cause) {
|
||||
super(message, cause);
|
||||
}
|
||||
|
||||
/**
|
||||
* Constructor with error message.
|
||||
*
|
||||
* @param message the error message
|
||||
*/
|
||||
public BulkAccessControlException(String message) {
|
||||
super(message);
|
||||
}
|
||||
|
||||
/**
|
||||
* Constructor with error cause.
|
||||
*
|
||||
* @param cause the error cause
|
||||
*/
|
||||
public BulkAccessControlException(Throwable cause) {
|
||||
super(cause);
|
||||
}
|
||||
|
||||
}
|
@@ -0,0 +1,59 @@
|
||||
/**
|
||||
* The contents of this file are subject to the license and copyright
|
||||
* detailed in the LICENSE and NOTICE files at the root of the source
|
||||
* tree and available online at
|
||||
*
|
||||
* http://www.dspace.org/license/
|
||||
*/
|
||||
package org.dspace.app.bulkaccesscontrol.model;
|
||||
|
||||
import java.util.Date;
|
||||
|
||||
import com.fasterxml.jackson.databind.annotation.JsonDeserialize;
|
||||
import org.dspace.app.bulkaccesscontrol.BulkAccessControl;
|
||||
import org.dspace.util.MultiFormatDateDeserializer;
|
||||
|
||||
/**
|
||||
* Class that model the values of an Access Condition as expressed in the {@link BulkAccessControl} input file
|
||||
*
|
||||
* @author Mohamed Eskander (mohamed.eskander at 4science.it)
|
||||
*/
|
||||
public class AccessCondition {
|
||||
|
||||
private String name;
|
||||
|
||||
private String description;
|
||||
|
||||
@JsonDeserialize(using = MultiFormatDateDeserializer.class)
|
||||
private Date startDate;
|
||||
|
||||
@JsonDeserialize(using = MultiFormatDateDeserializer.class)
|
||||
private Date endDate;
|
||||
|
||||
public AccessCondition() {
|
||||
}
|
||||
|
||||
public AccessCondition(String name, String description, Date startDate, Date endDate) {
|
||||
this.name = name;
|
||||
this.description = description;
|
||||
this.startDate = startDate;
|
||||
this.endDate = endDate;
|
||||
}
|
||||
|
||||
public String getName() {
|
||||
return name;
|
||||
}
|
||||
|
||||
public String getDescription() {
|
||||
return description;
|
||||
}
|
||||
|
||||
public Date getStartDate() {
|
||||
return startDate;
|
||||
}
|
||||
|
||||
public Date getEndDate() {
|
||||
return endDate;
|
||||
}
|
||||
|
||||
}
|
@@ -0,0 +1,69 @@
|
||||
/**
|
||||
* The contents of this file are subject to the license and copyright
|
||||
* detailed in the LICENSE and NOTICE files at the root of the source
|
||||
* tree and available online at
|
||||
*
|
||||
* http://www.dspace.org/license/
|
||||
*/
|
||||
package org.dspace.app.bulkaccesscontrol.model;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
|
||||
import org.dspace.app.bulkaccesscontrol.BulkAccessControl;
|
||||
|
||||
/**
|
||||
* Class that model the value of bitstream node
|
||||
* from json file of the {@link BulkAccessControl}
|
||||
*
|
||||
* @author Mohamed Eskander (mohamed.eskander at 4science.it)
|
||||
*/
|
||||
public class AccessConditionBitstream {
|
||||
|
||||
private String mode;
|
||||
|
||||
private Constraint constraints;
|
||||
|
||||
private List<AccessCondition> accessConditions;
|
||||
|
||||
public String getMode() {
|
||||
return mode;
|
||||
}
|
||||
|
||||
public void setMode(String mode) {
|
||||
this.mode = mode;
|
||||
}
|
||||
|
||||
public Constraint getConstraints() {
|
||||
return constraints;
|
||||
}
|
||||
|
||||
public void setConstraints(Constraint constraints) {
|
||||
this.constraints = constraints;
|
||||
}
|
||||
|
||||
public List<AccessCondition> getAccessConditions() {
|
||||
if (accessConditions == null) {
|
||||
return new ArrayList<>();
|
||||
}
|
||||
return accessConditions;
|
||||
}
|
||||
|
||||
public void setAccessConditions(List<AccessCondition> accessConditions) {
|
||||
this.accessConditions = accessConditions;
|
||||
}
|
||||
|
||||
public class Constraint {
|
||||
|
||||
private List<String> uuid;
|
||||
|
||||
public List<String> getUuid() {
|
||||
return uuid;
|
||||
}
|
||||
|
||||
public void setUuid(List<String> uuid) {
|
||||
this.uuid = uuid;
|
||||
}
|
||||
}
|
||||
|
||||
}
|
@@ -0,0 +1,45 @@
|
||||
/**
|
||||
* The contents of this file are subject to the license and copyright
|
||||
* detailed in the LICENSE and NOTICE files at the root of the source
|
||||
* tree and available online at
|
||||
*
|
||||
* http://www.dspace.org/license/
|
||||
*/
|
||||
package org.dspace.app.bulkaccesscontrol.model;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
|
||||
import org.dspace.app.bulkaccesscontrol.BulkAccessControl;
|
||||
|
||||
/**
|
||||
* Class that model the value of item node
|
||||
* from json file of the {@link BulkAccessControl}
|
||||
*
|
||||
* @author Mohamed Eskander (mohamed.eskander at 4science.it)
|
||||
*/
|
||||
public class AccessConditionItem {
|
||||
|
||||
String mode;
|
||||
|
||||
List<AccessCondition> accessConditions;
|
||||
|
||||
public String getMode() {
|
||||
return mode;
|
||||
}
|
||||
|
||||
public void setMode(String mode) {
|
||||
this.mode = mode;
|
||||
}
|
||||
|
||||
public List<AccessCondition> getAccessConditions() {
|
||||
if (accessConditions == null) {
|
||||
return new ArrayList<>();
|
||||
}
|
||||
return accessConditions;
|
||||
}
|
||||
|
||||
public void setAccessConditions(List<AccessCondition> accessConditions) {
|
||||
this.accessConditions = accessConditions;
|
||||
}
|
||||
}
|
@@ -0,0 +1,50 @@
|
||||
/**
|
||||
* The contents of this file are subject to the license and copyright
|
||||
* detailed in the LICENSE and NOTICE files at the root of the source
|
||||
* tree and available online at
|
||||
*
|
||||
* http://www.dspace.org/license/
|
||||
*/
|
||||
package org.dspace.app.bulkaccesscontrol.model;
|
||||
|
||||
import java.util.List;
|
||||
|
||||
import org.dspace.submit.model.AccessConditionOption;
|
||||
|
||||
/**
|
||||
* A collection of conditions to be met when bulk access condition.
|
||||
*
|
||||
* @author Mohamed Eskander (mohamed.eskander at 4science.it)
|
||||
*/
|
||||
public class BulkAccessConditionConfiguration {
|
||||
|
||||
private String name;
|
||||
private List<AccessConditionOption> itemAccessConditionOptions;
|
||||
private List<AccessConditionOption> bitstreamAccessConditionOptions;
|
||||
|
||||
public String getName() {
|
||||
return name;
|
||||
}
|
||||
|
||||
public void setName(String name) {
|
||||
this.name = name;
|
||||
}
|
||||
|
||||
public List<AccessConditionOption> getItemAccessConditionOptions() {
|
||||
return itemAccessConditionOptions;
|
||||
}
|
||||
|
||||
public void setItemAccessConditionOptions(
|
||||
List<AccessConditionOption> itemAccessConditionOptions) {
|
||||
this.itemAccessConditionOptions = itemAccessConditionOptions;
|
||||
}
|
||||
|
||||
public List<AccessConditionOption> getBitstreamAccessConditionOptions() {
|
||||
return bitstreamAccessConditionOptions;
|
||||
}
|
||||
|
||||
public void setBitstreamAccessConditionOptions(
|
||||
List<AccessConditionOption> bitstreamAccessConditionOptions) {
|
||||
this.bitstreamAccessConditionOptions = bitstreamAccessConditionOptions;
|
||||
}
|
||||
}
|
@@ -0,0 +1,72 @@
|
||||
/**
|
||||
* The contents of this file are subject to the license and copyright
|
||||
* detailed in the LICENSE and NOTICE files at the root of the source
|
||||
* tree and available online at
|
||||
*
|
||||
* http://www.dspace.org/license/
|
||||
*/
|
||||
package org.dspace.app.bulkaccesscontrol.model;
|
||||
|
||||
import org.dspace.app.bulkaccesscontrol.BulkAccessControl;
|
||||
|
||||
/**
|
||||
* Class that model the content of the JSON file used as input for the {@link BulkAccessControl}
|
||||
*
|
||||
* <code> <br/>
|
||||
* { <br/>
|
||||
* item: { <br/>
|
||||
* mode: "replace", <br/>
|
||||
* accessConditions: [ <br/>
|
||||
* { <br/>
|
||||
* "name": "openaccess" <br/>
|
||||
* } <br/>
|
||||
* ] <br/>
|
||||
* }, <br/>
|
||||
* bitstream: { <br/>
|
||||
* constraints: { <br/>
|
||||
* uuid: [bit-uuid1, bit-uuid2, ..., bit-uuidN], <br/>
|
||||
* }, <br/>
|
||||
* mode: "add", <br/>
|
||||
* accessConditions: [ <br/>
|
||||
* { <br/>
|
||||
* "name": "embargo", <br/>
|
||||
* "startDate": "2024-06-24T23:59:59.999+0000" <br/>
|
||||
* } <br/>
|
||||
* ] <br/>
|
||||
* } <br/>
|
||||
* }
|
||||
* </code>
|
||||
*
|
||||
* @author Mohamed Eskander (mohamed.eskander at 4science.it)
|
||||
*/
|
||||
public class BulkAccessControlInput {
|
||||
|
||||
AccessConditionItem item;
|
||||
|
||||
AccessConditionBitstream bitstream;
|
||||
|
||||
public BulkAccessControlInput() {
|
||||
}
|
||||
|
||||
public BulkAccessControlInput(AccessConditionItem item,
|
||||
AccessConditionBitstream bitstream) {
|
||||
this.item = item;
|
||||
this.bitstream = bitstream;
|
||||
}
|
||||
|
||||
public AccessConditionItem getItem() {
|
||||
return item;
|
||||
}
|
||||
|
||||
public void setItem(AccessConditionItem item) {
|
||||
this.item = item;
|
||||
}
|
||||
|
||||
public AccessConditionBitstream getBitstream() {
|
||||
return bitstream;
|
||||
}
|
||||
|
||||
public void setBitstream(AccessConditionBitstream bitstream) {
|
||||
this.bitstream = bitstream;
|
||||
}
|
||||
}
|
@@ -0,0 +1,45 @@
|
||||
/**
|
||||
* The contents of this file are subject to the license and copyright
|
||||
* detailed in the LICENSE and NOTICE files at the root of the source
|
||||
* tree and available online at
|
||||
*
|
||||
* http://www.dspace.org/license/
|
||||
*/
|
||||
package org.dspace.app.bulkaccesscontrol.service;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
|
||||
import org.apache.commons.collections4.CollectionUtils;
|
||||
import org.dspace.app.bulkaccesscontrol.model.BulkAccessConditionConfiguration;
|
||||
import org.springframework.beans.factory.annotation.Autowired;
|
||||
|
||||
/**
|
||||
* Simple bean to manage different Bulk Access Condition configurations
|
||||
*
|
||||
* @author Mohamed Eskander (mohamed.eskander at 4science.it)
|
||||
*/
|
||||
public class BulkAccessConditionConfigurationService {
|
||||
|
||||
@Autowired
|
||||
private List<BulkAccessConditionConfiguration> bulkAccessConditionConfigurations;
|
||||
|
||||
public List<BulkAccessConditionConfiguration> getBulkAccessConditionConfigurations() {
|
||||
if (CollectionUtils.isEmpty(bulkAccessConditionConfigurations)) {
|
||||
return new ArrayList<>();
|
||||
}
|
||||
return bulkAccessConditionConfigurations;
|
||||
}
|
||||
|
||||
public BulkAccessConditionConfiguration getBulkAccessConditionConfiguration(String name) {
|
||||
return getBulkAccessConditionConfigurations().stream()
|
||||
.filter(x -> name.equals(x.getName()))
|
||||
.findFirst()
|
||||
.orElse(null);
|
||||
}
|
||||
|
||||
public void setBulkAccessConditionConfigurations(
|
||||
List<BulkAccessConditionConfiguration> bulkAccessConditionConfigurations) {
|
||||
this.bulkAccessConditionConfigurations = bulkAccessConditionConfigurations;
|
||||
}
|
||||
}
|
@@ -7,33 +7,16 @@
|
||||
*/
|
||||
package org.dspace.app.bulkedit;
|
||||
|
||||
import java.sql.SQLException;
|
||||
|
||||
import org.apache.commons.cli.Options;
|
||||
import org.dspace.authorize.service.AuthorizeService;
|
||||
import org.dspace.core.Context;
|
||||
import org.dspace.scripts.configuration.ScriptConfiguration;
|
||||
import org.springframework.beans.factory.annotation.Autowired;
|
||||
|
||||
/**
|
||||
* The {@link ScriptConfiguration} for the {@link MetadataDeletion} script.
|
||||
*/
|
||||
public class MetadataDeletionScriptConfiguration<T extends MetadataDeletion> extends ScriptConfiguration<T> {
|
||||
|
||||
@Autowired
|
||||
private AuthorizeService authorizeService;
|
||||
|
||||
private Class<T> dspaceRunnableClass;
|
||||
|
||||
@Override
|
||||
public boolean isAllowedToExecute(Context context) {
|
||||
try {
|
||||
return authorizeService.isAdmin(context);
|
||||
} catch (SQLException e) {
|
||||
throw new RuntimeException("SQLException occurred when checking if the current user is an admin", e);
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public Options getOptions() {
|
||||
if (options == null) {
|
||||
|
@@ -7,22 +7,14 @@
|
||||
*/
|
||||
package org.dspace.app.bulkedit;
|
||||
|
||||
import java.sql.SQLException;
|
||||
|
||||
import org.apache.commons.cli.Options;
|
||||
import org.dspace.authorize.service.AuthorizeService;
|
||||
import org.dspace.core.Context;
|
||||
import org.dspace.scripts.configuration.ScriptConfiguration;
|
||||
import org.springframework.beans.factory.annotation.Autowired;
|
||||
|
||||
/**
|
||||
* The {@link ScriptConfiguration} for the {@link MetadataExport} script
|
||||
*/
|
||||
public class MetadataExportScriptConfiguration<T extends MetadataExport> extends ScriptConfiguration<T> {
|
||||
|
||||
@Autowired
|
||||
private AuthorizeService authorizeService;
|
||||
|
||||
private Class<T> dspaceRunnableClass;
|
||||
|
||||
@Override
|
||||
@@ -39,15 +31,6 @@ public class MetadataExportScriptConfiguration<T extends MetadataExport> extends
|
||||
this.dspaceRunnableClass = dspaceRunnableClass;
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean isAllowedToExecute(Context context) {
|
||||
try {
|
||||
return authorizeService.isAdmin(context);
|
||||
} catch (SQLException e) {
|
||||
throw new RuntimeException("SQLException occurred when checking if the current user is an admin", e);
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public Options getOptions() {
|
||||
if (options == null) {
|
||||
|
@@ -9,7 +9,6 @@
|
||||
package org.dspace.app.bulkedit;
|
||||
|
||||
import org.apache.commons.cli.Options;
|
||||
import org.dspace.core.Context;
|
||||
import org.dspace.scripts.configuration.ScriptConfiguration;
|
||||
|
||||
/**
|
||||
@@ -29,11 +28,6 @@ public class MetadataExportSearchScriptConfiguration<T extends MetadataExportSea
|
||||
this.dspaceRunnableclass = dspaceRunnableClass;
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean isAllowedToExecute(Context context) {
|
||||
return true;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Options getOptions() {
|
||||
if (options == null) {
|
||||
|
@@ -598,18 +598,19 @@ public class MetadataImport extends DSpaceRunnable<MetadataImportScriptConfigura
|
||||
changes.add(whatHasChanged);
|
||||
}
|
||||
|
||||
if (change) {
|
||||
//only clear cache if changes have been made.
|
||||
c.uncacheEntity(wsItem);
|
||||
c.uncacheEntity(wfItem);
|
||||
c.uncacheEntity(item);
|
||||
if (change && (rowCount % configurationService.getIntProperty("bulkedit.change.commit.count", 100) == 0)) {
|
||||
c.commit();
|
||||
handler.logInfo(LogHelper.getHeader(c, "metadata_import_commit", "lineNumber=" + rowCount));
|
||||
}
|
||||
populateRefAndRowMap(line, item == null ? null : item.getID());
|
||||
// keep track of current rows processed
|
||||
rowCount++;
|
||||
}
|
||||
if (change) {
|
||||
c.commit();
|
||||
}
|
||||
|
||||
c.setMode(originalMode);
|
||||
c.setMode(Context.Mode.READ_ONLY);
|
||||
|
||||
|
||||
// Return the changes
|
||||
|
@@ -8,22 +8,15 @@
|
||||
package org.dspace.app.bulkedit;
|
||||
|
||||
import java.io.InputStream;
|
||||
import java.sql.SQLException;
|
||||
|
||||
import org.apache.commons.cli.Options;
|
||||
import org.dspace.authorize.service.AuthorizeService;
|
||||
import org.dspace.core.Context;
|
||||
import org.dspace.scripts.configuration.ScriptConfiguration;
|
||||
import org.springframework.beans.factory.annotation.Autowired;
|
||||
|
||||
/**
|
||||
* The {@link ScriptConfiguration} for the {@link MetadataImport} script
|
||||
*/
|
||||
public class MetadataImportScriptConfiguration<T extends MetadataImport> extends ScriptConfiguration<T> {
|
||||
|
||||
@Autowired
|
||||
private AuthorizeService authorizeService;
|
||||
|
||||
private Class<T> dspaceRunnableClass;
|
||||
|
||||
@Override
|
||||
@@ -40,15 +33,6 @@ public class MetadataImportScriptConfiguration<T extends MetadataImport> extends
|
||||
this.dspaceRunnableClass = dspaceRunnableClass;
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean isAllowedToExecute(Context context) {
|
||||
try {
|
||||
return authorizeService.isAdmin(context);
|
||||
} catch (SQLException e) {
|
||||
throw new RuntimeException("SQLException occurred when checking if the current user is an admin", e);
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public Options getOptions() {
|
||||
if (options == null) {
|
||||
|
@@ -7,18 +7,11 @@
|
||||
*/
|
||||
package org.dspace.app.harvest;
|
||||
|
||||
import java.sql.SQLException;
|
||||
|
||||
import org.apache.commons.cli.Options;
|
||||
import org.dspace.authorize.service.AuthorizeService;
|
||||
import org.dspace.core.Context;
|
||||
import org.dspace.scripts.configuration.ScriptConfiguration;
|
||||
import org.springframework.beans.factory.annotation.Autowired;
|
||||
|
||||
|
||||
public class HarvestScriptConfiguration<T extends Harvest> extends ScriptConfiguration<T> {
|
||||
@Autowired
|
||||
private AuthorizeService authorizeService;
|
||||
|
||||
private Class<T> dspaceRunnableClass;
|
||||
|
||||
@@ -32,13 +25,6 @@ public class HarvestScriptConfiguration<T extends Harvest> extends ScriptConfigu
|
||||
this.dspaceRunnableClass = dspaceRunnableClass;
|
||||
}
|
||||
|
||||
public boolean isAllowedToExecute(final Context context) {
|
||||
try {
|
||||
return authorizeService.isAdmin(context);
|
||||
} catch (SQLException e) {
|
||||
throw new RuntimeException("SQLException occurred when checking if the current user is an admin", e);
|
||||
}
|
||||
}
|
||||
|
||||
public Options getOptions() {
|
||||
Options options = new Options();
|
||||
|
@@ -7,14 +7,9 @@
|
||||
*/
|
||||
package org.dspace.app.itemexport;
|
||||
|
||||
import java.sql.SQLException;
|
||||
|
||||
import org.apache.commons.cli.Option;
|
||||
import org.apache.commons.cli.Options;
|
||||
import org.dspace.authorize.service.AuthorizeService;
|
||||
import org.dspace.core.Context;
|
||||
import org.dspace.scripts.configuration.ScriptConfiguration;
|
||||
import org.springframework.beans.factory.annotation.Autowired;
|
||||
|
||||
/**
|
||||
* The {@link ScriptConfiguration} for the {@link ItemExport} script
|
||||
@@ -23,9 +18,6 @@ import org.springframework.beans.factory.annotation.Autowired;
|
||||
*/
|
||||
public class ItemExportScriptConfiguration<T extends ItemExport> extends ScriptConfiguration<T> {
|
||||
|
||||
@Autowired
|
||||
private AuthorizeService authorizeService;
|
||||
|
||||
private Class<T> dspaceRunnableClass;
|
||||
|
||||
@Override
|
||||
@@ -38,15 +30,6 @@ public class ItemExportScriptConfiguration<T extends ItemExport> extends ScriptC
|
||||
this.dspaceRunnableClass = dspaceRunnableClass;
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean isAllowedToExecute(final Context context) {
|
||||
try {
|
||||
return authorizeService.isAdmin(context);
|
||||
} catch (SQLException e) {
|
||||
throw new RuntimeException("SQLException occurred when checking if the current user is an admin", e);
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public Options getOptions() {
|
||||
Options options = new Options();
|
||||
|
@@ -11,6 +11,7 @@ import java.io.File;
|
||||
import java.io.FileInputStream;
|
||||
import java.io.IOException;
|
||||
import java.io.InputStream;
|
||||
import java.net.URL;
|
||||
import java.nio.file.Files;
|
||||
import java.sql.SQLException;
|
||||
import java.util.ArrayList;
|
||||
@@ -22,6 +23,7 @@ import java.util.UUID;
|
||||
import org.apache.commons.cli.ParseException;
|
||||
import org.apache.commons.io.FileUtils;
|
||||
import org.apache.commons.lang3.StringUtils;
|
||||
import org.apache.tika.Tika;
|
||||
import org.dspace.app.itemimport.factory.ItemImportServiceFactory;
|
||||
import org.dspace.app.itemimport.service.ItemImportService;
|
||||
import org.dspace.authorize.AuthorizeException;
|
||||
@@ -67,16 +69,19 @@ public class ItemImport extends DSpaceRunnable<ItemImportScriptConfiguration> {
|
||||
protected String eperson = null;
|
||||
protected String[] collections = null;
|
||||
protected boolean isTest = false;
|
||||
protected boolean isExcludeContent = false;
|
||||
protected boolean isResume = false;
|
||||
protected boolean useWorkflow = false;
|
||||
protected boolean useWorkflowSendEmail = false;
|
||||
protected boolean isQuiet = false;
|
||||
protected boolean commandLineCollections = false;
|
||||
protected boolean zip = false;
|
||||
protected boolean remoteUrl = false;
|
||||
protected String zipfilename = null;
|
||||
protected boolean zipvalid = false;
|
||||
protected boolean help = false;
|
||||
protected File workDir = null;
|
||||
private File workFile = null;
|
||||
protected File workFile = null;
|
||||
|
||||
protected static final CollectionService collectionService =
|
||||
ContentServiceFactory.getInstance().getCollectionService();
|
||||
@@ -119,6 +124,8 @@ public class ItemImport extends DSpaceRunnable<ItemImportScriptConfiguration> {
|
||||
handler.logInfo("**Test Run** - not actually importing items.");
|
||||
}
|
||||
|
||||
isExcludeContent = commandLine.hasOption('x');
|
||||
|
||||
if (commandLine.hasOption('p')) {
|
||||
template = true;
|
||||
}
|
||||
@@ -204,6 +211,7 @@ public class ItemImport extends DSpaceRunnable<ItemImportScriptConfiguration> {
|
||||
.getItemImportService();
|
||||
try {
|
||||
itemImportService.setTest(isTest);
|
||||
itemImportService.setExcludeContent(isExcludeContent);
|
||||
itemImportService.setResume(isResume);
|
||||
itemImportService.setUseWorkflow(useWorkflow);
|
||||
itemImportService.setUseWorkflowSendEmail(useWorkflowSendEmail);
|
||||
@@ -229,12 +237,23 @@ public class ItemImport extends DSpaceRunnable<ItemImportScriptConfiguration> {
|
||||
handler.logInfo("***End of Test Run***");
|
||||
}
|
||||
} finally {
|
||||
// clean work dir
|
||||
if (zip) {
|
||||
// if zip file was valid then clean sourcedir
|
||||
if (zipvalid && sourcedir != null && new File(sourcedir).exists()) {
|
||||
FileUtils.deleteDirectory(new File(sourcedir));
|
||||
}
|
||||
|
||||
// clean workdir
|
||||
if (workDir != null && workDir.exists()) {
|
||||
FileUtils.deleteDirectory(workDir);
|
||||
}
|
||||
|
||||
// conditionally clean workFile if import was done in the UI or via a URL and it still exists
|
||||
if (workFile != null && workFile.exists()) {
|
||||
workFile.delete();
|
||||
}
|
||||
}
|
||||
|
||||
Date endTime = new Date();
|
||||
handler.logInfo("Started: " + startTime.getTime());
|
||||
handler.logInfo("Ended: " + endTime.getTime());
|
||||
@@ -249,6 +268,17 @@ public class ItemImport extends DSpaceRunnable<ItemImportScriptConfiguration> {
|
||||
* @param context
|
||||
*/
|
||||
protected void validate(Context context) {
|
||||
// check zip type: uploaded file or remote url
|
||||
if (commandLine.hasOption('z')) {
|
||||
zipfilename = commandLine.getOptionValue('z');
|
||||
} else if (commandLine.hasOption('u')) {
|
||||
remoteUrl = true;
|
||||
zipfilename = commandLine.getOptionValue('u');
|
||||
}
|
||||
if (StringUtils.isBlank(zipfilename)) {
|
||||
throw new UnsupportedOperationException("Must run with either name of zip file or url of zip file");
|
||||
}
|
||||
|
||||
if (command == null) {
|
||||
handler.logError("Must run with either add, replace, or remove (run with -h flag for details)");
|
||||
throw new UnsupportedOperationException("Must run with either add, replace, or remove");
|
||||
@@ -291,7 +321,6 @@ public class ItemImport extends DSpaceRunnable<ItemImportScriptConfiguration> {
|
||||
handler.writeFilestream(context, MAPFILE_FILENAME, mapfileInputStream, MAPFILE_BITSTREAM_TYPE);
|
||||
} finally {
|
||||
mapFile.delete();
|
||||
workFile.delete();
|
||||
}
|
||||
}
|
||||
|
||||
@@ -302,17 +331,55 @@ public class ItemImport extends DSpaceRunnable<ItemImportScriptConfiguration> {
|
||||
* @throws Exception
|
||||
*/
|
||||
protected void readZip(Context context, ItemImportService itemImportService) throws Exception {
|
||||
Optional<InputStream> optionalFileStream = handler.getFileStream(context, zipfilename);
|
||||
if (optionalFileStream.isPresent()) {
|
||||
Optional<InputStream> optionalFileStream = Optional.empty();
|
||||
Optional<InputStream> validationFileStream = Optional.empty();
|
||||
if (!remoteUrl) {
|
||||
// manage zip via upload
|
||||
optionalFileStream = handler.getFileStream(context, zipfilename);
|
||||
validationFileStream = handler.getFileStream(context, zipfilename);
|
||||
} else {
|
||||
// manage zip via remote url
|
||||
optionalFileStream = Optional.ofNullable(new URL(zipfilename).openStream());
|
||||
validationFileStream = Optional.ofNullable(new URL(zipfilename).openStream());
|
||||
}
|
||||
|
||||
if (validationFileStream.isPresent()) {
|
||||
// validate zip file
|
||||
if (validationFileStream.isPresent()) {
|
||||
validateZip(validationFileStream.get());
|
||||
}
|
||||
|
||||
workFile = new File(itemImportService.getTempWorkDir() + File.separator
|
||||
+ zipfilename + "-" + context.getCurrentUser().getID());
|
||||
FileUtils.copyInputStreamToFile(optionalFileStream.get(), workFile);
|
||||
workDir = new File(itemImportService.getTempWorkDir() + File.separator + TEMP_DIR);
|
||||
sourcedir = itemImportService.unzip(workFile, workDir.getAbsolutePath());
|
||||
} else {
|
||||
throw new IllegalArgumentException(
|
||||
"Error reading file, the file couldn't be found for filename: " + zipfilename);
|
||||
}
|
||||
|
||||
workDir = new File(itemImportService.getTempWorkDir() + File.separator + TEMP_DIR
|
||||
+ File.separator + context.getCurrentUser().getID());
|
||||
sourcedir = itemImportService.unzip(workFile, workDir.getAbsolutePath());
|
||||
}
|
||||
|
||||
/**
|
||||
* Confirm that the zip file has the correct MIME type
|
||||
* @param inputStream
|
||||
*/
|
||||
protected void validateZip(InputStream inputStream) {
|
||||
Tika tika = new Tika();
|
||||
try {
|
||||
String mimeType = tika.detect(inputStream);
|
||||
if (mimeType.equals("application/zip")) {
|
||||
zipvalid = true;
|
||||
} else {
|
||||
handler.logError("A valid zip file must be supplied. The provided file has mimetype: " + mimeType);
|
||||
throw new UnsupportedOperationException("A valid zip file must be supplied");
|
||||
}
|
||||
} catch (IOException e) {
|
||||
throw new IllegalArgumentException(
|
||||
"There was an error while reading the zip file: " + zipfilename);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -352,7 +419,6 @@ public class ItemImport extends DSpaceRunnable<ItemImportScriptConfiguration> {
|
||||
*/
|
||||
protected void setZip() {
|
||||
zip = true;
|
||||
zipfilename = commandLine.getOptionValue('z');
|
||||
}
|
||||
|
||||
/**
|
||||
|
@@ -8,10 +8,15 @@
|
||||
package org.dspace.app.itemimport;
|
||||
|
||||
import java.io.File;
|
||||
import java.io.FileInputStream;
|
||||
import java.io.InputStream;
|
||||
import java.net.URL;
|
||||
import java.sql.SQLException;
|
||||
import java.util.List;
|
||||
import java.util.Optional;
|
||||
import java.util.UUID;
|
||||
|
||||
import org.apache.commons.io.FileUtils;
|
||||
import org.apache.commons.lang3.StringUtils;
|
||||
import org.dspace.app.itemimport.service.ItemImportService;
|
||||
import org.dspace.content.Collection;
|
||||
@@ -62,7 +67,7 @@ public class ItemImportCLI extends ItemImport {
|
||||
handler.logError("Must run with either add, replace, or remove (run with -h flag for details)");
|
||||
throw new UnsupportedOperationException("Must run with either add, replace, or remove");
|
||||
} else if ("add".equals(command) || "replace".equals(command)) {
|
||||
if (sourcedir == null) {
|
||||
if (!remoteUrl && sourcedir == null) {
|
||||
handler.logError("A source directory containing items must be set (run with -h flag for details)");
|
||||
throw new UnsupportedOperationException("A source directory containing items must be set");
|
||||
}
|
||||
@@ -96,10 +101,43 @@ public class ItemImportCLI extends ItemImport {
|
||||
protected void readZip(Context context, ItemImportService itemImportService) throws Exception {
|
||||
// If this is a zip archive, unzip it first
|
||||
if (zip) {
|
||||
if (!remoteUrl) {
|
||||
// confirm zip file exists
|
||||
File myZipFile = new File(sourcedir + File.separator + zipfilename);
|
||||
if ((!myZipFile.exists()) || (!myZipFile.isFile())) {
|
||||
throw new IllegalArgumentException(
|
||||
"Error reading file, the file couldn't be found for filename: " + zipfilename);
|
||||
}
|
||||
|
||||
// validate zip file
|
||||
InputStream validationFileStream = new FileInputStream(myZipFile);
|
||||
validateZip(validationFileStream);
|
||||
|
||||
workDir = new File(itemImportService.getTempWorkDir() + File.separator + TEMP_DIR
|
||||
+ File.separator + context.getCurrentUser().getID());
|
||||
sourcedir = itemImportService.unzip(
|
||||
new File(sourcedir + File.separator + zipfilename), workDir.getAbsolutePath());
|
||||
} else {
|
||||
// manage zip via remote url
|
||||
Optional<InputStream> optionalFileStream = Optional.ofNullable(new URL(zipfilename).openStream());
|
||||
if (optionalFileStream.isPresent()) {
|
||||
// validate zip file via url
|
||||
Optional<InputStream> validationFileStream = Optional.ofNullable(new URL(zipfilename).openStream());
|
||||
if (validationFileStream.isPresent()) {
|
||||
validateZip(validationFileStream.get());
|
||||
}
|
||||
|
||||
workFile = new File(itemImportService.getTempWorkDir() + File.separator
|
||||
+ zipfilename + "-" + context.getCurrentUser().getID());
|
||||
FileUtils.copyInputStreamToFile(optionalFileStream.get(), workFile);
|
||||
workDir = new File(itemImportService.getTempWorkDir() + File.separator + TEMP_DIR
|
||||
+ File.separator + context.getCurrentUser().getID());
|
||||
sourcedir = itemImportService.unzip(workFile, workDir.getAbsolutePath());
|
||||
} else {
|
||||
throw new IllegalArgumentException(
|
||||
"Error reading file, the file couldn't be found for filename: " + zipfilename);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -120,6 +158,12 @@ public class ItemImportCLI extends ItemImport {
|
||||
zip = true;
|
||||
zipfilename = commandLine.getOptionValue('z');
|
||||
}
|
||||
|
||||
if (commandLine.hasOption('u')) { // remote url
|
||||
zip = true;
|
||||
remoteUrl = true;
|
||||
zipfilename = commandLine.getOptionValue('u');
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@@ -37,6 +37,9 @@ public class ItemImportCLIScriptConfiguration extends ItemImportScriptConfigurat
|
||||
options.addOption(Option.builder("z").longOpt("zip")
|
||||
.desc("name of zip file")
|
||||
.hasArg().required(false).build());
|
||||
options.addOption(Option.builder("u").longOpt("url")
|
||||
.desc("url of zip file")
|
||||
.hasArg().build());
|
||||
options.addOption(Option.builder("c").longOpt("collection")
|
||||
.desc("destination collection(s) Handle or database ID")
|
||||
.hasArg().required(false).build());
|
||||
@@ -55,6 +58,9 @@ public class ItemImportCLIScriptConfiguration extends ItemImportScriptConfigurat
|
||||
options.addOption(Option.builder("v").longOpt("validate")
|
||||
.desc("test run - do not actually import items")
|
||||
.hasArg(false).required(false).build());
|
||||
options.addOption(Option.builder("x").longOpt("exclude-bitstreams")
|
||||
.desc("do not load or expect content bitstreams")
|
||||
.hasArg(false).required(false).build());
|
||||
options.addOption(Option.builder("p").longOpt("template")
|
||||
.desc("apply template")
|
||||
.hasArg(false).required(false).build());
|
||||
|
@@ -8,14 +8,10 @@
|
||||
package org.dspace.app.itemimport;
|
||||
|
||||
import java.io.InputStream;
|
||||
import java.sql.SQLException;
|
||||
|
||||
import org.apache.commons.cli.Option;
|
||||
import org.apache.commons.cli.Options;
|
||||
import org.dspace.authorize.service.AuthorizeService;
|
||||
import org.dspace.core.Context;
|
||||
import org.dspace.scripts.configuration.ScriptConfiguration;
|
||||
import org.springframework.beans.factory.annotation.Autowired;
|
||||
|
||||
/**
|
||||
* The {@link ScriptConfiguration} for the {@link ItemImport} script
|
||||
@@ -24,9 +20,6 @@ import org.springframework.beans.factory.annotation.Autowired;
|
||||
*/
|
||||
public class ItemImportScriptConfiguration<T extends ItemImport> extends ScriptConfiguration<T> {
|
||||
|
||||
@Autowired
|
||||
private AuthorizeService authorizeService;
|
||||
|
||||
private Class<T> dspaceRunnableClass;
|
||||
|
||||
@Override
|
||||
@@ -39,15 +32,6 @@ public class ItemImportScriptConfiguration<T extends ItemImport> extends ScriptC
|
||||
this.dspaceRunnableClass = dspaceRunnableClass;
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean isAllowedToExecute(final Context context) {
|
||||
try {
|
||||
return authorizeService.isAdmin(context);
|
||||
} catch (SQLException e) {
|
||||
throw new RuntimeException("SQLException occurred when checking if the current user is an admin", e);
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public Options getOptions() {
|
||||
Options options = new Options();
|
||||
@@ -64,7 +48,10 @@ public class ItemImportScriptConfiguration<T extends ItemImport> extends ScriptC
|
||||
options.addOption(Option.builder("z").longOpt("zip")
|
||||
.desc("name of zip file")
|
||||
.type(InputStream.class)
|
||||
.hasArg().required().build());
|
||||
.hasArg().build());
|
||||
options.addOption(Option.builder("u").longOpt("url")
|
||||
.desc("url of zip file")
|
||||
.hasArg().build());
|
||||
options.addOption(Option.builder("c").longOpt("collection")
|
||||
.desc("destination collection(s) Handle or database ID")
|
||||
.hasArg().required(false).build());
|
||||
@@ -81,6 +68,9 @@ public class ItemImportScriptConfiguration<T extends ItemImport> extends ScriptC
|
||||
options.addOption(Option.builder("v").longOpt("validate")
|
||||
.desc("test run - do not actually import items")
|
||||
.hasArg(false).required(false).build());
|
||||
options.addOption(Option.builder("x").longOpt("exclude-bitstreams")
|
||||
.desc("do not load or expect content bitstreams")
|
||||
.hasArg(false).required(false).build());
|
||||
options.addOption(Option.builder("p").longOpt("template")
|
||||
.desc("apply template")
|
||||
.hasArg(false).required(false).build());
|
||||
|
@@ -62,6 +62,7 @@ import org.apache.commons.io.FileUtils;
|
||||
import org.apache.commons.lang3.RandomStringUtils;
|
||||
import org.apache.commons.lang3.StringUtils;
|
||||
import org.apache.commons.lang3.exception.ExceptionUtils;
|
||||
import org.apache.logging.log4j.LogManager;
|
||||
import org.apache.logging.log4j.Logger;
|
||||
import org.dspace.app.itemimport.service.ItemImportService;
|
||||
import org.dspace.app.util.LocalSchemaFilenameFilter;
|
||||
@@ -135,7 +136,7 @@ import org.xml.sax.SAXException;
|
||||
* allow the registration of files (bitstreams) into DSpace.
|
||||
*/
|
||||
public class ItemImportServiceImpl implements ItemImportService, InitializingBean {
|
||||
private final Logger log = org.apache.logging.log4j.LogManager.getLogger(ItemImportServiceImpl.class);
|
||||
private final Logger log = LogManager.getLogger();
|
||||
|
||||
private DSpaceRunnableHandler handler;
|
||||
|
||||
@@ -181,6 +182,7 @@ public class ItemImportServiceImpl implements ItemImportService, InitializingBea
|
||||
protected String tempWorkDir;
|
||||
|
||||
protected boolean isTest = false;
|
||||
protected boolean isExcludeContent = false;
|
||||
protected boolean isResume = false;
|
||||
protected boolean useWorkflow = false;
|
||||
protected boolean useWorkflowSendEmail = false;
|
||||
@@ -950,9 +952,10 @@ public class ItemImportServiceImpl implements ItemImportService, InitializingBea
|
||||
String qualifier = getAttributeValue(n, "qualifier"); //NodeValue();
|
||||
// //getElementData(n,
|
||||
// "qualifier");
|
||||
String language = getAttributeValue(n, "language");
|
||||
if (language != null) {
|
||||
language = language.trim();
|
||||
|
||||
String language = null;
|
||||
if (StringUtils.isNotBlank(getAttributeValue(n, "language"))) {
|
||||
language = getAttributeValue(n, "language").trim();
|
||||
}
|
||||
|
||||
if (!isQuiet) {
|
||||
@@ -1403,6 +1406,10 @@ public class ItemImportServiceImpl implements ItemImportService, InitializingBea
|
||||
protected void processContentFileEntry(Context c, Item i, String path,
|
||||
String fileName, String bundleName, boolean primary) throws SQLException,
|
||||
IOException, AuthorizeException {
|
||||
if (isExcludeContent) {
|
||||
return;
|
||||
}
|
||||
|
||||
String fullpath = path + File.separatorChar + fileName;
|
||||
|
||||
// get an input stream
|
||||
@@ -2342,6 +2349,11 @@ public class ItemImportServiceImpl implements ItemImportService, InitializingBea
|
||||
this.isTest = isTest;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void setExcludeContent(boolean isExcludeContent) {
|
||||
this.isExcludeContent = isExcludeContent;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void setResume(boolean isResume) {
|
||||
this.isResume = isResume;
|
||||
|
@@ -211,6 +211,13 @@ public interface ItemImportService {
|
||||
*/
|
||||
public void setTest(boolean isTest);
|
||||
|
||||
/**
|
||||
* Set exclude-content flag.
|
||||
*
|
||||
* @param isExcludeContent true or false
|
||||
*/
|
||||
public void setExcludeContent(boolean isExcludeContent);
|
||||
|
||||
/**
|
||||
* Set resume flag
|
||||
*
|
||||
|
@@ -22,7 +22,9 @@ public class ImageMagickPdfThumbnailFilter extends ImageMagickThumbnailFilter {
|
||||
File f2 = null;
|
||||
File f3 = null;
|
||||
try {
|
||||
f2 = getImageFile(f, 0, verbose);
|
||||
// Step 1: get an image from our PDF file, with PDF-specific processing options
|
||||
f2 = getImageFile(f, verbose);
|
||||
// Step 2: use the image above to create the final resized and rotated thumbnail
|
||||
f3 = getThumbnailFile(f2, verbose);
|
||||
byte[] bytes = Files.readAllBytes(f3.toPath());
|
||||
return new ByteArrayInputStream(bytes);
|
||||
|
@@ -14,6 +14,9 @@ import java.io.InputStream;
|
||||
import java.util.regex.Pattern;
|
||||
import java.util.regex.PatternSyntaxException;
|
||||
|
||||
import org.apache.pdfbox.pdmodel.PDDocument;
|
||||
import org.apache.pdfbox.pdmodel.PDPage;
|
||||
import org.apache.pdfbox.pdmodel.common.PDRectangle;
|
||||
import org.dspace.content.Bitstream;
|
||||
import org.dspace.content.Bundle;
|
||||
import org.dspace.content.Item;
|
||||
@@ -113,9 +116,17 @@ public abstract class ImageMagickThumbnailFilter extends MediaFilter {
|
||||
return f2;
|
||||
}
|
||||
|
||||
public File getImageFile(File f, int page, boolean verbose)
|
||||
/**
|
||||
* Return an image from a bitstream with specific processing options for
|
||||
* PDFs. This is only used by ImageMagickPdfThumbnailFilter in order to
|
||||
* generate an intermediate image file for use with getThumbnailFile.
|
||||
*/
|
||||
public File getImageFile(File f, boolean verbose)
|
||||
throws IOException, InterruptedException, IM4JavaException {
|
||||
File f2 = new File(f.getParentFile(), f.getName() + ".jpg");
|
||||
// Writing an intermediate file to disk is inefficient, but since we're
|
||||
// doing it anyway, we should use a lossless format. IM's internal MIFF
|
||||
// is lossless like PNG and TIFF, but much faster.
|
||||
File f2 = new File(f.getParentFile(), f.getName() + ".miff");
|
||||
f2.deleteOnExit();
|
||||
ConvertCmd cmd = new ConvertCmd();
|
||||
IMOperation op = new IMOperation();
|
||||
@@ -132,7 +143,27 @@ public abstract class ImageMagickThumbnailFilter extends MediaFilter {
|
||||
op.density(Integer.valueOf(density));
|
||||
}
|
||||
|
||||
String s = "[" + page + "]";
|
||||
// Check the PDF's MediaBox and CropBox to see if they are the same.
|
||||
// If not, then tell ImageMagick to use the CropBox when generating
|
||||
// the thumbnail because the CropBox is generally used to define the
|
||||
// area displayed when a user opens the PDF on a screen, whereas the
|
||||
// MediaBox is used for print. Not all PDFs set these correctly, so
|
||||
// we can use ImageMagick's default behavior unless we see an explit
|
||||
// CropBox. Note: we don't need to do anything special to detect if
|
||||
// the CropBox is missing or empty because pdfbox will set it to the
|
||||
// same size as the MediaBox if it doesn't exist. Also note that we
|
||||
// only need to check the first page, since that's what we use for
|
||||
// generating the thumbnail (PDDocument uses a zero-based index).
|
||||
PDPage pdfPage = PDDocument.load(f).getPage(0);
|
||||
PDRectangle pdfPageMediaBox = pdfPage.getMediaBox();
|
||||
PDRectangle pdfPageCropBox = pdfPage.getCropBox();
|
||||
|
||||
// This option must come *before* we open the input file.
|
||||
if (pdfPageCropBox != pdfPageMediaBox) {
|
||||
op.define("pdf:use-cropbox=true");
|
||||
}
|
||||
|
||||
String s = "[0]";
|
||||
op.addImage(f.getAbsolutePath() + s);
|
||||
if (configurationService.getBooleanProperty(PRE + ".flatten", true)) {
|
||||
op.flatten();
|
||||
@@ -185,20 +216,20 @@ public abstract class ImageMagickThumbnailFilter extends MediaFilter {
|
||||
if (description != null) {
|
||||
if (replaceRegex.matcher(description).matches()) {
|
||||
if (verbose) {
|
||||
System.out.format("%s %s matches pattern and is replacable.%n",
|
||||
description, nsrc);
|
||||
System.out.format("%s %s matches pattern and is replaceable.%n",
|
||||
description, n);
|
||||
}
|
||||
continue;
|
||||
}
|
||||
if (description.equals(getDescription())) {
|
||||
if (verbose) {
|
||||
System.out.format("%s %s is replaceable.%n",
|
||||
getDescription(), nsrc);
|
||||
getDescription(), n);
|
||||
}
|
||||
continue;
|
||||
}
|
||||
}
|
||||
System.out.format("Custom Thumbnail exists for %s for item %s. Thumbnail will not be generated.%n",
|
||||
System.out.format("Custom thumbnail exists for %s for item %s. Thumbnail will not be generated.%n",
|
||||
nsrc, item.getHandle());
|
||||
return false;
|
||||
}
|
||||
|
@@ -0,0 +1,76 @@
|
||||
/**
|
||||
* The contents of this file are subject to the license and copyright
|
||||
* detailed in the LICENSE and NOTICE files at the root of the source
|
||||
* tree and available online at
|
||||
*
|
||||
* http://www.dspace.org/license/
|
||||
*/
|
||||
package org.dspace.app.mediafilter;
|
||||
|
||||
import java.io.ByteArrayInputStream;
|
||||
import java.io.File;
|
||||
import java.io.IOException;
|
||||
import java.io.InputStream;
|
||||
import java.nio.file.Files;
|
||||
|
||||
import org.dspace.content.Item;
|
||||
import org.im4java.core.ConvertCmd;
|
||||
import org.im4java.core.IM4JavaException;
|
||||
import org.im4java.core.IMOperation;
|
||||
|
||||
|
||||
/**
|
||||
* Filter video bitstreams, scaling the image to be within the bounds of
|
||||
* thumbnail.maxwidth, thumbnail.maxheight, the size we want our thumbnail to be
|
||||
* no bigger than. Creates only JPEGs.
|
||||
*/
|
||||
public class ImageMagickVideoThumbnailFilter extends ImageMagickThumbnailFilter {
|
||||
private static final int DEFAULT_WIDTH = 180;
|
||||
private static final int DEFAULT_HEIGHT = 120;
|
||||
private static final int FRAME_NUMBER = 100;
|
||||
|
||||
/**
|
||||
* @param currentItem item
|
||||
* @param source source input stream
|
||||
* @param verbose verbose mode
|
||||
* @return InputStream the resulting input stream
|
||||
* @throws Exception if error
|
||||
*/
|
||||
@Override
|
||||
public InputStream getDestinationStream(Item currentItem, InputStream source, boolean verbose)
|
||||
throws Exception {
|
||||
File f = inputStreamToTempFile(source, "imthumb", ".tmp");
|
||||
File f2 = null;
|
||||
try {
|
||||
f2 = getThumbnailFile(f, verbose);
|
||||
byte[] bytes = Files.readAllBytes(f2.toPath());
|
||||
return new ByteArrayInputStream(bytes);
|
||||
} finally {
|
||||
//noinspection ResultOfMethodCallIgnored
|
||||
f.delete();
|
||||
if (f2 != null) {
|
||||
//noinspection ResultOfMethodCallIgnored
|
||||
f2.delete();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public File getThumbnailFile(File f, boolean verbose)
|
||||
throws IOException, InterruptedException, IM4JavaException {
|
||||
File f2 = new File(f.getParentFile(), f.getName() + ".jpg");
|
||||
f2.deleteOnExit();
|
||||
ConvertCmd cmd = new ConvertCmd();
|
||||
IMOperation op = new IMOperation();
|
||||
op.autoOrient();
|
||||
op.addImage("VIDEO:" + f.getAbsolutePath() + "[" + FRAME_NUMBER + "]");
|
||||
op.thumbnail(configurationService.getIntProperty("thumbnail.maxwidth", DEFAULT_WIDTH),
|
||||
configurationService.getIntProperty("thumbnail.maxheight", DEFAULT_HEIGHT));
|
||||
op.addImage(f2.getAbsolutePath());
|
||||
if (verbose) {
|
||||
System.out.println("IM Thumbnail Param: " + op);
|
||||
}
|
||||
cmd.run(op);
|
||||
return f2;
|
||||
}
|
||||
}
|
@@ -7,25 +7,16 @@
|
||||
*/
|
||||
package org.dspace.app.mediafilter;
|
||||
|
||||
import java.sql.SQLException;
|
||||
|
||||
import org.apache.commons.cli.Option;
|
||||
import org.apache.commons.cli.Options;
|
||||
import org.dspace.authorize.service.AuthorizeService;
|
||||
import org.dspace.core.Context;
|
||||
import org.dspace.scripts.configuration.ScriptConfiguration;
|
||||
import org.springframework.beans.factory.annotation.Autowired;
|
||||
|
||||
public class MediaFilterScriptConfiguration<T extends MediaFilterScript> extends ScriptConfiguration<T> {
|
||||
|
||||
@Autowired
|
||||
private AuthorizeService authorizeService;
|
||||
|
||||
private Class<T> dspaceRunnableClass;
|
||||
|
||||
private static final String MEDIA_FILTER_PLUGINS_KEY = "filter.plugins";
|
||||
|
||||
|
||||
@Override
|
||||
public Class<T> getDspaceRunnableClass() {
|
||||
return dspaceRunnableClass;
|
||||
@@ -36,16 +27,6 @@ public class MediaFilterScriptConfiguration<T extends MediaFilterScript> extends
|
||||
this.dspaceRunnableClass = dspaceRunnableClass;
|
||||
}
|
||||
|
||||
|
||||
@Override
|
||||
public boolean isAllowedToExecute(final Context context) {
|
||||
try {
|
||||
return authorizeService.isAdmin(context);
|
||||
} catch (SQLException e) {
|
||||
throw new RuntimeException("SQLException occurred when checking if the current user is an admin", e);
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public Options getOptions() {
|
||||
Options options = new Options();
|
||||
|
@@ -8,13 +8,17 @@
|
||||
package org.dspace.app.mediafilter;
|
||||
|
||||
import java.io.InputStream;
|
||||
import java.sql.SQLException;
|
||||
import java.util.ArrayList;
|
||||
import java.util.HashMap;
|
||||
import java.util.Iterator;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.stream.Collectors;
|
||||
|
||||
import org.apache.commons.lang3.StringUtils;
|
||||
import org.dspace.app.mediafilter.service.MediaFilterService;
|
||||
import org.dspace.authorize.AuthorizeException;
|
||||
import org.dspace.authorize.service.AuthorizeService;
|
||||
import org.dspace.content.Bitstream;
|
||||
import org.dspace.content.BitstreamFormat;
|
||||
@@ -315,25 +319,25 @@ public class MediaFilterServiceImpl implements MediaFilterService, InitializingB
|
||||
|
||||
// check if destination bitstream exists
|
||||
Bundle existingBundle = null;
|
||||
Bitstream existingBitstream = null;
|
||||
List<Bitstream> existingBitstreams = new ArrayList<Bitstream>();
|
||||
List<Bundle> bundles = itemService.getBundles(item, formatFilter.getBundleName());
|
||||
|
||||
if (bundles.size() > 0) {
|
||||
// only finds the last match (FIXME?)
|
||||
// only finds the last matching bundle and all matching bitstreams in the proper bundle(s)
|
||||
for (Bundle bundle : bundles) {
|
||||
List<Bitstream> bitstreams = bundle.getBitstreams();
|
||||
|
||||
for (Bitstream bitstream : bitstreams) {
|
||||
if (bitstream.getName().trim().equals(newName.trim())) {
|
||||
existingBundle = bundle;
|
||||
existingBitstream = bitstream;
|
||||
existingBitstreams.add(bitstream);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// if exists and overwrite = false, exit
|
||||
if (!overWrite && (existingBitstream != null)) {
|
||||
if (!overWrite && (existingBitstreams.size() > 0)) {
|
||||
if (!isQuiet) {
|
||||
logInfo("SKIPPED: bitstream " + source.getID()
|
||||
+ " (item: " + item.getHandle() + ") because '" + newName + "' already exists");
|
||||
@@ -388,18 +392,7 @@ public class MediaFilterServiceImpl implements MediaFilterService, InitializingB
|
||||
bitstreamService.update(context, b);
|
||||
|
||||
//Set permissions on the derivative bitstream
|
||||
//- First remove any existing policies
|
||||
authorizeService.removeAllPolicies(context, b);
|
||||
|
||||
//- Determine if this is a public-derivative format
|
||||
if (publicFiltersClasses.contains(formatFilter.getClass().getSimpleName())) {
|
||||
//- Set derivative bitstream to be publicly accessible
|
||||
Group anonymous = groupService.findByName(context, Group.ANONYMOUS);
|
||||
authorizeService.addPolicy(context, b, Constants.READ, anonymous);
|
||||
} else {
|
||||
//- Inherit policies from the source bitstream
|
||||
authorizeService.inheritPolicies(context, source, b);
|
||||
}
|
||||
updatePoliciesOfDerivativeBitstream(context, b, formatFilter, source);
|
||||
|
||||
//do post-processing of the generated bitstream
|
||||
formatFilter.postProcessBitstream(context, item, b);
|
||||
@@ -408,9 +401,8 @@ public class MediaFilterServiceImpl implements MediaFilterService, InitializingB
|
||||
logError("!!! OutOfMemoryError !!!");
|
||||
}
|
||||
|
||||
// fixme - set date?
|
||||
// we are overwriting, so remove old bitstream
|
||||
if (existingBitstream != null) {
|
||||
for (Bitstream existingBitstream : existingBitstreams) {
|
||||
bundleService.removeBitstream(context, existingBundle, existingBitstream);
|
||||
}
|
||||
|
||||
@@ -422,6 +414,71 @@ public class MediaFilterServiceImpl implements MediaFilterService, InitializingB
|
||||
return true;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void updatePoliciesOfDerivativeBitstreams(Context context, Item item, Bitstream source)
|
||||
throws SQLException, AuthorizeException {
|
||||
|
||||
if (filterClasses == null) {
|
||||
return;
|
||||
}
|
||||
|
||||
for (FormatFilter formatFilter : filterClasses) {
|
||||
for (Bitstream bitstream : findDerivativeBitstreams(item, source, formatFilter)) {
|
||||
updatePoliciesOfDerivativeBitstream(context, bitstream, formatFilter, source);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* find derivative bitstreams related to source bitstream
|
||||
*
|
||||
* @param item item containing bitstreams
|
||||
* @param source source bitstream
|
||||
* @param formatFilter formatFilter
|
||||
* @return list of derivative bitstreams from source bitstream
|
||||
* @throws SQLException If something goes wrong in the database
|
||||
*/
|
||||
private List<Bitstream> findDerivativeBitstreams(Item item, Bitstream source, FormatFilter formatFilter)
|
||||
throws SQLException {
|
||||
|
||||
String bitstreamName = formatFilter.getFilteredName(source.getName());
|
||||
List<Bundle> bundles = itemService.getBundles(item, formatFilter.getBundleName());
|
||||
|
||||
return bundles.stream()
|
||||
.flatMap(bundle ->
|
||||
bundle.getBitstreams().stream())
|
||||
.filter(bitstream ->
|
||||
StringUtils.equals(bitstream.getName().trim(), bitstreamName.trim()))
|
||||
.collect(Collectors.toList());
|
||||
}
|
||||
|
||||
/**
|
||||
* update resource polices of derivative bitstreams.
|
||||
* by remove all resource policies and
|
||||
* set derivative bitstreams to be publicly accessible or
|
||||
* replace derivative bitstreams policies using
|
||||
* the same in the source bitstream.
|
||||
*
|
||||
* @param context the context
|
||||
* @param bitstream derivative bitstream
|
||||
* @param formatFilter formatFilter
|
||||
* @param source the source bitstream
|
||||
* @throws SQLException If something goes wrong in the database
|
||||
* @throws AuthorizeException if authorization error
|
||||
*/
|
||||
private void updatePoliciesOfDerivativeBitstream(Context context, Bitstream bitstream, FormatFilter formatFilter,
|
||||
Bitstream source) throws SQLException, AuthorizeException {
|
||||
|
||||
authorizeService.removeAllPolicies(context, bitstream);
|
||||
|
||||
if (publicFiltersClasses.contains(formatFilter.getClass().getSimpleName())) {
|
||||
Group anonymous = groupService.findByName(context, Group.ANONYMOUS);
|
||||
authorizeService.addPolicy(context, bitstream, Constants.READ, anonymous);
|
||||
} else {
|
||||
authorizeService.replaceAllPolicies(context, source, bitstream);
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public Item getCurrentItem() {
|
||||
return currentItem;
|
||||
|
@@ -7,10 +7,12 @@
|
||||
*/
|
||||
package org.dspace.app.mediafilter.service;
|
||||
|
||||
import java.sql.SQLException;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
|
||||
import org.dspace.app.mediafilter.FormatFilter;
|
||||
import org.dspace.authorize.AuthorizeException;
|
||||
import org.dspace.content.Bitstream;
|
||||
import org.dspace.content.Collection;
|
||||
import org.dspace.content.Community;
|
||||
@@ -91,6 +93,22 @@ public interface MediaFilterService {
|
||||
public boolean processBitstream(Context context, Item item, Bitstream source, FormatFilter formatFilter)
|
||||
throws Exception;
|
||||
|
||||
/**
|
||||
* update resource polices of derivative bitstreams
|
||||
* related to source bitstream.
|
||||
* set derivative bitstreams to be publicly accessible or
|
||||
* replace derivative bitstreams policies using
|
||||
* the same in the source bitstream.
|
||||
*
|
||||
* @param context context
|
||||
* @param item item containing bitstreams
|
||||
* @param source source bitstream
|
||||
* @throws SQLException If something goes wrong in the database
|
||||
* @throws AuthorizeException if authorization error
|
||||
*/
|
||||
public void updatePoliciesOfDerivativeBitstreams(Context context, Item item, Bitstream source)
|
||||
throws SQLException, AuthorizeException;
|
||||
|
||||
/**
|
||||
* Return the item that is currently being processed/filtered
|
||||
* by the MediaFilterManager.
|
||||
|
@@ -11,54 +11,59 @@ package org.dspace.app.requestitem;
|
||||
import java.io.IOException;
|
||||
import java.sql.SQLException;
|
||||
import java.util.List;
|
||||
import javax.annotation.ManagedBean;
|
||||
import javax.inject.Inject;
|
||||
import javax.inject.Singleton;
|
||||
import javax.mail.MessagingException;
|
||||
|
||||
import org.apache.logging.log4j.LogManager;
|
||||
import org.apache.logging.log4j.Logger;
|
||||
import org.dspace.app.requestitem.factory.RequestItemServiceFactory;
|
||||
import org.dspace.app.requestitem.service.RequestItemService;
|
||||
import org.dspace.authorize.AuthorizeException;
|
||||
import org.dspace.content.Bitstream;
|
||||
import org.dspace.content.Bundle;
|
||||
import org.dspace.content.Item;
|
||||
import org.dspace.content.factory.ContentServiceFactory;
|
||||
import org.dspace.content.service.BitstreamService;
|
||||
import org.dspace.core.Context;
|
||||
import org.dspace.core.Email;
|
||||
import org.dspace.core.I18nUtil;
|
||||
import org.dspace.core.LogHelper;
|
||||
import org.dspace.eperson.EPerson;
|
||||
import org.dspace.handle.factory.HandleServiceFactory;
|
||||
import org.dspace.handle.service.HandleService;
|
||||
import org.dspace.services.ConfigurationService;
|
||||
import org.dspace.services.factory.DSpaceServicesFactory;
|
||||
|
||||
/**
|
||||
* Send item requests and responses by email.
|
||||
*
|
||||
* <p>The "strategy" by which approvers are chosen is in an implementation of
|
||||
* {@link RequestItemAuthorExtractor} which is injected by the name
|
||||
* {@code requestItemAuthorExtractor}. See the DI configuration documents.
|
||||
*
|
||||
* @author Mark H. Wood <mwood@iupui.edu>
|
||||
*/
|
||||
@Singleton
|
||||
@ManagedBean
|
||||
public class RequestItemEmailNotifier {
|
||||
private static final Logger LOG = LogManager.getLogger();
|
||||
|
||||
private static final BitstreamService bitstreamService
|
||||
= ContentServiceFactory.getInstance().getBitstreamService();
|
||||
@Inject
|
||||
protected BitstreamService bitstreamService;
|
||||
|
||||
private static final ConfigurationService configurationService
|
||||
= DSpaceServicesFactory.getInstance().getConfigurationService();
|
||||
@Inject
|
||||
protected ConfigurationService configurationService;
|
||||
|
||||
private static final HandleService handleService
|
||||
= HandleServiceFactory.getInstance().getHandleService();
|
||||
@Inject
|
||||
protected HandleService handleService;
|
||||
|
||||
private static final RequestItemService requestItemService
|
||||
= RequestItemServiceFactory.getInstance().getRequestItemService();
|
||||
@Inject
|
||||
protected RequestItemService requestItemService;
|
||||
|
||||
private static final RequestItemAuthorExtractor requestItemAuthorExtractor
|
||||
= DSpaceServicesFactory.getInstance()
|
||||
.getServiceManager()
|
||||
.getServiceByName(null, RequestItemAuthorExtractor.class);
|
||||
protected final RequestItemAuthorExtractor requestItemAuthorExtractor;
|
||||
|
||||
private RequestItemEmailNotifier() {}
|
||||
@Inject
|
||||
public RequestItemEmailNotifier(RequestItemAuthorExtractor requestItemAuthorExtractor) {
|
||||
this.requestItemAuthorExtractor = requestItemAuthorExtractor;
|
||||
}
|
||||
|
||||
/**
|
||||
* Send the request to the approver(s).
|
||||
@@ -69,7 +74,7 @@ public class RequestItemEmailNotifier {
|
||||
* @throws IOException passed through.
|
||||
* @throws SQLException if the message was not sent.
|
||||
*/
|
||||
static public void sendRequest(Context context, RequestItem ri, String responseLink)
|
||||
public void sendRequest(Context context, RequestItem ri, String responseLink)
|
||||
throws IOException, SQLException {
|
||||
// Who is making this request?
|
||||
List<RequestItemAuthor> authors = requestItemAuthorExtractor
|
||||
@@ -146,12 +151,38 @@ public class RequestItemEmailNotifier {
|
||||
* @param message email body (may be empty).
|
||||
* @throws IOException if sending failed.
|
||||
*/
|
||||
static public void sendResponse(Context context, RequestItem ri, String subject,
|
||||
public void sendResponse(Context context, RequestItem ri, String subject,
|
||||
String message)
|
||||
throws IOException {
|
||||
// Who granted this request?
|
||||
List<RequestItemAuthor> grantors;
|
||||
try {
|
||||
grantors = requestItemAuthorExtractor.getRequestItemAuthor(context, ri.getItem());
|
||||
} catch (SQLException e) {
|
||||
LOG.warn("Failed to get grantor's name and address: {}", e.getMessage());
|
||||
grantors = List.of();
|
||||
}
|
||||
|
||||
String grantorName;
|
||||
String grantorAddress;
|
||||
if (grantors.isEmpty()) {
|
||||
grantorName = configurationService.getProperty("mail.admin.name");
|
||||
grantorAddress = configurationService.getProperty("mail.admin");
|
||||
} else {
|
||||
RequestItemAuthor grantor = grantors.get(0); // XXX Cannot know which one
|
||||
grantorName = grantor.getFullName();
|
||||
grantorAddress = grantor.getEmail();
|
||||
}
|
||||
|
||||
// Build an email back to the requester.
|
||||
Email email = new Email();
|
||||
email.setContent("body", message);
|
||||
Email email = Email.getEmail(I18nUtil.getEmailFilename(context.getCurrentLocale(),
|
||||
ri.isAccept_request() ? "request_item.granted" : "request_item.rejected"));
|
||||
email.addArgument(ri.getReqName()); // {0} requestor's name
|
||||
email.addArgument(handleService.getCanonicalForm(ri.getItem().getHandle())); // {1} URL of the requested Item
|
||||
email.addArgument(ri.getItem().getName()); // {2} title of the requested Item
|
||||
email.addArgument(grantorName); // {3} name of the grantor
|
||||
email.addArgument(grantorAddress); // {4} email of the grantor
|
||||
email.addArgument(message); // {5} grantor's optional message
|
||||
email.setSubject(subject);
|
||||
email.addRecipient(ri.getReqEmail());
|
||||
// Attach bitstreams.
|
||||
@@ -166,17 +197,25 @@ public class RequestItemEmailNotifier {
|
||||
if (!bitstream.getFormat(context).isInternal() &&
|
||||
requestItemService.isRestricted(context,
|
||||
bitstream)) {
|
||||
email.addAttachment(bitstreamService.retrieve(context,
|
||||
bitstream), bitstream.getName(),
|
||||
// #8636 Anyone receiving the email can respond to the
|
||||
// request without authenticating into DSpace
|
||||
context.turnOffAuthorisationSystem();
|
||||
email.addAttachment(
|
||||
bitstreamService.retrieve(context, bitstream),
|
||||
bitstream.getName(),
|
||||
bitstream.getFormat(context).getMIMEType());
|
||||
context.restoreAuthSystemState();
|
||||
}
|
||||
}
|
||||
}
|
||||
} else {
|
||||
Bitstream bitstream = ri.getBitstream();
|
||||
// #8636 Anyone receiving the email can respond to the request without authenticating into DSpace
|
||||
context.turnOffAuthorisationSystem();
|
||||
email.addAttachment(bitstreamService.retrieve(context, bitstream),
|
||||
bitstream.getName(),
|
||||
bitstream.getFormat(context).getMIMEType());
|
||||
context.restoreAuthSystemState();
|
||||
}
|
||||
email.send();
|
||||
} else {
|
||||
@@ -206,7 +245,7 @@ public class RequestItemEmailNotifier {
|
||||
* @throws IOException if the message body cannot be loaded or the message
|
||||
* cannot be sent.
|
||||
*/
|
||||
static public void requestOpenAccess(Context context, RequestItem ri)
|
||||
public void requestOpenAccess(Context context, RequestItem ri)
|
||||
throws IOException {
|
||||
Email message = Email.getEmail(I18nUtil.getEmailFilename(context.getCurrentLocale(),
|
||||
"request_item.admin"));
|
||||
@@ -228,8 +267,13 @@ public class RequestItemEmailNotifier {
|
||||
message.addArgument(bitstreamName); // {0} bitstream name or "all"
|
||||
message.addArgument(item.getHandle()); // {1} Item handle
|
||||
message.addArgument(ri.getToken()); // {2} Request token
|
||||
if (approver != null) {
|
||||
message.addArgument(approver.getFullName()); // {3} Approver's name
|
||||
message.addArgument(approver.getEmail()); // {4} Approver's address
|
||||
} else {
|
||||
message.addArgument("anonymous approver"); // [3] Approver's name
|
||||
message.addArgument(configurationService.getProperty("mail.admin")); // [4] Approver's address
|
||||
}
|
||||
|
||||
// Who gets this message?
|
||||
String recipient;
|
||||
|
@@ -22,21 +22,27 @@ import org.springframework.beans.factory.annotation.Autowired;
|
||||
import org.springframework.lang.NonNull;
|
||||
|
||||
/**
|
||||
* RequestItem strategy to allow DSpace support team's helpdesk to receive requestItem request.
|
||||
* With this enabled, then the Item author/submitter doesn't receive the request, but the helpdesk instead does.
|
||||
* RequestItem strategy to allow DSpace support team's help desk to receive
|
||||
* requestItem requests. With this enabled, the Item author/submitter doesn't
|
||||
* receive the request, but the help desk instead does.
|
||||
*
|
||||
* Failover to the RequestItemSubmitterStrategy, which means the submitter would get the request if there is no
|
||||
* specified helpdesk email.
|
||||
* <p>Fails over to the {@link RequestItemSubmitterStrategy}, which means the
|
||||
* submitter would get the request if there is no specified help desk email.
|
||||
*
|
||||
* @author Sam Ottenhoff
|
||||
* @author Peter Dietz
|
||||
*/
|
||||
public class RequestItemHelpdeskStrategy extends RequestItemSubmitterStrategy {
|
||||
public class RequestItemHelpdeskStrategy
|
||||
extends RequestItemSubmitterStrategy {
|
||||
static final String P_HELPDESK_OVERRIDE
|
||||
= "request.item.helpdesk.override";
|
||||
static final String P_MAIL_HELPDESK = "mail.helpdesk";
|
||||
|
||||
@Autowired(required = true)
|
||||
protected EPersonService ePersonService;
|
||||
|
||||
@Autowired(required = true)
|
||||
private ConfigurationService configuration;
|
||||
protected ConfigurationService configurationService;
|
||||
|
||||
public RequestItemHelpdeskStrategy() {
|
||||
}
|
||||
@@ -45,9 +51,9 @@ public class RequestItemHelpdeskStrategy extends RequestItemSubmitterStrategy {
|
||||
@NonNull
|
||||
public List<RequestItemAuthor> getRequestItemAuthor(Context context, Item item)
|
||||
throws SQLException {
|
||||
boolean helpdeskOverridesSubmitter = configuration
|
||||
boolean helpdeskOverridesSubmitter = configurationService
|
||||
.getBooleanProperty("request.item.helpdesk.override", false);
|
||||
String helpDeskEmail = configuration.getProperty("mail.helpdesk");
|
||||
String helpDeskEmail = configurationService.getProperty("mail.helpdesk");
|
||||
|
||||
if (helpdeskOverridesSubmitter && StringUtils.isNotBlank(helpDeskEmail)) {
|
||||
List<RequestItemAuthor> authors = new ArrayList<>(1);
|
||||
@@ -60,16 +66,18 @@ public class RequestItemHelpdeskStrategy extends RequestItemSubmitterStrategy {
|
||||
}
|
||||
|
||||
/**
|
||||
* Return a RequestItemAuthor object for the specified helpdesk email address.
|
||||
* It makes an attempt to find if there is a matching eperson for the helpdesk address, to use the name,
|
||||
* Otherwise it falls back to a helpdeskname key in the Messages.props.
|
||||
* Return a RequestItemAuthor object for the specified help desk email address.
|
||||
* It makes an attempt to find if there is a matching {@link EPerson} for
|
||||
* the help desk address, to use its name. Otherwise it falls back to the
|
||||
* {@code helpdeskname} key in {@code Messages.properties}.
|
||||
*
|
||||
* @param context context
|
||||
* @param helpDeskEmail email
|
||||
* @return RequestItemAuthor
|
||||
* @throws SQLException if database error
|
||||
*/
|
||||
public RequestItemAuthor getHelpDeskPerson(Context context, String helpDeskEmail) throws SQLException {
|
||||
public RequestItemAuthor getHelpDeskPerson(Context context, String helpDeskEmail)
|
||||
throws SQLException {
|
||||
context.turnOffAuthorisationSystem();
|
||||
EPerson helpdeskEPerson = ePersonService.findByEmail(context, helpDeskEmail);
|
||||
context.restoreAuthSystemState();
|
||||
|
@@ -9,6 +9,7 @@ package org.dspace.app.requestitem;
|
||||
|
||||
import java.sql.SQLException;
|
||||
import java.util.Date;
|
||||
import java.util.Iterator;
|
||||
import java.util.List;
|
||||
|
||||
import org.apache.logging.log4j.LogManager;
|
||||
@@ -90,6 +91,11 @@ public class RequestItemServiceImpl implements RequestItemService {
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public Iterator<RequestItem> findByItem(Context context, Item item) throws SQLException {
|
||||
return requestItemDAO.findByItem(context, item);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void update(Context context, RequestItem requestItem) {
|
||||
try {
|
||||
|
@@ -22,7 +22,6 @@ import org.springframework.lang.NonNull;
|
||||
* @author Andrea Bollini
|
||||
*/
|
||||
public class RequestItemSubmitterStrategy implements RequestItemAuthorExtractor {
|
||||
|
||||
public RequestItemSubmitterStrategy() {
|
||||
}
|
||||
|
||||
|
@@ -8,8 +8,10 @@
|
||||
package org.dspace.app.requestitem.dao;
|
||||
|
||||
import java.sql.SQLException;
|
||||
import java.util.Iterator;
|
||||
|
||||
import org.dspace.app.requestitem.RequestItem;
|
||||
import org.dspace.content.Item;
|
||||
import org.dspace.core.Context;
|
||||
import org.dspace.core.GenericDAO;
|
||||
|
||||
@@ -32,4 +34,6 @@ public interface RequestItemDAO extends GenericDAO<RequestItem> {
|
||||
* @throws SQLException passed through.
|
||||
*/
|
||||
public RequestItem findByToken(Context context, String token) throws SQLException;
|
||||
|
||||
public Iterator<RequestItem> findByItem(Context context, Item item) throws SQLException;
|
||||
}
|
||||
|
@@ -8,6 +8,8 @@
|
||||
package org.dspace.app.requestitem.dao.impl;
|
||||
|
||||
import java.sql.SQLException;
|
||||
import java.util.Iterator;
|
||||
import javax.persistence.Query;
|
||||
import javax.persistence.criteria.CriteriaBuilder;
|
||||
import javax.persistence.criteria.CriteriaQuery;
|
||||
import javax.persistence.criteria.Root;
|
||||
@@ -15,6 +17,7 @@ import javax.persistence.criteria.Root;
|
||||
import org.dspace.app.requestitem.RequestItem;
|
||||
import org.dspace.app.requestitem.RequestItem_;
|
||||
import org.dspace.app.requestitem.dao.RequestItemDAO;
|
||||
import org.dspace.content.Item;
|
||||
import org.dspace.core.AbstractHibernateDAO;
|
||||
import org.dspace.core.Context;
|
||||
|
||||
@@ -39,4 +42,10 @@ public class RequestItemDAOImpl extends AbstractHibernateDAO<RequestItem> implem
|
||||
criteriaQuery.where(criteriaBuilder.equal(requestItemRoot.get(RequestItem_.token), token));
|
||||
return uniqueResult(context, criteriaQuery, false, RequestItem.class);
|
||||
}
|
||||
@Override
|
||||
public Iterator<RequestItem> findByItem(Context context, Item item) throws SQLException {
|
||||
Query query = createQuery(context, "FROM RequestItem WHERE item_id= :uuid");
|
||||
query.setParameter("uuid", item.getID());
|
||||
return iterate(query);
|
||||
}
|
||||
}
|
||||
|
@@ -12,10 +12,15 @@
|
||||
* e-mailed to a responsible party for consideration and action. Find details
|
||||
* in the user documentation under the rubric "Request a Copy".
|
||||
*
|
||||
* <p>This package includes several "strategy" classes which discover responsible
|
||||
* parties in various ways. See {@link RequestItemSubmitterStrategy} and the
|
||||
* classes which extend it. A strategy class must be configured and identified
|
||||
* as {@link RequestItemAuthorExtractor} for injection into code which requires
|
||||
* Request a Copy services.
|
||||
* <p>Mailing is handled by {@link RequestItemEmailNotifier}. Responsible
|
||||
* parties are represented by {@link RequestItemAuthor}
|
||||
*
|
||||
* <p>This package includes several "strategy" classes which discover
|
||||
* responsible parties in various ways. See
|
||||
* {@link RequestItemSubmitterStrategy} and the classes which extend it, and
|
||||
* others which implement {@link RequestItemAuthorExtractor}. A strategy class
|
||||
* must be configured and identified as {@link requestItemAuthorExtractor}
|
||||
* (<em>note capitalization</em>) for injection into code which requires Request
|
||||
* a Copy services.
|
||||
*/
|
||||
package org.dspace.app.requestitem;
|
||||
|
@@ -8,6 +8,7 @@
|
||||
package org.dspace.app.requestitem.service;
|
||||
|
||||
import java.sql.SQLException;
|
||||
import java.util.Iterator;
|
||||
import java.util.List;
|
||||
|
||||
import org.dspace.app.requestitem.RequestItem;
|
||||
@@ -62,6 +63,14 @@ public interface RequestItemService {
|
||||
*/
|
||||
public RequestItem findByToken(Context context, String token);
|
||||
|
||||
/**
|
||||
* Retrieve a request based on the item.
|
||||
* @param context current DSpace session.
|
||||
* @param item the item to find requests for.
|
||||
* @return the matching requests, or null if not found.
|
||||
*/
|
||||
public Iterator<RequestItem> findByItem(Context context, Item item) throws SQLException;
|
||||
|
||||
/**
|
||||
* Save updates to the record. Only accept_request, and decision_date are set-able.
|
||||
*
|
||||
|
@@ -8,7 +8,6 @@
|
||||
package org.dspace.app.solrdatabaseresync;
|
||||
|
||||
import org.apache.commons.cli.Options;
|
||||
import org.dspace.core.Context;
|
||||
import org.dspace.scripts.configuration.ScriptConfiguration;
|
||||
|
||||
/**
|
||||
@@ -27,11 +26,6 @@ public class SolrDatabaseResyncCliScriptConfiguration extends ScriptConfiguratio
|
||||
this.dspaceRunnableClass = dspaceRunnableClass;
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean isAllowedToExecute(Context context) {
|
||||
return true;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Options getOptions() {
|
||||
if (options == null) {
|
||||
|
@@ -10,6 +10,7 @@ package org.dspace.app.util;
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.Optional;
|
||||
import java.util.regex.Pattern;
|
||||
import java.util.regex.PatternSyntaxException;
|
||||
import javax.annotation.Nullable;
|
||||
@@ -131,10 +132,15 @@ public class DCInput {
|
||||
private boolean closedVocabulary = false;
|
||||
|
||||
/**
|
||||
* the regex to comply with, null if nothing
|
||||
* the regex in ECMAScript standard format, usable also by rests.
|
||||
*/
|
||||
private String regex = null;
|
||||
|
||||
/**
|
||||
* the computed pattern, null if nothing
|
||||
*/
|
||||
private Pattern pattern = null;
|
||||
|
||||
/**
|
||||
* allowed document types
|
||||
*/
|
||||
@@ -178,7 +184,7 @@ public class DCInput {
|
||||
|
||||
//check if the input have a language tag
|
||||
language = Boolean.valueOf(fieldMap.get("language"));
|
||||
valueLanguageList = new ArrayList();
|
||||
valueLanguageList = new ArrayList<>();
|
||||
if (language) {
|
||||
String languageNameTmp = fieldMap.get("value-pairs-name");
|
||||
if (StringUtils.isBlank(languageNameTmp)) {
|
||||
@@ -191,7 +197,7 @@ public class DCInput {
|
||||
repeatable = "true".equalsIgnoreCase(repStr)
|
||||
|| "yes".equalsIgnoreCase(repStr);
|
||||
String nameVariantsString = fieldMap.get("name-variants");
|
||||
nameVariants = (StringUtils.isNotBlank(nameVariantsString)) ?
|
||||
nameVariants = StringUtils.isNotBlank(nameVariantsString) ?
|
||||
nameVariantsString.equalsIgnoreCase("true") : false;
|
||||
label = fieldMap.get("label");
|
||||
inputType = fieldMap.get("input-type");
|
||||
@@ -203,17 +209,17 @@ public class DCInput {
|
||||
}
|
||||
hint = fieldMap.get("hint");
|
||||
warning = fieldMap.get("required");
|
||||
required = (warning != null && warning.length() > 0);
|
||||
required = warning != null && warning.length() > 0;
|
||||
visibility = fieldMap.get("visibility");
|
||||
readOnly = fieldMap.get("readonly");
|
||||
vocabulary = fieldMap.get("vocabulary");
|
||||
regex = fieldMap.get("regex");
|
||||
this.initRegex(fieldMap.get("regex"));
|
||||
String closedVocabularyStr = fieldMap.get("closedVocabulary");
|
||||
closedVocabulary = "true".equalsIgnoreCase(closedVocabularyStr)
|
||||
|| "yes".equalsIgnoreCase(closedVocabularyStr);
|
||||
|
||||
// parsing of the <type-bind> element (using the colon as split separator)
|
||||
typeBind = new ArrayList<>();
|
||||
typeBind = new ArrayList<String>();
|
||||
String typeBindDef = fieldMap.get("type-bind");
|
||||
if (typeBindDef != null && typeBindDef.trim().length() > 0) {
|
||||
String[] types = typeBindDef.split(",");
|
||||
@@ -238,6 +244,22 @@ public class DCInput {
|
||||
|
||||
}
|
||||
|
||||
protected void initRegex(String regex) {
|
||||
this.regex = null;
|
||||
this.pattern = null;
|
||||
if (regex != null) {
|
||||
try {
|
||||
Optional.ofNullable(RegexPatternUtils.computePattern(regex))
|
||||
.ifPresent(pattern -> {
|
||||
this.pattern = pattern;
|
||||
this.regex = regex;
|
||||
});
|
||||
} catch (PatternSyntaxException e) {
|
||||
log.warn("The regex field of input {} with value {} is invalid!", this.label, regex);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Is this DCInput for display in the given scope? The scope should be
|
||||
* either "workflow" or "submit", as per the input forms definition. If the
|
||||
@@ -248,7 +270,7 @@ public class DCInput {
|
||||
* @return whether the input should be displayed or not
|
||||
*/
|
||||
public boolean isVisible(String scope) {
|
||||
return (visibility == null || visibility.equals(scope));
|
||||
return visibility == null || visibility.equals(scope);
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -512,8 +534,12 @@ public class DCInput {
|
||||
return visibility;
|
||||
}
|
||||
|
||||
public Pattern getPattern() {
|
||||
return this.pattern;
|
||||
}
|
||||
|
||||
public String getRegex() {
|
||||
return regex;
|
||||
return this.regex;
|
||||
}
|
||||
|
||||
public String getFieldName() {
|
||||
@@ -546,8 +572,7 @@ public class DCInput {
|
||||
public boolean validate(String value) {
|
||||
if (StringUtils.isNotBlank(value)) {
|
||||
try {
|
||||
if (StringUtils.isNotBlank(regex)) {
|
||||
Pattern pattern = Pattern.compile(regex);
|
||||
if (this.pattern != null) {
|
||||
if (!pattern.matcher(value).matches()) {
|
||||
return false;
|
||||
}
|
||||
@@ -557,7 +582,6 @@ public class DCInput {
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
return true;
|
||||
}
|
||||
|
||||
|
@@ -10,6 +10,7 @@ package org.dspace.app.util;
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.Objects;
|
||||
|
||||
import org.apache.commons.lang3.StringUtils;
|
||||
import org.dspace.core.Utils;
|
||||
@@ -118,9 +119,12 @@ public class DCInputSet {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
} else if (field.isRelationshipField() &&
|
||||
("relation." + field.getRelationshipType()).equals(fieldName)) {
|
||||
return true;
|
||||
} else {
|
||||
String fullName = field.getFieldName();
|
||||
if (fullName.equals(fieldName)) {
|
||||
if (Objects.equals(fullName, fieldName)) {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
|
@@ -0,0 +1,73 @@
|
||||
/**
|
||||
* The contents of this file are subject to the license and copyright
|
||||
* detailed in the LICENSE and NOTICE files at the root of the source
|
||||
* tree and available online at
|
||||
*
|
||||
* http://www.dspace.org/license/
|
||||
*/
|
||||
package org.dspace.app.util;
|
||||
|
||||
import static java.util.regex.Pattern.CASE_INSENSITIVE;
|
||||
|
||||
import java.util.Optional;
|
||||
import java.util.regex.Matcher;
|
||||
import java.util.regex.Pattern;
|
||||
import java.util.regex.PatternSyntaxException;
|
||||
|
||||
import org.apache.commons.lang3.StringUtils;
|
||||
|
||||
/**
|
||||
* Utility class useful for check regex and patterns.
|
||||
*
|
||||
* @author Vincenzo Mecca (vins01-4science - vincenzo.mecca at 4science.com)
|
||||
*
|
||||
*/
|
||||
public class RegexPatternUtils {
|
||||
|
||||
// checks input having the format /{pattern}/{flags}
|
||||
// allowed flags are: g,i,m,s,u,y
|
||||
public static final String REGEX_INPUT_VALIDATOR = "(/?)(.+)\\1([gimsuy]*)";
|
||||
// flags usable inside regex definition using format (?i|m|s|u|y)
|
||||
public static final String REGEX_FLAGS = "(?%s)";
|
||||
public static final Pattern PATTERN_REGEX_INPUT_VALIDATOR =
|
||||
Pattern.compile(REGEX_INPUT_VALIDATOR, CASE_INSENSITIVE);
|
||||
|
||||
/**
|
||||
* Computes a pattern starting from a regex definition with flags that
|
||||
* uses the standard format: <code>/{regex}/{flags}</code> (ECMAScript format).
|
||||
* This method can transform an ECMAScript regex into a java {@code Pattern} object
|
||||
* wich can be used to validate strings.
|
||||
* <br/>
|
||||
* If regex is null, empty or blank a null {@code Pattern} will be retrieved
|
||||
* If it's a valid regex, then a non-null {@code Pattern} will be retrieved,
|
||||
* an exception will be thrown otherwise.
|
||||
*
|
||||
* @param regex with format <code>/{regex}/{flags}</code>
|
||||
* @return {@code Pattern} regex pattern instance
|
||||
* @throws PatternSyntaxException
|
||||
*/
|
||||
public static final Pattern computePattern(String regex) throws PatternSyntaxException {
|
||||
if (StringUtils.isBlank(regex)) {
|
||||
return null;
|
||||
}
|
||||
Matcher inputMatcher = PATTERN_REGEX_INPUT_VALIDATOR.matcher(regex);
|
||||
String regexPattern = regex;
|
||||
String regexFlags = "";
|
||||
if (inputMatcher.matches()) {
|
||||
regexPattern =
|
||||
Optional.of(inputMatcher.group(2))
|
||||
.filter(StringUtils::isNotBlank)
|
||||
.orElse(regex);
|
||||
regexFlags =
|
||||
Optional.ofNullable(inputMatcher.group(3))
|
||||
.filter(StringUtils::isNotBlank)
|
||||
.map(flags -> String.format(REGEX_FLAGS, flags))
|
||||
.orElse("")
|
||||
.replaceAll("g", "");
|
||||
}
|
||||
return Pattern.compile(regexFlags + regexPattern);
|
||||
}
|
||||
|
||||
private RegexPatternUtils() {}
|
||||
|
||||
}
|
@@ -22,7 +22,10 @@ import org.apache.commons.lang3.StringUtils;
|
||||
import org.apache.logging.log4j.Logger;
|
||||
import org.dspace.content.Collection;
|
||||
import org.dspace.content.DSpaceObject;
|
||||
import org.dspace.content.factory.ContentServiceFactory;
|
||||
import org.dspace.content.service.CollectionService;
|
||||
import org.dspace.core.Context;
|
||||
import org.dspace.discovery.SearchServiceException;
|
||||
import org.dspace.handle.factory.HandleServiceFactory;
|
||||
import org.dspace.services.factory.DSpaceServicesFactory;
|
||||
import org.w3c.dom.Document;
|
||||
@@ -105,6 +108,13 @@ public class SubmissionConfigReader {
|
||||
*/
|
||||
private SubmissionConfig lastSubmissionConfig = null;
|
||||
|
||||
/**
|
||||
* Collection Service instance, needed to interact with collection's
|
||||
* stored data
|
||||
*/
|
||||
protected static final CollectionService collectionService
|
||||
= ContentServiceFactory.getInstance().getCollectionService();
|
||||
|
||||
/**
|
||||
* Load Submission Configuration from the
|
||||
* item-submission.xml configuration file
|
||||
@@ -152,6 +162,9 @@ public class SubmissionConfigReader {
|
||||
} catch (FactoryConfigurationError fe) {
|
||||
throw new SubmissionConfigReaderException(
|
||||
"Cannot create Item Submission Configuration parser", fe);
|
||||
} catch (SearchServiceException se) {
|
||||
throw new SubmissionConfigReaderException(
|
||||
"Cannot perform a discovery search for Item Submission Configuration", se);
|
||||
} catch (Exception e) {
|
||||
throw new SubmissionConfigReaderException(
|
||||
"Error creating Item Submission Configuration: " + e);
|
||||
@@ -287,7 +300,7 @@ public class SubmissionConfigReader {
|
||||
* should correspond to the collection-form maps, the form definitions, and
|
||||
* the display/storage word pairs.
|
||||
*/
|
||||
private void doNodes(Node n) throws SAXException, SubmissionConfigReaderException {
|
||||
private void doNodes(Node n) throws SAXException, SearchServiceException, SubmissionConfigReaderException {
|
||||
if (n == null) {
|
||||
return;
|
||||
}
|
||||
@@ -334,18 +347,23 @@ public class SubmissionConfigReader {
|
||||
* the collection handle and item submission name, put name in hashmap keyed
|
||||
* by the collection handle.
|
||||
*/
|
||||
private void processMap(Node e) throws SAXException {
|
||||
private void processMap(Node e) throws SAXException, SearchServiceException {
|
||||
// create a context
|
||||
Context context = new Context();
|
||||
|
||||
NodeList nl = e.getChildNodes();
|
||||
int len = nl.getLength();
|
||||
for (int i = 0; i < len; i++) {
|
||||
Node nd = nl.item(i);
|
||||
if (nd.getNodeName().equals("name-map")) {
|
||||
String id = getAttribute(nd, "collection-handle");
|
||||
String entityType = getAttribute(nd, "collection-entity-type");
|
||||
String value = getAttribute(nd, "submission-name");
|
||||
String content = getValue(nd);
|
||||
if (id == null) {
|
||||
if (id == null && entityType == null) {
|
||||
throw new SAXException(
|
||||
"name-map element is missing collection-handle attribute in 'item-submission.xml'");
|
||||
"name-map element is missing collection-handle or collection-entity-type attribute " +
|
||||
"in 'item-submission.xml'");
|
||||
}
|
||||
if (value == null) {
|
||||
throw new SAXException(
|
||||
@@ -355,7 +373,17 @@ public class SubmissionConfigReader {
|
||||
throw new SAXException(
|
||||
"name-map element has content in 'item-submission.xml', it should be empty.");
|
||||
}
|
||||
if (id != null) {
|
||||
collectionToSubmissionConfig.put(id, value);
|
||||
|
||||
} else {
|
||||
// get all collections for this entity-type
|
||||
List<Collection> collections = collectionService.findAllCollectionsByEntityType( context,
|
||||
entityType);
|
||||
for (Collection collection : collections) {
|
||||
collectionToSubmissionConfig.putIfAbsent(collection.getHandle(), value);
|
||||
}
|
||||
}
|
||||
} // ignore any child node that isn't a "name-map"
|
||||
}
|
||||
}
|
||||
|
@@ -11,6 +11,9 @@ import java.io.Serializable;
|
||||
import java.util.Map;
|
||||
|
||||
import org.apache.commons.lang3.BooleanUtils;
|
||||
import org.dspace.content.InProgressSubmission;
|
||||
import org.dspace.content.WorkspaceItem;
|
||||
import org.hibernate.proxy.HibernateProxyHelper;
|
||||
|
||||
/**
|
||||
* Class representing configuration for a single step within an Item Submission
|
||||
@@ -173,6 +176,38 @@ public class SubmissionStepConfig implements Serializable {
|
||||
return visibilityOutside;
|
||||
}
|
||||
|
||||
/**
|
||||
* Check if given submission section object is hidden for the current submission scope
|
||||
*
|
||||
* @param obj the InProgressSubmission to check
|
||||
* @return true if the submission section is hidden, false otherwise
|
||||
*/
|
||||
public boolean isHiddenForInProgressSubmission(InProgressSubmission obj) {
|
||||
|
||||
String scopeToCheck = getScope(obj);
|
||||
|
||||
if (scope == null || scopeToCheck == null) {
|
||||
return false;
|
||||
}
|
||||
|
||||
String visibility = getVisibility();
|
||||
String visibilityOutside = getVisibilityOutside();
|
||||
|
||||
if (scope.equalsIgnoreCase(scopeToCheck)) {
|
||||
return "hidden".equalsIgnoreCase(visibility);
|
||||
} else {
|
||||
return visibilityOutside == null || "hidden".equalsIgnoreCase(visibilityOutside);
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
private String getScope(InProgressSubmission obj) {
|
||||
if (HibernateProxyHelper.getClassWithoutInitializingProxy(obj).equals(WorkspaceItem.class)) {
|
||||
return "submission";
|
||||
}
|
||||
return "workflow";
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the number of this step in the current Submission process config.
|
||||
* Step numbers start with #0 (although step #0 is ALWAYS the special
|
||||
|
@@ -51,6 +51,7 @@ import org.dspace.content.service.CollectionService;
|
||||
import org.dspace.content.service.CommunityService;
|
||||
import org.dspace.content.service.ItemService;
|
||||
import org.dspace.core.Context;
|
||||
import org.dspace.core.I18nUtil;
|
||||
import org.dspace.discovery.IndexableObject;
|
||||
import org.dspace.discovery.indexobject.IndexableCollection;
|
||||
import org.dspace.discovery.indexobject.IndexableCommunity;
|
||||
@@ -91,6 +92,7 @@ public class SyndicationFeed {
|
||||
|
||||
// default DC fields for entry
|
||||
protected String defaultTitleField = "dc.title";
|
||||
protected String defaultDescriptionField = "dc.description";
|
||||
protected String defaultAuthorField = "dc.contributor.author";
|
||||
protected String defaultDateField = "dc.date.issued";
|
||||
private static final String[] defaultDescriptionFields =
|
||||
@@ -196,15 +198,15 @@ public class SyndicationFeed {
|
||||
// dso is null for the whole site, or a search without scope
|
||||
if (dso == null) {
|
||||
defaultTitle = configurationService.getProperty("dspace.name");
|
||||
feed.setDescription(localize(labels, MSG_FEED_DESCRIPTION));
|
||||
defaultDescriptionField = localize(labels, MSG_FEED_DESCRIPTION);
|
||||
objectURL = resolveURL(request, null);
|
||||
} else {
|
||||
Bitstream logo = null;
|
||||
if (dso instanceof IndexableCollection) {
|
||||
Collection col = ((IndexableCollection) dso).getIndexedObject();
|
||||
defaultTitle = col.getName();
|
||||
feed.setDescription(collectionService.getMetadataFirstValue(col,
|
||||
CollectionService.MD_SHORT_DESCRIPTION, Item.ANY));
|
||||
defaultDescriptionField = collectionService.getMetadataFirstValue(col,
|
||||
CollectionService.MD_SHORT_DESCRIPTION, Item.ANY);
|
||||
logo = col.getLogo();
|
||||
String cols = configurationService.getProperty("webui.feed.podcast.collections");
|
||||
if (cols != null && cols.length() > 1 && cols.contains(col.getHandle())) {
|
||||
@@ -214,8 +216,8 @@ public class SyndicationFeed {
|
||||
} else if (dso instanceof IndexableCommunity) {
|
||||
Community comm = ((IndexableCommunity) dso).getIndexedObject();
|
||||
defaultTitle = comm.getName();
|
||||
feed.setDescription(communityService.getMetadataFirstValue(comm,
|
||||
CommunityService.MD_SHORT_DESCRIPTION, Item.ANY));
|
||||
defaultDescriptionField = communityService.getMetadataFirstValue(comm,
|
||||
CommunityService.MD_SHORT_DESCRIPTION, Item.ANY);
|
||||
logo = comm.getLogo();
|
||||
String comms = configurationService.getProperty("webui.feed.podcast.communities");
|
||||
if (comms != null && comms.length() > 1 && comms.contains(comm.getHandle())) {
|
||||
@@ -230,6 +232,12 @@ public class SyndicationFeed {
|
||||
}
|
||||
feed.setTitle(labels.containsKey(MSG_FEED_TITLE) ?
|
||||
localize(labels, MSG_FEED_TITLE) : defaultTitle);
|
||||
|
||||
if (defaultDescriptionField == null || defaultDescriptionField == "") {
|
||||
defaultDescriptionField = I18nUtil.getMessage("org.dspace.app.util.SyndicationFeed.no-description");
|
||||
}
|
||||
|
||||
feed.setDescription(defaultDescriptionField);
|
||||
feed.setLink(objectURL);
|
||||
feed.setPublishedDate(new Date());
|
||||
feed.setUri(objectURL);
|
||||
|
@@ -52,11 +52,6 @@ public class IPAuthentication implements AuthenticationMethod {
|
||||
*/
|
||||
private static Logger log = org.apache.logging.log4j.LogManager.getLogger(IPAuthentication.class);
|
||||
|
||||
/**
|
||||
* Whether to look for x-forwarded headers for logging IP addresses
|
||||
*/
|
||||
protected static Boolean useProxies;
|
||||
|
||||
/**
|
||||
* All the IP matchers
|
||||
*/
|
||||
@@ -250,7 +245,7 @@ public class IPAuthentication implements AuthenticationMethod {
|
||||
|
||||
log.debug(LogHelper.getHeader(context, "authenticated",
|
||||
"special_groups=" + gsb.toString()
|
||||
+ " (by IP=" + addr + ", useProxies=" + useProxies.toString() + ")"
|
||||
+ " (by IP=" + addr + ")"
|
||||
));
|
||||
}
|
||||
|
||||
|
@@ -11,9 +11,11 @@ import static org.dspace.eperson.service.EPersonService.MD_PHONE;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.sql.SQLException;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Arrays;
|
||||
import java.util.Collections;
|
||||
import java.util.Hashtable;
|
||||
import java.util.Iterator;
|
||||
import java.util.List;
|
||||
import javax.naming.NamingEnumeration;
|
||||
import javax.naming.NamingException;
|
||||
@@ -64,6 +66,7 @@ import org.dspace.services.factory.DSpaceServicesFactory;
|
||||
* @author Reuben Pasquini
|
||||
* @author Samuel Ottenhoff
|
||||
* @author Ivan Masár
|
||||
* @author Michael Plate
|
||||
*/
|
||||
public class LDAPAuthentication
|
||||
implements AuthenticationMethod {
|
||||
@@ -391,7 +394,7 @@ public class LDAPAuthentication
|
||||
protected String ldapGivenName = null;
|
||||
protected String ldapSurname = null;
|
||||
protected String ldapPhone = null;
|
||||
protected String ldapGroup = null;
|
||||
protected ArrayList<String> ldapGroup = null;
|
||||
|
||||
/**
|
||||
* LDAP settings
|
||||
@@ -406,9 +409,9 @@ public class LDAPAuthentication
|
||||
final String ldap_surname_field;
|
||||
final String ldap_phone_field;
|
||||
final String ldap_group_field;
|
||||
|
||||
final boolean useTLS;
|
||||
|
||||
|
||||
SpeakerToLDAP(Logger thelog) {
|
||||
ConfigurationService configurationService
|
||||
= DSpaceServicesFactory.getInstance().getConfigurationService();
|
||||
@@ -547,7 +550,11 @@ public class LDAPAuthentication
|
||||
if (attlist[4] != null) {
|
||||
att = atts.get(attlist[4]);
|
||||
if (att != null) {
|
||||
ldapGroup = (String) att.get();
|
||||
// loop through all groups returned by LDAP
|
||||
ldapGroup = new ArrayList<String>();
|
||||
for (NamingEnumeration val = att.getAll(); val.hasMoreElements(); ) {
|
||||
ldapGroup.add((String) val.next());
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -693,24 +700,44 @@ public class LDAPAuthentication
|
||||
/*
|
||||
* Add authenticated users to the group defined in dspace.cfg by
|
||||
* the authentication-ldap.login.groupmap.* key.
|
||||
*
|
||||
* @param dn
|
||||
* The string containing distinguished name of the user
|
||||
*
|
||||
* @param group
|
||||
* List of strings with LDAP dn of groups
|
||||
*
|
||||
* @param context
|
||||
* DSpace context
|
||||
*/
|
||||
private void assignGroups(String dn, String group, Context context) {
|
||||
private void assignGroups(String dn, ArrayList<String> group, Context context) {
|
||||
if (StringUtils.isNotBlank(dn)) {
|
||||
System.out.println("dn:" + dn);
|
||||
int i = 1;
|
||||
String groupMap = configurationService.getProperty("authentication-ldap.login.groupmap." + i);
|
||||
|
||||
boolean cmp;
|
||||
|
||||
|
||||
// groupmap contains the mapping of LDAP groups to DSpace groups
|
||||
// outer loop with the DSpace groups
|
||||
while (groupMap != null) {
|
||||
String t[] = groupMap.split(":");
|
||||
String ldapSearchString = t[0];
|
||||
String dspaceGroupName = t[1];
|
||||
|
||||
if (group == null) {
|
||||
// list of strings with dn from LDAP groups
|
||||
// inner loop
|
||||
Iterator<String> groupIterator = group.iterator();
|
||||
while (groupIterator.hasNext()) {
|
||||
|
||||
// save the current entry from iterator for further use
|
||||
String currentGroup = groupIterator.next();
|
||||
|
||||
// very much the old code from DSpace <= 7.5
|
||||
if (currentGroup == null) {
|
||||
cmp = StringUtils.containsIgnoreCase(dn, ldapSearchString + ",");
|
||||
} else {
|
||||
cmp = StringUtils.equalsIgnoreCase(group, ldapSearchString);
|
||||
cmp = StringUtils.equalsIgnoreCase(currentGroup, ldapSearchString);
|
||||
}
|
||||
|
||||
if (cmp) {
|
||||
@@ -737,6 +764,7 @@ public class LDAPAuthentication
|
||||
dspaceGroupName));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
groupMap = configurationService.getProperty("authentication-ldap.login.groupmap." + ++i);
|
||||
}
|
||||
|
@@ -31,16 +31,19 @@ import org.dspace.content.DSpaceObject;
|
||||
import org.dspace.content.Item;
|
||||
import org.dspace.content.factory.ContentServiceFactory;
|
||||
import org.dspace.content.service.BitstreamService;
|
||||
import org.dspace.content.service.CollectionService;
|
||||
import org.dspace.content.service.WorkspaceItemService;
|
||||
import org.dspace.core.Constants;
|
||||
import org.dspace.core.Context;
|
||||
import org.dspace.discovery.DiscoverQuery;
|
||||
import org.dspace.discovery.DiscoverQuery.SORT_ORDER;
|
||||
import org.dspace.discovery.DiscoverResult;
|
||||
import org.dspace.discovery.IndexableObject;
|
||||
import org.dspace.discovery.SearchService;
|
||||
import org.dspace.discovery.SearchServiceException;
|
||||
import org.dspace.discovery.indexobject.IndexableCollection;
|
||||
import org.dspace.discovery.indexobject.IndexableCommunity;
|
||||
import org.dspace.discovery.indexobject.IndexableItem;
|
||||
import org.dspace.eperson.EPerson;
|
||||
import org.dspace.eperson.Group;
|
||||
import org.dspace.eperson.service.GroupService;
|
||||
@@ -521,6 +524,15 @@ public class AuthorizeServiceImpl implements AuthorizeService {
|
||||
addPolicies(c, nonAdminPolicies, dest);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void replaceAllPolicies(Context context, DSpaceObject source, DSpaceObject dest)
|
||||
throws SQLException, AuthorizeException {
|
||||
// find all policies for the source object
|
||||
List<ResourcePolicy> policies = getPolicies(context, source);
|
||||
removeAllPolicies(context, dest);
|
||||
addPolicies(context, policies, dest);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void switchPoliciesAction(Context context, DSpaceObject dso, int fromAction, int toAction)
|
||||
throws SQLException, AuthorizeException {
|
||||
@@ -643,60 +655,6 @@ public class AuthorizeServiceImpl implements AuthorizeService {
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Generate Policies policies READ for the date in input adding reason. New policies are assigned automatically
|
||||
* at the groups that
|
||||
* have right on the collection. E.g., if the anonymous can access the collection policies are assigned to
|
||||
* anonymous.
|
||||
*
|
||||
* @param context The relevant DSpace Context.
|
||||
* @param embargoDate embargo end date
|
||||
* @param reason embargo reason
|
||||
* @param dso DSpace object
|
||||
* @param owningCollection collection to get group policies from
|
||||
* @throws SQLException if database error
|
||||
* @throws AuthorizeException if authorization error
|
||||
*/
|
||||
@Override
|
||||
public void generateAutomaticPolicies(Context context, Date embargoDate,
|
||||
String reason, DSpaceObject dso, Collection owningCollection)
|
||||
throws SQLException, AuthorizeException {
|
||||
|
||||
if (embargoDate != null || (embargoDate == null && dso instanceof Bitstream)) {
|
||||
|
||||
List<Group> authorizedGroups = getAuthorizedGroups(context, owningCollection, Constants.DEFAULT_ITEM_READ);
|
||||
|
||||
removeAllPoliciesByDSOAndType(context, dso, ResourcePolicy.TYPE_CUSTOM);
|
||||
|
||||
// look for anonymous
|
||||
boolean isAnonymousInPlace = false;
|
||||
for (Group g : authorizedGroups) {
|
||||
if (StringUtils.equals(g.getName(), Group.ANONYMOUS)) {
|
||||
isAnonymousInPlace = true;
|
||||
}
|
||||
}
|
||||
if (!isAnonymousInPlace) {
|
||||
// add policies for all the groups
|
||||
for (Group g : authorizedGroups) {
|
||||
ResourcePolicy rp = createOrModifyPolicy(null, context, null, g, null, embargoDate, Constants.READ,
|
||||
reason, dso);
|
||||
if (rp != null) {
|
||||
resourcePolicyService.update(context, rp);
|
||||
}
|
||||
}
|
||||
|
||||
} else {
|
||||
// add policy just for anonymous
|
||||
ResourcePolicy rp = createOrModifyPolicy(null, context, null,
|
||||
groupService.findByName(context, Group.ANONYMOUS), null,
|
||||
embargoDate, Constants.READ, reason, dso);
|
||||
if (rp != null) {
|
||||
resourcePolicyService.update(context, rp);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public ResourcePolicy createResourcePolicy(Context context, DSpaceObject dso, Group group, EPerson eperson,
|
||||
int type, String rpType) throws SQLException, AuthorizeException {
|
||||
@@ -798,6 +756,19 @@ public class AuthorizeServiceImpl implements AuthorizeService {
|
||||
return performCheck(context, "search.resourcetype:" + IndexableCollection.TYPE);
|
||||
}
|
||||
|
||||
/**
|
||||
* Checks that the context's current user is an item admin in the site by querying the solr database.
|
||||
*
|
||||
* @param context context with the current user
|
||||
* @return true if the current user is an item admin in the site
|
||||
* false when this is not the case, or an exception occurred
|
||||
* @throws java.sql.SQLException passed through.
|
||||
*/
|
||||
@Override
|
||||
public boolean isItemAdmin(Context context) throws SQLException {
|
||||
return performCheck(context, "search.resourcetype:" + IndexableItem.TYPE);
|
||||
}
|
||||
|
||||
/**
|
||||
* Checks that the context's current user is a community or collection admin in the site.
|
||||
*
|
||||
@@ -830,7 +801,7 @@ public class AuthorizeServiceImpl implements AuthorizeService {
|
||||
query = formatCustomQuery(query);
|
||||
DiscoverResult discoverResult = getDiscoverResult(context, query + "search.resourcetype:" +
|
||||
IndexableCommunity.TYPE,
|
||||
offset, limit);
|
||||
offset, limit, null, null);
|
||||
for (IndexableObject solrCollections : discoverResult.getIndexableObjects()) {
|
||||
Community community = ((IndexableCommunity) solrCollections).getIndexedObject();
|
||||
communities.add(community);
|
||||
@@ -852,7 +823,7 @@ public class AuthorizeServiceImpl implements AuthorizeService {
|
||||
query = formatCustomQuery(query);
|
||||
DiscoverResult discoverResult = getDiscoverResult(context, query + "search.resourcetype:" +
|
||||
IndexableCommunity.TYPE,
|
||||
null, null);
|
||||
null, null, null, null);
|
||||
return discoverResult.getTotalSearchResults();
|
||||
}
|
||||
|
||||
@@ -877,7 +848,7 @@ public class AuthorizeServiceImpl implements AuthorizeService {
|
||||
query = formatCustomQuery(query);
|
||||
DiscoverResult discoverResult = getDiscoverResult(context, query + "search.resourcetype:" +
|
||||
IndexableCollection.TYPE,
|
||||
offset, limit);
|
||||
offset, limit, CollectionService.SOLR_SORT_FIELD, SORT_ORDER.asc);
|
||||
for (IndexableObject solrCollections : discoverResult.getIndexableObjects()) {
|
||||
Collection collection = ((IndexableCollection) solrCollections).getIndexedObject();
|
||||
collections.add(collection);
|
||||
@@ -899,7 +870,7 @@ public class AuthorizeServiceImpl implements AuthorizeService {
|
||||
query = formatCustomQuery(query);
|
||||
DiscoverResult discoverResult = getDiscoverResult(context, query + "search.resourcetype:" +
|
||||
IndexableCollection.TYPE,
|
||||
null, null);
|
||||
null, null, null, null);
|
||||
return discoverResult.getTotalSearchResults();
|
||||
}
|
||||
|
||||
@@ -919,7 +890,7 @@ public class AuthorizeServiceImpl implements AuthorizeService {
|
||||
}
|
||||
|
||||
try {
|
||||
DiscoverResult discoverResult = getDiscoverResult(context, query, null, null);
|
||||
DiscoverResult discoverResult = getDiscoverResult(context, query, null, null, null, null);
|
||||
if (discoverResult.getTotalSearchResults() > 0) {
|
||||
return true;
|
||||
}
|
||||
@@ -931,7 +902,8 @@ public class AuthorizeServiceImpl implements AuthorizeService {
|
||||
return false;
|
||||
}
|
||||
|
||||
private DiscoverResult getDiscoverResult(Context context, String query, Integer offset, Integer limit)
|
||||
private DiscoverResult getDiscoverResult(Context context, String query, Integer offset, Integer limit,
|
||||
String sortField, SORT_ORDER sortOrder)
|
||||
throws SearchServiceException, SQLException {
|
||||
String groupQuery = getGroupToQuery(groupService.allMemberGroups(context, context.getCurrentUser()));
|
||||
|
||||
@@ -947,7 +919,9 @@ public class AuthorizeServiceImpl implements AuthorizeService {
|
||||
if (limit != null) {
|
||||
discoverQuery.setMaxResults(limit);
|
||||
}
|
||||
|
||||
if (sortField != null && sortOrder != null) {
|
||||
discoverQuery.setSortField(sortField, sortOrder);
|
||||
}
|
||||
|
||||
return searchService.search(context, discoverQuery);
|
||||
}
|
||||
|
@@ -41,9 +41,16 @@ import org.hibernate.proxy.HibernateProxyHelper;
|
||||
@Entity
|
||||
@Table(name = "resourcepolicy")
|
||||
public class ResourcePolicy implements ReloadableEntity<Integer> {
|
||||
/** This policy was set on submission, to give the submitter access. */
|
||||
public static String TYPE_SUBMISSION = "TYPE_SUBMISSION";
|
||||
|
||||
/** This policy was set to allow access by a workflow group. */
|
||||
public static String TYPE_WORKFLOW = "TYPE_WORKFLOW";
|
||||
|
||||
/** This policy was explicitly set on this object. */
|
||||
public static String TYPE_CUSTOM = "TYPE_CUSTOM";
|
||||
|
||||
/** This policy was copied from the containing object's default policies. */
|
||||
public static String TYPE_INHERITED = "TYPE_INHERITED";
|
||||
|
||||
@Id
|
||||
@@ -93,7 +100,7 @@ public class ResourcePolicy implements ReloadableEntity<Integer> {
|
||||
private String rptype;
|
||||
|
||||
@Lob
|
||||
@Type(type = "org.dspace.storage.rdbms.hibernate.DatabaseAwareLobType")
|
||||
@Type(type = "org.hibernate.type.TextType")
|
||||
@Column(name = "rpdescription")
|
||||
private String rpdescription;
|
||||
|
||||
|
@@ -232,6 +232,15 @@ public class ResourcePolicyServiceImpl implements ResourcePolicyService {
|
||||
c.restoreAuthSystemState();
|
||||
}
|
||||
|
||||
@Override
|
||||
public void removePolicies(Context c, DSpaceObject o, String type, int action)
|
||||
throws SQLException, AuthorizeException {
|
||||
resourcePolicyDAO.deleteByDsoAndTypeAndAction(c, o, type, action);
|
||||
c.turnOffAuthorisationSystem();
|
||||
contentServiceFactory.getDSpaceObjectService(o).updateLastModified(c, o);
|
||||
c.restoreAuthSystemState();
|
||||
}
|
||||
|
||||
@Override
|
||||
public void removeDsoGroupPolicies(Context context, DSpaceObject dso, Group group)
|
||||
throws SQLException, AuthorizeException {
|
||||
|
@@ -39,6 +39,9 @@ public interface ResourcePolicyDAO extends GenericDAO<ResourcePolicy> {
|
||||
|
||||
public List<ResourcePolicy> findByDSoAndAction(Context context, DSpaceObject dso, int actionId) throws SQLException;
|
||||
|
||||
public void deleteByDsoAndTypeAndAction(Context context, DSpaceObject dSpaceObject, String type, int action)
|
||||
throws SQLException;
|
||||
|
||||
public List<ResourcePolicy> findByTypeGroupAction(Context context, DSpaceObject dso, Group group, int action)
|
||||
throws SQLException;
|
||||
|
||||
|
@@ -103,6 +103,19 @@ public class ResourcePolicyDAOImpl extends AbstractHibernateDAO<ResourcePolicy>
|
||||
return list(context, criteriaQuery, false, ResourcePolicy.class, -1, -1);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void deleteByDsoAndTypeAndAction(Context context, DSpaceObject dso, String type, int actionId)
|
||||
throws SQLException {
|
||||
String queryString = "delete from ResourcePolicy where dSpaceObject.id = :dsoId "
|
||||
+ "AND rptype = :rptype AND actionId= :actionId";
|
||||
Query query = createQuery(context, queryString);
|
||||
query.setParameter("dsoId", dso.getID());
|
||||
query.setParameter("rptype", type);
|
||||
query.setParameter("actionId", actionId);
|
||||
query.executeUpdate();
|
||||
|
||||
}
|
||||
|
||||
@Override
|
||||
public List<ResourcePolicy> findByTypeGroupAction(Context context, DSpaceObject dso, Group group, int action)
|
||||
throws SQLException {
|
||||
|
@@ -0,0 +1,67 @@
|
||||
/**
|
||||
* The contents of this file are subject to the license and copyright
|
||||
* detailed in the LICENSE and NOTICE files at the root of the source
|
||||
* tree and available online at
|
||||
*
|
||||
* http://www.dspace.org/license/
|
||||
*/
|
||||
|
||||
/**
|
||||
* Represents permissions for access to DSpace content.
|
||||
*
|
||||
* <h2>Philosophy</h2>
|
||||
* DSpace's authorization system follows the classical "police state"
|
||||
* philosophy of security - the user can do nothing, unless it is
|
||||
* specifically allowed. Those permissions are spelled out with
|
||||
* {@link ResourcePolicy} objects, stored in the {@code resourcepolicy} table
|
||||
* in the database.
|
||||
*
|
||||
* <h2>Policies are attached to Content</h2>
|
||||
* Resource Policies get assigned to all of the content objects in
|
||||
* DSpace - collections, communities, items, bundles, and bitstreams.
|
||||
* (Currently they are not attached to non-content objects such as
|
||||
* {@code EPerson} or {@code Group}. But they could be, hence the name
|
||||
* {@code ResourcePolicy} instead of {@code ContentPolicy}.)
|
||||
*
|
||||
* <h2>Policies are tuples</h2>
|
||||
* Authorization is based on evaluating the tuple of (object, action, actor),
|
||||
* such as (ITEM, READ, EPerson John Smith) to check if the {@code EPerson}
|
||||
* "John Smith" can read an item. {@code ResourcePolicy} objects are pretty
|
||||
* simple, describing a single instance of (object, action, actor). If
|
||||
* multiple actors are desired, such as groups 10, 11, and 12 are allowed to
|
||||
* READ Item 13, you simply create a {@code ResourcePolicy} for each group.
|
||||
*
|
||||
* <h2>Built-in groups</h2>
|
||||
* The install process should create two built-in groups - {@code Anonymous}
|
||||
* for anonymous/public access, and {@code Administrators} for administrators.
|
||||
* Group {@code Anonymous} allows anyone access, even if not authenticated.
|
||||
* Group {@code Administrators}' members have super-user rights,
|
||||
* and are allowed to do any action to any object.
|
||||
*
|
||||
* <h2>Policy types
|
||||
* Policies have a "type" used to distinguish policies which are applied for
|
||||
* specific purposes.
|
||||
* <dl>
|
||||
* <dt>CUSTOM</dt>
|
||||
* <dd>These are created and assigned explicitly by users.</dd>
|
||||
* <dt>INHERITED</dt>
|
||||
* <dd>These are copied from a containing object's default policies.</dd>
|
||||
* <dt>SUBMISSION</dt>
|
||||
* <dd>These are applied during submission to give the submitter access while
|
||||
* composing a submission.</dd>
|
||||
* <dt>WORKFLOW</dt>
|
||||
* <dd>These are automatically applied during workflow, to give curators
|
||||
* access to submissions in their curation queues. They usually have an
|
||||
* automatically-created workflow group as the actor.</dd>
|
||||
*
|
||||
* <h2>Start and End dates</h2>
|
||||
* A policy may have a start date and/or an end date. The policy is
|
||||
* considered not valid before the start date or after the end date. No date
|
||||
* means do not apply the related test. For example, embargo until a given
|
||||
* date can be expressed by a READ policy with a given start date, and a
|
||||
* limited-time offer by a READ policy with a given end date.
|
||||
*
|
||||
* @author dstuve
|
||||
* @author mwood
|
||||
*/
|
||||
package org.dspace.authorize;
|
@@ -1,68 +0,0 @@
|
||||
<!--
|
||||
|
||||
The contents of this file are subject to the license and copyright
|
||||
detailed in the LICENSE and NOTICE files at the root of the source
|
||||
tree and available online at
|
||||
|
||||
http://www.dspace.org/license/
|
||||
|
||||
-->
|
||||
<!DOCTYPE HTML PUBLIC "-//W3C//DTD HTML 3.2 Final//EN">
|
||||
<html>
|
||||
<head>
|
||||
<!--
|
||||
Author: dstuve
|
||||
Version: $Id$
|
||||
Date: $Date$
|
||||
-->
|
||||
</head>
|
||||
<body bgcolor="white">
|
||||
<p>Handles permissions for DSpace content.
|
||||
</p>
|
||||
|
||||
<p><strong>Philosophy</strong><br>
|
||||
DSpace's authorization system follows the classical "police state"
|
||||
philosophy of security - the user can do nothing, unless it is
|
||||
specifically allowed. Those permissions are spelled out with
|
||||
ResourcePolicy objects, stored in the resourcepolicy table in the
|
||||
database.
|
||||
</p>
|
||||
|
||||
<h2>Policies are attached to Content</h2>
|
||||
<p><strong>Policies are attached to Content</strong><br>
|
||||
Resource Policies get assigned to all of the content objects in
|
||||
DSpace - collections, communities, items, bundles, and bitstreams.
|
||||
(Currently they are not attached to non-content objects such as EPerson
|
||||
or Group. But they could be, hence the name ResourcePolicy instead of
|
||||
ContentPolicy.)
|
||||
</p>
|
||||
|
||||
<h2>Policies are tuples</h2>
|
||||
Authorization is based on evaluating the tuple of (object, action, who),
|
||||
such as (ITEM, READ, EPerson John Smith) to check if the EPerson "John Smith"
|
||||
can read an item. ResourcePolicy objects are pretty simple, describing a single instance of
|
||||
(object, action, who). If multiple who's are desired, such as Groups 10, 11, and
|
||||
12 are allowed to READ Item 13, you simply create a ResourcePolicy for each
|
||||
group.
|
||||
</p>
|
||||
|
||||
<h2>Special Groups</h2>
|
||||
The install process should create two special groups - group 0, for
|
||||
anonymous/public access, and group 1 for administrators.
|
||||
Group 0 (public/anonymous) allows anyone access, even if they are not
|
||||
authenticated. Group 1's (admin) members have super-user rights, and
|
||||
are allowed to do any action to any object.
|
||||
</p>
|
||||
|
||||
<h2>Unused ResourcePolicy attributes </h2>
|
||||
ResourcePolicies have a few attributes that are currently unused,
|
||||
but are included with the intent that they will be used someday.
|
||||
One is start and end dates, for when policies will be active, so that
|
||||
permissions for content can change over time. The other is the EPerson -
|
||||
policies could apply to only a single EPerson, but for ease of
|
||||
administration currently a Group is the recommended unit to use to
|
||||
describe 'who'.
|
||||
</p>
|
||||
|
||||
</body>
|
||||
</html>
|
@@ -470,24 +470,6 @@ public interface AuthorizeService {
|
||||
public ResourcePolicy findByTypeGroupAction(Context c, DSpaceObject dso, Group group, int action)
|
||||
throws SQLException;
|
||||
|
||||
|
||||
/**
|
||||
* Generate Policies policies READ for the date in input adding reason. New policies are assigned automatically
|
||||
* at the groups that
|
||||
* have right on the collection. E.g., if the anonymous can access the collection policies are assigned to
|
||||
* anonymous.
|
||||
*
|
||||
* @param context current context
|
||||
* @param embargoDate date
|
||||
* @param reason reason
|
||||
* @param dso DSpaceObject
|
||||
* @param owningCollection collection
|
||||
* @throws SQLException if database error
|
||||
* @throws AuthorizeException if authorization error
|
||||
*/
|
||||
public void generateAutomaticPolicies(Context context, Date embargoDate, String reason, DSpaceObject dso,
|
||||
Collection owningCollection) throws SQLException, AuthorizeException;
|
||||
|
||||
public ResourcePolicy createResourcePolicy(Context context, DSpaceObject dso, Group group, EPerson eperson,
|
||||
int type, String rpType) throws SQLException, AuthorizeException;
|
||||
|
||||
@@ -532,6 +514,15 @@ public interface AuthorizeService {
|
||||
*/
|
||||
boolean isCollectionAdmin(Context context) throws SQLException;
|
||||
|
||||
/**
|
||||
* Checks that the context's current user is an item admin in the site by querying the solr database.
|
||||
*
|
||||
* @param context context with the current user
|
||||
* @return true if the current user is an item admin in the site
|
||||
* false when this is not the case, or an exception occurred
|
||||
*/
|
||||
boolean isItemAdmin(Context context) throws SQLException;
|
||||
|
||||
/**
|
||||
* Checks that the context's current user is a community or collection admin in the site.
|
||||
*
|
||||
@@ -600,4 +591,17 @@ public interface AuthorizeService {
|
||||
* @return true if the current user can manage accounts
|
||||
*/
|
||||
boolean isAccountManager(Context context);
|
||||
|
||||
/**
|
||||
* Replace all the policies in the target object with exactly the same policies that exist in the source object
|
||||
*
|
||||
* @param context DSpace Context
|
||||
* @param source source of policies
|
||||
* @param dest destination of inherited policies
|
||||
* @throws SQLException if there's a database problem
|
||||
* @throws AuthorizeException if the current user is not authorized to add these policies
|
||||
*/
|
||||
public void replaceAllPolicies(Context context, DSpaceObject source, DSpaceObject dest)
|
||||
throws SQLException, AuthorizeException;
|
||||
|
||||
}
|
||||
|
@@ -53,12 +53,19 @@ public interface ResourcePolicyService extends DSpaceCRUDService<ResourcePolicy>
|
||||
throws SQLException;
|
||||
|
||||
/**
|
||||
* Look for ResourcePolicies by DSpaceObject, Group, and action, ignoring IDs with a specific PolicyID.
|
||||
* This method can be used to detect duplicate ResourcePolicies.
|
||||
* Look for ResourcePolicies by DSpaceObject, Group, and action, ignoring
|
||||
* IDs with a specific PolicyID. This method can be used to detect duplicate
|
||||
* ResourcePolicies.
|
||||
*
|
||||
* @param notPolicyID ResourcePolicies with this ID will be ignored while looking out for equal ResourcePolicies.
|
||||
* @return List of resource policies for the same DSpaceObject, group and action but other policyID.
|
||||
* @throws SQLException
|
||||
* @param context current DSpace session.
|
||||
* @param dso find policies for this object.
|
||||
* @param group find policies referring to this group.
|
||||
* @param action find policies for this action.
|
||||
* @param notPolicyID ResourcePolicies with this ID will be ignored while
|
||||
* looking out for equal ResourcePolicies.
|
||||
* @return List of resource policies for the same DSpaceObject, group and
|
||||
* action but other policyID.
|
||||
* @throws SQLException passed through.
|
||||
*/
|
||||
public List<ResourcePolicy> findByTypeGroupActionExceptId(Context context, DSpaceObject dso, Group group,
|
||||
int action, int notPolicyID)
|
||||
@@ -68,6 +75,16 @@ public interface ResourcePolicyService extends DSpaceCRUDService<ResourcePolicy>
|
||||
|
||||
public boolean isDateValid(ResourcePolicy resourcePolicy);
|
||||
|
||||
/**
|
||||
* Create and persist a copy of a given ResourcePolicy, with an empty
|
||||
* dSpaceObject field.
|
||||
*
|
||||
* @param context current DSpace session.
|
||||
* @param resourcePolicy the policy to be copied.
|
||||
* @return the copy.
|
||||
* @throws SQLException passed through.
|
||||
* @throws AuthorizeException passed through.
|
||||
*/
|
||||
public ResourcePolicy clone(Context context, ResourcePolicy resourcePolicy) throws SQLException, AuthorizeException;
|
||||
|
||||
public void removeAllPolicies(Context c, DSpaceObject o) throws SQLException, AuthorizeException;
|
||||
@@ -76,6 +93,9 @@ public interface ResourcePolicyService extends DSpaceCRUDService<ResourcePolicy>
|
||||
|
||||
public void removePolicies(Context c, DSpaceObject o, String type) throws SQLException, AuthorizeException;
|
||||
|
||||
public void removePolicies(Context c, DSpaceObject o, String type, int action)
|
||||
throws SQLException, AuthorizeException;
|
||||
|
||||
public void removeDsoGroupPolicies(Context context, DSpaceObject dso, Group group)
|
||||
throws SQLException, AuthorizeException;
|
||||
|
||||
@@ -117,6 +137,7 @@ public interface ResourcePolicyService extends DSpaceCRUDService<ResourcePolicy>
|
||||
* @param ePerson ePerson whose policies want to find
|
||||
* @param offset the position of the first result to return
|
||||
* @param limit paging limit
|
||||
* @return some of the policies referring to {@code ePerson}.
|
||||
* @throws SQLException if database error
|
||||
*/
|
||||
public List<ResourcePolicy> findByEPerson(Context context, EPerson ePerson, int offset, int limit)
|
||||
|
@@ -8,8 +8,8 @@
|
||||
package org.dspace.browse;
|
||||
|
||||
import java.util.List;
|
||||
import java.util.UUID;
|
||||
|
||||
import org.dspace.content.DSpaceObject;
|
||||
import org.dspace.content.Item;
|
||||
|
||||
/**
|
||||
@@ -140,21 +140,21 @@ public interface BrowseDAO {
|
||||
public void setAscending(boolean ascending);
|
||||
|
||||
/**
|
||||
* Get the database ID of the container object. The container object will be a
|
||||
* Get the container object. The container object will be a
|
||||
* Community or a Collection.
|
||||
*
|
||||
* @return the database id of the container, or -1 if none is set
|
||||
* @return the container, or null if none is set
|
||||
*/
|
||||
public UUID getContainerID();
|
||||
public DSpaceObject getContainer();
|
||||
|
||||
/**
|
||||
* Set the database id of the container object. This should be the id of a
|
||||
* Community or Collection. This will constrain the results of the browse
|
||||
* to only items or values within items that appear in the given container.
|
||||
* Set the container object. This should be a Community or Collection.
|
||||
* This will constrain the results of the browse to only items or values within items that appear in the given
|
||||
* container and add the related configuration default filters.
|
||||
*
|
||||
* @param containerID community/collection internal ID (UUID)
|
||||
* @param container community/collection
|
||||
*/
|
||||
public void setContainerID(UUID containerID);
|
||||
public void setContainer(DSpaceObject container);
|
||||
|
||||
/**
|
||||
* get the name of the field in which to look for the container id. This is
|
||||
|
@@ -141,12 +141,12 @@ public class BrowseEngine {
|
||||
Collection col = (Collection) scope.getBrowseContainer();
|
||||
dao.setContainerTable("collection2item");
|
||||
dao.setContainerIDField("collection_id");
|
||||
dao.setContainerID(col.getID());
|
||||
dao.setContainer(col);
|
||||
} else if (scope.inCommunity()) {
|
||||
Community com = (Community) scope.getBrowseContainer();
|
||||
dao.setContainerTable("communities2item");
|
||||
dao.setContainerIDField("community_id");
|
||||
dao.setContainerID(com.getID());
|
||||
dao.setContainer(com);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -247,12 +247,12 @@ public class BrowseEngine {
|
||||
Collection col = (Collection) scope.getBrowseContainer();
|
||||
dao.setContainerTable("collection2item");
|
||||
dao.setContainerIDField("collection_id");
|
||||
dao.setContainerID(col.getID());
|
||||
dao.setContainer(col);
|
||||
} else if (scope.inCommunity()) {
|
||||
Community com = (Community) scope.getBrowseContainer();
|
||||
dao.setContainerTable("communities2item");
|
||||
dao.setContainerIDField("community_id");
|
||||
dao.setContainerID(com.getID());
|
||||
dao.setContainer(com);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -413,12 +413,12 @@ public class BrowseEngine {
|
||||
Collection col = (Collection) scope.getBrowseContainer();
|
||||
dao.setContainerTable("collection2item");
|
||||
dao.setContainerIDField("collection_id");
|
||||
dao.setContainerID(col.getID());
|
||||
dao.setContainer(col);
|
||||
} else if (scope.inCommunity()) {
|
||||
Community com = (Community) scope.getBrowseContainer();
|
||||
dao.setContainerTable("communities2item");
|
||||
dao.setContainerIDField("community_id");
|
||||
dao.setContainerID(com.getID());
|
||||
dao.setContainer(com);
|
||||
}
|
||||
}
|
||||
|
||||
|
@@ -22,11 +22,13 @@ import org.dspace.sort.SortOption;
|
||||
* This class holds all the information about a specifically configured
|
||||
* BrowseIndex. It is responsible for parsing the configuration, understanding
|
||||
* about what sort options are available, and what the names of the database
|
||||
* tables that hold all the information are actually called.
|
||||
* tables that hold all the information are actually called. Hierarchical browse
|
||||
* indexes also contain information about the vocabulary they're using, see:
|
||||
* {@link org.dspace.content.authority.DSpaceControlledVocabularyIndex}
|
||||
*
|
||||
* @author Richard Jones
|
||||
*/
|
||||
public final class BrowseIndex {
|
||||
public class BrowseIndex {
|
||||
/** the configuration number, as specified in the config */
|
||||
/**
|
||||
* used for single metadata browse tables for generating the table name
|
||||
@@ -102,7 +104,7 @@ public final class BrowseIndex {
|
||||
*
|
||||
* @param baseName The base of the table name
|
||||
*/
|
||||
private BrowseIndex(String baseName) {
|
||||
protected BrowseIndex(String baseName) {
|
||||
try {
|
||||
number = -1;
|
||||
tableBaseName = baseName;
|
||||
|
@@ -59,7 +59,16 @@ public class CrossLinks {
|
||||
* @return true/false
|
||||
*/
|
||||
public boolean hasLink(String metadata) {
|
||||
return links.containsKey(metadata);
|
||||
return findLinkType(metadata) != null;
|
||||
}
|
||||
|
||||
/**
|
||||
* Is there a link for the given browse name (eg 'author')
|
||||
* @param browseIndexName
|
||||
* @return true/false
|
||||
*/
|
||||
public boolean hasBrowseName(String browseIndexName) {
|
||||
return links.containsValue(browseIndexName);
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -69,6 +78,41 @@ public class CrossLinks {
|
||||
* @return type
|
||||
*/
|
||||
public String getLinkType(String metadata) {
|
||||
return links.get(metadata);
|
||||
return findLinkType(metadata);
|
||||
}
|
||||
|
||||
/**
|
||||
* Get full map of field->indexname link configurations
|
||||
* @return
|
||||
*/
|
||||
public Map<String, String> getLinks() {
|
||||
return links;
|
||||
}
|
||||
|
||||
/**
|
||||
* Find and return the browse name for a given metadata field.
|
||||
* If the link key contains a wildcard eg dc.subject.*, it should
|
||||
* match dc.subject.other, etc.
|
||||
* @param metadata
|
||||
* @return
|
||||
*/
|
||||
public String findLinkType(String metadata) {
|
||||
// Resolve wildcards properly, eg. dc.subject.other matches a configuration for dc.subject.*
|
||||
for (String key : links.keySet()) {
|
||||
if (null != key && key.endsWith(".*")) {
|
||||
// A substring of length-1, also substracting the wildcard should work as a "startsWith"
|
||||
// check for the field eg. dc.subject.* -> dc.subject is the start of dc.subject.other
|
||||
if (null != metadata && metadata.startsWith(key.substring(0, key.length() - 1 - ".*".length()))) {
|
||||
return links.get(key);
|
||||
}
|
||||
} else {
|
||||
// Exact match, if the key field has no .* wildcard
|
||||
if (links.containsKey(metadata)) {
|
||||
return links.get(key);
|
||||
}
|
||||
}
|
||||
}
|
||||
// No match
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
@@ -18,6 +18,7 @@ import org.dspace.content.service.ItemService;
|
||||
import org.dspace.core.Context;
|
||||
import org.dspace.services.ConfigurationService;
|
||||
import org.dspace.services.factory.DSpaceServicesFactory;
|
||||
import org.dspace.web.ContextUtil;
|
||||
|
||||
/**
|
||||
* This class provides a standard interface to all item counting
|
||||
@@ -49,9 +50,20 @@ public class ItemCounter {
|
||||
*/
|
||||
private Context context;
|
||||
|
||||
/**
|
||||
* This field is used to hold singular instance of a class.
|
||||
* Singleton pattern is used but this class should be
|
||||
* refactored to modern DSpace approach (injectible service).
|
||||
*/
|
||||
|
||||
private static ItemCounter instance;
|
||||
|
||||
protected ItemService itemService;
|
||||
protected ConfigurationService configurationService;
|
||||
|
||||
private boolean showStrengths;
|
||||
private boolean useCache;
|
||||
|
||||
/**
|
||||
* Construct a new item counter which will use the given DSpace Context
|
||||
*
|
||||
@@ -63,21 +75,42 @@ public class ItemCounter {
|
||||
this.dao = ItemCountDAOFactory.getInstance(this.context);
|
||||
this.itemService = ContentServiceFactory.getInstance().getItemService();
|
||||
this.configurationService = DSpaceServicesFactory.getInstance().getConfigurationService();
|
||||
this.showStrengths = configurationService.getBooleanProperty("webui.strengths.show", false);
|
||||
this.useCache = configurationService.getBooleanProperty("webui.strengths.cache", true);
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the singular instance of a class.
|
||||
* It creates a new instance at the first usage of this method.
|
||||
*
|
||||
* @return instance af a class
|
||||
* @throws ItemCountException when error occurs
|
||||
*/
|
||||
public static ItemCounter getInstance() throws ItemCountException {
|
||||
if (instance == null) {
|
||||
instance = new ItemCounter(ContextUtil.obtainCurrentRequestContext());
|
||||
}
|
||||
return instance;
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the count of the items in the given container. If the configuration
|
||||
* value webui.strengths.cache is equal to 'true' this will return the
|
||||
* cached value if it exists. If it is equal to 'false' it will count
|
||||
* the number of items in the container in real time.
|
||||
* value webui.strengths.show is equal to 'true' this method will return all
|
||||
* archived items. If the configuration value webui.strengths.show is equal to
|
||||
* 'false' this method will return -1.
|
||||
* If the configuration value webui.strengths.cache
|
||||
* is equal to 'true' this will return the cached value if it exists.
|
||||
* If it is equal to 'false' it will count the number of items
|
||||
* in the container in real time.
|
||||
*
|
||||
* @param dso DSpaceObject
|
||||
* @return count
|
||||
* @throws ItemCountException when error occurs
|
||||
*/
|
||||
public int getCount(DSpaceObject dso) throws ItemCountException {
|
||||
boolean useCache = configurationService.getBooleanProperty(
|
||||
"webui.strengths.cache", true);
|
||||
if (!showStrengths) {
|
||||
return -1;
|
||||
}
|
||||
|
||||
if (useCache) {
|
||||
return dao.getCount(dso);
|
||||
|
@@ -8,17 +8,17 @@
|
||||
package org.dspace.browse;
|
||||
|
||||
import java.io.Serializable;
|
||||
import java.sql.SQLException;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Collections;
|
||||
import java.util.Comparator;
|
||||
import java.util.List;
|
||||
import java.util.UUID;
|
||||
|
||||
import org.apache.commons.lang3.StringUtils;
|
||||
import org.apache.logging.log4j.Logger;
|
||||
import org.apache.solr.client.solrj.util.ClientUtils;
|
||||
import org.dspace.authorize.factory.AuthorizeServiceFactory;
|
||||
import org.dspace.authorize.service.AuthorizeService;
|
||||
import org.dspace.content.DSpaceObject;
|
||||
import org.dspace.content.Item;
|
||||
import org.dspace.core.Context;
|
||||
import org.dspace.discovery.DiscoverFacetField;
|
||||
@@ -30,6 +30,8 @@ import org.dspace.discovery.DiscoverResult.SearchDocument;
|
||||
import org.dspace.discovery.IndexableObject;
|
||||
import org.dspace.discovery.SearchService;
|
||||
import org.dspace.discovery.SearchServiceException;
|
||||
import org.dspace.discovery.SearchUtils;
|
||||
import org.dspace.discovery.configuration.DiscoveryConfiguration;
|
||||
import org.dspace.discovery.configuration.DiscoveryConfigurationParameters;
|
||||
import org.dspace.discovery.indexobject.IndexableItem;
|
||||
import org.dspace.services.factory.DSpaceServicesFactory;
|
||||
@@ -123,9 +125,9 @@ public class SolrBrowseDAO implements BrowseDAO {
|
||||
private String containerIDField = null;
|
||||
|
||||
/**
|
||||
* the database id of the container we are constraining to
|
||||
* the container we are constraining to
|
||||
*/
|
||||
private UUID containerID = null;
|
||||
private DSpaceObject container = null;
|
||||
|
||||
/**
|
||||
* the column that we are sorting results by
|
||||
@@ -175,7 +177,7 @@ public class SolrBrowseDAO implements BrowseDAO {
|
||||
if (sResponse == null) {
|
||||
DiscoverQuery query = new DiscoverQuery();
|
||||
addLocationScopeFilter(query);
|
||||
addStatusFilter(query);
|
||||
addDefaultFilterQueries(query);
|
||||
if (distinct) {
|
||||
DiscoverFacetField dff;
|
||||
if (StringUtils.isNotBlank(startsWith)) {
|
||||
@@ -206,7 +208,8 @@ public class SolrBrowseDAO implements BrowseDAO {
|
||||
query.addFilterQueries("{!field f=" + facetField + "_partial}" + value);
|
||||
}
|
||||
if (StringUtils.isNotBlank(startsWith) && orderField != null) {
|
||||
query.addFilterQueries("bi_" + orderField + "_sort:" + startsWith + "*");
|
||||
query.addFilterQueries(
|
||||
"bi_" + orderField + "_sort:" + ClientUtils.escapeQueryChars(startsWith) + "*");
|
||||
}
|
||||
// filter on item to be sure to don't include any other object
|
||||
// indexed in the Discovery Search core
|
||||
@@ -225,26 +228,19 @@ public class SolrBrowseDAO implements BrowseDAO {
|
||||
return sResponse;
|
||||
}
|
||||
|
||||
private void addStatusFilter(DiscoverQuery query) {
|
||||
try {
|
||||
if (!authorizeService.isAdmin(context)
|
||||
&& (authorizeService.isCommunityAdmin(context)
|
||||
|| authorizeService.isCollectionAdmin(context))) {
|
||||
query.addFilterQueries(searcher.createLocationQueryForAdministrableItems(context));
|
||||
private void addLocationScopeFilter(DiscoverQuery query) {
|
||||
if (container != null) {
|
||||
if (containerIDField.startsWith("collection")) {
|
||||
query.addFilterQueries("location.coll:" + container.getID());
|
||||
} else if (containerIDField.startsWith("community")) {
|
||||
query.addFilterQueries("location.comm:" + container.getID());
|
||||
}
|
||||
} catch (SQLException ex) {
|
||||
log.error("Error looking up authorization rights of current user", ex);
|
||||
}
|
||||
}
|
||||
|
||||
private void addLocationScopeFilter(DiscoverQuery query) {
|
||||
if (containerID != null) {
|
||||
if (containerIDField.startsWith("collection")) {
|
||||
query.addFilterQueries("location.coll:" + containerID);
|
||||
} else if (containerIDField.startsWith("community")) {
|
||||
query.addFilterQueries("location.comm:" + containerID);
|
||||
}
|
||||
}
|
||||
private void addDefaultFilterQueries(DiscoverQuery query) {
|
||||
DiscoveryConfiguration discoveryConfiguration = SearchUtils.getDiscoveryConfiguration(context, container);
|
||||
discoveryConfiguration.getDefaultFilterQueries().forEach(query::addFilterQueries);
|
||||
}
|
||||
|
||||
@Override
|
||||
@@ -335,7 +331,7 @@ public class SolrBrowseDAO implements BrowseDAO {
|
||||
throws BrowseException {
|
||||
DiscoverQuery query = new DiscoverQuery();
|
||||
addLocationScopeFilter(query);
|
||||
addStatusFilter(query);
|
||||
addDefaultFilterQueries(query);
|
||||
query.setMaxResults(0);
|
||||
query.addFilterQueries("search.resourcetype:" + IndexableItem.TYPE);
|
||||
|
||||
@@ -396,8 +392,8 @@ public class SolrBrowseDAO implements BrowseDAO {
|
||||
* @see org.dspace.browse.BrowseDAO#getContainerID()
|
||||
*/
|
||||
@Override
|
||||
public UUID getContainerID() {
|
||||
return containerID;
|
||||
public DSpaceObject getContainer() {
|
||||
return container;
|
||||
}
|
||||
|
||||
/*
|
||||
@@ -559,8 +555,8 @@ public class SolrBrowseDAO implements BrowseDAO {
|
||||
* @see org.dspace.browse.BrowseDAO#setContainerID(int)
|
||||
*/
|
||||
@Override
|
||||
public void setContainerID(UUID containerID) {
|
||||
this.containerID = containerID;
|
||||
public void setContainer(DSpaceObject container) {
|
||||
this.container = container;
|
||||
|
||||
}
|
||||
|
||||
|
@@ -245,7 +245,7 @@ public final class CheckerCommand {
|
||||
info.setProcessStartDate(new Date());
|
||||
|
||||
try {
|
||||
Map checksumMap = bitstreamStorageService.computeChecksum(context, info.getBitstream());
|
||||
Map<String, Object> checksumMap = bitstreamStorageService.computeChecksum(context, info.getBitstream());
|
||||
if (MapUtils.isNotEmpty(checksumMap)) {
|
||||
info.setBitstreamFound(true);
|
||||
if (checksumMap.containsKey("checksum")) {
|
||||
@@ -255,10 +255,16 @@ public final class CheckerCommand {
|
||||
if (checksumMap.containsKey("checksum_algorithm")) {
|
||||
info.setChecksumAlgorithm(checksumMap.get("checksum_algorithm").toString());
|
||||
}
|
||||
}
|
||||
|
||||
// compare new checksum to previous checksum
|
||||
info.setChecksumResult(compareChecksums(info.getExpectedChecksum(), info.getCurrentChecksum()));
|
||||
|
||||
} else {
|
||||
info.setCurrentChecksum("");
|
||||
info.setChecksumResult(getChecksumResultByCode(ChecksumResultCode.BITSTREAM_NOT_FOUND));
|
||||
info.setToBeProcessed(false);
|
||||
}
|
||||
|
||||
} catch (IOException e) {
|
||||
// bitstream located, but file missing from asset store
|
||||
info.setChecksumResult(getChecksumResultByCode(ChecksumResultCode.BITSTREAM_NOT_FOUND));
|
||||
|
@@ -74,7 +74,8 @@ public class ChecksumHistoryServiceImpl implements ChecksumHistoryService {
|
||||
if (mostRecentChecksum.getBitstream().isDeleted()) {
|
||||
checksumResult = checksumResultService.findByCode(context, ChecksumResultCode.BITSTREAM_MARKED_DELETED);
|
||||
} else {
|
||||
checksumResult = checksumResultService.findByCode(context, ChecksumResultCode.CHECKSUM_MATCH);
|
||||
checksumResult = checksumResultService.findByCode(context,
|
||||
mostRecentChecksum.getChecksumResult().getResultCode());
|
||||
}
|
||||
|
||||
checksumHistory.setResult(checksumResult);
|
||||
|
@@ -152,6 +152,7 @@ public class SimpleReporterServiceImpl implements SimpleReporterService {
|
||||
|
||||
osw.write("\n");
|
||||
osw.write(msg("bitstream-not-found-report"));
|
||||
osw.write(" ");
|
||||
osw.write(applyDateFormatShort(startDate));
|
||||
osw.write(" ");
|
||||
osw.write(msg("date-range-to"));
|
||||
@@ -230,6 +231,7 @@ public class SimpleReporterServiceImpl implements SimpleReporterService {
|
||||
|
||||
osw.write("\n");
|
||||
osw.write(msg("unchecked-bitstream-report"));
|
||||
osw.write(" ");
|
||||
osw.write(applyDateFormatShort(new Date()));
|
||||
osw.write("\n\n\n");
|
||||
|
||||
|
@@ -92,8 +92,8 @@ public class MostRecentChecksumDAOImpl extends AbstractHibernateDAO<MostRecentCh
|
||||
criteriaQuery.where(criteriaBuilder.and(
|
||||
criteriaBuilder.equal(mostRecentResult.get(ChecksumResult_.resultCode), resultCode),
|
||||
criteriaBuilder.lessThanOrEqualTo(
|
||||
mostRecentChecksumRoot.get(MostRecentChecksum_.processStartDate), startDate),
|
||||
criteriaBuilder.greaterThan(mostRecentChecksumRoot.get(MostRecentChecksum_.processStartDate), endDate)
|
||||
mostRecentChecksumRoot.get(MostRecentChecksum_.processStartDate), endDate),
|
||||
criteriaBuilder.greaterThan(mostRecentChecksumRoot.get(MostRecentChecksum_.processStartDate), startDate)
|
||||
)
|
||||
);
|
||||
List<Order> orderList = new LinkedList<>();
|
||||
|
@@ -332,8 +332,8 @@ public class BitstreamServiceImpl extends DSpaceObjectServiceImpl<Bitstream> imp
|
||||
}
|
||||
|
||||
@Override
|
||||
public List<Bitstream> findDeletedBitstreams(Context context) throws SQLException {
|
||||
return bitstreamDAO.findDeletedBitstreams(context);
|
||||
public List<Bitstream> findDeletedBitstreams(Context context, int limit, int offset) throws SQLException {
|
||||
return bitstreamDAO.findDeletedBitstreams(context, limit, offset);
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@@ -8,6 +8,7 @@
|
||||
package org.dspace.content;
|
||||
|
||||
import static org.dspace.core.Constants.ADD;
|
||||
import static org.dspace.core.Constants.READ;
|
||||
import static org.dspace.core.Constants.REMOVE;
|
||||
import static org.dspace.core.Constants.WRITE;
|
||||
|
||||
@@ -34,6 +35,7 @@ import org.dspace.content.service.ItemService;
|
||||
import org.dspace.core.Constants;
|
||||
import org.dspace.core.Context;
|
||||
import org.dspace.core.LogHelper;
|
||||
import org.dspace.eperson.Group;
|
||||
import org.dspace.event.Event;
|
||||
import org.springframework.beans.factory.annotation.Autowired;
|
||||
|
||||
@@ -173,6 +175,39 @@ public class BundleServiceImpl extends DSpaceObjectServiceImpl<Bundle> implement
|
||||
// copy authorization policies from bundle to bitstream
|
||||
// FIXME: multiple inclusion is affected by this...
|
||||
authorizeService.inheritPolicies(context, bundle, bitstream);
|
||||
// The next logic is a bit overly cautious but ensures that if there are any future start dates
|
||||
// on the item or bitstream read policies, that we'll skip inheriting anything from the owning collection
|
||||
// just in case. In practice, the item install process would overwrite these anyway but it may satisfy
|
||||
// some other bitstream creation methods and integration tests
|
||||
boolean isEmbargoed = false;
|
||||
for (ResourcePolicy resourcePolicy : authorizeService.getPoliciesActionFilter(context, owningItem, READ)) {
|
||||
if (!resourcePolicyService.isDateValid(resourcePolicy)) {
|
||||
isEmbargoed = true;
|
||||
break;
|
||||
}
|
||||
}
|
||||
if (owningItem != null && !isEmbargoed) {
|
||||
// Resolve owning collection
|
||||
Collection owningCollection = owningItem.getOwningCollection();
|
||||
if (owningCollection != null) {
|
||||
// Get DEFAULT_BITSTREAM_READ policy from the collection
|
||||
List<Group> defaultBitstreamReadGroups =
|
||||
authorizeService.getAuthorizedGroups(context, owningCollection,
|
||||
Constants.DEFAULT_BITSTREAM_READ);
|
||||
log.info(defaultBitstreamReadGroups.size());
|
||||
// If this collection is configured with a DEFAULT_BITSTREAM_READ group, overwrite the READ policy
|
||||
// inherited from the bundle with this policy.
|
||||
if (!defaultBitstreamReadGroups.isEmpty()) {
|
||||
// Remove read policies from the bitstream
|
||||
authorizeService.removePoliciesActionFilter(context, bitstream, Constants.READ);
|
||||
for (Group defaultBitstreamReadGroup : defaultBitstreamReadGroups) {
|
||||
// Inherit this policy as READ, directly from the collection roles
|
||||
authorizeService.addPolicy(context, bitstream,
|
||||
Constants.READ, defaultBitstreamReadGroup, ResourcePolicy.TYPE_INHERITED);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
bitstreamService.update(context, bitstream);
|
||||
}
|
||||
|
||||
|
@@ -29,6 +29,7 @@ import javax.persistence.Table;
|
||||
import javax.persistence.Transient;
|
||||
|
||||
import org.dspace.authorize.AuthorizeException;
|
||||
import org.dspace.browse.ItemCountException;
|
||||
import org.dspace.content.comparator.NameAscendingComparator;
|
||||
import org.dspace.content.factory.ContentServiceFactory;
|
||||
import org.dspace.content.service.CollectionService;
|
||||
@@ -336,4 +337,17 @@ public class Collection extends DSpaceObject implements DSpaceObjectLegacySuppor
|
||||
return collectionService;
|
||||
}
|
||||
|
||||
/**
|
||||
* return count of the collection items
|
||||
*
|
||||
* @return int
|
||||
*/
|
||||
public int countArchivedItems() {
|
||||
try {
|
||||
return collectionService.countArchivedItems(this);
|
||||
} catch (ItemCountException e) {
|
||||
throw new RuntimeException(e);
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
|
@@ -31,6 +31,8 @@ import org.dspace.authorize.AuthorizeException;
|
||||
import org.dspace.authorize.ResourcePolicy;
|
||||
import org.dspace.authorize.service.AuthorizeService;
|
||||
import org.dspace.authorize.service.ResourcePolicyService;
|
||||
import org.dspace.browse.ItemCountException;
|
||||
import org.dspace.browse.ItemCounter;
|
||||
import org.dspace.content.dao.CollectionDAO;
|
||||
import org.dspace.content.service.BitstreamService;
|
||||
import org.dspace.content.service.CollectionService;
|
||||
@@ -43,6 +45,7 @@ import org.dspace.core.I18nUtil;
|
||||
import org.dspace.core.LogHelper;
|
||||
import org.dspace.core.service.LicenseService;
|
||||
import org.dspace.discovery.DiscoverQuery;
|
||||
import org.dspace.discovery.DiscoverQuery.SORT_ORDER;
|
||||
import org.dspace.discovery.DiscoverResult;
|
||||
import org.dspace.discovery.IndexableObject;
|
||||
import org.dspace.discovery.SearchService;
|
||||
@@ -735,7 +738,7 @@ public class CollectionServiceImpl extends DSpaceObjectServiceImpl<Collection> i
|
||||
collection.getID(), collection.getHandle(), getIdentifiers(context, collection)));
|
||||
|
||||
// remove subscriptions - hmm, should this be in Subscription.java?
|
||||
subscribeService.deleteByCollection(context, collection);
|
||||
subscribeService.deleteByDspaceObject(context, collection);
|
||||
|
||||
// Remove Template Item
|
||||
removeTemplateItem(context, collection);
|
||||
@@ -946,6 +949,7 @@ public class CollectionServiceImpl extends DSpaceObjectServiceImpl<Collection> i
|
||||
discoverQuery.setDSpaceObjectFilter(IndexableCollection.TYPE);
|
||||
discoverQuery.setStart(offset);
|
||||
discoverQuery.setMaxResults(limit);
|
||||
discoverQuery.setSortField(SOLR_SORT_FIELD, SORT_ORDER.asc);
|
||||
DiscoverResult resp = retrieveCollectionsWithSubmit(context, discoverQuery, null, community, q);
|
||||
for (IndexableObject solrCollections : resp.getIndexableObjects()) {
|
||||
Collection c = ((IndexableCollection) solrCollections).getIndexedObject();
|
||||
@@ -1080,6 +1084,7 @@ public class CollectionServiceImpl extends DSpaceObjectServiceImpl<Collection> i
|
||||
discoverQuery.setDSpaceObjectFilter(IndexableCollection.TYPE);
|
||||
discoverQuery.setStart(offset);
|
||||
discoverQuery.setMaxResults(limit);
|
||||
discoverQuery.setSortField(SOLR_SORT_FIELD, SORT_ORDER.asc);
|
||||
DiscoverResult resp = retrieveCollectionsWithSubmit(context, discoverQuery,
|
||||
entityType, community, q);
|
||||
for (IndexableObject solrCollections : resp.getIndexableObjects()) {
|
||||
@@ -1099,4 +1104,35 @@ public class CollectionServiceImpl extends DSpaceObjectServiceImpl<Collection> i
|
||||
return (int) resp.getTotalSearchResults();
|
||||
}
|
||||
|
||||
@Override
|
||||
@SuppressWarnings("rawtypes")
|
||||
public List<Collection> findAllCollectionsByEntityType(Context context, String entityType)
|
||||
throws SearchServiceException {
|
||||
List<Collection> collectionList = new ArrayList<>();
|
||||
|
||||
DiscoverQuery discoverQuery = new DiscoverQuery();
|
||||
discoverQuery.setDSpaceObjectFilter(IndexableCollection.TYPE);
|
||||
discoverQuery.addFilterQueries("dspace.entity.type:" + entityType);
|
||||
|
||||
DiscoverResult discoverResult = searchService.search(context, discoverQuery);
|
||||
List<IndexableObject> solrIndexableObjects = discoverResult.getIndexableObjects();
|
||||
|
||||
for (IndexableObject solrCollection : solrIndexableObjects) {
|
||||
Collection c = ((IndexableCollection) solrCollection).getIndexedObject();
|
||||
collectionList.add(c);
|
||||
}
|
||||
return collectionList;
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns total collection archived items
|
||||
*
|
||||
* @param collection Collection
|
||||
* @return total collection archived items
|
||||
* @throws ItemCountException
|
||||
*/
|
||||
@Override
|
||||
public int countArchivedItems(Collection collection) throws ItemCountException {
|
||||
return ItemCounter.getInstance().getCount(collection);
|
||||
}
|
||||
}
|
||||
|
@@ -25,6 +25,7 @@ import javax.persistence.Table;
|
||||
import javax.persistence.Transient;
|
||||
|
||||
import org.apache.commons.lang3.builder.HashCodeBuilder;
|
||||
import org.dspace.browse.ItemCountException;
|
||||
import org.dspace.content.comparator.NameAscendingComparator;
|
||||
import org.dspace.content.factory.ContentServiceFactory;
|
||||
import org.dspace.content.service.CommunityService;
|
||||
@@ -264,4 +265,16 @@ public class Community extends DSpaceObject implements DSpaceObjectLegacySupport
|
||||
return communityService;
|
||||
}
|
||||
|
||||
/**
|
||||
* return count of the community items
|
||||
*
|
||||
* @return int
|
||||
*/
|
||||
public int countArchivedItems() {
|
||||
try {
|
||||
return communityService.countArchivedItems(this);
|
||||
} catch (ItemCountException e) {
|
||||
throw new RuntimeException(e);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@@ -24,6 +24,8 @@ import org.dspace.authorize.AuthorizeConfiguration;
|
||||
import org.dspace.authorize.AuthorizeException;
|
||||
import org.dspace.authorize.ResourcePolicy;
|
||||
import org.dspace.authorize.service.AuthorizeService;
|
||||
import org.dspace.browse.ItemCountException;
|
||||
import org.dspace.browse.ItemCounter;
|
||||
import org.dspace.content.dao.CommunityDAO;
|
||||
import org.dspace.content.service.BitstreamService;
|
||||
import org.dspace.content.service.CollectionService;
|
||||
@@ -36,6 +38,7 @@ import org.dspace.core.I18nUtil;
|
||||
import org.dspace.core.LogHelper;
|
||||
import org.dspace.eperson.Group;
|
||||
import org.dspace.eperson.service.GroupService;
|
||||
import org.dspace.eperson.service.SubscribeService;
|
||||
import org.dspace.event.Event;
|
||||
import org.dspace.identifier.IdentifierException;
|
||||
import org.dspace.identifier.service.IdentifierService;
|
||||
@@ -73,10 +76,11 @@ public class CommunityServiceImpl extends DSpaceObjectServiceImpl<Community> imp
|
||||
protected SiteService siteService;
|
||||
@Autowired(required = true)
|
||||
protected IdentifierService identifierService;
|
||||
@Autowired(required = true)
|
||||
protected SubscribeService subscribeService;
|
||||
|
||||
protected CommunityServiceImpl() {
|
||||
super();
|
||||
|
||||
}
|
||||
|
||||
@Override
|
||||
@@ -549,6 +553,8 @@ public class CommunityServiceImpl extends DSpaceObjectServiceImpl<Community> imp
|
||||
context.addEvent(new Event(Event.DELETE, Constants.COMMUNITY, community.getID(), community.getHandle(),
|
||||
getIdentifiers(context, community)));
|
||||
|
||||
subscribeService.deleteByDspaceObject(context, community);
|
||||
|
||||
// Remove collections
|
||||
Iterator<Collection> collections = community.getCollections().iterator();
|
||||
|
||||
@@ -704,4 +710,16 @@ public class CommunityServiceImpl extends DSpaceObjectServiceImpl<Community> imp
|
||||
public int countTotal(Context context) throws SQLException {
|
||||
return communityDAO.countRows(context);
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns total community archived items
|
||||
*
|
||||
* @param community Community
|
||||
* @return total community archived items
|
||||
* @throws ItemCountException
|
||||
*/
|
||||
@Override
|
||||
public int countArchivedItems(Community community) throws ItemCountException {
|
||||
return ItemCounter.getInstance().getCount(community);
|
||||
}
|
||||
}
|
||||
|
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user