mirror of
https://github.com/DSpace/DSpace.git
synced 2025-10-16 22:43:12 +00:00
Merge branch 'main' of https://github.com/DSpace/DSpace into CST-11298
This commit is contained in:
@@ -4,13 +4,6 @@
|
|||||||
# Can be validated via instructions at:
|
# Can be validated via instructions at:
|
||||||
# https://docs.codecov.io/docs/codecov-yaml#validate-your-repository-yaml
|
# https://docs.codecov.io/docs/codecov-yaml#validate-your-repository-yaml
|
||||||
|
|
||||||
# Tell Codecov not to send a coverage notification until (at least) 2 builds are completed
|
|
||||||
# Since we run Unit & Integration tests in parallel, this lets Codecov know that coverage
|
|
||||||
# needs to be merged across those builds
|
|
||||||
codecov:
|
|
||||||
notify:
|
|
||||||
after_n_builds: 2
|
|
||||||
|
|
||||||
# Settings related to code coverage analysis
|
# Settings related to code coverage analysis
|
||||||
coverage:
|
coverage:
|
||||||
status:
|
status:
|
||||||
|
@@ -6,6 +6,5 @@ dspace/modules/*/target/
|
|||||||
Dockerfile.*
|
Dockerfile.*
|
||||||
dspace/src/main/docker/dspace-postgres-pgcrypto
|
dspace/src/main/docker/dspace-postgres-pgcrypto
|
||||||
dspace/src/main/docker/dspace-postgres-pgcrypto-curl
|
dspace/src/main/docker/dspace-postgres-pgcrypto-curl
|
||||||
dspace/src/main/docker/solr
|
|
||||||
dspace/src/main/docker/README.md
|
dspace/src/main/docker/README.md
|
||||||
dspace/src/main/docker-compose/
|
dspace/src/main/docker-compose/
|
||||||
|
@@ -1,26 +0,0 @@
|
|||||||
# This workflow runs whenever a new pull request is created
|
|
||||||
# TEMPORARILY DISABLED. Unfortunately this doesn't work for PRs created from forked repositories (which is how we tend to create PRs).
|
|
||||||
# There is no known workaround yet. See https://github.community/t/how-to-use-github-token-for-prs-from-forks/16818
|
|
||||||
name: Pull Request opened
|
|
||||||
|
|
||||||
# Only run for newly opened PRs against the "main" branch
|
|
||||||
on:
|
|
||||||
pull_request:
|
|
||||||
types: [opened]
|
|
||||||
branches:
|
|
||||||
- main
|
|
||||||
|
|
||||||
jobs:
|
|
||||||
automation:
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
steps:
|
|
||||||
# Assign the PR to whomever created it. This is useful for visualizing assignments on project boards
|
|
||||||
# See https://github.com/marketplace/actions/pull-request-assigner
|
|
||||||
- name: Assign PR to creator
|
|
||||||
uses: thomaseizinger/assign-pr-creator-action@v1.0.0
|
|
||||||
# Note, this authentication token is created automatically
|
|
||||||
# See: https://docs.github.com/en/actions/configuring-and-managing-workflows/authenticating-with-the-github_token
|
|
||||||
with:
|
|
||||||
repo-token: ${{ secrets.GITHUB_TOKEN }}
|
|
||||||
# Ignore errors. It is possible the PR was created by someone who cannot be assigned
|
|
||||||
continue-on-error: true
|
|
10
.github/pull_request_template.md
vendored
10
.github/pull_request_template.md
vendored
@@ -1,7 +1,7 @@
|
|||||||
## References
|
## References
|
||||||
_Add references/links to any related issues or PRs. These may include:_
|
_Add references/links to any related issues or PRs. These may include:_
|
||||||
* Fixes #[issue-number]
|
* Fixes #`issue-number` (if this fixes an issue ticket)
|
||||||
* Related to [REST Contract](https://github.com/DSpace/Rest7Contract)
|
* Related to DSpace/RestContract#`pr-number` (if a corresponding REST Contract PR exists)
|
||||||
|
|
||||||
## Description
|
## Description
|
||||||
Short summary of changes (1-2 sentences).
|
Short summary of changes (1-2 sentences).
|
||||||
@@ -22,5 +22,7 @@ _This checklist provides a reminder of what we are going to look for when review
|
|||||||
- [ ] My PR passes Checkstyle validation based on the [Code Style Guide](https://wiki.lyrasis.org/display/DSPACE/Code+Style+Guide).
|
- [ ] My PR passes Checkstyle validation based on the [Code Style Guide](https://wiki.lyrasis.org/display/DSPACE/Code+Style+Guide).
|
||||||
- [ ] My PR includes Javadoc for _all new (or modified) public methods and classes_. It also includes Javadoc for large or complex private methods.
|
- [ ] My PR includes Javadoc for _all new (or modified) public methods and classes_. It also includes Javadoc for large or complex private methods.
|
||||||
- [ ] My PR passes all tests and includes new/updated Unit or Integration Tests based on the [Code Testing Guide](https://wiki.lyrasis.org/display/DSPACE/Code+Testing+Guide).
|
- [ ] My PR passes all tests and includes new/updated Unit or Integration Tests based on the [Code Testing Guide](https://wiki.lyrasis.org/display/DSPACE/Code+Testing+Guide).
|
||||||
- [ ] If my PR includes new, third-party dependencies (in any `pom.xml`), I've made sure their licenses align with the [DSpace BSD License](https://github.com/DSpace/DSpace/blob/main/LICENSE) based on the [Licensing of Contributions](https://wiki.lyrasis.org/display/DSPACE/Code+Contribution+Guidelines#CodeContributionGuidelines-LicensingofContributions) documentation.
|
- [ ] If my PR includes new libraries/dependencies (in any `pom.xml`), I've made sure their licenses align with the [DSpace BSD License](https://github.com/DSpace/DSpace/blob/main/LICENSE) based on the [Licensing of Contributions](https://wiki.lyrasis.org/display/DSPACE/Code+Contribution+Guidelines#CodeContributionGuidelines-LicensingofContributions) documentation.
|
||||||
- [ ] If my PR modifies the REST API, I've opened a separate [REST Contract](https://github.com/DSpace/RestContract/blob/main/README.md) PR related to this change.
|
- [ ] If my PR modifies REST API endpoints, I've opened a separate [REST Contract](https://github.com/DSpace/RestContract/blob/main/README.md) PR related to this change.
|
||||||
|
- [ ] If my PR includes new configurations, I've provided basic technical documentation in the PR itself.
|
||||||
|
- [ ] If my PR fixes an issue ticket, I've [linked them together](https://docs.github.com/en/issues/tracking-your-work-with-issues/linking-a-pull-request-to-an-issue).
|
||||||
|
37
.github/workflows/build.yml
vendored
37
.github/workflows/build.yml
vendored
@@ -79,6 +79,39 @@ jobs:
|
|||||||
name: ${{ matrix.type }} results
|
name: ${{ matrix.type }} results
|
||||||
path: ${{ matrix.resultsdir }}
|
path: ${{ matrix.resultsdir }}
|
||||||
|
|
||||||
# https://github.com/codecov/codecov-action
|
# Upload code coverage report to artifact, so that it can be shared with the 'codecov' job (see below)
|
||||||
|
- name: Upload code coverage report to Artifact
|
||||||
|
uses: actions/upload-artifact@v3
|
||||||
|
with:
|
||||||
|
name: ${{ matrix.type }} coverage report
|
||||||
|
path: 'dspace/target/site/jacoco-aggregate/jacoco.xml'
|
||||||
|
retention-days: 14
|
||||||
|
|
||||||
|
# Codecov upload is a separate job in order to allow us to restart this separate from the entire build/test
|
||||||
|
# job above. This is necessary because Codecov uploads seem to randomly fail at times.
|
||||||
|
# See https://community.codecov.com/t/upload-issues-unable-to-locate-build-via-github-actions-api/3954
|
||||||
|
codecov:
|
||||||
|
# Must run after 'tests' job above
|
||||||
|
needs: tests
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
steps:
|
||||||
|
- name: Checkout
|
||||||
|
uses: actions/checkout@v3
|
||||||
|
|
||||||
|
# Download artifacts from previous 'tests' job
|
||||||
|
- name: Download coverage artifacts
|
||||||
|
uses: actions/download-artifact@v3
|
||||||
|
|
||||||
|
# Now attempt upload to Codecov using its action.
|
||||||
|
# NOTE: We use a retry action to retry the Codecov upload if it fails the first time.
|
||||||
|
#
|
||||||
|
# Retry action: https://github.com/marketplace/actions/retry-action
|
||||||
|
# Codecov action: https://github.com/codecov/codecov-action
|
||||||
- name: Upload coverage to Codecov.io
|
- name: Upload coverage to Codecov.io
|
||||||
uses: codecov/codecov-action@v3
|
uses: Wandalen/wretry.action@v1.0.36
|
||||||
|
with:
|
||||||
|
action: codecov/codecov-action@v3
|
||||||
|
# Try upload 5 times max
|
||||||
|
attempt_limit: 5
|
||||||
|
# Run again in 30 seconds
|
||||||
|
attempt_delay: 30000
|
||||||
|
63
.github/workflows/codescan.yml
vendored
Normal file
63
.github/workflows/codescan.yml
vendored
Normal file
@@ -0,0 +1,63 @@
|
|||||||
|
# DSpace CodeQL code scanning configuration for GitHub
|
||||||
|
# https://docs.github.com/en/code-security/code-scanning
|
||||||
|
#
|
||||||
|
# NOTE: Code scanning must be run separate from our default build.yml
|
||||||
|
# because CodeQL requires a fresh build with all tests *disabled*.
|
||||||
|
name: "Code Scanning"
|
||||||
|
|
||||||
|
# Run this code scan for all pushes / PRs to main or maintenance branches. Also run once a week.
|
||||||
|
on:
|
||||||
|
push:
|
||||||
|
branches:
|
||||||
|
- main
|
||||||
|
- 'dspace-**'
|
||||||
|
pull_request:
|
||||||
|
branches:
|
||||||
|
- main
|
||||||
|
- 'dspace-**'
|
||||||
|
# Don't run if PR is only updating static documentation
|
||||||
|
paths-ignore:
|
||||||
|
- '**/*.md'
|
||||||
|
- '**/*.txt'
|
||||||
|
schedule:
|
||||||
|
- cron: "37 0 * * 1"
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
analyze:
|
||||||
|
name: Analyze Code
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
# Limit permissions of this GitHub action. Can only write to security-events
|
||||||
|
permissions:
|
||||||
|
actions: read
|
||||||
|
contents: read
|
||||||
|
security-events: write
|
||||||
|
|
||||||
|
steps:
|
||||||
|
# https://github.com/actions/checkout
|
||||||
|
- name: Checkout repository
|
||||||
|
uses: actions/checkout@v3
|
||||||
|
|
||||||
|
# https://github.com/actions/setup-java
|
||||||
|
- name: Install JDK
|
||||||
|
uses: actions/setup-java@v3
|
||||||
|
with:
|
||||||
|
java-version: 11
|
||||||
|
distribution: 'temurin'
|
||||||
|
|
||||||
|
# Initializes the CodeQL tools for scanning.
|
||||||
|
# https://github.com/github/codeql-action
|
||||||
|
- name: Initialize CodeQL
|
||||||
|
uses: github/codeql-action/init@v2
|
||||||
|
with:
|
||||||
|
# Codescan Javascript as well since a few JS files exist in REST API's interface
|
||||||
|
languages: java, javascript
|
||||||
|
|
||||||
|
# Autobuild attempts to build any compiled languages
|
||||||
|
# NOTE: Based on testing, this autobuild process works well for DSpace. A custom
|
||||||
|
# DSpace build w/caching (like in build.yml) was about the same speed as autobuild.
|
||||||
|
- name: Autobuild
|
||||||
|
uses: github/codeql-action/autobuild@v2
|
||||||
|
|
||||||
|
# Perform GitHub Code Scanning.
|
||||||
|
- name: Perform CodeQL Analysis
|
||||||
|
uses: github/codeql-action/analyze@v2
|
326
.github/workflows/docker.yml
vendored
326
.github/workflows/docker.yml
vendored
@@ -15,30 +15,36 @@ on:
|
|||||||
permissions:
|
permissions:
|
||||||
contents: read # to fetch code (actions/checkout)
|
contents: read # to fetch code (actions/checkout)
|
||||||
|
|
||||||
|
# Define shared environment variables for all jobs below
|
||||||
|
env:
|
||||||
|
# Define tags to use for Docker images based on Git tags/branches (for docker/metadata-action)
|
||||||
|
# For a new commit on default branch (main), use the literal tag 'latest' on Docker image.
|
||||||
|
# For a new commit on other branches, use the branch name as the tag for Docker image.
|
||||||
|
# For a new tag, copy that tag name as the tag for Docker image.
|
||||||
|
IMAGE_TAGS: |
|
||||||
|
type=raw,value=latest,enable=${{ endsWith(github.ref, github.event.repository.default_branch) }}
|
||||||
|
type=ref,event=branch,enable=${{ !endsWith(github.ref, github.event.repository.default_branch) }}
|
||||||
|
type=ref,event=tag
|
||||||
|
# Define default tag "flavor" for docker/metadata-action per
|
||||||
|
# https://github.com/docker/metadata-action#flavor-input
|
||||||
|
# We manage the 'latest' tag ourselves to the 'main' branch (see settings above)
|
||||||
|
TAGS_FLAVOR: |
|
||||||
|
latest=false
|
||||||
|
# Architectures / Platforms for which we will build Docker images
|
||||||
|
# If this is a PR, we ONLY build for AMD64. For PRs we only do a sanity check test to ensure Docker builds work.
|
||||||
|
# If this is NOT a PR (e.g. a tag or merge commit), also build for ARM64. NOTE: The ARM64 build takes MUCH
|
||||||
|
# longer (around 45mins or so) which is why we only run it when pushing a new Docker image.
|
||||||
|
PLATFORMS: linux/amd64${{ github.event_name != 'pull_request' && ', linux/arm64' || '' }}
|
||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
docker:
|
####################################################
|
||||||
|
# Build/Push the 'dspace/dspace-dependencies' image.
|
||||||
|
# This image is used by all other jobs.
|
||||||
|
####################################################
|
||||||
|
dspace-dependencies:
|
||||||
# Ensure this job never runs on forked repos. It's only executed for 'dspace/dspace'
|
# Ensure this job never runs on forked repos. It's only executed for 'dspace/dspace'
|
||||||
if: github.repository == 'dspace/dspace'
|
if: github.repository == 'dspace/dspace'
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
env:
|
|
||||||
# Define tags to use for Docker images based on Git tags/branches (for docker/metadata-action)
|
|
||||||
# For a new commit on default branch (main), use the literal tag 'dspace-7_x' on Docker image.
|
|
||||||
# For a new commit on other branches, use the branch name as the tag for Docker image.
|
|
||||||
# For a new tag, copy that tag name as the tag for Docker image.
|
|
||||||
IMAGE_TAGS: |
|
|
||||||
type=raw,value=dspace-7_x,enable=${{ endsWith(github.ref, github.event.repository.default_branch) }}
|
|
||||||
type=ref,event=branch,enable=${{ !endsWith(github.ref, github.event.repository.default_branch) }}
|
|
||||||
type=ref,event=tag
|
|
||||||
# Define default tag "flavor" for docker/metadata-action per
|
|
||||||
# https://github.com/docker/metadata-action#flavor-input
|
|
||||||
# We turn off 'latest' tag by default.
|
|
||||||
TAGS_FLAVOR: |
|
|
||||||
latest=false
|
|
||||||
# Architectures / Platforms for which we will build Docker images
|
|
||||||
# If this is a PR, we ONLY build for AMD64. For PRs we only do a sanity check test to ensure Docker builds work.
|
|
||||||
# If this is NOT a PR (e.g. a tag or merge commit), also build for ARM64. NOTE: The ARM64 build takes MUCH
|
|
||||||
# longer (around 45mins or so) which is why we only run it when pushing a new Docker image.
|
|
||||||
PLATFORMS: linux/amd64${{ github.event_name != 'pull_request' && ', linux/arm64' || '' }}
|
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
# https://github.com/actions/checkout
|
# https://github.com/actions/checkout
|
||||||
@@ -62,9 +68,6 @@ jobs:
|
|||||||
username: ${{ secrets.DOCKER_USERNAME }}
|
username: ${{ secrets.DOCKER_USERNAME }}
|
||||||
password: ${{ secrets.DOCKER_ACCESS_TOKEN }}
|
password: ${{ secrets.DOCKER_ACCESS_TOKEN }}
|
||||||
|
|
||||||
####################################################
|
|
||||||
# Build/Push the 'dspace/dspace-dependencies' image
|
|
||||||
####################################################
|
|
||||||
# https://github.com/docker/metadata-action
|
# https://github.com/docker/metadata-action
|
||||||
# Get Metadata for docker_build_deps step below
|
# Get Metadata for docker_build_deps step below
|
||||||
- name: Sync metadata (tags, labels) from GitHub to Docker for 'dspace-dependencies' image
|
- name: Sync metadata (tags, labels) from GitHub to Docker for 'dspace-dependencies' image
|
||||||
@@ -78,7 +81,7 @@ jobs:
|
|||||||
# https://github.com/docker/build-push-action
|
# https://github.com/docker/build-push-action
|
||||||
- name: Build and push 'dspace-dependencies' image
|
- name: Build and push 'dspace-dependencies' image
|
||||||
id: docker_build_deps
|
id: docker_build_deps
|
||||||
uses: docker/build-push-action@v3
|
uses: docker/build-push-action@v4
|
||||||
with:
|
with:
|
||||||
context: .
|
context: .
|
||||||
file: ./Dockerfile.dependencies
|
file: ./Dockerfile.dependencies
|
||||||
@@ -90,9 +93,38 @@ jobs:
|
|||||||
tags: ${{ steps.meta_build_deps.outputs.tags }}
|
tags: ${{ steps.meta_build_deps.outputs.tags }}
|
||||||
labels: ${{ steps.meta_build_deps.outputs.labels }}
|
labels: ${{ steps.meta_build_deps.outputs.labels }}
|
||||||
|
|
||||||
#######################################
|
#######################################
|
||||||
# Build/Push the 'dspace/dspace' image
|
# Build/Push the 'dspace/dspace' image
|
||||||
#######################################
|
#######################################
|
||||||
|
dspace:
|
||||||
|
# Ensure this job never runs on forked repos. It's only executed for 'dspace/dspace'
|
||||||
|
if: github.repository == 'dspace/dspace'
|
||||||
|
# Must run after 'dspace-dependencies' job above
|
||||||
|
needs: dspace-dependencies
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
|
||||||
|
steps:
|
||||||
|
# https://github.com/actions/checkout
|
||||||
|
- name: Checkout codebase
|
||||||
|
uses: actions/checkout@v3
|
||||||
|
|
||||||
|
# https://github.com/docker/setup-buildx-action
|
||||||
|
- name: Setup Docker Buildx
|
||||||
|
uses: docker/setup-buildx-action@v2
|
||||||
|
|
||||||
|
# https://github.com/docker/setup-qemu-action
|
||||||
|
- name: Set up QEMU emulation to build for multiple architectures
|
||||||
|
uses: docker/setup-qemu-action@v2
|
||||||
|
|
||||||
|
# https://github.com/docker/login-action
|
||||||
|
- name: Login to DockerHub
|
||||||
|
# Only login if not a PR, as PRs only trigger a Docker build and not a push
|
||||||
|
if: github.event_name != 'pull_request'
|
||||||
|
uses: docker/login-action@v2
|
||||||
|
with:
|
||||||
|
username: ${{ secrets.DOCKER_USERNAME }}
|
||||||
|
password: ${{ secrets.DOCKER_ACCESS_TOKEN }}
|
||||||
|
|
||||||
# Get Metadata for docker_build step below
|
# Get Metadata for docker_build step below
|
||||||
- name: Sync metadata (tags, labels) from GitHub to Docker for 'dspace' image
|
- name: Sync metadata (tags, labels) from GitHub to Docker for 'dspace' image
|
||||||
id: meta_build
|
id: meta_build
|
||||||
@@ -104,7 +136,7 @@ jobs:
|
|||||||
|
|
||||||
- name: Build and push 'dspace' image
|
- name: Build and push 'dspace' image
|
||||||
id: docker_build
|
id: docker_build
|
||||||
uses: docker/build-push-action@v3
|
uses: docker/build-push-action@v4
|
||||||
with:
|
with:
|
||||||
context: .
|
context: .
|
||||||
file: ./Dockerfile
|
file: ./Dockerfile
|
||||||
@@ -116,9 +148,38 @@ jobs:
|
|||||||
tags: ${{ steps.meta_build.outputs.tags }}
|
tags: ${{ steps.meta_build.outputs.tags }}
|
||||||
labels: ${{ steps.meta_build.outputs.labels }}
|
labels: ${{ steps.meta_build.outputs.labels }}
|
||||||
|
|
||||||
#####################################################
|
#############################################################
|
||||||
# Build/Push the 'dspace/dspace' image ('-test' tag)
|
# Build/Push the 'dspace/dspace' image ('-test' tag)
|
||||||
#####################################################
|
#############################################################
|
||||||
|
dspace-test:
|
||||||
|
# Ensure this job never runs on forked repos. It's only executed for 'dspace/dspace'
|
||||||
|
if: github.repository == 'dspace/dspace'
|
||||||
|
# Must run after 'dspace-dependencies' job above
|
||||||
|
needs: dspace-dependencies
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
|
||||||
|
steps:
|
||||||
|
# https://github.com/actions/checkout
|
||||||
|
- name: Checkout codebase
|
||||||
|
uses: actions/checkout@v3
|
||||||
|
|
||||||
|
# https://github.com/docker/setup-buildx-action
|
||||||
|
- name: Setup Docker Buildx
|
||||||
|
uses: docker/setup-buildx-action@v2
|
||||||
|
|
||||||
|
# https://github.com/docker/setup-qemu-action
|
||||||
|
- name: Set up QEMU emulation to build for multiple architectures
|
||||||
|
uses: docker/setup-qemu-action@v2
|
||||||
|
|
||||||
|
# https://github.com/docker/login-action
|
||||||
|
- name: Login to DockerHub
|
||||||
|
# Only login if not a PR, as PRs only trigger a Docker build and not a push
|
||||||
|
if: github.event_name != 'pull_request'
|
||||||
|
uses: docker/login-action@v2
|
||||||
|
with:
|
||||||
|
username: ${{ secrets.DOCKER_USERNAME }}
|
||||||
|
password: ${{ secrets.DOCKER_ACCESS_TOKEN }}
|
||||||
|
|
||||||
# Get Metadata for docker_build_test step below
|
# Get Metadata for docker_build_test step below
|
||||||
- name: Sync metadata (tags, labels) from GitHub to Docker for 'dspace-test' image
|
- name: Sync metadata (tags, labels) from GitHub to Docker for 'dspace-test' image
|
||||||
id: meta_build_test
|
id: meta_build_test
|
||||||
@@ -133,7 +194,7 @@ jobs:
|
|||||||
|
|
||||||
- name: Build and push 'dspace-test' image
|
- name: Build and push 'dspace-test' image
|
||||||
id: docker_build_test
|
id: docker_build_test
|
||||||
uses: docker/build-push-action@v3
|
uses: docker/build-push-action@v4
|
||||||
with:
|
with:
|
||||||
context: .
|
context: .
|
||||||
file: ./Dockerfile.test
|
file: ./Dockerfile.test
|
||||||
@@ -145,9 +206,38 @@ jobs:
|
|||||||
tags: ${{ steps.meta_build_test.outputs.tags }}
|
tags: ${{ steps.meta_build_test.outputs.tags }}
|
||||||
labels: ${{ steps.meta_build_test.outputs.labels }}
|
labels: ${{ steps.meta_build_test.outputs.labels }}
|
||||||
|
|
||||||
###########################################
|
###########################################
|
||||||
# Build/Push the 'dspace/dspace-cli' image
|
# Build/Push the 'dspace/dspace-cli' image
|
||||||
###########################################
|
###########################################
|
||||||
|
dspace-cli:
|
||||||
|
# Ensure this job never runs on forked repos. It's only executed for 'dspace/dspace'
|
||||||
|
if: github.repository == 'dspace/dspace'
|
||||||
|
# Must run after 'dspace-dependencies' job above
|
||||||
|
needs: dspace-dependencies
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
|
||||||
|
steps:
|
||||||
|
# https://github.com/actions/checkout
|
||||||
|
- name: Checkout codebase
|
||||||
|
uses: actions/checkout@v3
|
||||||
|
|
||||||
|
# https://github.com/docker/setup-buildx-action
|
||||||
|
- name: Setup Docker Buildx
|
||||||
|
uses: docker/setup-buildx-action@v2
|
||||||
|
|
||||||
|
# https://github.com/docker/setup-qemu-action
|
||||||
|
- name: Set up QEMU emulation to build for multiple architectures
|
||||||
|
uses: docker/setup-qemu-action@v2
|
||||||
|
|
||||||
|
# https://github.com/docker/login-action
|
||||||
|
- name: Login to DockerHub
|
||||||
|
# Only login if not a PR, as PRs only trigger a Docker build and not a push
|
||||||
|
if: github.event_name != 'pull_request'
|
||||||
|
uses: docker/login-action@v2
|
||||||
|
with:
|
||||||
|
username: ${{ secrets.DOCKER_USERNAME }}
|
||||||
|
password: ${{ secrets.DOCKER_ACCESS_TOKEN }}
|
||||||
|
|
||||||
# Get Metadata for docker_build_test step below
|
# Get Metadata for docker_build_test step below
|
||||||
- name: Sync metadata (tags, labels) from GitHub to Docker for 'dspace-cli' image
|
- name: Sync metadata (tags, labels) from GitHub to Docker for 'dspace-cli' image
|
||||||
id: meta_build_cli
|
id: meta_build_cli
|
||||||
@@ -159,7 +249,7 @@ jobs:
|
|||||||
|
|
||||||
- name: Build and push 'dspace-cli' image
|
- name: Build and push 'dspace-cli' image
|
||||||
id: docker_build_cli
|
id: docker_build_cli
|
||||||
uses: docker/build-push-action@v3
|
uses: docker/build-push-action@v4
|
||||||
with:
|
with:
|
||||||
context: .
|
context: .
|
||||||
file: ./Dockerfile.cli
|
file: ./Dockerfile.cli
|
||||||
@@ -170,3 +260,167 @@ jobs:
|
|||||||
# Use tags / labels provided by 'docker/metadata-action' above
|
# Use tags / labels provided by 'docker/metadata-action' above
|
||||||
tags: ${{ steps.meta_build_cli.outputs.tags }}
|
tags: ${{ steps.meta_build_cli.outputs.tags }}
|
||||||
labels: ${{ steps.meta_build_cli.outputs.labels }}
|
labels: ${{ steps.meta_build_cli.outputs.labels }}
|
||||||
|
|
||||||
|
###########################################
|
||||||
|
# Build/Push the 'dspace/dspace-solr' image
|
||||||
|
###########################################
|
||||||
|
dspace-solr:
|
||||||
|
# Ensure this job never runs on forked repos. It's only executed for 'dspace/dspace'
|
||||||
|
if: github.repository == 'dspace/dspace'
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
|
||||||
|
steps:
|
||||||
|
# https://github.com/actions/checkout
|
||||||
|
- name: Checkout codebase
|
||||||
|
uses: actions/checkout@v3
|
||||||
|
|
||||||
|
# https://github.com/docker/setup-buildx-action
|
||||||
|
- name: Setup Docker Buildx
|
||||||
|
uses: docker/setup-buildx-action@v2
|
||||||
|
|
||||||
|
# https://github.com/docker/setup-qemu-action
|
||||||
|
- name: Set up QEMU emulation to build for multiple architectures
|
||||||
|
uses: docker/setup-qemu-action@v2
|
||||||
|
|
||||||
|
# https://github.com/docker/login-action
|
||||||
|
- name: Login to DockerHub
|
||||||
|
# Only login if not a PR, as PRs only trigger a Docker build and not a push
|
||||||
|
if: github.event_name != 'pull_request'
|
||||||
|
uses: docker/login-action@v2
|
||||||
|
with:
|
||||||
|
username: ${{ secrets.DOCKER_USERNAME }}
|
||||||
|
password: ${{ secrets.DOCKER_ACCESS_TOKEN }}
|
||||||
|
|
||||||
|
# Get Metadata for docker_build_solr step below
|
||||||
|
- name: Sync metadata (tags, labels) from GitHub to Docker for 'dspace-solr' image
|
||||||
|
id: meta_build_solr
|
||||||
|
uses: docker/metadata-action@v4
|
||||||
|
with:
|
||||||
|
images: dspace/dspace-solr
|
||||||
|
tags: ${{ env.IMAGE_TAGS }}
|
||||||
|
flavor: ${{ env.TAGS_FLAVOR }}
|
||||||
|
|
||||||
|
- name: Build and push 'dspace-solr' image
|
||||||
|
id: docker_build_solr
|
||||||
|
uses: docker/build-push-action@v4
|
||||||
|
with:
|
||||||
|
context: .
|
||||||
|
file: ./dspace/src/main/docker/dspace-solr/Dockerfile
|
||||||
|
platforms: ${{ env.PLATFORMS }}
|
||||||
|
# For pull requests, we run the Docker build (to ensure no PR changes break the build),
|
||||||
|
# but we ONLY do an image push to DockerHub if it's NOT a PR
|
||||||
|
push: ${{ github.event_name != 'pull_request' }}
|
||||||
|
# Use tags / labels provided by 'docker/metadata-action' above
|
||||||
|
tags: ${{ steps.meta_build_solr.outputs.tags }}
|
||||||
|
labels: ${{ steps.meta_build_solr.outputs.labels }}
|
||||||
|
|
||||||
|
###########################################################
|
||||||
|
# Build/Push the 'dspace/dspace-postgres-pgcrypto' image
|
||||||
|
###########################################################
|
||||||
|
dspace-postgres-pgcrypto:
|
||||||
|
# Ensure this job never runs on forked repos. It's only executed for 'dspace/dspace'
|
||||||
|
if: github.repository == 'dspace/dspace'
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
|
||||||
|
steps:
|
||||||
|
# https://github.com/actions/checkout
|
||||||
|
- name: Checkout codebase
|
||||||
|
uses: actions/checkout@v3
|
||||||
|
|
||||||
|
# https://github.com/docker/setup-buildx-action
|
||||||
|
- name: Setup Docker Buildx
|
||||||
|
uses: docker/setup-buildx-action@v2
|
||||||
|
|
||||||
|
# https://github.com/docker/setup-qemu-action
|
||||||
|
- name: Set up QEMU emulation to build for multiple architectures
|
||||||
|
uses: docker/setup-qemu-action@v2
|
||||||
|
|
||||||
|
# https://github.com/docker/login-action
|
||||||
|
- name: Login to DockerHub
|
||||||
|
# Only login if not a PR, as PRs only trigger a Docker build and not a push
|
||||||
|
if: github.event_name != 'pull_request'
|
||||||
|
uses: docker/login-action@v2
|
||||||
|
with:
|
||||||
|
username: ${{ secrets.DOCKER_USERNAME }}
|
||||||
|
password: ${{ secrets.DOCKER_ACCESS_TOKEN }}
|
||||||
|
|
||||||
|
# Get Metadata for docker_build_postgres step below
|
||||||
|
- name: Sync metadata (tags, labels) from GitHub to Docker for 'dspace-postgres-pgcrypto' image
|
||||||
|
id: meta_build_postgres
|
||||||
|
uses: docker/metadata-action@v4
|
||||||
|
with:
|
||||||
|
images: dspace/dspace-postgres-pgcrypto
|
||||||
|
tags: ${{ env.IMAGE_TAGS }}
|
||||||
|
flavor: ${{ env.TAGS_FLAVOR }}
|
||||||
|
|
||||||
|
- name: Build and push 'dspace-postgres-pgcrypto' image
|
||||||
|
id: docker_build_postgres
|
||||||
|
uses: docker/build-push-action@v4
|
||||||
|
with:
|
||||||
|
# Must build out of subdirectory to have access to install script for pgcrypto
|
||||||
|
context: ./dspace/src/main/docker/dspace-postgres-pgcrypto/
|
||||||
|
dockerfile: Dockerfile
|
||||||
|
platforms: ${{ env.PLATFORMS }}
|
||||||
|
# For pull requests, we run the Docker build (to ensure no PR changes break the build),
|
||||||
|
# but we ONLY do an image push to DockerHub if it's NOT a PR
|
||||||
|
push: ${{ github.event_name != 'pull_request' }}
|
||||||
|
# Use tags / labels provided by 'docker/metadata-action' above
|
||||||
|
tags: ${{ steps.meta_build_postgres.outputs.tags }}
|
||||||
|
labels: ${{ steps.meta_build_postgres.outputs.labels }}
|
||||||
|
|
||||||
|
########################################################################
|
||||||
|
# Build/Push the 'dspace/dspace-postgres-pgcrypto' image (-loadsql tag)
|
||||||
|
########################################################################
|
||||||
|
dspace-postgres-pgcrypto-loadsql:
|
||||||
|
# Ensure this job never runs on forked repos. It's only executed for 'dspace/dspace'
|
||||||
|
if: github.repository == 'dspace/dspace'
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
|
||||||
|
steps:
|
||||||
|
# https://github.com/actions/checkout
|
||||||
|
- name: Checkout codebase
|
||||||
|
uses: actions/checkout@v3
|
||||||
|
|
||||||
|
# https://github.com/docker/setup-buildx-action
|
||||||
|
- name: Setup Docker Buildx
|
||||||
|
uses: docker/setup-buildx-action@v2
|
||||||
|
|
||||||
|
# https://github.com/docker/setup-qemu-action
|
||||||
|
- name: Set up QEMU emulation to build for multiple architectures
|
||||||
|
uses: docker/setup-qemu-action@v2
|
||||||
|
|
||||||
|
# https://github.com/docker/login-action
|
||||||
|
- name: Login to DockerHub
|
||||||
|
# Only login if not a PR, as PRs only trigger a Docker build and not a push
|
||||||
|
if: github.event_name != 'pull_request'
|
||||||
|
uses: docker/login-action@v2
|
||||||
|
with:
|
||||||
|
username: ${{ secrets.DOCKER_USERNAME }}
|
||||||
|
password: ${{ secrets.DOCKER_ACCESS_TOKEN }}
|
||||||
|
|
||||||
|
# Get Metadata for docker_build_postgres_loadsql step below
|
||||||
|
- name: Sync metadata (tags, labels) from GitHub to Docker for 'dspace-postgres-pgcrypto-loadsql' image
|
||||||
|
id: meta_build_postgres_loadsql
|
||||||
|
uses: docker/metadata-action@v4
|
||||||
|
with:
|
||||||
|
images: dspace/dspace-postgres-pgcrypto
|
||||||
|
tags: ${{ env.IMAGE_TAGS }}
|
||||||
|
# Suffix all tags with "-loadsql". Otherwise, it uses the same
|
||||||
|
# tagging logic as the primary 'dspace/dspace-postgres-pgcrypto' image above.
|
||||||
|
flavor: ${{ env.TAGS_FLAVOR }}
|
||||||
|
suffix=-loadsql
|
||||||
|
|
||||||
|
- name: Build and push 'dspace-postgres-pgcrypto-loadsql' image
|
||||||
|
id: docker_build_postgres_loadsql
|
||||||
|
uses: docker/build-push-action@v4
|
||||||
|
with:
|
||||||
|
# Must build out of subdirectory to have access to install script for pgcrypto
|
||||||
|
context: ./dspace/src/main/docker/dspace-postgres-pgcrypto-curl/
|
||||||
|
dockerfile: Dockerfile
|
||||||
|
platforms: ${{ env.PLATFORMS }}
|
||||||
|
# For pull requests, we run the Docker build (to ensure no PR changes break the build),
|
||||||
|
# but we ONLY do an image push to DockerHub if it's NOT a PR
|
||||||
|
push: ${{ github.event_name != 'pull_request' }}
|
||||||
|
# Use tags / labels provided by 'docker/metadata-action' above
|
||||||
|
tags: ${{ steps.meta_build_postgres_loadsql.outputs.tags }}
|
||||||
|
labels: ${{ steps.meta_build_postgres_loadsql.outputs.labels }}
|
2
.github/workflows/issue_opened.yml
vendored
2
.github/workflows/issue_opened.yml
vendored
@@ -16,7 +16,7 @@ jobs:
|
|||||||
# Only add to project board if issue is flagged as "needs triage" or has no labels
|
# Only add to project board if issue is flagged as "needs triage" or has no labels
|
||||||
# NOTE: By default we flag new issues as "needs triage" in our issue template
|
# NOTE: By default we flag new issues as "needs triage" in our issue template
|
||||||
if: (contains(github.event.issue.labels.*.name, 'needs triage') || join(github.event.issue.labels.*.name) == '')
|
if: (contains(github.event.issue.labels.*.name, 'needs triage') || join(github.event.issue.labels.*.name) == '')
|
||||||
uses: actions/add-to-project@v0.3.0
|
uses: actions/add-to-project@v0.5.0
|
||||||
# Note, the authentication token below is an ORG level Secret.
|
# Note, the authentication token below is an ORG level Secret.
|
||||||
# It must be created/recreated manually via a personal access token with admin:org, project, public_repo permissions
|
# It must be created/recreated manually via a personal access token with admin:org, project, public_repo permissions
|
||||||
# See: https://docs.github.com/en/actions/configuring-and-managing-workflows/authenticating-with-the-github_token#permissions-for-the-github_token
|
# See: https://docs.github.com/en/actions/configuring-and-managing-workflows/authenticating-with-the-github_token#permissions-for-the-github_token
|
||||||
|
11
.github/workflows/label_merge_conflicts.yml
vendored
11
.github/workflows/label_merge_conflicts.yml
vendored
@@ -1,11 +1,12 @@
|
|||||||
# This workflow checks open PRs for merge conflicts and labels them when conflicts are found
|
# This workflow checks open PRs for merge conflicts and labels them when conflicts are found
|
||||||
name: Check for merge conflicts
|
name: Check for merge conflicts
|
||||||
|
|
||||||
# Run whenever the "main" branch is updated
|
# Run this for all pushes (i.e. merges) to 'main' or maintenance branches
|
||||||
# NOTE: This means merge conflicts are only checked for when a PR is merged to main.
|
|
||||||
on:
|
on:
|
||||||
push:
|
push:
|
||||||
branches: [ main ]
|
branches:
|
||||||
|
- main
|
||||||
|
- 'dspace-**'
|
||||||
# So that the `conflict_label_name` is removed if conflicts are resolved,
|
# So that the `conflict_label_name` is removed if conflicts are resolved,
|
||||||
# we allow this to run for `pull_request_target` so that github secrets are available.
|
# we allow this to run for `pull_request_target` so that github secrets are available.
|
||||||
pull_request_target:
|
pull_request_target:
|
||||||
@@ -23,7 +24,9 @@ jobs:
|
|||||||
steps:
|
steps:
|
||||||
# See: https://github.com/prince-chrismc/label-merge-conflicts-action
|
# See: https://github.com/prince-chrismc/label-merge-conflicts-action
|
||||||
- name: Auto-label PRs with merge conflicts
|
- name: Auto-label PRs with merge conflicts
|
||||||
uses: prince-chrismc/label-merge-conflicts-action@v2
|
uses: prince-chrismc/label-merge-conflicts-action@v3
|
||||||
|
# Ignore any failures -- may occur (randomly?) for older, outdated PRs.
|
||||||
|
continue-on-error: true
|
||||||
# Add "merge conflict" label if a merge conflict is detected. Remove it when resolved.
|
# Add "merge conflict" label if a merge conflict is detected. Remove it when resolved.
|
||||||
# Note, the authentication token is created automatically
|
# Note, the authentication token is created automatically
|
||||||
# See: https://docs.github.com/en/actions/configuring-and-managing-workflows/authenticating-with-the-github_token
|
# See: https://docs.github.com/en/actions/configuring-and-managing-workflows/authenticating-with-the-github_token
|
||||||
|
44
.github/workflows/port_merged_pull_request.yml
vendored
Normal file
44
.github/workflows/port_merged_pull_request.yml
vendored
Normal file
@@ -0,0 +1,44 @@
|
|||||||
|
# This workflow will attempt to port a merged pull request to
|
||||||
|
# the branch specified in a "port to" label (if exists)
|
||||||
|
name: Port merged Pull Request
|
||||||
|
|
||||||
|
# Only run for merged PRs against the "main" or maintenance branches
|
||||||
|
# We allow this to run for `pull_request_target` so that github secrets are available
|
||||||
|
# (This is required when the PR comes from a forked repo)
|
||||||
|
on:
|
||||||
|
pull_request_target:
|
||||||
|
types: [ closed ]
|
||||||
|
branches:
|
||||||
|
- main
|
||||||
|
- 'dspace-**'
|
||||||
|
|
||||||
|
permissions:
|
||||||
|
contents: write # so action can add comments
|
||||||
|
pull-requests: write # so action can create pull requests
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
port_pr:
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
# Don't run on closed *unmerged* pull requests
|
||||||
|
if: github.event.pull_request.merged
|
||||||
|
steps:
|
||||||
|
# Checkout code
|
||||||
|
- uses: actions/checkout@v3
|
||||||
|
# Port PR to other branch (ONLY if labeled with "port to")
|
||||||
|
# See https://github.com/korthout/backport-action
|
||||||
|
- name: Create backport pull requests
|
||||||
|
uses: korthout/backport-action@v1
|
||||||
|
with:
|
||||||
|
# Trigger based on a "port to [branch]" label on PR
|
||||||
|
# (This label must specify the branch name to port to)
|
||||||
|
label_pattern: '^port to ([^ ]+)$'
|
||||||
|
# Title to add to the (newly created) port PR
|
||||||
|
pull_title: '[Port ${target_branch}] ${pull_title}'
|
||||||
|
# Description to add to the (newly created) port PR
|
||||||
|
pull_description: 'Port of #${pull_number} by @${pull_author} to `${target_branch}`.'
|
||||||
|
# Copy all labels from original PR to (newly created) port PR
|
||||||
|
# NOTE: The labels matching 'label_pattern' are automatically excluded
|
||||||
|
copy_labels_pattern: '.*'
|
||||||
|
# Use a personal access token (PAT) to create PR as 'dspace-bot' user.
|
||||||
|
# A PAT is required in order for the new PR to trigger its own actions (for CI checks)
|
||||||
|
github_token: ${{ secrets.PR_PORT_TOKEN }}
|
24
.github/workflows/pull_request_opened.yml
vendored
Normal file
24
.github/workflows/pull_request_opened.yml
vendored
Normal file
@@ -0,0 +1,24 @@
|
|||||||
|
# This workflow runs whenever a new pull request is created
|
||||||
|
name: Pull Request opened
|
||||||
|
|
||||||
|
# Only run for newly opened PRs against the "main" or maintenance branches
|
||||||
|
# We allow this to run for `pull_request_target` so that github secrets are available
|
||||||
|
# (This is required to assign a PR back to the creator when the PR comes from a forked repo)
|
||||||
|
on:
|
||||||
|
pull_request_target:
|
||||||
|
types: [ opened ]
|
||||||
|
branches:
|
||||||
|
- main
|
||||||
|
- 'dspace-**'
|
||||||
|
|
||||||
|
permissions:
|
||||||
|
pull-requests: write
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
automation:
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
steps:
|
||||||
|
# Assign the PR to whomever created it. This is useful for visualizing assignments on project boards
|
||||||
|
# See https://github.com/toshimaru/auto-author-assign
|
||||||
|
- name: Assign PR to creator
|
||||||
|
uses: toshimaru/auto-author-assign@v1.6.2
|
45
CONTRIBUTING.md
Normal file
45
CONTRIBUTING.md
Normal file
@@ -0,0 +1,45 @@
|
|||||||
|
# How to Contribute
|
||||||
|
|
||||||
|
DSpace is a community built and supported project. We do not have a centralized development or support team, but have a dedicated group of volunteers who help us improve the software, documentation, resources, etc.
|
||||||
|
|
||||||
|
* [Contribute new code via a Pull Request](#contribute-new-code-via-a-pull-request)
|
||||||
|
* [Contribute documentation](#contribute-documentation)
|
||||||
|
* [Help others on mailing lists or Slack](#help-others-on-mailing-lists-or-slack)
|
||||||
|
* [Join a working or interest group](#join-a-working-or-interest-group)
|
||||||
|
|
||||||
|
## Contribute new code via a Pull Request
|
||||||
|
|
||||||
|
We accept [GitHub Pull Requests (PRs)](https://docs.github.com/en/pull-requests/collaborating-with-pull-requests/proposing-changes-to-your-work-with-pull-requests/creating-a-pull-request-from-a-fork) at any time from anyone.
|
||||||
|
Contributors to each release are recognized in our [Release Notes](https://wiki.lyrasis.org/display/DSDOC7x/Release+Notes).
|
||||||
|
|
||||||
|
Code Contribution Checklist
|
||||||
|
- [ ] PRs _should_ be smaller in size (ideally less than 1,000 lines of code, not including comments & tests)
|
||||||
|
- [ ] PRs **must** pass Checkstyle validation based on our [Code Style Guide](https://wiki.lyrasis.org/display/DSPACE/Code+Style+Guide).
|
||||||
|
- [ ] PRs **must** include Javadoc for _all new/modified public methods and classes_. Larger private methods should also have Javadoc
|
||||||
|
- [ ] PRs **must** pass all automated tests and include new/updated Unit or Integration tests based on our [Code Testing Guide](https://wiki.lyrasis.org/display/DSPACE/Code+Testing+Guide).
|
||||||
|
- [ ] If a PR includes new libraries/dependencies (in any `pom.xml`), then their software licenses **must** align with the [DSpace BSD License](https://github.com/DSpace/DSpace/blob/main/LICENSE) based on the [Licensing of Contributions](https://wiki.lyrasis.org/display/DSPACE/Code+Contribution+Guidelines#CodeContributionGuidelines-LicensingofContributions) documentation.
|
||||||
|
- [ ] Basic technical documentation _should_ be provided for any new features or changes to the REST API. REST API changes should be documented in our [Rest Contract](https://github.com/DSpace/RestContract).
|
||||||
|
- [ ] If a PR fixes an issue ticket, please [link them together](https://docs.github.com/en/issues/tracking-your-work-with-issues/linking-a-pull-request-to-an-issue).
|
||||||
|
|
||||||
|
Additional details on the code contribution process can be found in our [Code Contribution Guidelines](https://wiki.lyrasis.org/display/DSPACE/Code+Contribution+Guidelines)
|
||||||
|
|
||||||
|
## Contribute documentation
|
||||||
|
|
||||||
|
DSpace Documentation is a collaborative effort in a shared Wiki. The latest documentation is at https://wiki.lyrasis.org/display/DSDOC7x
|
||||||
|
|
||||||
|
If you find areas of the DSpace Documentation which you wish to improve, please request a Wiki account by emailing wikihelp@lyrasis.org.
|
||||||
|
Once you have an account setup, contact @tdonohue (via [Slack](https://wiki.lyrasis.org/display/DSPACE/Slack) or email) for access to edit our Documentation.
|
||||||
|
|
||||||
|
## Help others on mailing lists or Slack
|
||||||
|
|
||||||
|
DSpace has our own [Slack](https://wiki.lyrasis.org/display/DSPACE/Slack) community and [Mailing Lists](https://wiki.lyrasis.org/display/DSPACE/Mailing+Lists) where discussions take place and questions are answered.
|
||||||
|
Anyone is welcome to join and help others. We just ask you to follow our [Code of Conduct](https://www.lyrasis.org/about/Pages/Code-of-Conduct.aspx) (adopted via LYRASIS).
|
||||||
|
|
||||||
|
## Join a working or interest group
|
||||||
|
|
||||||
|
Most of the work in building/improving DSpace comes via [Working Groups](https://wiki.lyrasis.org/display/DSPACE/DSpace+Working+Groups) or [Interest Groups](https://wiki.lyrasis.org/display/DSPACE/DSpace+Interest+Groups).
|
||||||
|
|
||||||
|
All working/interest groups are open to anyone to join and participate. A few key groups to be aware of include:
|
||||||
|
|
||||||
|
* [DSpace 7 Working Group](https://wiki.lyrasis.org/display/DSPACE/DSpace+7+Working+Group) - This is the main (mostly volunteer) development team. We meet weekly to review our current development [project board](https://github.com/orgs/DSpace/projects), assigning tickets and/or PRs.
|
||||||
|
* [DSpace Community Advisory Team (DCAT)](https://wiki.lyrasis.org/display/cmtygp/DSpace+Community+Advisory+Team) - This is an interest group for repository managers/administrators. We meet monthly to discuss DSpace, share tips & provide feedback back to developers.
|
@@ -1,14 +1,15 @@
|
|||||||
# This image will be published as dspace/dspace
|
# This image will be published as dspace/dspace
|
||||||
# See https://github.com/DSpace/DSpace/tree/main/dspace/src/main/docker for usage details
|
# See https://github.com/DSpace/DSpace/tree/main/dspace/src/main/docker for usage details
|
||||||
#
|
#
|
||||||
# - note: default tag for branch: dspace/dspace: dspace/dspace:dspace-7_x
|
# - note: default tag for branch: dspace/dspace: dspace/dspace:latest
|
||||||
|
|
||||||
# This Dockerfile uses JDK11 by default, but has also been tested with JDK17.
|
# This Dockerfile uses JDK11 by default, but has also been tested with JDK17.
|
||||||
# To build with JDK17, use "--build-arg JDK_VERSION=17"
|
# To build with JDK17, use "--build-arg JDK_VERSION=17"
|
||||||
ARG JDK_VERSION=11
|
ARG JDK_VERSION=11
|
||||||
|
ARG DSPACE_VERSION=latest
|
||||||
|
|
||||||
# Step 1 - Run Maven Build
|
# Step 1 - Run Maven Build
|
||||||
FROM dspace/dspace-dependencies:dspace-7_x as build
|
FROM dspace/dspace-dependencies:${DSPACE_VERSION} as build
|
||||||
ARG TARGET_DIR=dspace-installer
|
ARG TARGET_DIR=dspace-installer
|
||||||
WORKDIR /app
|
WORKDIR /app
|
||||||
# The dspace-installer directory will be written to /install
|
# The dspace-installer directory will be written to /install
|
||||||
@@ -31,7 +32,7 @@ ARG TARGET_DIR=dspace-installer
|
|||||||
COPY --from=build /install /dspace-src
|
COPY --from=build /install /dspace-src
|
||||||
WORKDIR /dspace-src
|
WORKDIR /dspace-src
|
||||||
# Create the initial install deployment using ANT
|
# Create the initial install deployment using ANT
|
||||||
ENV ANT_VERSION 1.10.12
|
ENV ANT_VERSION 1.10.13
|
||||||
ENV ANT_HOME /tmp/ant-$ANT_VERSION
|
ENV ANT_HOME /tmp/ant-$ANT_VERSION
|
||||||
ENV PATH $ANT_HOME/bin:$PATH
|
ENV PATH $ANT_HOME/bin:$PATH
|
||||||
# Need wget to install ant
|
# Need wget to install ant
|
||||||
|
@@ -1,14 +1,15 @@
|
|||||||
# This image will be published as dspace/dspace-cli
|
# This image will be published as dspace/dspace-cli
|
||||||
# See https://github.com/DSpace/DSpace/tree/main/dspace/src/main/docker for usage details
|
# See https://github.com/DSpace/DSpace/tree/main/dspace/src/main/docker for usage details
|
||||||
#
|
#
|
||||||
# - note: default tag for branch: dspace/dspace-cli: dspace/dspace-cli:dspace-7_x
|
# - note: default tag for branch: dspace/dspace-cli: dspace/dspace-cli:latest
|
||||||
|
|
||||||
# This Dockerfile uses JDK11 by default, but has also been tested with JDK17.
|
# This Dockerfile uses JDK11 by default, but has also been tested with JDK17.
|
||||||
# To build with JDK17, use "--build-arg JDK_VERSION=17"
|
# To build with JDK17, use "--build-arg JDK_VERSION=17"
|
||||||
ARG JDK_VERSION=11
|
ARG JDK_VERSION=11
|
||||||
|
ARG DSPACE_VERSION=latest
|
||||||
|
|
||||||
# Step 1 - Run Maven Build
|
# Step 1 - Run Maven Build
|
||||||
FROM dspace/dspace-dependencies:dspace-7_x as build
|
FROM dspace/dspace-dependencies:${DSPACE_VERSION} as build
|
||||||
ARG TARGET_DIR=dspace-installer
|
ARG TARGET_DIR=dspace-installer
|
||||||
WORKDIR /app
|
WORKDIR /app
|
||||||
# The dspace-installer directory will be written to /install
|
# The dspace-installer directory will be written to /install
|
||||||
@@ -30,12 +31,12 @@ ARG TARGET_DIR=dspace-installer
|
|||||||
COPY --from=build /install /dspace-src
|
COPY --from=build /install /dspace-src
|
||||||
WORKDIR /dspace-src
|
WORKDIR /dspace-src
|
||||||
# Create the initial install deployment using ANT
|
# Create the initial install deployment using ANT
|
||||||
ENV ANT_VERSION 1.10.12
|
ENV ANT_VERSION 1.10.13
|
||||||
ENV ANT_HOME /tmp/ant-$ANT_VERSION
|
ENV ANT_HOME /tmp/ant-$ANT_VERSION
|
||||||
ENV PATH $ANT_HOME/bin:$PATH
|
ENV PATH $ANT_HOME/bin:$PATH
|
||||||
# Need wget to install ant
|
# Need wget to install ant, and unzip for managing AIPs
|
||||||
RUN apt-get update \
|
RUN apt-get update \
|
||||||
&& apt-get install -y --no-install-recommends wget \
|
&& apt-get install -y --no-install-recommends wget unzip \
|
||||||
&& apt-get purge -y --auto-remove \
|
&& apt-get purge -y --auto-remove \
|
||||||
&& rm -rf /var/lib/apt/lists/*
|
&& rm -rf /var/lib/apt/lists/*
|
||||||
# Download and install 'ant'
|
# Download and install 'ant'
|
||||||
|
@@ -1,16 +1,17 @@
|
|||||||
# This image will be published as dspace/dspace
|
# This image will be published as dspace/dspace
|
||||||
# See https://github.com/DSpace/DSpace/tree/main/dspace/src/main/docker for usage details
|
# See https://github.com/DSpace/DSpace/tree/main/dspace/src/main/docker for usage details
|
||||||
#
|
#
|
||||||
# - note: default tag for branch: dspace/dspace: dspace/dspace:dspace-7_x-test
|
# - note: default tag for branch: dspace/dspace: dspace/dspace:latest-test
|
||||||
#
|
#
|
||||||
# This image is meant for TESTING/DEVELOPMENT ONLY as it deploys the old v6 REST API under HTTP (not HTTPS)
|
# This image is meant for TESTING/DEVELOPMENT ONLY as it deploys the old v6 REST API under HTTP (not HTTPS)
|
||||||
|
|
||||||
# This Dockerfile uses JDK11 by default, but has also been tested with JDK17.
|
# This Dockerfile uses JDK11 by default, but has also been tested with JDK17.
|
||||||
# To build with JDK17, use "--build-arg JDK_VERSION=17"
|
# To build with JDK17, use "--build-arg JDK_VERSION=17"
|
||||||
ARG JDK_VERSION=11
|
ARG JDK_VERSION=11
|
||||||
|
ARG DSPACE_VERSION=latest
|
||||||
|
|
||||||
# Step 1 - Run Maven Build
|
# Step 1 - Run Maven Build
|
||||||
FROM dspace/dspace-dependencies:dspace-7_x as build
|
FROM dspace/dspace-dependencies:${DSPACE_VERSION} as build
|
||||||
ARG TARGET_DIR=dspace-installer
|
ARG TARGET_DIR=dspace-installer
|
||||||
WORKDIR /app
|
WORKDIR /app
|
||||||
# The dspace-installer directory will be written to /install
|
# The dspace-installer directory will be written to /install
|
||||||
|
@@ -26,20 +26,20 @@ https://wiki.lyrasis.org/display/DSPACE/Code+Contribution+Guidelines
|
|||||||
* AWS Java SDK for Amazon S3 (com.amazonaws:aws-java-sdk-s3:1.12.261 - https://aws.amazon.com/sdkforjava)
|
* AWS Java SDK for Amazon S3 (com.amazonaws:aws-java-sdk-s3:1.12.261 - https://aws.amazon.com/sdkforjava)
|
||||||
* JMES Path Query library (com.amazonaws:jmespath-java:1.12.261 - https://aws.amazon.com/sdkforjava)
|
* JMES Path Query library (com.amazonaws:jmespath-java:1.12.261 - https://aws.amazon.com/sdkforjava)
|
||||||
* HPPC Collections (com.carrotsearch:hppc:0.8.1 - http://labs.carrotsearch.com/hppc.html/hppc)
|
* HPPC Collections (com.carrotsearch:hppc:0.8.1 - http://labs.carrotsearch.com/hppc.html/hppc)
|
||||||
* com.drewnoakes:metadata-extractor (com.drewnoakes:metadata-extractor:2.16.0 - https://drewnoakes.com/code/exif/)
|
* com.drewnoakes:metadata-extractor (com.drewnoakes:metadata-extractor:2.18.0 - https://drewnoakes.com/code/exif/)
|
||||||
* parso (com.epam:parso:2.0.14 - https://github.com/epam/parso)
|
* parso (com.epam:parso:2.0.14 - https://github.com/epam/parso)
|
||||||
* Esri Geometry API for Java (com.esri.geometry:esri-geometry-api:2.2.0 - https://github.com/Esri/geometry-api-java)
|
* Esri Geometry API for Java (com.esri.geometry:esri-geometry-api:2.2.0 - https://github.com/Esri/geometry-api-java)
|
||||||
* ClassMate (com.fasterxml:classmate:1.3.0 - http://github.com/cowtowncoder/java-classmate)
|
* ClassMate (com.fasterxml:classmate:1.3.0 - http://github.com/cowtowncoder/java-classmate)
|
||||||
* Jackson-annotations (com.fasterxml.jackson.core:jackson-annotations:2.12.6 - http://github.com/FasterXML/jackson)
|
* Jackson-annotations (com.fasterxml.jackson.core:jackson-annotations:2.13.4 - http://github.com/FasterXML/jackson)
|
||||||
* Jackson-core (com.fasterxml.jackson.core:jackson-core:2.12.6 - https://github.com/FasterXML/jackson-core)
|
* Jackson-core (com.fasterxml.jackson.core:jackson-core:2.13.4 - https://github.com/FasterXML/jackson-core)
|
||||||
* jackson-databind (com.fasterxml.jackson.core:jackson-databind:2.12.6.1 - http://github.com/FasterXML/jackson)
|
* jackson-databind (com.fasterxml.jackson.core:jackson-databind:2.13.4.2 - http://github.com/FasterXML/jackson)
|
||||||
* Jackson dataformat: CBOR (com.fasterxml.jackson.dataformat:jackson-dataformat-cbor:2.12.6 - http://github.com/FasterXML/jackson-dataformats-binary)
|
* Jackson dataformat: CBOR (com.fasterxml.jackson.dataformat:jackson-dataformat-cbor:2.12.6 - http://github.com/FasterXML/jackson-dataformats-binary)
|
||||||
* Jackson dataformat: Smile (com.fasterxml.jackson.dataformat:jackson-dataformat-smile:2.12.3 - http://github.com/FasterXML/jackson-dataformats-binary)
|
* Jackson dataformat: Smile (com.fasterxml.jackson.dataformat:jackson-dataformat-smile:2.13.3 - http://github.com/FasterXML/jackson-dataformats-binary)
|
||||||
* Jackson-dataformat-YAML (com.fasterxml.jackson.dataformat:jackson-dataformat-yaml:2.11.1 - https://github.com/FasterXML/jackson-dataformats-text)
|
* Jackson-dataformat-YAML (com.fasterxml.jackson.dataformat:jackson-dataformat-yaml:2.11.1 - https://github.com/FasterXML/jackson-dataformats-text)
|
||||||
* Jackson datatype: jdk8 (com.fasterxml.jackson.datatype:jackson-datatype-jdk8:2.13.3 - https://github.com/FasterXML/jackson-modules-java8/jackson-datatype-jdk8)
|
* Jackson datatype: jdk8 (com.fasterxml.jackson.datatype:jackson-datatype-jdk8:2.13.5 - https://github.com/FasterXML/jackson-modules-java8/jackson-datatype-jdk8)
|
||||||
* Jackson datatype: JSR310 (com.fasterxml.jackson.datatype:jackson-datatype-jsr310:2.11.1 - https://github.com/FasterXML/jackson-modules-java8/jackson-datatype-jsr310)
|
* Jackson datatype: JSR310 (com.fasterxml.jackson.datatype:jackson-datatype-jsr310:2.11.1 - https://github.com/FasterXML/jackson-modules-java8/jackson-datatype-jsr310)
|
||||||
* Jackson datatype: JSR310 (com.fasterxml.jackson.datatype:jackson-datatype-jsr310:2.13.3 - https://github.com/FasterXML/jackson-modules-java8/jackson-datatype-jsr310)
|
* Jackson datatype: JSR310 (com.fasterxml.jackson.datatype:jackson-datatype-jsr310:2.13.5 - https://github.com/FasterXML/jackson-modules-java8/jackson-datatype-jsr310)
|
||||||
* Jackson-module-parameter-names (com.fasterxml.jackson.module:jackson-module-parameter-names:2.13.3 - https://github.com/FasterXML/jackson-modules-java8/jackson-module-parameter-names)
|
* Jackson-module-parameter-names (com.fasterxml.jackson.module:jackson-module-parameter-names:2.13.5 - https://github.com/FasterXML/jackson-modules-java8/jackson-module-parameter-names)
|
||||||
* Java UUID Generator (com.fasterxml.uuid:java-uuid-generator:4.0.1 - https://github.com/cowtowncoder/java-uuid-generator)
|
* Java UUID Generator (com.fasterxml.uuid:java-uuid-generator:4.0.1 - https://github.com/cowtowncoder/java-uuid-generator)
|
||||||
* Woodstox (com.fasterxml.woodstox:woodstox-core:6.2.4 - https://github.com/FasterXML/woodstox)
|
* Woodstox (com.fasterxml.woodstox:woodstox-core:6.2.4 - https://github.com/FasterXML/woodstox)
|
||||||
* zjsonpatch (com.flipkart.zjsonpatch:zjsonpatch:0.4.6 - https://github.com/flipkart-incubator/zjsonpatch/)
|
* zjsonpatch (com.flipkart.zjsonpatch:zjsonpatch:0.4.6 - https://github.com/flipkart-incubator/zjsonpatch/)
|
||||||
@@ -56,19 +56,19 @@ https://wiki.lyrasis.org/display/DSPACE/Code+Contribution+Guidelines
|
|||||||
* Google Analytics API v3-rev145-1.23.0 (com.google.apis:google-api-services-analytics:v3-rev145-1.23.0 - http://nexus.sonatype.org/oss-repository-hosting.html/google-api-services-analytics)
|
* Google Analytics API v3-rev145-1.23.0 (com.google.apis:google-api-services-analytics:v3-rev145-1.23.0 - http://nexus.sonatype.org/oss-repository-hosting.html/google-api-services-analytics)
|
||||||
* FindBugs-jsr305 (com.google.code.findbugs:jsr305:3.0.1 - http://findbugs.sourceforge.net/)
|
* FindBugs-jsr305 (com.google.code.findbugs:jsr305:3.0.1 - http://findbugs.sourceforge.net/)
|
||||||
* Gson (com.google.code.gson:gson:2.9.0 - https://github.com/google/gson/gson)
|
* Gson (com.google.code.gson:gson:2.9.0 - https://github.com/google/gson/gson)
|
||||||
* error-prone annotations (com.google.errorprone:error_prone_annotations:2.7.1 - http://nexus.sonatype.org/oss-repository-hosting.html/error_prone_parent/error_prone_annotations)
|
* error-prone annotations (com.google.errorprone:error_prone_annotations:2.18.0 - https://errorprone.info/error_prone_annotations)
|
||||||
* Guava InternalFutureFailureAccess and InternalFutures (com.google.guava:failureaccess:1.0.1 - https://github.com/google/guava/failureaccess)
|
* Guava InternalFutureFailureAccess and InternalFutures (com.google.guava:failureaccess:1.0.1 - https://github.com/google/guava/failureaccess)
|
||||||
* Guava: Google Core Libraries for Java (com.google.guava:guava:31.0.1-jre - https://github.com/google/guava)
|
* Guava: Google Core Libraries for Java (com.google.guava:guava:32.0.0-jre - https://github.com/google/guava)
|
||||||
* Guava: Google Core Libraries for Java (JDK5 Backport) (com.google.guava:guava-jdk5:17.0 - http://code.google.com/p/guava-libraries/guava-jdk5)
|
* Guava: Google Core Libraries for Java (JDK5 Backport) (com.google.guava:guava-jdk5:17.0 - http://code.google.com/p/guava-libraries/guava-jdk5)
|
||||||
* Guava ListenableFuture only (com.google.guava:listenablefuture:9999.0-empty-to-avoid-conflict-with-guava - https://github.com/google/guava/listenablefuture)
|
* Guava ListenableFuture only (com.google.guava:listenablefuture:9999.0-empty-to-avoid-conflict-with-guava - https://github.com/google/guava/listenablefuture)
|
||||||
* Google HTTP Client Library for Java (com.google.http-client:google-http-client:1.23.0 - https://github.com/google/google-http-java-client/google-http-client)
|
* Google HTTP Client Library for Java (com.google.http-client:google-http-client:1.23.0 - https://github.com/google/google-http-java-client/google-http-client)
|
||||||
* GSON extensions to the Google HTTP Client Library for Java. (com.google.http-client:google-http-client-gson:1.41.7 - https://github.com/googleapis/google-http-java-client/google-http-client-gson)
|
* GSON extensions to the Google HTTP Client Library for Java. (com.google.http-client:google-http-client-gson:1.41.7 - https://github.com/googleapis/google-http-java-client/google-http-client-gson)
|
||||||
* Jackson 2 extensions to the Google HTTP Client Library for Java. (com.google.http-client:google-http-client-jackson2:1.23.0 - https://github.com/google/google-http-java-client/google-http-client-jackson2)
|
* Jackson 2 extensions to the Google HTTP Client Library for Java. (com.google.http-client:google-http-client-jackson2:1.23.0 - https://github.com/google/google-http-java-client/google-http-client-jackson2)
|
||||||
* J2ObjC Annotations (com.google.j2objc:j2objc-annotations:1.3 - https://github.com/google/j2objc/)
|
* J2ObjC Annotations (com.google.j2objc:j2objc-annotations:2.8 - https://github.com/google/j2objc/)
|
||||||
* Google OAuth Client Library for Java (com.google.oauth-client:google-oauth-client:1.33.3 - https://github.com/googleapis/google-oauth-java-client/google-oauth-client)
|
* Google OAuth Client Library for Java (com.google.oauth-client:google-oauth-client:1.33.3 - https://github.com/googleapis/google-oauth-java-client/google-oauth-client)
|
||||||
* ConcurrentLinkedHashMap (com.googlecode.concurrentlinkedhashmap:concurrentlinkedhashmap-lru:1.4.2 - http://code.google.com/p/concurrentlinkedhashmap)
|
* ConcurrentLinkedHashMap (com.googlecode.concurrentlinkedhashmap:concurrentlinkedhashmap-lru:1.4.2 - http://code.google.com/p/concurrentlinkedhashmap)
|
||||||
* libphonenumber (com.googlecode.libphonenumber:libphonenumber:8.11.1 - https://github.com/google/libphonenumber/)
|
* libphonenumber (com.googlecode.libphonenumber:libphonenumber:8.11.1 - https://github.com/google/libphonenumber/)
|
||||||
* Jackcess (com.healthmarketscience.jackcess:jackcess:4.0.1 - https://jackcess.sourceforge.io)
|
* Jackcess (com.healthmarketscience.jackcess:jackcess:4.0.2 - https://jackcess.sourceforge.io)
|
||||||
* Jackcess Encrypt (com.healthmarketscience.jackcess:jackcess-encrypt:4.0.1 - http://jackcessencrypt.sf.net)
|
* Jackcess Encrypt (com.healthmarketscience.jackcess:jackcess-encrypt:4.0.1 - http://jackcessencrypt.sf.net)
|
||||||
* project ':json-path' (com.jayway.jsonpath:json-path:2.6.0 - https://github.com/jayway/JsonPath)
|
* project ':json-path' (com.jayway.jsonpath:json-path:2.6.0 - https://github.com/jayway/JsonPath)
|
||||||
* project ':json-path-assert' (com.jayway.jsonpath:json-path-assert:2.6.0 - https://github.com/jayway/JsonPath)
|
* project ':json-path-assert' (com.jayway.jsonpath:json-path-assert:2.6.0 - https://github.com/jayway/JsonPath)
|
||||||
@@ -79,11 +79,20 @@ https://wiki.lyrasis.org/display/DSPACE/Code+Contribution+Guidelines
|
|||||||
* Nimbus JOSE+JWT (com.nimbusds:nimbus-jose-jwt:7.9 - https://bitbucket.org/connect2id/nimbus-jose-jwt)
|
* Nimbus JOSE+JWT (com.nimbusds:nimbus-jose-jwt:7.9 - https://bitbucket.org/connect2id/nimbus-jose-jwt)
|
||||||
* opencsv (com.opencsv:opencsv:5.6 - http://opencsv.sf.net)
|
* opencsv (com.opencsv:opencsv:5.6 - http://opencsv.sf.net)
|
||||||
* java-libpst (com.pff:java-libpst:0.9.3 - https://github.com/rjohnsondev/java-libpst)
|
* java-libpst (com.pff:java-libpst:0.9.3 - https://github.com/rjohnsondev/java-libpst)
|
||||||
* rome (com.rometools:rome:1.18.0 - http://rometools.com/rome)
|
* rome (com.rometools:rome:1.19.0 - http://rometools.com/rome)
|
||||||
* rome-modules (com.rometools:rome-modules:1.18.0 - http://rometools.com/rome-modules)
|
* rome-modules (com.rometools:rome-modules:1.19.0 - http://rometools.com/rome-modules)
|
||||||
* rome-utils (com.rometools:rome-utils:1.18.0 - http://rometools.com/rome-utils)
|
* rome-utils (com.rometools:rome-utils:1.19.0 - http://rometools.com/rome-utils)
|
||||||
* fastinfoset (com.sun.xml.fastinfoset:FastInfoset:1.2.15 - http://fi.java.net)
|
* fastinfoset (com.sun.xml.fastinfoset:FastInfoset:1.2.15 - http://fi.java.net)
|
||||||
* T-Digest (com.tdunning:t-digest:3.1 - https://github.com/tdunning/t-digest)
|
* T-Digest (com.tdunning:t-digest:3.1 - https://github.com/tdunning/t-digest)
|
||||||
|
* config (com.typesafe:config:1.3.3 - https://github.com/lightbend/config)
|
||||||
|
* ssl-config-core (com.typesafe:ssl-config-core_2.13:0.3.8 - https://github.com/lightbend/ssl-config)
|
||||||
|
* akka-actor (com.typesafe.akka:akka-actor_2.13:2.5.31 - https://akka.io/)
|
||||||
|
* akka-http-core (com.typesafe.akka:akka-http-core_2.13:10.1.12 - https://akka.io)
|
||||||
|
* akka-http (com.typesafe.akka:akka-http_2.13:10.1.12 - https://akka.io)
|
||||||
|
* akka-parsing (com.typesafe.akka:akka-parsing_2.13:10.1.12 - https://akka.io)
|
||||||
|
* akka-protobuf (com.typesafe.akka:akka-protobuf_2.13:2.5.31 - https://akka.io/)
|
||||||
|
* akka-stream (com.typesafe.akka:akka-stream_2.13:2.5.31 - https://akka.io/)
|
||||||
|
* scala-logging (com.typesafe.scala-logging:scala-logging_2.13:3.9.2 - https://github.com/lightbend/scala-logging)
|
||||||
* JSON library from Android SDK (com.vaadin.external.google:android-json:0.0.20131108.vaadin1 - http://developer.android.com/sdk)
|
* JSON library from Android SDK (com.vaadin.external.google:android-json:0.0.20131108.vaadin1 - http://developer.android.com/sdk)
|
||||||
* SparseBitSet (com.zaxxer:SparseBitSet:1.2 - https://github.com/brettwooldridge/SparseBitSet)
|
* SparseBitSet (com.zaxxer:SparseBitSet:1.2 - https://github.com/brettwooldridge/SparseBitSet)
|
||||||
* Apache Commons BeanUtils (commons-beanutils:commons-beanutils:1.9.4 - https://commons.apache.org/proper/commons-beanutils/)
|
* Apache Commons BeanUtils (commons-beanutils:commons-beanutils:1.9.4 - https://commons.apache.org/proper/commons-beanutils/)
|
||||||
@@ -91,20 +100,19 @@ https://wiki.lyrasis.org/display/DSPACE/Code+Contribution+Guidelines
|
|||||||
* Apache Commons Codec (commons-codec:commons-codec:1.10 - http://commons.apache.org/proper/commons-codec/)
|
* Apache Commons Codec (commons-codec:commons-codec:1.10 - http://commons.apache.org/proper/commons-codec/)
|
||||||
* Apache Commons Collections (commons-collections:commons-collections:3.2.2 - http://commons.apache.org/collections/)
|
* Apache Commons Collections (commons-collections:commons-collections:3.2.2 - http://commons.apache.org/collections/)
|
||||||
* Commons Digester (commons-digester:commons-digester:1.8.1 - http://commons.apache.org/digester/)
|
* Commons Digester (commons-digester:commons-digester:1.8.1 - http://commons.apache.org/digester/)
|
||||||
* Apache Commons FileUpload (commons-fileupload:commons-fileupload:1.3.3 - http://commons.apache.org/proper/commons-fileupload/)
|
* Apache Commons FileUpload (commons-fileupload:commons-fileupload:1.5 - https://commons.apache.org/proper/commons-fileupload/)
|
||||||
* Apache Commons IO (commons-io:commons-io:2.7 - https://commons.apache.org/proper/commons-io/)
|
* Apache Commons IO (commons-io:commons-io:2.7 - https://commons.apache.org/proper/commons-io/)
|
||||||
* Commons Lang (commons-lang:commons-lang:2.6 - http://commons.apache.org/lang/)
|
* Commons Lang (commons-lang:commons-lang:2.6 - http://commons.apache.org/lang/)
|
||||||
* Apache Commons Logging (commons-logging:commons-logging:1.2 - http://commons.apache.org/proper/commons-logging/)
|
* Apache Commons Logging (commons-logging:commons-logging:1.2 - http://commons.apache.org/proper/commons-logging/)
|
||||||
* Apache Commons Validator (commons-validator:commons-validator:1.5.0 - http://commons.apache.org/proper/commons-validator/)
|
* Apache Commons Validator (commons-validator:commons-validator:1.5.0 - http://commons.apache.org/proper/commons-validator/)
|
||||||
* GeoJson POJOs for Jackson (de.grundid.opendatalab:geojson-jackson:1.14 - https://github.com/opendatalab-de/geojson-jackson)
|
* GeoJson POJOs for Jackson (de.grundid.opendatalab:geojson-jackson:1.14 - https://github.com/opendatalab-de/geojson-jackson)
|
||||||
* Boilerpipe -- Boilerplate Removal and Fulltext Extraction from HTML pages (de.l3s.boilerpipe:boilerpipe:1.1.0 - http://code.google.com/p/boilerpipe/)
|
|
||||||
* OpenAIRE Funders Model (eu.openaire:funders-model:2.0.0 - https://api.openaire.eu)
|
* OpenAIRE Funders Model (eu.openaire:funders-model:2.0.0 - https://api.openaire.eu)
|
||||||
* Metrics Core (io.dropwizard.metrics:metrics-core:4.1.5 - https://metrics.dropwizard.io/metrics-core)
|
* Metrics Core (io.dropwizard.metrics:metrics-core:4.1.5 - https://metrics.dropwizard.io/metrics-core)
|
||||||
* Graphite Integration for Metrics (io.dropwizard.metrics:metrics-graphite:4.1.5 - https://metrics.dropwizard.io/metrics-graphite)
|
* Graphite Integration for Metrics (io.dropwizard.metrics:metrics-graphite:4.1.5 - https://metrics.dropwizard.io/metrics-graphite)
|
||||||
* Metrics Integration for Jetty 9.3 and higher (io.dropwizard.metrics:metrics-jetty9:4.1.5 - https://metrics.dropwizard.io/metrics-jetty9)
|
* Metrics Integration for Jetty 9.3 and higher (io.dropwizard.metrics:metrics-jetty9:4.1.5 - https://metrics.dropwizard.io/metrics-jetty9)
|
||||||
* Metrics Integration with JMX (io.dropwizard.metrics:metrics-jmx:4.1.5 - https://metrics.dropwizard.io/metrics-jmx)
|
* Metrics Integration with JMX (io.dropwizard.metrics:metrics-jmx:4.1.5 - https://metrics.dropwizard.io/metrics-jmx)
|
||||||
* JVM Integration for Metrics (io.dropwizard.metrics:metrics-jvm:4.1.5 - https://metrics.dropwizard.io/metrics-jvm)
|
* JVM Integration for Metrics (io.dropwizard.metrics:metrics-jvm:4.1.5 - https://metrics.dropwizard.io/metrics-jvm)
|
||||||
* micrometer-core (io.micrometer:micrometer-core:1.8.6 - https://github.com/micrometer-metrics/micrometer)
|
* micrometer-core (io.micrometer:micrometer-core:1.9.11 - https://github.com/micrometer-metrics/micrometer)
|
||||||
* Netty/Buffer (io.netty:netty-buffer:4.1.68.Final - https://netty.io/netty-buffer/)
|
* Netty/Buffer (io.netty:netty-buffer:4.1.68.Final - https://netty.io/netty-buffer/)
|
||||||
* Netty/Codec (io.netty:netty-codec:4.1.68.Final - https://netty.io/netty-codec/)
|
* Netty/Codec (io.netty:netty-codec:4.1.68.Final - https://netty.io/netty-codec/)
|
||||||
* Netty/Codec/HTTP (io.netty:netty-codec-http:4.1.53.Final - https://netty.io/netty-codec-http/)
|
* Netty/Codec/HTTP (io.netty:netty-codec-http:4.1.53.Final - https://netty.io/netty-codec-http/)
|
||||||
@@ -151,7 +159,7 @@ https://wiki.lyrasis.org/display/DSPACE/Code+Contribution+Guidelines
|
|||||||
* I18N Libraries (org.apache.abdera:abdera-i18n:1.1.3 - http://abdera.apache.org)
|
* I18N Libraries (org.apache.abdera:abdera-i18n:1.1.3 - http://abdera.apache.org)
|
||||||
* Apache Ant Core (org.apache.ant:ant:1.10.11 - https://ant.apache.org/)
|
* Apache Ant Core (org.apache.ant:ant:1.10.11 - https://ant.apache.org/)
|
||||||
* Apache Ant Launcher (org.apache.ant:ant-launcher:1.10.11 - https://ant.apache.org/)
|
* Apache Ant Launcher (org.apache.ant:ant-launcher:1.10.11 - https://ant.apache.org/)
|
||||||
* Apache Commons BCEL (org.apache.bcel:bcel:6.4.0 - https://commons.apache.org/proper/commons-bcel)
|
* Apache Commons BCEL (org.apache.bcel:bcel:6.6.0 - https://commons.apache.org/proper/commons-bcel)
|
||||||
* Calcite Core (org.apache.calcite:calcite-core:1.27.0 - https://calcite.apache.org)
|
* Calcite Core (org.apache.calcite:calcite-core:1.27.0 - https://calcite.apache.org)
|
||||||
* Calcite Linq4j (org.apache.calcite:calcite-linq4j:1.27.0 - https://calcite.apache.org)
|
* Calcite Linq4j (org.apache.calcite:calcite-linq4j:1.27.0 - https://calcite.apache.org)
|
||||||
* Apache Calcite Avatica (org.apache.calcite.avatica:avatica-core:1.18.0 - https://calcite.apache.org/avatica)
|
* Apache Calcite Avatica (org.apache.calcite.avatica:avatica-core:1.18.0 - https://calcite.apache.org/avatica)
|
||||||
@@ -159,12 +167,12 @@ https://wiki.lyrasis.org/display/DSPACE/Code+Contribution+Guidelines
|
|||||||
* Apache Commons Compress (org.apache.commons:commons-compress:1.21 - https://commons.apache.org/proper/commons-compress/)
|
* Apache Commons Compress (org.apache.commons:commons-compress:1.21 - https://commons.apache.org/proper/commons-compress/)
|
||||||
* Apache Commons Configuration (org.apache.commons:commons-configuration2:2.8.0 - https://commons.apache.org/proper/commons-configuration/)
|
* Apache Commons Configuration (org.apache.commons:commons-configuration2:2.8.0 - https://commons.apache.org/proper/commons-configuration/)
|
||||||
* Apache Commons CSV (org.apache.commons:commons-csv:1.9.0 - https://commons.apache.org/proper/commons-csv/)
|
* Apache Commons CSV (org.apache.commons:commons-csv:1.9.0 - https://commons.apache.org/proper/commons-csv/)
|
||||||
* Apache Commons DBCP (org.apache.commons:commons-dbcp2:2.8.0 - https://commons.apache.org/dbcp/)
|
* Apache Commons DBCP (org.apache.commons:commons-dbcp2:2.9.0 - https://commons.apache.org/dbcp/)
|
||||||
* Apache Commons Exec (org.apache.commons:commons-exec:1.3 - http://commons.apache.org/proper/commons-exec/)
|
* Apache Commons Exec (org.apache.commons:commons-exec:1.3 - http://commons.apache.org/proper/commons-exec/)
|
||||||
* Apache Commons Lang (org.apache.commons:commons-lang3:3.12.0 - https://commons.apache.org/proper/commons-lang/)
|
* Apache Commons Lang (org.apache.commons:commons-lang3:3.12.0 - https://commons.apache.org/proper/commons-lang/)
|
||||||
* Apache Commons Math (org.apache.commons:commons-math3:3.6.1 - http://commons.apache.org/proper/commons-math/)
|
* Apache Commons Math (org.apache.commons:commons-math3:3.6.1 - http://commons.apache.org/proper/commons-math/)
|
||||||
* Apache Commons Pool (org.apache.commons:commons-pool2:2.9.0 - https://commons.apache.org/proper/commons-pool/)
|
* Apache Commons Pool (org.apache.commons:commons-pool2:2.11.1 - https://commons.apache.org/proper/commons-pool/)
|
||||||
* Apache Commons Text (org.apache.commons:commons-text:1.9 - https://commons.apache.org/proper/commons-text)
|
* Apache Commons Text (org.apache.commons:commons-text:1.10.0 - https://commons.apache.org/proper/commons-text)
|
||||||
* Curator Client (org.apache.curator:curator-client:2.13.0 - http://curator.apache.org/curator-client)
|
* Curator Client (org.apache.curator:curator-client:2.13.0 - http://curator.apache.org/curator-client)
|
||||||
* Curator Framework (org.apache.curator:curator-framework:2.13.0 - http://curator.apache.org/curator-framework)
|
* Curator Framework (org.apache.curator:curator-framework:2.13.0 - http://curator.apache.org/curator-framework)
|
||||||
* Curator Recipes (org.apache.curator:curator-recipes:2.13.0 - http://curator.apache.org/curator-recipes)
|
* Curator Recipes (org.apache.curator:curator-recipes:2.13.0 - http://curator.apache.org/curator-recipes)
|
||||||
@@ -188,88 +196,87 @@ https://wiki.lyrasis.org/display/DSPACE/Code+Contribution+Guidelines
|
|||||||
* Kerby-kerb Util (org.apache.kerby:kerb-util:1.0.1 - http://directory.apache.org/kerby/kerby-kerb/kerb-util)
|
* Kerby-kerb Util (org.apache.kerby:kerb-util:1.0.1 - http://directory.apache.org/kerby/kerby-kerb/kerb-util)
|
||||||
* Kerby ASN1 Project (org.apache.kerby:kerby-asn1:1.0.1 - http://directory.apache.org/kerby/kerby-common/kerby-asn1)
|
* Kerby ASN1 Project (org.apache.kerby:kerby-asn1:1.0.1 - http://directory.apache.org/kerby/kerby-common/kerby-asn1)
|
||||||
* Kerby PKIX Project (org.apache.kerby:kerby-pkix:1.0.1 - http://directory.apache.org/kerby/kerby-pkix)
|
* Kerby PKIX Project (org.apache.kerby:kerby-pkix:1.0.1 - http://directory.apache.org/kerby/kerby-pkix)
|
||||||
* Apache Log4j 1.x Compatibility API (org.apache.logging.log4j:log4j-1.2-api:2.17.1 - https://logging.apache.org/log4j/2.x/log4j-1.2-api/)
|
* Apache Log4j 1.x Compatibility API (org.apache.logging.log4j:log4j-1.2-api:2.20.0 - https://logging.apache.org/log4j/2.x/log4j-1.2-api/)
|
||||||
* Apache Log4j API (org.apache.logging.log4j:log4j-api:2.17.1 - https://logging.apache.org/log4j/2.x/log4j-api/)
|
* Apache Log4j API (org.apache.logging.log4j:log4j-api:2.20.0 - https://logging.apache.org/log4j/2.x/log4j-api/)
|
||||||
* Apache Log4j Core (org.apache.logging.log4j:log4j-core:2.17.1 - https://logging.apache.org/log4j/2.x/log4j-core/)
|
* Apache Log4j Core (org.apache.logging.log4j:log4j-core:2.20.0 - https://logging.apache.org/log4j/2.x/log4j-core/)
|
||||||
* Apache Log4j JUL Adapter (org.apache.logging.log4j:log4j-jul:2.17.1 - https://logging.apache.org/log4j/2.x/log4j-jul/)
|
* Apache Log4j JUL Adapter (org.apache.logging.log4j:log4j-jul:2.20.0 - https://logging.apache.org/log4j/2.x/log4j-jul/)
|
||||||
* Apache Log4j Layout for JSON template (org.apache.logging.log4j:log4j-layout-template-json:2.16.0 - https://logging.apache.org/log4j/2.x/log4j-layout-template-json/)
|
* Apache Log4j Layout for JSON template (org.apache.logging.log4j:log4j-layout-template-json:2.17.1 - https://logging.apache.org/log4j/2.x/log4j-layout-template-json/)
|
||||||
* Apache Log4j SLF4J Binding (org.apache.logging.log4j:log4j-slf4j-impl:2.17.1 - https://logging.apache.org/log4j/2.x/log4j-slf4j-impl/)
|
* Apache Log4j SLF4J Binding (org.apache.logging.log4j:log4j-slf4j-impl:2.20.0 - https://logging.apache.org/log4j/2.x/log4j-slf4j-impl/)
|
||||||
* Apache Log4j Web (org.apache.logging.log4j:log4j-web:2.17.1 - https://logging.apache.org/log4j/2.x/log4j-web/)
|
* Apache Log4j Web (org.apache.logging.log4j:log4j-web:2.20.0 - https://logging.apache.org/log4j/2.x/log4j-web/)
|
||||||
* Lucene Common Analyzers (org.apache.lucene:lucene-analyzers-common:8.11.1 - https://lucene.apache.org/lucene-parent/lucene-analyzers-common)
|
* Lucene Common Analyzers (org.apache.lucene:lucene-analyzers-common:8.11.2 - https://lucene.apache.org/lucene-parent/lucene-analyzers-common)
|
||||||
* Lucene ICU Analysis Components (org.apache.lucene:lucene-analyzers-icu:8.11.1 - https://lucene.apache.org/lucene-parent/lucene-analyzers-icu)
|
* Lucene ICU Analysis Components (org.apache.lucene:lucene-analyzers-icu:8.11.2 - https://lucene.apache.org/lucene-parent/lucene-analyzers-icu)
|
||||||
* Lucene Kuromoji Japanese Morphological Analyzer (org.apache.lucene:lucene-analyzers-kuromoji:8.11.1 - https://lucene.apache.org/lucene-parent/lucene-analyzers-kuromoji)
|
* Lucene Kuromoji Japanese Morphological Analyzer (org.apache.lucene:lucene-analyzers-kuromoji:8.11.2 - https://lucene.apache.org/lucene-parent/lucene-analyzers-kuromoji)
|
||||||
* Lucene Nori Korean Morphological Analyzer (org.apache.lucene:lucene-analyzers-nori:8.11.1 - https://lucene.apache.org/lucene-parent/lucene-analyzers-nori)
|
* Lucene Nori Korean Morphological Analyzer (org.apache.lucene:lucene-analyzers-nori:8.11.2 - https://lucene.apache.org/lucene-parent/lucene-analyzers-nori)
|
||||||
* Lucene Phonetic Filters (org.apache.lucene:lucene-analyzers-phonetic:8.11.1 - https://lucene.apache.org/lucene-parent/lucene-analyzers-phonetic)
|
* Lucene Phonetic Filters (org.apache.lucene:lucene-analyzers-phonetic:8.11.2 - https://lucene.apache.org/lucene-parent/lucene-analyzers-phonetic)
|
||||||
* Lucene Smart Chinese Analyzer (org.apache.lucene:lucene-analyzers-smartcn:8.11.1 - https://lucene.apache.org/lucene-parent/lucene-analyzers-smartcn)
|
* Lucene Smart Chinese Analyzer (org.apache.lucene:lucene-analyzers-smartcn:8.11.2 - https://lucene.apache.org/lucene-parent/lucene-analyzers-smartcn)
|
||||||
* Lucene Stempel Analyzer (org.apache.lucene:lucene-analyzers-stempel:8.11.1 - https://lucene.apache.org/lucene-parent/lucene-analyzers-stempel)
|
* Lucene Stempel Analyzer (org.apache.lucene:lucene-analyzers-stempel:8.11.2 - https://lucene.apache.org/lucene-parent/lucene-analyzers-stempel)
|
||||||
* Lucene Memory (org.apache.lucene:lucene-backward-codecs:8.11.1 - https://lucene.apache.org/lucene-parent/lucene-backward-codecs)
|
* Lucene Memory (org.apache.lucene:lucene-backward-codecs:8.11.2 - https://lucene.apache.org/lucene-parent/lucene-backward-codecs)
|
||||||
* Lucene Classification (org.apache.lucene:lucene-classification:8.11.1 - https://lucene.apache.org/lucene-parent/lucene-classification)
|
* Lucene Classification (org.apache.lucene:lucene-classification:8.11.2 - https://lucene.apache.org/lucene-parent/lucene-classification)
|
||||||
* Lucene codecs (org.apache.lucene:lucene-codecs:8.11.1 - https://lucene.apache.org/lucene-parent/lucene-codecs)
|
* Lucene codecs (org.apache.lucene:lucene-codecs:8.11.2 - https://lucene.apache.org/lucene-parent/lucene-codecs)
|
||||||
* Lucene Core (org.apache.lucene:lucene-core:8.11.1 - https://lucene.apache.org/lucene-parent/lucene-core)
|
* Lucene Core (org.apache.lucene:lucene-core:8.11.2 - https://lucene.apache.org/lucene-parent/lucene-core)
|
||||||
* Lucene Expressions (org.apache.lucene:lucene-expressions:8.11.1 - https://lucene.apache.org/lucene-parent/lucene-expressions)
|
* Lucene Expressions (org.apache.lucene:lucene-expressions:8.11.2 - https://lucene.apache.org/lucene-parent/lucene-expressions)
|
||||||
* Lucene Grouping (org.apache.lucene:lucene-grouping:8.11.1 - https://lucene.apache.org/lucene-parent/lucene-grouping)
|
* Lucene Grouping (org.apache.lucene:lucene-grouping:8.11.2 - https://lucene.apache.org/lucene-parent/lucene-grouping)
|
||||||
* Lucene Highlighter (org.apache.lucene:lucene-highlighter:8.11.1 - https://lucene.apache.org/lucene-parent/lucene-highlighter)
|
* Lucene Highlighter (org.apache.lucene:lucene-highlighter:8.11.2 - https://lucene.apache.org/lucene-parent/lucene-highlighter)
|
||||||
* Lucene Join (org.apache.lucene:lucene-join:8.11.1 - https://lucene.apache.org/lucene-parent/lucene-join)
|
* Lucene Join (org.apache.lucene:lucene-join:8.11.2 - https://lucene.apache.org/lucene-parent/lucene-join)
|
||||||
* Lucene Memory (org.apache.lucene:lucene-memory:8.11.1 - https://lucene.apache.org/lucene-parent/lucene-memory)
|
* Lucene Memory (org.apache.lucene:lucene-memory:8.11.2 - https://lucene.apache.org/lucene-parent/lucene-memory)
|
||||||
* Lucene Miscellaneous (org.apache.lucene:lucene-misc:8.11.1 - https://lucene.apache.org/lucene-parent/lucene-misc)
|
* Lucene Miscellaneous (org.apache.lucene:lucene-misc:8.11.2 - https://lucene.apache.org/lucene-parent/lucene-misc)
|
||||||
* Lucene Queries (org.apache.lucene:lucene-queries:8.11.1 - https://lucene.apache.org/lucene-parent/lucene-queries)
|
* Lucene Queries (org.apache.lucene:lucene-queries:8.11.2 - https://lucene.apache.org/lucene-parent/lucene-queries)
|
||||||
* Lucene QueryParsers (org.apache.lucene:lucene-queryparser:8.11.1 - https://lucene.apache.org/lucene-parent/lucene-queryparser)
|
* Lucene QueryParsers (org.apache.lucene:lucene-queryparser:8.11.2 - https://lucene.apache.org/lucene-parent/lucene-queryparser)
|
||||||
* Lucene Sandbox (org.apache.lucene:lucene-sandbox:8.11.1 - https://lucene.apache.org/lucene-parent/lucene-sandbox)
|
* Lucene Sandbox (org.apache.lucene:lucene-sandbox:8.11.2 - https://lucene.apache.org/lucene-parent/lucene-sandbox)
|
||||||
* Lucene Spatial Extras (org.apache.lucene:lucene-spatial-extras:8.11.1 - https://lucene.apache.org/lucene-parent/lucene-spatial-extras)
|
* Lucene Spatial Extras (org.apache.lucene:lucene-spatial-extras:8.11.2 - https://lucene.apache.org/lucene-parent/lucene-spatial-extras)
|
||||||
* Lucene Spatial 3D (org.apache.lucene:lucene-spatial3d:8.11.1 - https://lucene.apache.org/lucene-parent/lucene-spatial3d)
|
* Lucene Spatial 3D (org.apache.lucene:lucene-spatial3d:8.11.2 - https://lucene.apache.org/lucene-parent/lucene-spatial3d)
|
||||||
* Lucene Suggest (org.apache.lucene:lucene-suggest:8.11.1 - https://lucene.apache.org/lucene-parent/lucene-suggest)
|
* Lucene Suggest (org.apache.lucene:lucene-suggest:8.11.2 - https://lucene.apache.org/lucene-parent/lucene-suggest)
|
||||||
* Apache FontBox (org.apache.pdfbox:fontbox:2.0.24 - http://pdfbox.apache.org/)
|
* Apache FontBox (org.apache.pdfbox:fontbox:2.0.28 - http://pdfbox.apache.org/)
|
||||||
* PDFBox JBIG2 ImageIO plugin (org.apache.pdfbox:jbig2-imageio:3.0.3 - https://www.apache.org/jbig2-imageio/)
|
* PDFBox JBIG2 ImageIO plugin (org.apache.pdfbox:jbig2-imageio:3.0.4 - https://www.apache.org/jbig2-imageio/)
|
||||||
* Apache JempBox (org.apache.pdfbox:jempbox:1.8.16 - http://www.apache.org/pdfbox-parent/jempbox/)
|
* Apache JempBox (org.apache.pdfbox:jempbox:1.8.17 - http://www.apache.org/pdfbox-parent/jempbox/)
|
||||||
* Apache PDFBox (org.apache.pdfbox:pdfbox:2.0.24 - https://www.apache.org/pdfbox-parent/pdfbox/)
|
* Apache PDFBox (org.apache.pdfbox:pdfbox:2.0.28 - https://www.apache.org/pdfbox-parent/pdfbox/)
|
||||||
* Apache PDFBox Debugger (org.apache.pdfbox:pdfbox-debugger:2.0.25 - https://www.apache.org/pdfbox-parent/pdfbox-debugger/)
|
* Apache PDFBox tools (org.apache.pdfbox:pdfbox-tools:2.0.27 - https://www.apache.org/pdfbox-parent/pdfbox-tools/)
|
||||||
* Apache PDFBox tools (org.apache.pdfbox:pdfbox-tools:2.0.25 - https://www.apache.org/pdfbox-parent/pdfbox-tools/)
|
* Apache XmpBox (org.apache.pdfbox:xmpbox:2.0.27 - https://www.apache.org/pdfbox-parent/xmpbox/)
|
||||||
* Apache XmpBox (org.apache.pdfbox:xmpbox:2.0.25 - https://www.apache.org/pdfbox-parent/xmpbox/)
|
* Apache POI - Common (org.apache.poi:poi:5.2.3 - https://poi.apache.org/)
|
||||||
* Apache POI - Common (org.apache.poi:poi:5.2.0 - https://poi.apache.org/)
|
* Apache POI - API based on OPC and OOXML schemas (org.apache.poi:poi-ooxml:5.2.3 - https://poi.apache.org/)
|
||||||
* Apache POI - API based on OPC and OOXML schemas (org.apache.poi:poi-ooxml:5.2.0 - https://poi.apache.org/)
|
* Apache POI (org.apache.poi:poi-ooxml-lite:5.2.3 - https://poi.apache.org/)
|
||||||
* Apache POI (org.apache.poi:poi-ooxml-lite:5.2.0 - https://poi.apache.org/)
|
* Apache POI (org.apache.poi:poi-scratchpad:5.2.3 - https://poi.apache.org/)
|
||||||
* Apache POI (org.apache.poi:poi-scratchpad:5.2.0 - https://poi.apache.org/)
|
* Apache Solr Core (org.apache.solr:solr-core:8.11.2 - https://lucene.apache.org/solr-parent/solr-core)
|
||||||
* Apache Solr Core (org.apache.solr:solr-core:8.11.1 - https://lucene.apache.org/solr-parent/solr-core)
|
* Apache Solr Solrj (org.apache.solr:solr-solrj:8.11.2 - https://lucene.apache.org/solr-parent/solr-solrj)
|
||||||
* Apache Solr Solrj (org.apache.solr:solr-solrj:8.11.1 - https://lucene.apache.org/solr-parent/solr-solrj)
|
|
||||||
* Apache Standard Taglib Implementation (org.apache.taglibs:taglibs-standard-impl:1.2.5 - http://tomcat.apache.org/taglibs/standard-1.2.5/taglibs-standard-impl)
|
* Apache Standard Taglib Implementation (org.apache.taglibs:taglibs-standard-impl:1.2.5 - http://tomcat.apache.org/taglibs/standard-1.2.5/taglibs-standard-impl)
|
||||||
* Apache Standard Taglib Specification API (org.apache.taglibs:taglibs-standard-spec:1.2.5 - http://tomcat.apache.org/taglibs/standard-1.2.5/taglibs-standard-spec)
|
* Apache Standard Taglib Specification API (org.apache.taglibs:taglibs-standard-spec:1.2.5 - http://tomcat.apache.org/taglibs/standard-1.2.5/taglibs-standard-spec)
|
||||||
* Apache Thrift (org.apache.thrift:libthrift:0.9.2 - http://thrift.apache.org)
|
* Apache Thrift (org.apache.thrift:libthrift:0.9.2 - http://thrift.apache.org)
|
||||||
* Apache Tika core (org.apache.tika:tika-core:2.3.0 - https://tika.apache.org/)
|
* Apache Tika core (org.apache.tika:tika-core:2.5.0 - https://tika.apache.org/)
|
||||||
* Apache Tika Apple parser module (org.apache.tika:tika-parser-apple-module:2.3.0 - https://tika.apache.org/tika-parser-apple-module/)
|
* Apache Tika Apple parser module (org.apache.tika:tika-parser-apple-module:2.5.0 - https://tika.apache.org/tika-parser-apple-module/)
|
||||||
* Apache Tika audiovideo parser module (org.apache.tika:tika-parser-audiovideo-module:2.3.0 - https://tika.apache.org/tika-parser-audiovideo-module/)
|
* Apache Tika audiovideo parser module (org.apache.tika:tika-parser-audiovideo-module:2.5.0 - https://tika.apache.org/tika-parser-audiovideo-module/)
|
||||||
* Apache Tika cad parser module (org.apache.tika:tika-parser-cad-module:2.3.0 - https://tika.apache.org/tika-parser-cad-module/)
|
* Apache Tika cad parser module (org.apache.tika:tika-parser-cad-module:2.5.0 - https://tika.apache.org/tika-parser-cad-module/)
|
||||||
* Apache Tika code parser module (org.apache.tika:tika-parser-code-module:2.3.0 - https://tika.apache.org/tika-parser-code-module/)
|
* Apache Tika code parser module (org.apache.tika:tika-parser-code-module:2.5.0 - https://tika.apache.org/tika-parser-code-module/)
|
||||||
* Apache Tika crypto parser module (org.apache.tika:tika-parser-crypto-module:2.3.0 - https://tika.apache.org/tika-parser-crypto-module/)
|
* Apache Tika crypto parser module (org.apache.tika:tika-parser-crypto-module:2.5.0 - https://tika.apache.org/tika-parser-crypto-module/)
|
||||||
* Apache Tika digest commons (org.apache.tika:tika-parser-digest-commons:2.3.0 - https://tika.apache.org/tika-parser-digest-commons/)
|
* Apache Tika digest commons (org.apache.tika:tika-parser-digest-commons:2.5.0 - https://tika.apache.org/tika-parser-digest-commons/)
|
||||||
* Apache Tika font parser module (org.apache.tika:tika-parser-font-module:2.3.0 - https://tika.apache.org/tika-parser-font-module/)
|
* Apache Tika font parser module (org.apache.tika:tika-parser-font-module:2.5.0 - https://tika.apache.org/tika-parser-font-module/)
|
||||||
* Apache Tika html commons (org.apache.tika:tika-parser-html-commons:2.3.0 - https://tika.apache.org/tika-parser-html-commons/)
|
* Apache Tika html parser module (org.apache.tika:tika-parser-html-module:2.5.0 - https://tika.apache.org/tika-parser-html-module/)
|
||||||
* Apache Tika html parser module (org.apache.tika:tika-parser-html-module:2.3.0 - https://tika.apache.org/tika-parser-html-module/)
|
* Apache Tika image parser module (org.apache.tika:tika-parser-image-module:2.5.0 - https://tika.apache.org/tika-parser-image-module/)
|
||||||
* Apache Tika image parser module (org.apache.tika:tika-parser-image-module:2.3.0 - https://tika.apache.org/tika-parser-image-module/)
|
* Apache Tika mail commons (org.apache.tika:tika-parser-mail-commons:2.5.0 - https://tika.apache.org/tika-parser-mail-commons/)
|
||||||
* Apache Tika mail commons (org.apache.tika:tika-parser-mail-commons:2.3.0 - https://tika.apache.org/tika-parser-mail-commons/)
|
* Apache Tika mail parser module (org.apache.tika:tika-parser-mail-module:2.5.0 - https://tika.apache.org/tika-parser-mail-module/)
|
||||||
* Apache Tika mail parser module (org.apache.tika:tika-parser-mail-module:2.3.0 - https://tika.apache.org/tika-parser-mail-module/)
|
* Apache Tika Microsoft parser module (org.apache.tika:tika-parser-microsoft-module:2.5.0 - https://tika.apache.org/tika-parser-microsoft-module/)
|
||||||
* Apache Tika Microsoft parser module (org.apache.tika:tika-parser-microsoft-module:2.3.0 - https://tika.apache.org/tika-parser-microsoft-module/)
|
* Apache Tika miscellaneous office format parser module (org.apache.tika:tika-parser-miscoffice-module:2.5.0 - https://tika.apache.org/tika-parser-miscoffice-module/)
|
||||||
* Apache Tika miscellaneous office format parser module (org.apache.tika:tika-parser-miscoffice-module:2.3.0 - https://tika.apache.org/tika-parser-miscoffice-module/)
|
* Apache Tika news parser module (org.apache.tika:tika-parser-news-module:2.5.0 - https://tika.apache.org/tika-parser-news-module/)
|
||||||
* Apache Tika news parser module (org.apache.tika:tika-parser-news-module:2.3.0 - https://tika.apache.org/tika-parser-news-module/)
|
* Apache Tika OCR parser module (org.apache.tika:tika-parser-ocr-module:2.5.0 - https://tika.apache.org/tika-parser-ocr-module/)
|
||||||
* Apache Tika OCR parser module (org.apache.tika:tika-parser-ocr-module:2.3.0 - https://tika.apache.org/tika-parser-ocr-module/)
|
* Apache Tika PDF parser module (org.apache.tika:tika-parser-pdf-module:2.5.0 - https://tika.apache.org/tika-parser-pdf-module/)
|
||||||
* Apache Tika PDF parser module (org.apache.tika:tika-parser-pdf-module:2.3.0 - https://tika.apache.org/tika-parser-pdf-module/)
|
* Apache Tika package parser module (org.apache.tika:tika-parser-pkg-module:2.5.0 - https://tika.apache.org/tika-parser-pkg-module/)
|
||||||
* Apache Tika package parser module (org.apache.tika:tika-parser-pkg-module:2.3.0 - https://tika.apache.org/tika-parser-pkg-module/)
|
* Apache Tika text parser module (org.apache.tika:tika-parser-text-module:2.5.0 - https://tika.apache.org/tika-parser-text-module/)
|
||||||
* Apache Tika text parser module (org.apache.tika:tika-parser-text-module:2.3.0 - https://tika.apache.org/tika-parser-text-module/)
|
* Apache Tika WARC parser module (org.apache.tika:tika-parser-webarchive-module:2.5.0 - https://tika.apache.org/tika-parser-webarchive-module/)
|
||||||
* Apache Tika XML parser module (org.apache.tika:tika-parser-xml-module:2.3.0 - https://tika.apache.org/tika-parser-xml-module/)
|
* Apache Tika XML parser module (org.apache.tika:tika-parser-xml-module:2.5.0 - https://tika.apache.org/tika-parser-xml-module/)
|
||||||
* Apache Tika XMP commons (org.apache.tika:tika-parser-xmp-commons:2.3.0 - https://tika.apache.org/tika-parser-xmp-commons/)
|
* Apache Tika XMP commons (org.apache.tika:tika-parser-xmp-commons:2.5.0 - https://tika.apache.org/tika-parser-xmp-commons/)
|
||||||
* Apache Tika ZIP commons (org.apache.tika:tika-parser-zip-commons:2.3.0 - https://tika.apache.org/tika-parser-zip-commons/)
|
* Apache Tika ZIP commons (org.apache.tika:tika-parser-zip-commons:2.5.0 - https://tika.apache.org/tika-parser-zip-commons/)
|
||||||
* Apache Tika standard parser package (org.apache.tika:tika-parsers-standard-package:2.3.0 - https://tika.apache.org/tika-parsers/tika-parsers-standard/tika-parsers-standard-package/)
|
* Apache Tika standard parser package (org.apache.tika:tika-parsers-standard-package:2.5.0 - https://tika.apache.org/tika-parsers/tika-parsers-standard/tika-parsers-standard-package/)
|
||||||
* tomcat-embed-core (org.apache.tomcat.embed:tomcat-embed-core:9.0.63 - https://tomcat.apache.org/)
|
* tomcat-embed-core (org.apache.tomcat.embed:tomcat-embed-core:9.0.75 - https://tomcat.apache.org/)
|
||||||
* tomcat-embed-el (org.apache.tomcat.embed:tomcat-embed-el:9.0.63 - https://tomcat.apache.org/)
|
* tomcat-embed-el (org.apache.tomcat.embed:tomcat-embed-el:9.0.75 - https://tomcat.apache.org/)
|
||||||
* tomcat-embed-websocket (org.apache.tomcat.embed:tomcat-embed-websocket:9.0.63 - https://tomcat.apache.org/)
|
* tomcat-embed-websocket (org.apache.tomcat.embed:tomcat-embed-websocket:9.0.75 - https://tomcat.apache.org/)
|
||||||
* Apache Velocity - Engine (org.apache.velocity:velocity-engine-core:2.3 - http://velocity.apache.org/engine/devel/velocity-engine-core/)
|
* Apache Velocity - Engine (org.apache.velocity:velocity-engine-core:2.3 - http://velocity.apache.org/engine/devel/velocity-engine-core/)
|
||||||
* Apache Velocity - JSR 223 Scripting (org.apache.velocity:velocity-engine-scripting:2.2 - http://velocity.apache.org/engine/devel/velocity-engine-scripting/)
|
* Apache Velocity - JSR 223 Scripting (org.apache.velocity:velocity-engine-scripting:2.2 - http://velocity.apache.org/engine/devel/velocity-engine-scripting/)
|
||||||
* Axiom API (org.apache.ws.commons.axiom:axiom-api:1.2.22 - http://ws.apache.org/axiom/)
|
* Axiom API (org.apache.ws.commons.axiom:axiom-api:1.2.22 - http://ws.apache.org/axiom/)
|
||||||
* Abdera Model (FOM) Implementation (org.apache.ws.commons.axiom:fom-impl:1.2.22 - http://ws.apache.org/axiom/implementations/fom-impl/)
|
* Abdera Model (FOM) Implementation (org.apache.ws.commons.axiom:fom-impl:1.2.22 - http://ws.apache.org/axiom/implementations/fom-impl/)
|
||||||
* XmlBeans (org.apache.xmlbeans:xmlbeans:5.0.3 - https://xmlbeans.apache.org/)
|
* XmlBeans (org.apache.xmlbeans:xmlbeans:5.1.1 - https://xmlbeans.apache.org/)
|
||||||
* Apache ZooKeeper - Server (org.apache.zookeeper:zookeeper:3.6.2 - http://zookeeper.apache.org/zookeeper)
|
* Apache ZooKeeper - Server (org.apache.zookeeper:zookeeper:3.6.2 - http://zookeeper.apache.org/zookeeper)
|
||||||
* Apache ZooKeeper - Jute (org.apache.zookeeper:zookeeper-jute:3.6.2 - http://zookeeper.apache.org/zookeeper-jute)
|
* Apache ZooKeeper - Jute (org.apache.zookeeper:zookeeper-jute:3.6.2 - http://zookeeper.apache.org/zookeeper-jute)
|
||||||
* org.apiguardian:apiguardian-api (org.apiguardian:apiguardian-api:1.1.0 - https://github.com/apiguardian-team/apiguardian)
|
* org.apiguardian:apiguardian-api (org.apiguardian:apiguardian-api:1.1.0 - https://github.com/apiguardian-team/apiguardian)
|
||||||
* AssertJ fluent assertions (org.assertj:assertj-core:3.21.0 - https://assertj.github.io/doc/assertj-core/)
|
* AssertJ fluent assertions (org.assertj:assertj-core:3.22.0 - https://assertj.github.io/doc/assertj-core/)
|
||||||
* Evo Inflector (org.atteo:evo-inflector:1.3 - http://atteo.org/static/evo-inflector)
|
* Evo Inflector (org.atteo:evo-inflector:1.3 - http://atteo.org/static/evo-inflector)
|
||||||
* jose4j (org.bitbucket.b_c:jose4j:0.6.5 - https://bitbucket.org/b_c/jose4j/)
|
* jose4j (org.bitbucket.b_c:jose4j:0.6.5 - https://bitbucket.org/b_c/jose4j/)
|
||||||
* TagSoup (org.ccil.cowan.tagsoup:tagsoup:1.2.1 - http://home.ccil.org/~cowan/XML/tagsoup/)
|
* TagSoup (org.ccil.cowan.tagsoup:tagsoup:1.2.1 - http://home.ccil.org/~cowan/XML/tagsoup/)
|
||||||
@@ -279,34 +286,34 @@ https://wiki.lyrasis.org/display/DSPACE/Code+Contribution+Guidelines
|
|||||||
* Apache :: JSTL module (org.eclipse.jetty:apache-jstl:9.4.15.v20190215 - http://tomcat.apache.org/taglibs/standard/)
|
* Apache :: JSTL module (org.eclipse.jetty:apache-jstl:9.4.15.v20190215 - http://tomcat.apache.org/taglibs/standard/)
|
||||||
* Jetty :: ALPN :: Client (org.eclipse.jetty:jetty-alpn-client:9.4.44.v20210927 - https://eclipse.org/jetty/jetty-alpn-parent/jetty-alpn-client)
|
* Jetty :: ALPN :: Client (org.eclipse.jetty:jetty-alpn-client:9.4.44.v20210927 - https://eclipse.org/jetty/jetty-alpn-parent/jetty-alpn-client)
|
||||||
* Jetty :: ALPN :: JDK9 Client Implementation (org.eclipse.jetty:jetty-alpn-java-client:9.4.44.v20210927 - https://eclipse.org/jetty/jetty-alpn-parent/jetty-alpn-java-client)
|
* Jetty :: ALPN :: JDK9 Client Implementation (org.eclipse.jetty:jetty-alpn-java-client:9.4.44.v20210927 - https://eclipse.org/jetty/jetty-alpn-parent/jetty-alpn-java-client)
|
||||||
* Jetty :: ALPN :: JDK9 Server Implementation (org.eclipse.jetty:jetty-alpn-java-server:9.4.48.v20220622 - https://eclipse.org/jetty/jetty-alpn-parent/jetty-alpn-java-server)
|
* Jetty :: ALPN :: JDK9 Server Implementation (org.eclipse.jetty:jetty-alpn-java-server:9.4.51.v20230217 - https://eclipse.org/jetty/jetty-alpn-parent/jetty-alpn-java-server)
|
||||||
* Jetty :: ALPN :: Server (org.eclipse.jetty:jetty-alpn-server:9.4.44.v20210927 - https://eclipse.org/jetty/jetty-alpn-parent/jetty-alpn-server)
|
* Jetty :: ALPN :: Server (org.eclipse.jetty:jetty-alpn-server:9.4.44.v20210927 - https://eclipse.org/jetty/jetty-alpn-parent/jetty-alpn-server)
|
||||||
* Jetty :: ALPN :: Server (org.eclipse.jetty:jetty-alpn-server:9.4.48.v20220622 - https://eclipse.org/jetty/jetty-alpn-parent/jetty-alpn-server)
|
* Jetty :: ALPN :: Server (org.eclipse.jetty:jetty-alpn-server:9.4.51.v20230217 - https://eclipse.org/jetty/jetty-alpn-parent/jetty-alpn-server)
|
||||||
* Jetty :: Servlet Annotations (org.eclipse.jetty:jetty-annotations:9.4.15.v20190215 - http://www.eclipse.org/jetty)
|
* Jetty :: Servlet Annotations (org.eclipse.jetty:jetty-annotations:9.4.15.v20190215 - http://www.eclipse.org/jetty)
|
||||||
* Jetty :: Asynchronous HTTP Client (org.eclipse.jetty:jetty-client:9.4.44.v20210927 - https://eclipse.org/jetty/jetty-client)
|
* Jetty :: Asynchronous HTTP Client (org.eclipse.jetty:jetty-client:9.4.44.v20210927 - https://eclipse.org/jetty/jetty-client)
|
||||||
* Jetty :: Continuation (org.eclipse.jetty:jetty-continuation:9.4.44.v20210927 - https://eclipse.org/jetty/jetty-continuation)
|
* Jetty :: Continuation (org.eclipse.jetty:jetty-continuation:9.4.44.v20210927 - https://eclipse.org/jetty/jetty-continuation)
|
||||||
* Jetty :: Continuation (org.eclipse.jetty:jetty-continuation:9.4.48.v20220622 - https://eclipse.org/jetty/jetty-continuation)
|
* Jetty :: Continuation (org.eclipse.jetty:jetty-continuation:9.4.51.v20230217 - https://eclipse.org/jetty/jetty-continuation)
|
||||||
* Jetty :: Deployers (org.eclipse.jetty:jetty-deploy:9.4.48.v20220622 - https://eclipse.org/jetty/jetty-deploy)
|
* Jetty :: Deployers (org.eclipse.jetty:jetty-deploy:9.4.51.v20230217 - https://eclipse.org/jetty/jetty-deploy)
|
||||||
* Jetty :: Http Utility (org.eclipse.jetty:jetty-http:9.4.48.v20220622 - https://eclipse.org/jetty/jetty-http)
|
* Jetty :: Http Utility (org.eclipse.jetty:jetty-http:9.4.51.v20230217 - https://eclipse.org/jetty/jetty-http)
|
||||||
* Jetty :: IO Utility (org.eclipse.jetty:jetty-io:9.4.48.v20220622 - https://eclipse.org/jetty/jetty-io)
|
* Jetty :: IO Utility (org.eclipse.jetty:jetty-io:9.4.51.v20230217 - https://eclipse.org/jetty/jetty-io)
|
||||||
* Jetty :: JMX Management (org.eclipse.jetty:jetty-jmx:9.4.44.v20210927 - https://eclipse.org/jetty/jetty-jmx)
|
* Jetty :: JMX Management (org.eclipse.jetty:jetty-jmx:9.4.44.v20210927 - https://eclipse.org/jetty/jetty-jmx)
|
||||||
* Jetty :: JNDI Naming (org.eclipse.jetty:jetty-jndi:9.4.15.v20190215 - http://www.eclipse.org/jetty)
|
* Jetty :: JNDI Naming (org.eclipse.jetty:jetty-jndi:9.4.15.v20190215 - http://www.eclipse.org/jetty)
|
||||||
* Jetty :: Plus (org.eclipse.jetty:jetty-plus:9.4.15.v20190215 - http://www.eclipse.org/jetty)
|
* Jetty :: Plus (org.eclipse.jetty:jetty-plus:9.4.15.v20190215 - http://www.eclipse.org/jetty)
|
||||||
* Jetty :: Rewrite Handler (org.eclipse.jetty:jetty-rewrite:9.4.44.v20210927 - https://eclipse.org/jetty/jetty-rewrite)
|
* Jetty :: Rewrite Handler (org.eclipse.jetty:jetty-rewrite:9.4.44.v20210927 - https://eclipse.org/jetty/jetty-rewrite)
|
||||||
* Jetty :: Security (org.eclipse.jetty:jetty-security:9.4.44.v20210927 - https://eclipse.org/jetty/jetty-security)
|
* Jetty :: Security (org.eclipse.jetty:jetty-security:9.4.44.v20210927 - https://eclipse.org/jetty/jetty-security)
|
||||||
* Jetty :: Security (org.eclipse.jetty:jetty-security:9.4.48.v20220622 - https://eclipse.org/jetty/jetty-security)
|
* Jetty :: Security (org.eclipse.jetty:jetty-security:9.4.51.v20230217 - https://eclipse.org/jetty/jetty-security)
|
||||||
* Jetty :: Server Core (org.eclipse.jetty:jetty-server:9.4.48.v20220622 - https://eclipse.org/jetty/jetty-server)
|
* Jetty :: Server Core (org.eclipse.jetty:jetty-server:9.4.51.v20230217 - https://eclipse.org/jetty/jetty-server)
|
||||||
* Jetty :: Servlet Handling (org.eclipse.jetty:jetty-servlet:9.4.48.v20220622 - https://eclipse.org/jetty/jetty-servlet)
|
* Jetty :: Servlet Handling (org.eclipse.jetty:jetty-servlet:9.4.51.v20230217 - https://eclipse.org/jetty/jetty-servlet)
|
||||||
* Jetty :: Utility Servlets and Filters (org.eclipse.jetty:jetty-servlets:9.4.48.v20220622 - https://eclipse.org/jetty/jetty-servlets)
|
* Jetty :: Utility Servlets and Filters (org.eclipse.jetty:jetty-servlets:9.4.51.v20230217 - https://eclipse.org/jetty/jetty-servlets)
|
||||||
* Jetty :: Utilities (org.eclipse.jetty:jetty-util:9.4.48.v20220622 - https://eclipse.org/jetty/jetty-util)
|
* Jetty :: Utilities (org.eclipse.jetty:jetty-util:9.4.51.v20230217 - https://eclipse.org/jetty/jetty-util)
|
||||||
* Jetty :: Utilities :: Ajax(JSON) (org.eclipse.jetty:jetty-util-ajax:9.4.48.v20220622 - https://eclipse.org/jetty/jetty-util-ajax)
|
* Jetty :: Utilities :: Ajax(JSON) (org.eclipse.jetty:jetty-util-ajax:9.4.51.v20230217 - https://eclipse.org/jetty/jetty-util-ajax)
|
||||||
* Jetty :: Webapp Application Support (org.eclipse.jetty:jetty-webapp:9.4.48.v20220622 - https://eclipse.org/jetty/jetty-webapp)
|
* Jetty :: Webapp Application Support (org.eclipse.jetty:jetty-webapp:9.4.51.v20230217 - https://eclipse.org/jetty/jetty-webapp)
|
||||||
* Jetty :: XML utilities (org.eclipse.jetty:jetty-xml:9.4.48.v20220622 - https://eclipse.org/jetty/jetty-xml)
|
* Jetty :: XML utilities (org.eclipse.jetty:jetty-xml:9.4.51.v20230217 - https://eclipse.org/jetty/jetty-xml)
|
||||||
* Jetty :: HTTP2 :: Client (org.eclipse.jetty.http2:http2-client:9.4.44.v20210927 - https://eclipse.org/jetty/http2-parent/http2-client)
|
* Jetty :: HTTP2 :: Client (org.eclipse.jetty.http2:http2-client:9.4.44.v20210927 - https://eclipse.org/jetty/http2-parent/http2-client)
|
||||||
* Jetty :: HTTP2 :: Common (org.eclipse.jetty.http2:http2-common:9.4.48.v20220622 - https://eclipse.org/jetty/http2-parent/http2-common)
|
* Jetty :: HTTP2 :: Common (org.eclipse.jetty.http2:http2-common:9.4.51.v20230217 - https://eclipse.org/jetty/http2-parent/http2-common)
|
||||||
* Jetty :: HTTP2 :: HPACK (org.eclipse.jetty.http2:http2-hpack:9.4.44.v20210927 - https://eclipse.org/jetty/http2-parent/http2-hpack)
|
* Jetty :: HTTP2 :: HPACK (org.eclipse.jetty.http2:http2-hpack:9.4.44.v20210927 - https://eclipse.org/jetty/http2-parent/http2-hpack)
|
||||||
* Jetty :: HTTP2 :: HTTP Client Transport (org.eclipse.jetty.http2:http2-http-client-transport:9.4.44.v20210927 - https://eclipse.org/jetty/http2-parent/http2-http-client-transport)
|
* Jetty :: HTTP2 :: HTTP Client Transport (org.eclipse.jetty.http2:http2-http-client-transport:9.4.44.v20210927 - https://eclipse.org/jetty/http2-parent/http2-http-client-transport)
|
||||||
* Jetty :: HTTP2 :: Server (org.eclipse.jetty.http2:http2-server:9.4.48.v20220622 - https://eclipse.org/jetty/http2-parent/http2-server)
|
* Jetty :: HTTP2 :: Server (org.eclipse.jetty.http2:http2-server:9.4.51.v20230217 - https://eclipse.org/jetty/http2-parent/http2-server)
|
||||||
* Jetty :: Schemas (org.eclipse.jetty.toolchain:jetty-schemas:3.1.2 - https://eclipse.org/jetty/jetty-schemas)
|
* Jetty :: Schemas (org.eclipse.jetty.toolchain:jetty-schemas:3.1.2 - https://eclipse.org/jetty/jetty-schemas)
|
||||||
* Ehcache (org.ehcache:ehcache:3.4.0 - http://ehcache.org)
|
* Ehcache (org.ehcache:ehcache:3.4.0 - http://ehcache.org)
|
||||||
* flyway-core (org.flywaydb:flyway-core:8.4.4 - https://flywaydb.org/flyway-core)
|
* flyway-core (org.flywaydb:flyway-core:8.4.4 - https://flywaydb.org/flyway-core)
|
||||||
@@ -315,8 +322,10 @@ https://wiki.lyrasis.org/display/DSPACE/Code+Contribution+Guidelines
|
|||||||
* jersey-core-client (org.glassfish.jersey.core:jersey-client:2.35 - https://projects.eclipse.org/projects/ee4j.jersey/jersey-client)
|
* jersey-core-client (org.glassfish.jersey.core:jersey-client:2.35 - https://projects.eclipse.org/projects/ee4j.jersey/jersey-client)
|
||||||
* jersey-core-common (org.glassfish.jersey.core:jersey-common:2.35 - https://projects.eclipse.org/projects/ee4j.jersey/jersey-common)
|
* jersey-core-common (org.glassfish.jersey.core:jersey-common:2.35 - https://projects.eclipse.org/projects/ee4j.jersey/jersey-common)
|
||||||
* jersey-inject-hk2 (org.glassfish.jersey.inject:jersey-hk2:2.35 - https://projects.eclipse.org/projects/ee4j.jersey/project/jersey-hk2)
|
* jersey-inject-hk2 (org.glassfish.jersey.inject:jersey-hk2:2.35 - https://projects.eclipse.org/projects/ee4j.jersey/project/jersey-hk2)
|
||||||
* Hibernate Validator Engine (org.hibernate.validator:hibernate-validator:6.0.23.Final - http://hibernate.org/validator/hibernate-validator)
|
* Hibernate Validator Engine (org.hibernate.validator:hibernate-validator:6.2.5.Final - http://hibernate.org/validator/hibernate-validator)
|
||||||
* Hibernate Validator Portable Extension (org.hibernate.validator:hibernate-validator-cdi:6.0.23.Final - http://hibernate.org/validator/hibernate-validator-cdi)
|
* Hibernate Validator Portable Extension (org.hibernate.validator:hibernate-validator-cdi:6.2.5.Final - http://hibernate.org/validator/hibernate-validator-cdi)
|
||||||
|
* leveldb (org.iq80.leveldb:leveldb:0.12 - http://github.com/dain/leveldb/leveldb)
|
||||||
|
* leveldb-api (org.iq80.leveldb:leveldb-api:0.12 - http://github.com/dain/leveldb/leveldb-api)
|
||||||
* Javassist (org.javassist:javassist:3.25.0-GA - http://www.javassist.org/)
|
* Javassist (org.javassist:javassist:3.25.0-GA - http://www.javassist.org/)
|
||||||
* Java Annotation Indexer (org.jboss:jandex:2.4.2.Final - http://www.jboss.org/jandex)
|
* Java Annotation Indexer (org.jboss:jandex:2.4.2.Final - http://www.jboss.org/jandex)
|
||||||
* JBoss Logging 3 (org.jboss.logging:jboss-logging:3.4.3.Final - http://www.jboss.org)
|
* JBoss Logging 3 (org.jboss.logging:jboss-logging:3.4.3.Final - http://www.jboss.org)
|
||||||
@@ -337,59 +346,67 @@ https://wiki.lyrasis.org/display/DSPACE/Code+Contribution+Guidelines
|
|||||||
* Jetty Servlet Tester (org.mortbay.jetty:jetty-servlet-tester:6.1.26 - http://www.eclipse.org/jetty/jetty-parent/project/jetty-servlet-tester)
|
* Jetty Servlet Tester (org.mortbay.jetty:jetty-servlet-tester:6.1.26 - http://www.eclipse.org/jetty/jetty-parent/project/jetty-servlet-tester)
|
||||||
* Jetty Utilities (org.mortbay.jetty:jetty-util:6.1.26 - http://www.eclipse.org/jetty/jetty-parent/project/jetty-util)
|
* Jetty Utilities (org.mortbay.jetty:jetty-util:6.1.26 - http://www.eclipse.org/jetty/jetty-parent/project/jetty-util)
|
||||||
* Servlet Specification API (org.mortbay.jetty:servlet-api:2.5-20081211 - http://jetty.mortbay.org/servlet-api)
|
* Servlet Specification API (org.mortbay.jetty:servlet-api:2.5-20081211 - http://jetty.mortbay.org/servlet-api)
|
||||||
|
* jwarc (org.netpreserve:jwarc:0.19.0 - https://github.com/iipc/jwarc)
|
||||||
* Objenesis (org.objenesis:objenesis:3.2 - http://objenesis.org/objenesis)
|
* Objenesis (org.objenesis:objenesis:3.2 - http://objenesis.org/objenesis)
|
||||||
* parboiled-core (org.parboiled:parboiled-core:1.3.1 - http://parboiled.org)
|
* parboiled-core (org.parboiled:parboiled-core:1.3.1 - http://parboiled.org)
|
||||||
* parboiled-java (org.parboiled:parboiled-java:1.3.1 - http://parboiled.org)
|
* parboiled-java (org.parboiled:parboiled-java:1.3.1 - http://parboiled.org)
|
||||||
* RRD4J (org.rrd4j:rrd4j:3.5 - https://github.com/rrd4j/rrd4j/)
|
* RRD4J (org.rrd4j:rrd4j:3.5 - https://github.com/rrd4j/rrd4j/)
|
||||||
* JSONassert (org.skyscreamer:jsonassert:1.5.0 - https://github.com/skyscreamer/JSONassert)
|
* Scala Library (org.scala-lang:scala-library:2.13.9 - https://www.scala-lang.org/)
|
||||||
* Spring AOP (org.springframework:spring-aop:5.3.20 - https://github.com/spring-projects/spring-framework)
|
* Scala Compiler (org.scala-lang:scala-reflect:2.13.0 - https://www.scala-lang.org/)
|
||||||
* Spring Beans (org.springframework:spring-beans:5.3.20 - https://github.com/spring-projects/spring-framework)
|
* scala-collection-compat (org.scala-lang.modules:scala-collection-compat_2.13:2.1.6 - http://www.scala-lang.org/)
|
||||||
* Spring Context (org.springframework:spring-context:5.3.20 - https://github.com/spring-projects/spring-framework)
|
* scala-java8-compat (org.scala-lang.modules:scala-java8-compat_2.13:0.9.0 - http://www.scala-lang.org/)
|
||||||
* Spring Context Support (org.springframework:spring-context-support:5.3.20 - https://github.com/spring-projects/spring-framework)
|
* scala-parser-combinators (org.scala-lang.modules:scala-parser-combinators_2.13:1.1.2 - http://www.scala-lang.org/)
|
||||||
* Spring Core (org.springframework:spring-core:5.3.20 - https://github.com/spring-projects/spring-framework)
|
* scala-xml (org.scala-lang.modules:scala-xml_2.13:1.3.0 - http://www.scala-lang.org/)
|
||||||
* Spring Expression Language (SpEL) (org.springframework:spring-expression:5.3.20 - https://github.com/spring-projects/spring-framework)
|
* JSONassert (org.skyscreamer:jsonassert:1.5.1 - https://github.com/skyscreamer/JSONassert)
|
||||||
* Spring Commons Logging Bridge (org.springframework:spring-jcl:5.3.20 - https://github.com/spring-projects/spring-framework)
|
* JCL 1.2 implemented over SLF4J (org.slf4j:jcl-over-slf4j:1.7.36 - http://www.slf4j.org)
|
||||||
* Spring JDBC (org.springframework:spring-jdbc:5.3.20 - https://github.com/spring-projects/spring-framework)
|
* Spring AOP (org.springframework:spring-aop:5.3.27 - https://github.com/spring-projects/spring-framework)
|
||||||
* Spring Object/Relational Mapping (org.springframework:spring-orm:5.3.20 - https://github.com/spring-projects/spring-framework)
|
* Spring Beans (org.springframework:spring-beans:5.3.27 - https://github.com/spring-projects/spring-framework)
|
||||||
* Spring TestContext Framework (org.springframework:spring-test:5.3.20 - https://github.com/spring-projects/spring-framework)
|
* Spring Context (org.springframework:spring-context:5.3.27 - https://github.com/spring-projects/spring-framework)
|
||||||
* Spring Transaction (org.springframework:spring-tx:5.3.20 - https://github.com/spring-projects/spring-framework)
|
* Spring Context Support (org.springframework:spring-context-support:5.3.27 - https://github.com/spring-projects/spring-framework)
|
||||||
* Spring Web (org.springframework:spring-web:5.3.20 - https://github.com/spring-projects/spring-framework)
|
* Spring Core (org.springframework:spring-core:5.3.27 - https://github.com/spring-projects/spring-framework)
|
||||||
* Spring Web MVC (org.springframework:spring-webmvc:5.3.20 - https://github.com/spring-projects/spring-framework)
|
* Spring Expression Language (SpEL) (org.springframework:spring-expression:5.3.27 - https://github.com/spring-projects/spring-framework)
|
||||||
* spring-boot (org.springframework.boot:spring-boot:2.6.8 - https://spring.io/projects/spring-boot)
|
* Spring Commons Logging Bridge (org.springframework:spring-jcl:5.3.27 - https://github.com/spring-projects/spring-framework)
|
||||||
* spring-boot-actuator (org.springframework.boot:spring-boot-actuator:2.6.8 - https://spring.io/projects/spring-boot)
|
* Spring JDBC (org.springframework:spring-jdbc:5.3.27 - https://github.com/spring-projects/spring-framework)
|
||||||
* spring-boot-actuator-autoconfigure (org.springframework.boot:spring-boot-actuator-autoconfigure:2.6.8 - https://spring.io/projects/spring-boot)
|
* Spring Object/Relational Mapping (org.springframework:spring-orm:5.3.27 - https://github.com/spring-projects/spring-framework)
|
||||||
* spring-boot-autoconfigure (org.springframework.boot:spring-boot-autoconfigure:2.6.8 - https://spring.io/projects/spring-boot)
|
* Spring TestContext Framework (org.springframework:spring-test:5.3.27 - https://github.com/spring-projects/spring-framework)
|
||||||
|
* Spring Transaction (org.springframework:spring-tx:5.3.27 - https://github.com/spring-projects/spring-framework)
|
||||||
|
* Spring Web (org.springframework:spring-web:5.3.27 - https://github.com/spring-projects/spring-framework)
|
||||||
|
* Spring Web MVC (org.springframework:spring-webmvc:5.3.27 - https://github.com/spring-projects/spring-framework)
|
||||||
|
* spring-boot (org.springframework.boot:spring-boot:2.7.12 - https://spring.io/projects/spring-boot)
|
||||||
|
* spring-boot-actuator (org.springframework.boot:spring-boot-actuator:2.7.12 - https://spring.io/projects/spring-boot)
|
||||||
|
* spring-boot-actuator-autoconfigure (org.springframework.boot:spring-boot-actuator-autoconfigure:2.7.12 - https://spring.io/projects/spring-boot)
|
||||||
|
* spring-boot-autoconfigure (org.springframework.boot:spring-boot-autoconfigure:2.7.12 - https://spring.io/projects/spring-boot)
|
||||||
* Spring Boot Configuration Processor (org.springframework.boot:spring-boot-configuration-processor:2.0.0.RELEASE - https://projects.spring.io/spring-boot/#/spring-boot-parent/spring-boot-tools/spring-boot-configuration-processor)
|
* Spring Boot Configuration Processor (org.springframework.boot:spring-boot-configuration-processor:2.0.0.RELEASE - https://projects.spring.io/spring-boot/#/spring-boot-parent/spring-boot-tools/spring-boot-configuration-processor)
|
||||||
* spring-boot-starter (org.springframework.boot:spring-boot-starter:2.6.8 - https://spring.io/projects/spring-boot)
|
* spring-boot-starter (org.springframework.boot:spring-boot-starter:2.7.12 - https://spring.io/projects/spring-boot)
|
||||||
* spring-boot-starter-actuator (org.springframework.boot:spring-boot-starter-actuator:2.6.8 - https://spring.io/projects/spring-boot)
|
* spring-boot-starter-actuator (org.springframework.boot:spring-boot-starter-actuator:2.7.12 - https://spring.io/projects/spring-boot)
|
||||||
* spring-boot-starter-aop (org.springframework.boot:spring-boot-starter-aop:2.6.8 - https://spring.io/projects/spring-boot)
|
* spring-boot-starter-aop (org.springframework.boot:spring-boot-starter-aop:2.7.12 - https://spring.io/projects/spring-boot)
|
||||||
* spring-boot-starter-cache (org.springframework.boot:spring-boot-starter-cache:2.6.8 - https://spring.io/projects/spring-boot)
|
* spring-boot-starter-cache (org.springframework.boot:spring-boot-starter-cache:2.7.12 - https://spring.io/projects/spring-boot)
|
||||||
* spring-boot-starter-data-rest (org.springframework.boot:spring-boot-starter-data-rest:2.6.8 - https://spring.io/projects/spring-boot)
|
* spring-boot-starter-data-rest (org.springframework.boot:spring-boot-starter-data-rest:2.7.12 - https://spring.io/projects/spring-boot)
|
||||||
* spring-boot-starter-json (org.springframework.boot:spring-boot-starter-json:2.6.8 - https://spring.io/projects/spring-boot)
|
* spring-boot-starter-json (org.springframework.boot:spring-boot-starter-json:2.7.12 - https://spring.io/projects/spring-boot)
|
||||||
* spring-boot-starter-log4j2 (org.springframework.boot:spring-boot-starter-log4j2:2.6.8 - https://spring.io/projects/spring-boot)
|
* spring-boot-starter-log4j2 (org.springframework.boot:spring-boot-starter-log4j2:2.7.12 - https://spring.io/projects/spring-boot)
|
||||||
* spring-boot-starter-security (org.springframework.boot:spring-boot-starter-security:2.6.8 - https://spring.io/projects/spring-boot)
|
* spring-boot-starter-security (org.springframework.boot:spring-boot-starter-security:2.7.12 - https://spring.io/projects/spring-boot)
|
||||||
* spring-boot-starter-test (org.springframework.boot:spring-boot-starter-test:2.6.8 - https://spring.io/projects/spring-boot)
|
* spring-boot-starter-test (org.springframework.boot:spring-boot-starter-test:2.7.12 - https://spring.io/projects/spring-boot)
|
||||||
* spring-boot-starter-tomcat (org.springframework.boot:spring-boot-starter-tomcat:2.6.8 - https://spring.io/projects/spring-boot)
|
* spring-boot-starter-tomcat (org.springframework.boot:spring-boot-starter-tomcat:2.7.12 - https://spring.io/projects/spring-boot)
|
||||||
* spring-boot-starter-web (org.springframework.boot:spring-boot-starter-web:2.6.8 - https://spring.io/projects/spring-boot)
|
* spring-boot-starter-web (org.springframework.boot:spring-boot-starter-web:2.7.12 - https://spring.io/projects/spring-boot)
|
||||||
* spring-boot-test (org.springframework.boot:spring-boot-test:2.6.8 - https://spring.io/projects/spring-boot)
|
* spring-boot-test (org.springframework.boot:spring-boot-test:2.7.12 - https://spring.io/projects/spring-boot)
|
||||||
* spring-boot-test-autoconfigure (org.springframework.boot:spring-boot-test-autoconfigure:2.6.8 - https://spring.io/projects/spring-boot)
|
* spring-boot-test-autoconfigure (org.springframework.boot:spring-boot-test-autoconfigure:2.7.12 - https://spring.io/projects/spring-boot)
|
||||||
* Spring Data Core (org.springframework.data:spring-data-commons:2.6.4 - https://www.spring.io/spring-data/spring-data-commons)
|
* Spring Data Core (org.springframework.data:spring-data-commons:2.7.12 - https://www.spring.io/spring-data/spring-data-commons)
|
||||||
* Spring Data REST - Core (org.springframework.data:spring-data-rest-core:3.6.4 - https://www.spring.io/spring-data/spring-data-rest-parent/spring-data-rest-core)
|
* Spring Data REST - Core (org.springframework.data:spring-data-rest-core:3.7.12 - https://www.spring.io/spring-data/spring-data-rest-parent/spring-data-rest-core)
|
||||||
* Spring Data REST - WebMVC (org.springframework.data:spring-data-rest-webmvc:3.6.4 - https://www.spring.io/spring-data/spring-data-rest-parent/spring-data-rest-webmvc)
|
* Spring Data REST - WebMVC (org.springframework.data:spring-data-rest-webmvc:3.7.12 - https://www.spring.io/spring-data/spring-data-rest-parent/spring-data-rest-webmvc)
|
||||||
* Spring HATEOAS (org.springframework.hateoas:spring-hateoas:1.4.2 - https://github.com/spring-projects/spring-hateoas)
|
* Spring HATEOAS (org.springframework.hateoas:spring-hateoas:1.5.4 - https://github.com/spring-projects/spring-hateoas)
|
||||||
* Spring Plugin - Core (org.springframework.plugin:spring-plugin-core:2.0.0.RELEASE - https://github.com/spring-projects/spring-plugin/spring-plugin-core)
|
* Spring Plugin - Core (org.springframework.plugin:spring-plugin-core:2.0.0.RELEASE - https://github.com/spring-projects/spring-plugin/spring-plugin-core)
|
||||||
* spring-security-config (org.springframework.security:spring-security-config:5.6.5 - https://spring.io/projects/spring-security)
|
* spring-security-config (org.springframework.security:spring-security-config:5.7.8 - https://spring.io/projects/spring-security)
|
||||||
* spring-security-core (org.springframework.security:spring-security-core:5.6.5 - https://spring.io/projects/spring-security)
|
* spring-security-core (org.springframework.security:spring-security-core:5.7.8 - https://spring.io/projects/spring-security)
|
||||||
* spring-security-crypto (org.springframework.security:spring-security-crypto:5.6.5 - https://spring.io/projects/spring-security)
|
* spring-security-crypto (org.springframework.security:spring-security-crypto:5.7.8 - https://spring.io/projects/spring-security)
|
||||||
* spring-security-test (org.springframework.security:spring-security-test:5.6.5 - https://spring.io/projects/spring-security)
|
* spring-security-test (org.springframework.security:spring-security-test:5.7.8 - https://spring.io/projects/spring-security)
|
||||||
* spring-security-web (org.springframework.security:spring-security-web:5.6.5 - https://spring.io/projects/spring-security)
|
* spring-security-web (org.springframework.security:spring-security-web:5.7.8 - https://spring.io/projects/spring-security)
|
||||||
* SWORD v2 :: Common Server Library (org.swordapp:sword2-server:1.0 - http://www.swordapp.org/)
|
* SWORD v2 :: Common Server Library (org.swordapp:sword2-server:1.0 - http://www.swordapp.org/)
|
||||||
* snappy-java (org.xerial.snappy:snappy-java:1.1.7.6 - https://github.com/xerial/snappy-java)
|
* snappy-java (org.xerial.snappy:snappy-java:1.1.7.6 - https://github.com/xerial/snappy-java)
|
||||||
* xml-matchers (org.xmlmatchers:xml-matchers:0.10 - http://code.google.com/p/xml-matchers/)
|
* xml-matchers (org.xmlmatchers:xml-matchers:0.10 - http://code.google.com/p/xml-matchers/)
|
||||||
* org.xmlunit:xmlunit-core (org.xmlunit:xmlunit-core:2.8.0 - https://www.xmlunit.org/)
|
* org.xmlunit:xmlunit-core (org.xmlunit:xmlunit-core:2.8.0 - https://www.xmlunit.org/)
|
||||||
* org.xmlunit:xmlunit-core (org.xmlunit:xmlunit-core:2.8.4 - https://www.xmlunit.org/)
|
* org.xmlunit:xmlunit-core (org.xmlunit:xmlunit-core:2.9.1 - https://www.xmlunit.org/)
|
||||||
* org.xmlunit:xmlunit-placeholders (org.xmlunit:xmlunit-placeholders:2.8.0 - https://www.xmlunit.org/xmlunit-placeholders/)
|
* org.xmlunit:xmlunit-placeholders (org.xmlunit:xmlunit-placeholders:2.8.0 - https://www.xmlunit.org/xmlunit-placeholders/)
|
||||||
* SnakeYAML (org.yaml:snakeyaml:1.29 - http://www.snakeyaml.org)
|
* SnakeYAML (org.yaml:snakeyaml:1.30 - https://bitbucket.org/snakeyaml/snakeyaml)
|
||||||
* software.amazon.ion:ion-java (software.amazon.ion:ion-java:1.0.2 - https://github.com/amznlabs/ion-java/)
|
* software.amazon.ion:ion-java (software.amazon.ion:ion-java:1.0.2 - https://github.com/amznlabs/ion-java/)
|
||||||
* Xalan Java Serializer (xalan:serializer:2.7.2 - http://xml.apache.org/xalan-j/)
|
* Xalan Java Serializer (xalan:serializer:2.7.2 - http://xml.apache.org/xalan-j/)
|
||||||
* xalan (xalan:xalan:2.7.0 - no url defined)
|
* xalan (xalan:xalan:2.7.0 - no url defined)
|
||||||
@@ -404,7 +421,7 @@ https://wiki.lyrasis.org/display/DSPACE/Code+Contribution+Guidelines
|
|||||||
* coverity-escapers (com.coverity.security:coverity-escapers:1.1.1 - http://coverity.com/security)
|
* coverity-escapers (com.coverity.security:coverity-escapers:1.1.1 - http://coverity.com/security)
|
||||||
* Java Advanced Imaging Image I/O Tools API core (standalone) (com.github.jai-imageio:jai-imageio-core:1.4.0 - https://github.com/jai-imageio/jai-imageio-core)
|
* Java Advanced Imaging Image I/O Tools API core (standalone) (com.github.jai-imageio:jai-imageio-core:1.4.0 - https://github.com/jai-imageio/jai-imageio-core)
|
||||||
* JSONLD Java :: Core (com.github.jsonld-java:jsonld-java:0.5.1 - http://github.com/jsonld-java/jsonld-java/jsonld-java/)
|
* JSONLD Java :: Core (com.github.jsonld-java:jsonld-java:0.5.1 - http://github.com/jsonld-java/jsonld-java/jsonld-java/)
|
||||||
* curvesapi (com.github.virtuald:curvesapi:1.06 - https://github.com/virtuald/curvesapi)
|
* curvesapi (com.github.virtuald:curvesapi:1.07 - https://github.com/virtuald/curvesapi)
|
||||||
* Protocol Buffers [Core] (com.google.protobuf:protobuf-java:3.11.0 - https://developers.google.com/protocol-buffers/protobuf-java/)
|
* Protocol Buffers [Core] (com.google.protobuf:protobuf-java:3.11.0 - https://developers.google.com/protocol-buffers/protobuf-java/)
|
||||||
* JZlib (com.jcraft:jzlib:1.1.3 - http://www.jcraft.com/jzlib/)
|
* JZlib (com.jcraft:jzlib:1.1.3 - http://www.jcraft.com/jzlib/)
|
||||||
* dnsjava (dnsjava:dnsjava:2.1.7 - http://www.dnsjava.org)
|
* dnsjava (dnsjava:dnsjava:2.1.7 - http://www.dnsjava.org)
|
||||||
@@ -426,11 +443,15 @@ https://wiki.lyrasis.org/display/DSPACE/Code+Contribution+Guidelines
|
|||||||
* asm-commons (org.ow2.asm:asm-commons:8.0.1 - http://asm.ow2.io/)
|
* asm-commons (org.ow2.asm:asm-commons:8.0.1 - http://asm.ow2.io/)
|
||||||
* asm-tree (org.ow2.asm:asm-tree:7.1 - http://asm.ow2.org/)
|
* asm-tree (org.ow2.asm:asm-tree:7.1 - http://asm.ow2.org/)
|
||||||
* asm-util (org.ow2.asm:asm-util:7.1 - http://asm.ow2.org/)
|
* asm-util (org.ow2.asm:asm-util:7.1 - http://asm.ow2.org/)
|
||||||
* PostgreSQL JDBC Driver (org.postgresql:postgresql:42.4.1 - https://jdbc.postgresql.org)
|
* PostgreSQL JDBC Driver (org.postgresql:postgresql:42.6.0 - https://jdbc.postgresql.org)
|
||||||
* Reflections (org.reflections:reflections:0.9.12 - http://github.com/ronmamo/reflections)
|
* Reflections (org.reflections:reflections:0.9.12 - http://github.com/ronmamo/reflections)
|
||||||
* JMatIO (org.tallison:jmatio:1.5 - https://github.com/tballison/jmatio)
|
* JMatIO (org.tallison:jmatio:1.5 - https://github.com/tballison/jmatio)
|
||||||
* XMLUnit for Java (xmlunit:xmlunit:1.3 - http://xmlunit.sourceforge.net/)
|
* XMLUnit for Java (xmlunit:xmlunit:1.3 - http://xmlunit.sourceforge.net/)
|
||||||
|
|
||||||
|
CC0:
|
||||||
|
|
||||||
|
* reactive-streams (org.reactivestreams:reactive-streams:1.0.2 - http://www.reactive-streams.org/)
|
||||||
|
|
||||||
Common Development and Distribution License (CDDL):
|
Common Development and Distribution License (CDDL):
|
||||||
|
|
||||||
* istack common utility code runtime (com.sun.istack:istack-commons-runtime:3.0.7 - http://java.net/istack-commons/istack-commons-runtime/)
|
* istack common utility code runtime (com.sun.istack:istack-commons-runtime:3.0.7 - http://java.net/istack-commons/istack-commons-runtime/)
|
||||||
@@ -446,7 +467,7 @@ https://wiki.lyrasis.org/display/DSPACE/Code+Contribution+Guidelines
|
|||||||
* Java Servlet API (javax.servlet:javax.servlet-api:3.1.0 - http://servlet-spec.java.net)
|
* Java Servlet API (javax.servlet:javax.servlet-api:3.1.0 - http://servlet-spec.java.net)
|
||||||
* javax.transaction API (javax.transaction:javax.transaction-api:1.3 - http://jta-spec.java.net)
|
* javax.transaction API (javax.transaction:javax.transaction-api:1.3 - http://jta-spec.java.net)
|
||||||
* jaxb-api (javax.xml.bind:jaxb-api:2.3.1 - https://github.com/javaee/jaxb-spec/jaxb-api)
|
* jaxb-api (javax.xml.bind:jaxb-api:2.3.1 - https://github.com/javaee/jaxb-spec/jaxb-api)
|
||||||
* JHighlight (org.codelibs:jhighlight:1.0.3 - https://github.com/codelibs/jhighlight)
|
* JHighlight (org.codelibs:jhighlight:1.1.0 - https://github.com/codelibs/jhighlight)
|
||||||
* HK2 API module (org.glassfish.hk2:hk2-api:2.6.1 - https://github.com/eclipse-ee4j/glassfish-hk2/hk2-api)
|
* HK2 API module (org.glassfish.hk2:hk2-api:2.6.1 - https://github.com/eclipse-ee4j/glassfish-hk2/hk2-api)
|
||||||
* ServiceLocator Default Implementation (org.glassfish.hk2:hk2-locator:2.6.1 - https://github.com/eclipse-ee4j/glassfish-hk2/hk2-locator)
|
* ServiceLocator Default Implementation (org.glassfish.hk2:hk2-locator:2.6.1 - https://github.com/eclipse-ee4j/glassfish-hk2/hk2-locator)
|
||||||
* HK2 Implementation Utilities (org.glassfish.hk2:hk2-utils:2.6.1 - https://github.com/eclipse-ee4j/glassfish-hk2/hk2-utils)
|
* HK2 Implementation Utilities (org.glassfish.hk2:hk2-utils:2.6.1 - https://github.com/eclipse-ee4j/glassfish-hk2/hk2-utils)
|
||||||
@@ -489,34 +510,34 @@ https://wiki.lyrasis.org/display/DSPACE/Code+Contribution+Guidelines
|
|||||||
* Apache :: JSTL module (org.eclipse.jetty:apache-jstl:9.4.15.v20190215 - http://tomcat.apache.org/taglibs/standard/)
|
* Apache :: JSTL module (org.eclipse.jetty:apache-jstl:9.4.15.v20190215 - http://tomcat.apache.org/taglibs/standard/)
|
||||||
* Jetty :: ALPN :: Client (org.eclipse.jetty:jetty-alpn-client:9.4.44.v20210927 - https://eclipse.org/jetty/jetty-alpn-parent/jetty-alpn-client)
|
* Jetty :: ALPN :: Client (org.eclipse.jetty:jetty-alpn-client:9.4.44.v20210927 - https://eclipse.org/jetty/jetty-alpn-parent/jetty-alpn-client)
|
||||||
* Jetty :: ALPN :: JDK9 Client Implementation (org.eclipse.jetty:jetty-alpn-java-client:9.4.44.v20210927 - https://eclipse.org/jetty/jetty-alpn-parent/jetty-alpn-java-client)
|
* Jetty :: ALPN :: JDK9 Client Implementation (org.eclipse.jetty:jetty-alpn-java-client:9.4.44.v20210927 - https://eclipse.org/jetty/jetty-alpn-parent/jetty-alpn-java-client)
|
||||||
* Jetty :: ALPN :: JDK9 Server Implementation (org.eclipse.jetty:jetty-alpn-java-server:9.4.48.v20220622 - https://eclipse.org/jetty/jetty-alpn-parent/jetty-alpn-java-server)
|
* Jetty :: ALPN :: JDK9 Server Implementation (org.eclipse.jetty:jetty-alpn-java-server:9.4.51.v20230217 - https://eclipse.org/jetty/jetty-alpn-parent/jetty-alpn-java-server)
|
||||||
* Jetty :: ALPN :: Server (org.eclipse.jetty:jetty-alpn-server:9.4.44.v20210927 - https://eclipse.org/jetty/jetty-alpn-parent/jetty-alpn-server)
|
* Jetty :: ALPN :: Server (org.eclipse.jetty:jetty-alpn-server:9.4.44.v20210927 - https://eclipse.org/jetty/jetty-alpn-parent/jetty-alpn-server)
|
||||||
* Jetty :: ALPN :: Server (org.eclipse.jetty:jetty-alpn-server:9.4.48.v20220622 - https://eclipse.org/jetty/jetty-alpn-parent/jetty-alpn-server)
|
* Jetty :: ALPN :: Server (org.eclipse.jetty:jetty-alpn-server:9.4.51.v20230217 - https://eclipse.org/jetty/jetty-alpn-parent/jetty-alpn-server)
|
||||||
* Jetty :: Servlet Annotations (org.eclipse.jetty:jetty-annotations:9.4.15.v20190215 - http://www.eclipse.org/jetty)
|
* Jetty :: Servlet Annotations (org.eclipse.jetty:jetty-annotations:9.4.15.v20190215 - http://www.eclipse.org/jetty)
|
||||||
* Jetty :: Asynchronous HTTP Client (org.eclipse.jetty:jetty-client:9.4.44.v20210927 - https://eclipse.org/jetty/jetty-client)
|
* Jetty :: Asynchronous HTTP Client (org.eclipse.jetty:jetty-client:9.4.44.v20210927 - https://eclipse.org/jetty/jetty-client)
|
||||||
* Jetty :: Continuation (org.eclipse.jetty:jetty-continuation:9.4.44.v20210927 - https://eclipse.org/jetty/jetty-continuation)
|
* Jetty :: Continuation (org.eclipse.jetty:jetty-continuation:9.4.44.v20210927 - https://eclipse.org/jetty/jetty-continuation)
|
||||||
* Jetty :: Continuation (org.eclipse.jetty:jetty-continuation:9.4.48.v20220622 - https://eclipse.org/jetty/jetty-continuation)
|
* Jetty :: Continuation (org.eclipse.jetty:jetty-continuation:9.4.51.v20230217 - https://eclipse.org/jetty/jetty-continuation)
|
||||||
* Jetty :: Deployers (org.eclipse.jetty:jetty-deploy:9.4.48.v20220622 - https://eclipse.org/jetty/jetty-deploy)
|
* Jetty :: Deployers (org.eclipse.jetty:jetty-deploy:9.4.51.v20230217 - https://eclipse.org/jetty/jetty-deploy)
|
||||||
* Jetty :: Http Utility (org.eclipse.jetty:jetty-http:9.4.48.v20220622 - https://eclipse.org/jetty/jetty-http)
|
* Jetty :: Http Utility (org.eclipse.jetty:jetty-http:9.4.51.v20230217 - https://eclipse.org/jetty/jetty-http)
|
||||||
* Jetty :: IO Utility (org.eclipse.jetty:jetty-io:9.4.48.v20220622 - https://eclipse.org/jetty/jetty-io)
|
* Jetty :: IO Utility (org.eclipse.jetty:jetty-io:9.4.51.v20230217 - https://eclipse.org/jetty/jetty-io)
|
||||||
* Jetty :: JMX Management (org.eclipse.jetty:jetty-jmx:9.4.44.v20210927 - https://eclipse.org/jetty/jetty-jmx)
|
* Jetty :: JMX Management (org.eclipse.jetty:jetty-jmx:9.4.44.v20210927 - https://eclipse.org/jetty/jetty-jmx)
|
||||||
* Jetty :: JNDI Naming (org.eclipse.jetty:jetty-jndi:9.4.15.v20190215 - http://www.eclipse.org/jetty)
|
* Jetty :: JNDI Naming (org.eclipse.jetty:jetty-jndi:9.4.15.v20190215 - http://www.eclipse.org/jetty)
|
||||||
* Jetty :: Plus (org.eclipse.jetty:jetty-plus:9.4.15.v20190215 - http://www.eclipse.org/jetty)
|
* Jetty :: Plus (org.eclipse.jetty:jetty-plus:9.4.15.v20190215 - http://www.eclipse.org/jetty)
|
||||||
* Jetty :: Rewrite Handler (org.eclipse.jetty:jetty-rewrite:9.4.44.v20210927 - https://eclipse.org/jetty/jetty-rewrite)
|
* Jetty :: Rewrite Handler (org.eclipse.jetty:jetty-rewrite:9.4.44.v20210927 - https://eclipse.org/jetty/jetty-rewrite)
|
||||||
* Jetty :: Security (org.eclipse.jetty:jetty-security:9.4.44.v20210927 - https://eclipse.org/jetty/jetty-security)
|
* Jetty :: Security (org.eclipse.jetty:jetty-security:9.4.44.v20210927 - https://eclipse.org/jetty/jetty-security)
|
||||||
* Jetty :: Security (org.eclipse.jetty:jetty-security:9.4.48.v20220622 - https://eclipse.org/jetty/jetty-security)
|
* Jetty :: Security (org.eclipse.jetty:jetty-security:9.4.51.v20230217 - https://eclipse.org/jetty/jetty-security)
|
||||||
* Jetty :: Server Core (org.eclipse.jetty:jetty-server:9.4.48.v20220622 - https://eclipse.org/jetty/jetty-server)
|
* Jetty :: Server Core (org.eclipse.jetty:jetty-server:9.4.51.v20230217 - https://eclipse.org/jetty/jetty-server)
|
||||||
* Jetty :: Servlet Handling (org.eclipse.jetty:jetty-servlet:9.4.48.v20220622 - https://eclipse.org/jetty/jetty-servlet)
|
* Jetty :: Servlet Handling (org.eclipse.jetty:jetty-servlet:9.4.51.v20230217 - https://eclipse.org/jetty/jetty-servlet)
|
||||||
* Jetty :: Utility Servlets and Filters (org.eclipse.jetty:jetty-servlets:9.4.48.v20220622 - https://eclipse.org/jetty/jetty-servlets)
|
* Jetty :: Utility Servlets and Filters (org.eclipse.jetty:jetty-servlets:9.4.51.v20230217 - https://eclipse.org/jetty/jetty-servlets)
|
||||||
* Jetty :: Utilities (org.eclipse.jetty:jetty-util:9.4.48.v20220622 - https://eclipse.org/jetty/jetty-util)
|
* Jetty :: Utilities (org.eclipse.jetty:jetty-util:9.4.51.v20230217 - https://eclipse.org/jetty/jetty-util)
|
||||||
* Jetty :: Utilities :: Ajax(JSON) (org.eclipse.jetty:jetty-util-ajax:9.4.48.v20220622 - https://eclipse.org/jetty/jetty-util-ajax)
|
* Jetty :: Utilities :: Ajax(JSON) (org.eclipse.jetty:jetty-util-ajax:9.4.51.v20230217 - https://eclipse.org/jetty/jetty-util-ajax)
|
||||||
* Jetty :: Webapp Application Support (org.eclipse.jetty:jetty-webapp:9.4.48.v20220622 - https://eclipse.org/jetty/jetty-webapp)
|
* Jetty :: Webapp Application Support (org.eclipse.jetty:jetty-webapp:9.4.51.v20230217 - https://eclipse.org/jetty/jetty-webapp)
|
||||||
* Jetty :: XML utilities (org.eclipse.jetty:jetty-xml:9.4.48.v20220622 - https://eclipse.org/jetty/jetty-xml)
|
* Jetty :: XML utilities (org.eclipse.jetty:jetty-xml:9.4.51.v20230217 - https://eclipse.org/jetty/jetty-xml)
|
||||||
* Jetty :: HTTP2 :: Client (org.eclipse.jetty.http2:http2-client:9.4.44.v20210927 - https://eclipse.org/jetty/http2-parent/http2-client)
|
* Jetty :: HTTP2 :: Client (org.eclipse.jetty.http2:http2-client:9.4.44.v20210927 - https://eclipse.org/jetty/http2-parent/http2-client)
|
||||||
* Jetty :: HTTP2 :: Common (org.eclipse.jetty.http2:http2-common:9.4.48.v20220622 - https://eclipse.org/jetty/http2-parent/http2-common)
|
* Jetty :: HTTP2 :: Common (org.eclipse.jetty.http2:http2-common:9.4.51.v20230217 - https://eclipse.org/jetty/http2-parent/http2-common)
|
||||||
* Jetty :: HTTP2 :: HPACK (org.eclipse.jetty.http2:http2-hpack:9.4.44.v20210927 - https://eclipse.org/jetty/http2-parent/http2-hpack)
|
* Jetty :: HTTP2 :: HPACK (org.eclipse.jetty.http2:http2-hpack:9.4.44.v20210927 - https://eclipse.org/jetty/http2-parent/http2-hpack)
|
||||||
* Jetty :: HTTP2 :: HTTP Client Transport (org.eclipse.jetty.http2:http2-http-client-transport:9.4.44.v20210927 - https://eclipse.org/jetty/http2-parent/http2-http-client-transport)
|
* Jetty :: HTTP2 :: HTTP Client Transport (org.eclipse.jetty.http2:http2-http-client-transport:9.4.44.v20210927 - https://eclipse.org/jetty/http2-parent/http2-http-client-transport)
|
||||||
* Jetty :: HTTP2 :: Server (org.eclipse.jetty.http2:http2-server:9.4.48.v20220622 - https://eclipse.org/jetty/http2-parent/http2-server)
|
* Jetty :: HTTP2 :: Server (org.eclipse.jetty.http2:http2-server:9.4.51.v20230217 - https://eclipse.org/jetty/http2-parent/http2-server)
|
||||||
* Jetty :: Schemas (org.eclipse.jetty.toolchain:jetty-schemas:3.1.2 - https://eclipse.org/jetty/jetty-schemas)
|
* Jetty :: Schemas (org.eclipse.jetty.toolchain:jetty-schemas:3.1.2 - https://eclipse.org/jetty/jetty-schemas)
|
||||||
* HK2 API module (org.glassfish.hk2:hk2-api:2.6.1 - https://github.com/eclipse-ee4j/glassfish-hk2/hk2-api)
|
* HK2 API module (org.glassfish.hk2:hk2-api:2.6.1 - https://github.com/eclipse-ee4j/glassfish-hk2/hk2-api)
|
||||||
* ServiceLocator Default Implementation (org.glassfish.hk2:hk2-locator:2.6.1 - https://github.com/eclipse-ee4j/glassfish-hk2/hk2-locator)
|
* ServiceLocator Default Implementation (org.glassfish.hk2:hk2-locator:2.6.1 - https://github.com/eclipse-ee4j/glassfish-hk2/hk2-locator)
|
||||||
@@ -542,10 +563,10 @@ https://wiki.lyrasis.org/display/DSPACE/Code+Contribution+Guidelines
|
|||||||
* msg-simple (com.github.java-json-tools:msg-simple:1.2 - https://github.com/java-json-tools/msg-simple)
|
* msg-simple (com.github.java-json-tools:msg-simple:1.2 - https://github.com/java-json-tools/msg-simple)
|
||||||
* uri-template (com.github.java-json-tools:uri-template:0.10 - https://github.com/java-json-tools/uri-template)
|
* uri-template (com.github.java-json-tools:uri-template:0.10 - https://github.com/java-json-tools/uri-template)
|
||||||
* FindBugs-Annotations (com.google.code.findbugs:annotations:3.0.1u2 - http://findbugs.sourceforge.net/)
|
* FindBugs-Annotations (com.google.code.findbugs:annotations:3.0.1u2 - http://findbugs.sourceforge.net/)
|
||||||
* JHighlight (org.codelibs:jhighlight:1.0.3 - https://github.com/codelibs/jhighlight)
|
* JHighlight (org.codelibs:jhighlight:1.1.0 - https://github.com/codelibs/jhighlight)
|
||||||
* Hibernate ORM - hibernate-core (org.hibernate:hibernate-core:5.6.5.Final - https://hibernate.org/orm)
|
* Hibernate ORM - hibernate-core (org.hibernate:hibernate-core:5.6.15.Final - https://hibernate.org/orm)
|
||||||
* Hibernate ORM - hibernate-jcache (org.hibernate:hibernate-jcache:5.6.5.Final - https://hibernate.org/orm)
|
* Hibernate ORM - hibernate-jcache (org.hibernate:hibernate-jcache:5.6.15.Final - https://hibernate.org/orm)
|
||||||
* Hibernate ORM - hibernate-jpamodelgen (org.hibernate:hibernate-jpamodelgen:5.6.5.Final - https://hibernate.org/orm)
|
* Hibernate ORM - hibernate-jpamodelgen (org.hibernate:hibernate-jpamodelgen:5.6.15.Final - https://hibernate.org/orm)
|
||||||
* Hibernate Commons Annotations (org.hibernate.common:hibernate-commons-annotations:5.1.2.Final - http://hibernate.org)
|
* Hibernate Commons Annotations (org.hibernate.common:hibernate-commons-annotations:5.1.2.Final - http://hibernate.org)
|
||||||
* im4java (org.im4java:im4java:1.4.0 - http://sourceforge.net/projects/im4java/)
|
* im4java (org.im4java:im4java:1.4.0 - http://sourceforge.net/projects/im4java/)
|
||||||
* Javassist (org.javassist:javassist:3.25.0-GA - http://www.javassist.org/)
|
* Javassist (org.javassist:javassist:3.25.0-GA - http://www.javassist.org/)
|
||||||
@@ -562,9 +583,11 @@ https://wiki.lyrasis.org/display/DSPACE/Code+Contribution+Guidelines
|
|||||||
|
|
||||||
MIT License:
|
MIT License:
|
||||||
|
|
||||||
|
* better-files (com.github.pathikrit:better-files_2.13:3.9.1 - https://github.com/pathikrit/better-files)
|
||||||
* Java SemVer (com.github.zafarkhaja:java-semver:0.9.0 - https://github.com/zafarkhaja/jsemver)
|
* Java SemVer (com.github.zafarkhaja:java-semver:0.9.0 - https://github.com/zafarkhaja/jsemver)
|
||||||
* dd-plist (com.googlecode.plist:dd-plist:1.23 - http://www.github.com/3breadt/dd-plist)
|
* dd-plist (com.googlecode.plist:dd-plist:1.25 - http://www.github.com/3breadt/dd-plist)
|
||||||
* DigitalCollections: IIIF API Library (de.digitalcollections.iiif:iiif-apis:0.3.9 - https://github.com/dbmdz/iiif-apis)
|
* DigitalCollections: IIIF API Library (de.digitalcollections.iiif:iiif-apis:0.3.9 - https://github.com/dbmdz/iiif-apis)
|
||||||
|
* s3mock (io.findify:s3mock_2.13:0.2.6 - https://github.com/findify/s3mock)
|
||||||
* JOpt Simple (net.sf.jopt-simple:jopt-simple:5.0.4 - http://jopt-simple.github.io/jopt-simple)
|
* JOpt Simple (net.sf.jopt-simple:jopt-simple:5.0.4 - http://jopt-simple.github.io/jopt-simple)
|
||||||
* Bouncy Castle S/MIME API (org.bouncycastle:bcmail-jdk15on:1.70 - https://www.bouncycastle.org/java.html)
|
* Bouncy Castle S/MIME API (org.bouncycastle:bcmail-jdk15on:1.70 - https://www.bouncycastle.org/java.html)
|
||||||
* Bouncy Castle PKIX, CMS, EAC, TSP, PKCS, OCSP, CMP, and CRMF APIs (org.bouncycastle:bcpkix-jdk15on:1.70 - https://www.bouncycastle.org/java.html)
|
* Bouncy Castle PKIX, CMS, EAC, TSP, PKCS, OCSP, CMP, and CRMF APIs (org.bouncycastle:bcpkix-jdk15on:1.70 - https://www.bouncycastle.org/java.html)
|
||||||
@@ -572,15 +595,14 @@ https://wiki.lyrasis.org/display/DSPACE/Code+Contribution+Guidelines
|
|||||||
* Bouncy Castle ASN.1 Extension and Utility APIs (org.bouncycastle:bcutil-jdk15on:1.70 - https://www.bouncycastle.org/java.html)
|
* Bouncy Castle ASN.1 Extension and Utility APIs (org.bouncycastle:bcutil-jdk15on:1.70 - https://www.bouncycastle.org/java.html)
|
||||||
* org.brotli:dec (org.brotli:dec:0.1.2 - http://brotli.org/dec)
|
* org.brotli:dec (org.brotli:dec:0.1.2 - http://brotli.org/dec)
|
||||||
* Checker Qual (org.checkerframework:checker-qual:3.10.0 - https://checkerframework.org)
|
* Checker Qual (org.checkerframework:checker-qual:3.10.0 - https://checkerframework.org)
|
||||||
* Checker Qual (org.checkerframework:checker-qual:3.5.0 - https://checkerframework.org)
|
* Checker Qual (org.checkerframework:checker-qual:3.31.0 - https://checkerframework.org)
|
||||||
* jersey-core-client (org.glassfish.jersey.core:jersey-client:2.35 - https://projects.eclipse.org/projects/ee4j.jersey/jersey-client)
|
* jersey-core-client (org.glassfish.jersey.core:jersey-client:2.35 - https://projects.eclipse.org/projects/ee4j.jersey/jersey-client)
|
||||||
* jersey-inject-hk2 (org.glassfish.jersey.inject:jersey-hk2:2.35 - https://projects.eclipse.org/projects/ee4j.jersey/project/jersey-hk2)
|
* jersey-inject-hk2 (org.glassfish.jersey.inject:jersey-hk2:2.35 - https://projects.eclipse.org/projects/ee4j.jersey/project/jersey-hk2)
|
||||||
* mockito-core (org.mockito:mockito-core:3.12.4 - https://github.com/mockito/mockito)
|
* mockito-core (org.mockito:mockito-core:3.12.4 - https://github.com/mockito/mockito)
|
||||||
* mockito-inline (org.mockito:mockito-inline:3.12.4 - https://github.com/mockito/mockito)
|
* mockito-inline (org.mockito:mockito-inline:3.12.4 - https://github.com/mockito/mockito)
|
||||||
* ORCID - Model (org.orcid:orcid-model:3.0.2 - http://github.com/ORCID/orcid-model)
|
* ORCID - Model (org.orcid:orcid-model:3.0.2 - http://github.com/ORCID/orcid-model)
|
||||||
* JCL 1.2 implemented over SLF4J (org.slf4j:jcl-over-slf4j:1.7.25 - http://www.slf4j.org)
|
* JUL to SLF4J bridge (org.slf4j:jul-to-slf4j:1.7.36 - http://www.slf4j.org)
|
||||||
* JUL to SLF4J bridge (org.slf4j:jul-to-slf4j:1.7.25 - http://www.slf4j.org)
|
* SLF4J API Module (org.slf4j:slf4j-api:1.7.36 - http://www.slf4j.org)
|
||||||
* SLF4J API Module (org.slf4j:slf4j-api:1.7.25 - http://www.slf4j.org)
|
|
||||||
* SLF4J Extensions Module (org.slf4j:slf4j-ext:1.7.28 - http://www.slf4j.org)
|
* SLF4J Extensions Module (org.slf4j:slf4j-ext:1.7.28 - http://www.slf4j.org)
|
||||||
* HAL Browser (org.webjars:hal-browser:ad9b865 - http://webjars.org)
|
* HAL Browser (org.webjars:hal-browser:ad9b865 - http://webjars.org)
|
||||||
* toastr (org.webjars.bowergithub.codeseven:toastr:2.1.4 - http://webjars.org)
|
* toastr (org.webjars.bowergithub.codeseven:toastr:2.1.4 - http://webjars.org)
|
||||||
@@ -589,7 +611,7 @@ https://wiki.lyrasis.org/display/DSPACE/Code+Contribution+Guidelines
|
|||||||
* jquery (org.webjars.bowergithub.jquery:jquery-dist:3.6.0 - https://www.webjars.org)
|
* jquery (org.webjars.bowergithub.jquery:jquery-dist:3.6.0 - https://www.webjars.org)
|
||||||
* urijs (org.webjars.bowergithub.medialize:uri.js:1.19.10 - https://www.webjars.org)
|
* urijs (org.webjars.bowergithub.medialize:uri.js:1.19.10 - https://www.webjars.org)
|
||||||
* bootstrap (org.webjars.bowergithub.twbs:bootstrap:4.6.1 - https://www.webjars.org)
|
* bootstrap (org.webjars.bowergithub.twbs:bootstrap:4.6.1 - https://www.webjars.org)
|
||||||
* core-js (org.webjars.npm:core-js:3.25.2 - https://www.webjars.org)
|
* core-js (org.webjars.npm:core-js:3.30.1 - https://www.webjars.org)
|
||||||
* @json-editor/json-editor (org.webjars.npm:json-editor__json-editor:2.6.1 - https://www.webjars.org)
|
* @json-editor/json-editor (org.webjars.npm:json-editor__json-editor:2.6.1 - https://www.webjars.org)
|
||||||
|
|
||||||
Mozilla Public License:
|
Mozilla Public License:
|
||||||
@@ -606,17 +628,14 @@ https://wiki.lyrasis.org/display/DSPACE/Code+Contribution+Guidelines
|
|||||||
* jersey-core-common (org.glassfish.jersey.core:jersey-common:2.35 - https://projects.eclipse.org/projects/ee4j.jersey/jersey-common)
|
* jersey-core-common (org.glassfish.jersey.core:jersey-common:2.35 - https://projects.eclipse.org/projects/ee4j.jersey/jersey-common)
|
||||||
* jersey-inject-hk2 (org.glassfish.jersey.inject:jersey-hk2:2.35 - https://projects.eclipse.org/projects/ee4j.jersey/project/jersey-hk2)
|
* jersey-inject-hk2 (org.glassfish.jersey.inject:jersey-hk2:2.35 - https://projects.eclipse.org/projects/ee4j.jersey/project/jersey-hk2)
|
||||||
* HdrHistogram (org.hdrhistogram:HdrHistogram:2.1.12 - http://hdrhistogram.github.io/HdrHistogram/)
|
* HdrHistogram (org.hdrhistogram:HdrHistogram:2.1.12 - http://hdrhistogram.github.io/HdrHistogram/)
|
||||||
|
* JSON in Java (org.json:json:20230227 - https://github.com/douglascrockford/JSON-java)
|
||||||
* LatencyUtils (org.latencyutils:LatencyUtils:2.0.3 - http://latencyutils.github.io/LatencyUtils/)
|
* LatencyUtils (org.latencyutils:LatencyUtils:2.0.3 - http://latencyutils.github.io/LatencyUtils/)
|
||||||
* Reflections (org.reflections:reflections:0.9.12 - http://github.com/ronmamo/reflections)
|
* Reflections (org.reflections:reflections:0.9.12 - http://github.com/ronmamo/reflections)
|
||||||
* XZ for Java (org.tukaani:xz:1.9 - https://tukaani.org/xz/java.html)
|
* XZ for Java (org.tukaani:xz:1.9 - https://tukaani.org/xz/java.html)
|
||||||
|
|
||||||
The JSON License:
|
|
||||||
|
|
||||||
* JSON in Java (org.json:json:20180130 - https://github.com/douglascrockford/JSON-java)
|
|
||||||
|
|
||||||
UnRar License:
|
UnRar License:
|
||||||
|
|
||||||
* Java Unrar (com.github.junrar:junrar:7.4.1 - https://github.com/junrar/junrar)
|
* Java Unrar (com.github.junrar:junrar:7.5.3 - https://github.com/junrar/junrar)
|
||||||
|
|
||||||
Unicode/ICU License:
|
Unicode/ICU License:
|
||||||
|
|
||||||
|
13
README.md
13
README.md
@@ -48,18 +48,7 @@ See [Running DSpace 7 with Docker Compose](dspace/src/main/docker-compose/README
|
|||||||
|
|
||||||
## Contributing
|
## Contributing
|
||||||
|
|
||||||
DSpace is a community built and supported project. We do not have a centralized development or support team,
|
See [Contributing documentation](CONTRIBUTING.md)
|
||||||
but have a dedicated group of volunteers who help us improve the software, documentation, resources, etc.
|
|
||||||
|
|
||||||
We welcome contributions of any type. Here's a few basic guides that provide suggestions for contributing to DSpace:
|
|
||||||
* [How to Contribute to DSpace](https://wiki.lyrasis.org/display/DSPACE/How+to+Contribute+to+DSpace): How to contribute in general (via code, documentation, bug reports, expertise, etc)
|
|
||||||
* [Code Contribution Guidelines](https://wiki.lyrasis.org/display/DSPACE/Code+Contribution+Guidelines): How to give back code or contribute features, bug fixes, etc.
|
|
||||||
* [DSpace Community Advisory Team (DCAT)](https://wiki.lyrasis.org/display/cmtygp/DSpace+Community+Advisory+Team): If you are not a developer, we also have an interest group specifically for repository managers. The DCAT group meets virtually, once a month, and sends open invitations to join their meetings via the [DCAT mailing list](https://groups.google.com/d/forum/DSpaceCommunityAdvisoryTeam).
|
|
||||||
|
|
||||||
We also encourage GitHub Pull Requests (PRs) at any time. Please see our [Development with Git](https://wiki.lyrasis.org/display/DSPACE/Development+with+Git) guide for more info.
|
|
||||||
|
|
||||||
In addition, a listing of all known contributors to DSpace software can be
|
|
||||||
found online at: https://wiki.lyrasis.org/display/DSPACE/DSpaceContributors
|
|
||||||
|
|
||||||
## Getting Help
|
## Getting Help
|
||||||
|
|
||||||
|
@@ -92,9 +92,7 @@ For more information on CheckStyle configurations below, see: http://checkstyle.
|
|||||||
<!-- Requirements for Javadocs for methods -->
|
<!-- Requirements for Javadocs for methods -->
|
||||||
<module name="JavadocMethod">
|
<module name="JavadocMethod">
|
||||||
<!-- All public methods MUST HAVE Javadocs -->
|
<!-- All public methods MUST HAVE Javadocs -->
|
||||||
<!-- <property name="scope" value="public"/> -->
|
<property name="scope" value="public"/>
|
||||||
<!-- TODO: Above rule has been disabled because of large amount of missing public method Javadocs -->
|
|
||||||
<property name="scope" value="nothing"/>
|
|
||||||
<!-- Allow params, throws and return tags to be optional -->
|
<!-- Allow params, throws and return tags to be optional -->
|
||||||
<property name="allowMissingParamTags" value="true"/>
|
<property name="allowMissingParamTags" value="true"/>
|
||||||
<property name="allowMissingReturnTag" value="true"/>
|
<property name="allowMissingReturnTag" value="true"/>
|
||||||
|
@@ -2,7 +2,7 @@ version: "3.7"
|
|||||||
|
|
||||||
services:
|
services:
|
||||||
dspace-cli:
|
dspace-cli:
|
||||||
image: "${DOCKER_OWNER:-dspace}/dspace-cli:${DSPACE_VER:-dspace-7_x}"
|
image: "${DOCKER_OWNER:-dspace}/dspace-cli:${DSPACE_VER:-latest}"
|
||||||
container_name: dspace-cli
|
container_name: dspace-cli
|
||||||
build:
|
build:
|
||||||
context: .
|
context: .
|
||||||
|
@@ -28,7 +28,7 @@ services:
|
|||||||
# proxies.trusted.ipranges: This setting is required for a REST API running in Docker to trust requests
|
# proxies.trusted.ipranges: This setting is required for a REST API running in Docker to trust requests
|
||||||
# from the host machine. This IP range MUST correspond to the 'dspacenet' subnet defined above.
|
# from the host machine. This IP range MUST correspond to the 'dspacenet' subnet defined above.
|
||||||
proxies__P__trusted__P__ipranges: '172.23.0'
|
proxies__P__trusted__P__ipranges: '172.23.0'
|
||||||
image: "${DOCKER_OWNER:-dspace}/dspace:${DSPACE_VER:-dspace-7_x-test}"
|
image: "${DOCKER_OWNER:-dspace}/dspace:${DSPACE_VER:-latest-test}"
|
||||||
build:
|
build:
|
||||||
context: .
|
context: .
|
||||||
dockerfile: Dockerfile.test
|
dockerfile: Dockerfile.test
|
||||||
@@ -62,13 +62,17 @@ services:
|
|||||||
while (!</dev/tcp/dspacedb/5432) > /dev/null 2>&1; do sleep 1; done;
|
while (!</dev/tcp/dspacedb/5432) > /dev/null 2>&1; do sleep 1; done;
|
||||||
/dspace/bin/dspace database migrate
|
/dspace/bin/dspace database migrate
|
||||||
catalina.sh run
|
catalina.sh run
|
||||||
# DSpace database container
|
# DSpace PostgreSQL database container
|
||||||
dspacedb:
|
dspacedb:
|
||||||
container_name: dspacedb
|
container_name: dspacedb
|
||||||
|
# Uses a custom Postgres image with pgcrypto installed
|
||||||
|
image: "${DOCKER_OWNER:-dspace}/dspace-postgres-pgcrypto:${DSPACE_VER:-latest}"
|
||||||
|
build:
|
||||||
|
# Must build out of subdirectory to have access to install script for pgcrypto
|
||||||
|
context: ./dspace/src/main/docker/dspace-postgres-pgcrypto/
|
||||||
environment:
|
environment:
|
||||||
PGDATA: /pgdata
|
PGDATA: /pgdata
|
||||||
# Uses a custom Postgres image with pgcrypto installed
|
POSTGRES_PASSWORD: dspace
|
||||||
image: dspace/dspace-postgres-pgcrypto
|
|
||||||
networks:
|
networks:
|
||||||
dspacenet:
|
dspacenet:
|
||||||
ports:
|
ports:
|
||||||
@@ -77,12 +81,17 @@ services:
|
|||||||
stdin_open: true
|
stdin_open: true
|
||||||
tty: true
|
tty: true
|
||||||
volumes:
|
volumes:
|
||||||
|
# Keep Postgres data directory between reboots
|
||||||
- pgdata:/pgdata
|
- pgdata:/pgdata
|
||||||
# DSpace Solr container
|
# DSpace Solr container
|
||||||
dspacesolr:
|
dspacesolr:
|
||||||
container_name: dspacesolr
|
container_name: dspacesolr
|
||||||
# Uses official Solr image at https://hub.docker.com/_/solr/
|
image: "${DOCKER_OWNER:-dspace}/dspace-solr:${DSPACE_VER:-latest}"
|
||||||
image: solr:8.11-slim
|
build:
|
||||||
|
context: .
|
||||||
|
dockerfile: ./dspace/src/main/docker/dspace-solr/Dockerfile
|
||||||
|
args:
|
||||||
|
SOLR_VERSION: "${SOLR_VER:-8.11}"
|
||||||
networks:
|
networks:
|
||||||
dspacenet:
|
dspacenet:
|
||||||
ports:
|
ports:
|
||||||
@@ -92,30 +101,25 @@ services:
|
|||||||
tty: true
|
tty: true
|
||||||
working_dir: /var/solr/data
|
working_dir: /var/solr/data
|
||||||
volumes:
|
volumes:
|
||||||
# Mount our local Solr core configs so that they are available as Solr configsets on container
|
|
||||||
- ./dspace/solr/authority:/opt/solr/server/solr/configsets/authority
|
|
||||||
- ./dspace/solr/oai:/opt/solr/server/solr/configsets/oai
|
|
||||||
- ./dspace/solr/search:/opt/solr/server/solr/configsets/search
|
|
||||||
- ./dspace/solr/statistics:/opt/solr/server/solr/configsets/statistics
|
|
||||||
# Keep Solr data directory between reboots
|
# Keep Solr data directory between reboots
|
||||||
- solr_data:/var/solr/data
|
- solr_data:/var/solr/data
|
||||||
# Initialize all DSpace Solr cores using the mounted local configsets (see above), then start Solr
|
# Initialize all DSpace Solr cores then start Solr:
|
||||||
# * First, run precreate-core to create the core (if it doesn't yet exist). If exists already, this is a no-op
|
# * First, run precreate-core to create the core (if it doesn't yet exist). If exists already, this is a no-op
|
||||||
# * Second, copy updated configs from mounted configsets to this core. If it already existed, this updates core
|
# * Second, copy configsets to this core:
|
||||||
# to the latest configs. If it's a newly created core, this is a no-op.
|
# Updates to Solr configs require the container to be rebuilt/restarted: `docker compose -p d7 up -d --build dspacesolr`
|
||||||
entrypoint:
|
entrypoint:
|
||||||
- /bin/bash
|
- /bin/bash
|
||||||
- '-c'
|
- '-c'
|
||||||
- |
|
- |
|
||||||
init-var-solr
|
init-var-solr
|
||||||
precreate-core authority /opt/solr/server/solr/configsets/authority
|
precreate-core authority /opt/solr/server/solr/configsets/authority
|
||||||
cp -r -u /opt/solr/server/solr/configsets/authority/* authority
|
cp -r /opt/solr/server/solr/configsets/authority/* authority
|
||||||
precreate-core oai /opt/solr/server/solr/configsets/oai
|
precreate-core oai /opt/solr/server/solr/configsets/oai
|
||||||
cp -r -u /opt/solr/server/solr/configsets/oai/* oai
|
cp -r /opt/solr/server/solr/configsets/oai/* oai
|
||||||
precreate-core search /opt/solr/server/solr/configsets/search
|
precreate-core search /opt/solr/server/solr/configsets/search
|
||||||
cp -r -u /opt/solr/server/solr/configsets/search/* search
|
cp -r /opt/solr/server/solr/configsets/search/* search
|
||||||
precreate-core statistics /opt/solr/server/solr/configsets/statistics
|
precreate-core statistics /opt/solr/server/solr/configsets/statistics
|
||||||
cp -r -u /opt/solr/server/solr/configsets/statistics/* statistics
|
cp -r /opt/solr/server/solr/configsets/statistics/* statistics
|
||||||
exec solr -f
|
exec solr -f
|
||||||
volumes:
|
volumes:
|
||||||
assetstore:
|
assetstore:
|
||||||
|
@@ -12,7 +12,7 @@
|
|||||||
<parent>
|
<parent>
|
||||||
<groupId>org.dspace</groupId>
|
<groupId>org.dspace</groupId>
|
||||||
<artifactId>dspace-parent</artifactId>
|
<artifactId>dspace-parent</artifactId>
|
||||||
<version>7.5-SNAPSHOT</version>
|
<version>8.0-SNAPSHOT</version>
|
||||||
<relativePath>..</relativePath>
|
<relativePath>..</relativePath>
|
||||||
</parent>
|
</parent>
|
||||||
|
|
||||||
@@ -492,12 +492,6 @@
|
|||||||
<dependency>
|
<dependency>
|
||||||
<groupId>jaxen</groupId>
|
<groupId>jaxen</groupId>
|
||||||
<artifactId>jaxen</artifactId>
|
<artifactId>jaxen</artifactId>
|
||||||
<exclusions>
|
|
||||||
<exclusion>
|
|
||||||
<artifactId>xom</artifactId>
|
|
||||||
<groupId>xom</groupId>
|
|
||||||
</exclusion>
|
|
||||||
</exclusions>
|
|
||||||
</dependency>
|
</dependency>
|
||||||
<dependency>
|
<dependency>
|
||||||
<groupId>org.jdom</groupId>
|
<groupId>org.jdom</groupId>
|
||||||
@@ -632,7 +626,7 @@
|
|||||||
<dependency>
|
<dependency>
|
||||||
<groupId>dnsjava</groupId>
|
<groupId>dnsjava</groupId>
|
||||||
<artifactId>dnsjava</artifactId>
|
<artifactId>dnsjava</artifactId>
|
||||||
<version>2.1.7</version>
|
<version>2.1.9</version>
|
||||||
</dependency>
|
</dependency>
|
||||||
|
|
||||||
<dependency>
|
<dependency>
|
||||||
@@ -668,7 +662,7 @@
|
|||||||
<dependency>
|
<dependency>
|
||||||
<groupId>org.flywaydb</groupId>
|
<groupId>org.flywaydb</groupId>
|
||||||
<artifactId>flyway-core</artifactId>
|
<artifactId>flyway-core</artifactId>
|
||||||
<version>8.4.4</version>
|
<version>8.5.13</version>
|
||||||
</dependency>
|
</dependency>
|
||||||
|
|
||||||
<!-- Google Analytics -->
|
<!-- Google Analytics -->
|
||||||
@@ -776,7 +770,7 @@
|
|||||||
<dependency>
|
<dependency>
|
||||||
<groupId>org.json</groupId>
|
<groupId>org.json</groupId>
|
||||||
<artifactId>json</artifactId>
|
<artifactId>json</artifactId>
|
||||||
<version>20180130</version>
|
<version>20230227</version>
|
||||||
</dependency>
|
</dependency>
|
||||||
|
|
||||||
<!-- Useful for testing command-line tools -->
|
<!-- Useful for testing command-line tools -->
|
||||||
@@ -791,7 +785,7 @@
|
|||||||
<dependency>
|
<dependency>
|
||||||
<groupId>com.opencsv</groupId>
|
<groupId>com.opencsv</groupId>
|
||||||
<artifactId>opencsv</artifactId>
|
<artifactId>opencsv</artifactId>
|
||||||
<version>5.6</version>
|
<version>5.7.1</version>
|
||||||
</dependency>
|
</dependency>
|
||||||
|
|
||||||
<!-- Email templating -->
|
<!-- Email templating -->
|
||||||
@@ -806,10 +800,11 @@
|
|||||||
<scope>test</scope>
|
<scope>test</scope>
|
||||||
</dependency>
|
</dependency>
|
||||||
|
|
||||||
<dependency>
|
<dependency>
|
||||||
<groupId>org.apache.bcel</groupId>
|
<groupId>org.apache.bcel</groupId>
|
||||||
<artifactId>bcel</artifactId>
|
<artifactId>bcel</artifactId>
|
||||||
<version>6.4.0</version>
|
<version>6.7.0</version>
|
||||||
|
<scope>test</scope>
|
||||||
</dependency>
|
</dependency>
|
||||||
|
|
||||||
<!-- required for openaire api integration -->
|
<!-- required for openaire api integration -->
|
||||||
@@ -817,6 +812,13 @@
|
|||||||
<groupId>eu.openaire</groupId>
|
<groupId>eu.openaire</groupId>
|
||||||
<artifactId>funders-model</artifactId>
|
<artifactId>funders-model</artifactId>
|
||||||
<version>2.0.0</version>
|
<version>2.0.0</version>
|
||||||
|
<exclusions>
|
||||||
|
<!-- Newer version pulled in via Jersey below -->
|
||||||
|
<exclusion>
|
||||||
|
<groupId>org.javassist</groupId>
|
||||||
|
<artifactId>javassist</artifactId>
|
||||||
|
</exclusion>
|
||||||
|
</exclusions>
|
||||||
</dependency>
|
</dependency>
|
||||||
|
|
||||||
<dependency>
|
<dependency>
|
||||||
@@ -838,37 +840,60 @@
|
|||||||
</exclusion>
|
</exclusion>
|
||||||
</exclusions>
|
</exclusions>
|
||||||
</dependency>
|
</dependency>
|
||||||
|
|
||||||
|
<dependency>
|
||||||
|
<groupId>io.findify</groupId>
|
||||||
|
<artifactId>s3mock_2.13</artifactId>
|
||||||
|
<version>0.2.6</version>
|
||||||
|
<scope>test</scope>
|
||||||
|
<exclusions>
|
||||||
|
<exclusion>
|
||||||
|
<groupId>com.amazonawsl</groupId>
|
||||||
|
<artifactId>aws-java-sdk-s3</artifactId>
|
||||||
|
</exclusion>
|
||||||
|
<exclusion>
|
||||||
|
<groupId>com.amazonaws</groupId>
|
||||||
|
<artifactId>aws-java-sdk-s3</artifactId>
|
||||||
|
</exclusion>
|
||||||
|
</exclusions>
|
||||||
|
</dependency>
|
||||||
|
|
||||||
</dependencies>
|
</dependencies>
|
||||||
|
|
||||||
<dependencyManagement>
|
<dependencyManagement>
|
||||||
<dependencies>
|
<dependencies>
|
||||||
<!-- for mockserver -->
|
<!-- for mockserver -->
|
||||||
<!-- Solve dependency convergence issues related to
|
<!-- Solve dependency convergence issues related to Solr and
|
||||||
'mockserver-junit-rule' by selecting the versions we want to use. -->
|
'mockserver-junit-rule' by selecting the versions we want to use. -->
|
||||||
<dependency>
|
<dependency>
|
||||||
<groupId>io.netty</groupId>
|
<groupId>io.netty</groupId>
|
||||||
<artifactId>netty-buffer</artifactId>
|
<artifactId>netty-buffer</artifactId>
|
||||||
<version>4.1.68.Final</version>
|
<version>4.1.94.Final</version>
|
||||||
</dependency>
|
</dependency>
|
||||||
<dependency>
|
<dependency>
|
||||||
<groupId>io.netty</groupId>
|
<groupId>io.netty</groupId>
|
||||||
<artifactId>netty-transport</artifactId>
|
<artifactId>netty-transport</artifactId>
|
||||||
<version>4.1.68.Final</version>
|
<version>4.1.94.Final</version>
|
||||||
</dependency>
|
</dependency>
|
||||||
|
<dependency>
|
||||||
|
<groupId>io.netty</groupId>
|
||||||
|
<artifactId>netty-transport-native-unix-common</artifactId>
|
||||||
|
<version>4.1.94.Final</version>
|
||||||
|
</dependency>
|
||||||
<dependency>
|
<dependency>
|
||||||
<groupId>io.netty</groupId>
|
<groupId>io.netty</groupId>
|
||||||
<artifactId>netty-common</artifactId>
|
<artifactId>netty-common</artifactId>
|
||||||
<version>4.1.68.Final</version>
|
<version>4.1.94.Final</version>
|
||||||
</dependency>
|
</dependency>
|
||||||
<dependency>
|
<dependency>
|
||||||
<groupId>io.netty</groupId>
|
<groupId>io.netty</groupId>
|
||||||
<artifactId>netty-handler</artifactId>
|
<artifactId>netty-handler</artifactId>
|
||||||
<version>4.1.68.Final</version>
|
<version>4.1.94.Final</version>
|
||||||
</dependency>
|
</dependency>
|
||||||
<dependency>
|
<dependency>
|
||||||
<groupId>io.netty</groupId>
|
<groupId>io.netty</groupId>
|
||||||
<artifactId>netty-codec</artifactId>
|
<artifactId>netty-codec</artifactId>
|
||||||
<version>4.1.68.Final</version>
|
<version>4.1.94.Final</version>
|
||||||
</dependency>
|
</dependency>
|
||||||
<dependency>
|
<dependency>
|
||||||
<groupId>org.apache.velocity</groupId>
|
<groupId>org.apache.velocity</groupId>
|
||||||
@@ -901,6 +926,12 @@
|
|||||||
<artifactId>swagger-core</artifactId>
|
<artifactId>swagger-core</artifactId>
|
||||||
<version>1.6.2</version>
|
<version>1.6.2</version>
|
||||||
</dependency>
|
</dependency>
|
||||||
|
<dependency>
|
||||||
|
<groupId>org.scala-lang</groupId>
|
||||||
|
<artifactId>scala-library</artifactId>
|
||||||
|
<version>2.13.11</version>
|
||||||
|
<scope>test</scope>
|
||||||
|
</dependency>
|
||||||
</dependencies>
|
</dependencies>
|
||||||
</dependencyManagement>
|
</dependencyManagement>
|
||||||
|
|
||||||
|
@@ -7,33 +7,16 @@
|
|||||||
*/
|
*/
|
||||||
package org.dspace.administer;
|
package org.dspace.administer;
|
||||||
|
|
||||||
import java.sql.SQLException;
|
|
||||||
|
|
||||||
import org.apache.commons.cli.Options;
|
import org.apache.commons.cli.Options;
|
||||||
import org.dspace.authorize.service.AuthorizeService;
|
|
||||||
import org.dspace.core.Context;
|
|
||||||
import org.dspace.scripts.configuration.ScriptConfiguration;
|
import org.dspace.scripts.configuration.ScriptConfiguration;
|
||||||
import org.springframework.beans.factory.annotation.Autowired;
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* The {@link ScriptConfiguration} for the {@link ProcessCleaner} script.
|
* The {@link ScriptConfiguration} for the {@link ProcessCleaner} script.
|
||||||
*/
|
*/
|
||||||
public class ProcessCleanerConfiguration<T extends ProcessCleaner> extends ScriptConfiguration<T> {
|
public class ProcessCleanerConfiguration<T extends ProcessCleaner> extends ScriptConfiguration<T> {
|
||||||
|
|
||||||
@Autowired
|
|
||||||
private AuthorizeService authorizeService;
|
|
||||||
|
|
||||||
private Class<T> dspaceRunnableClass;
|
private Class<T> dspaceRunnableClass;
|
||||||
|
|
||||||
@Override
|
|
||||||
public boolean isAllowedToExecute(Context context) {
|
|
||||||
try {
|
|
||||||
return authorizeService.isAdmin(context);
|
|
||||||
} catch (SQLException e) {
|
|
||||||
throw new RuntimeException("SQLException occurred when checking if the current user is an admin", e);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public Options getOptions() {
|
public Options getOptions() {
|
||||||
if (options == null) {
|
if (options == null) {
|
||||||
|
@@ -0,0 +1,54 @@
|
|||||||
|
/**
|
||||||
|
* The contents of this file are subject to the license and copyright
|
||||||
|
* detailed in the LICENSE and NOTICE files at the root of the source
|
||||||
|
* tree and available online at
|
||||||
|
*
|
||||||
|
* http://www.dspace.org/license/
|
||||||
|
*/
|
||||||
|
package org.dspace.alerts;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Enum representing the options for allowing sessions:
|
||||||
|
* ALLOW_ALL_SESSIONS - Will allow all users to log in and continue their sessions
|
||||||
|
* ALLOW_CURRENT_SESSIONS_ONLY - Will prevent non admin users from logging in, however logged-in users
|
||||||
|
* will remain logged in
|
||||||
|
* ALLOW_ADMIN_SESSIONS_ONLY - Only admin users can log in, non admin sessions will be interrupted
|
||||||
|
*
|
||||||
|
* NOTE: This functionality can be stored in the database, but no support is present right now to interrupt and prevent
|
||||||
|
* sessions.
|
||||||
|
*/
|
||||||
|
public enum AllowSessionsEnum {
|
||||||
|
ALLOW_ALL_SESSIONS("all"),
|
||||||
|
ALLOW_CURRENT_SESSIONS_ONLY("current"),
|
||||||
|
ALLOW_ADMIN_SESSIONS_ONLY("admin");
|
||||||
|
|
||||||
|
private String allowSessionsType;
|
||||||
|
|
||||||
|
AllowSessionsEnum(String allowSessionsType) {
|
||||||
|
this.allowSessionsType = allowSessionsType;
|
||||||
|
}
|
||||||
|
|
||||||
|
public String getValue() {
|
||||||
|
return allowSessionsType;
|
||||||
|
}
|
||||||
|
|
||||||
|
public static AllowSessionsEnum fromString(String alertAllowSessionType) {
|
||||||
|
if (alertAllowSessionType == null) {
|
||||||
|
return AllowSessionsEnum.ALLOW_ALL_SESSIONS;
|
||||||
|
}
|
||||||
|
|
||||||
|
switch (alertAllowSessionType) {
|
||||||
|
case "all":
|
||||||
|
return AllowSessionsEnum.ALLOW_ALL_SESSIONS;
|
||||||
|
case "current":
|
||||||
|
return AllowSessionsEnum.ALLOW_CURRENT_SESSIONS_ONLY;
|
||||||
|
case "admin" :
|
||||||
|
return AllowSessionsEnum.ALLOW_ADMIN_SESSIONS_ONLY;
|
||||||
|
default:
|
||||||
|
throw new IllegalArgumentException("No corresponding enum value for provided string: "
|
||||||
|
+ alertAllowSessionType);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
}
|
179
dspace-api/src/main/java/org/dspace/alerts/SystemWideAlert.java
Normal file
179
dspace-api/src/main/java/org/dspace/alerts/SystemWideAlert.java
Normal file
@@ -0,0 +1,179 @@
|
|||||||
|
/**
|
||||||
|
* The contents of this file are subject to the license and copyright
|
||||||
|
* detailed in the LICENSE and NOTICE files at the root of the source
|
||||||
|
* tree and available online at
|
||||||
|
*
|
||||||
|
* http://www.dspace.org/license/
|
||||||
|
*/
|
||||||
|
package org.dspace.alerts;
|
||||||
|
|
||||||
|
import java.util.Date;
|
||||||
|
import javax.persistence.Cacheable;
|
||||||
|
import javax.persistence.Column;
|
||||||
|
import javax.persistence.Entity;
|
||||||
|
import javax.persistence.GeneratedValue;
|
||||||
|
import javax.persistence.GenerationType;
|
||||||
|
import javax.persistence.Id;
|
||||||
|
import javax.persistence.SequenceGenerator;
|
||||||
|
import javax.persistence.Table;
|
||||||
|
import javax.persistence.Temporal;
|
||||||
|
import javax.persistence.TemporalType;
|
||||||
|
|
||||||
|
import org.apache.commons.lang3.builder.EqualsBuilder;
|
||||||
|
import org.apache.commons.lang3.builder.HashCodeBuilder;
|
||||||
|
import org.dspace.core.ReloadableEntity;
|
||||||
|
import org.hibernate.annotations.CacheConcurrencyStrategy;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Database object representing system-wide alerts
|
||||||
|
*/
|
||||||
|
@Entity
|
||||||
|
@Cacheable
|
||||||
|
@org.hibernate.annotations.Cache(usage = CacheConcurrencyStrategy.NONSTRICT_READ_WRITE, include = "non-lazy")
|
||||||
|
@Table(name = "systemwidealert")
|
||||||
|
public class SystemWideAlert implements ReloadableEntity<Integer> {
|
||||||
|
|
||||||
|
@Id
|
||||||
|
@GeneratedValue(strategy = GenerationType.SEQUENCE, generator = "alert_id_seq")
|
||||||
|
@SequenceGenerator(name = "alert_id_seq", sequenceName = "alert_id_seq", allocationSize = 1)
|
||||||
|
@Column(name = "alert_id", unique = true, nullable = false)
|
||||||
|
private Integer alertId;
|
||||||
|
|
||||||
|
@Column(name = "message", nullable = false)
|
||||||
|
private String message;
|
||||||
|
|
||||||
|
@Column(name = "allow_sessions")
|
||||||
|
private String allowSessions;
|
||||||
|
|
||||||
|
@Column(name = "countdown_to")
|
||||||
|
@Temporal(TemporalType.TIMESTAMP)
|
||||||
|
private Date countdownTo;
|
||||||
|
|
||||||
|
@Column(name = "active")
|
||||||
|
private boolean active;
|
||||||
|
|
||||||
|
protected SystemWideAlert() {
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* This method returns the ID that the system-wide alert holds within the database
|
||||||
|
*
|
||||||
|
* @return The ID that the system-wide alert holds within the database
|
||||||
|
*/
|
||||||
|
@Override
|
||||||
|
public Integer getID() {
|
||||||
|
return alertId;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Set the ID for the system-wide alert
|
||||||
|
*
|
||||||
|
* @param alertID The ID to set
|
||||||
|
*/
|
||||||
|
public void setID(final Integer alertID) {
|
||||||
|
this.alertId = alertID;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Retrieve the message of the system-wide alert
|
||||||
|
*
|
||||||
|
* @return the message of the system-wide alert
|
||||||
|
*/
|
||||||
|
public String getMessage() {
|
||||||
|
return message;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Set the message of the system-wide alert
|
||||||
|
*
|
||||||
|
* @param message The message to set
|
||||||
|
*/
|
||||||
|
public void setMessage(final String message) {
|
||||||
|
this.message = message;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Retrieve what kind of sessions are allowed while the system-wide alert is active
|
||||||
|
*
|
||||||
|
* @return what kind of sessions are allowed while the system-wide alert is active
|
||||||
|
*/
|
||||||
|
public AllowSessionsEnum getAllowSessions() {
|
||||||
|
return AllowSessionsEnum.fromString(allowSessions);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Set what kind of sessions are allowed while the system-wide alert is active
|
||||||
|
*
|
||||||
|
* @param allowSessions Integer representing what kind of sessions are allowed
|
||||||
|
*/
|
||||||
|
public void setAllowSessions(AllowSessionsEnum allowSessions) {
|
||||||
|
this.allowSessions = allowSessions.getValue();
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Retrieve the date to which will be count down when the system-wide alert is active
|
||||||
|
*
|
||||||
|
* @return the date to which will be count down when the system-wide alert is active
|
||||||
|
*/
|
||||||
|
public Date getCountdownTo() {
|
||||||
|
return countdownTo;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Set the date to which will be count down when the system-wide alert is active
|
||||||
|
*
|
||||||
|
* @param countdownTo The date to which will be count down
|
||||||
|
*/
|
||||||
|
public void setCountdownTo(final Date countdownTo) {
|
||||||
|
this.countdownTo = countdownTo;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Retrieve whether the system-wide alert is active
|
||||||
|
*
|
||||||
|
* @return whether the system-wide alert is active
|
||||||
|
*/
|
||||||
|
public boolean isActive() {
|
||||||
|
return active;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Set whether the system-wide alert is active
|
||||||
|
*
|
||||||
|
* @param active Whether the system-wide alert is active
|
||||||
|
*/
|
||||||
|
public void setActive(final boolean active) {
|
||||||
|
this.active = active;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Return <code>true</code> if <code>other</code> is the same SystemWideAlert
|
||||||
|
* as this object, <code>false</code> otherwise
|
||||||
|
*
|
||||||
|
* @param other object to compare to
|
||||||
|
* @return <code>true</code> if object passed in represents the same
|
||||||
|
* system-wide alert as this object
|
||||||
|
*/
|
||||||
|
@Override
|
||||||
|
public boolean equals(Object other) {
|
||||||
|
return (other instanceof SystemWideAlert &&
|
||||||
|
new EqualsBuilder().append(this.getID(), ((SystemWideAlert) other).getID())
|
||||||
|
.append(this.getMessage(), ((SystemWideAlert) other).getMessage())
|
||||||
|
.append(this.getAllowSessions(), ((SystemWideAlert) other).getAllowSessions())
|
||||||
|
.append(this.getCountdownTo(), ((SystemWideAlert) other).getCountdownTo())
|
||||||
|
.append(this.isActive(), ((SystemWideAlert) other).isActive())
|
||||||
|
.isEquals());
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public int hashCode() {
|
||||||
|
return new HashCodeBuilder(17, 37)
|
||||||
|
.append(this.getID())
|
||||||
|
.append(this.getMessage())
|
||||||
|
.append(this.getAllowSessions())
|
||||||
|
.append(this.getCountdownTo())
|
||||||
|
.append(this.isActive())
|
||||||
|
.toHashCode();
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
@@ -0,0 +1,129 @@
|
|||||||
|
/**
|
||||||
|
* The contents of this file are subject to the license and copyright
|
||||||
|
* detailed in the LICENSE and NOTICE files at the root of the source
|
||||||
|
* tree and available online at
|
||||||
|
*
|
||||||
|
* http://www.dspace.org/license/
|
||||||
|
*/
|
||||||
|
package org.dspace.alerts;
|
||||||
|
|
||||||
|
import java.io.IOException;
|
||||||
|
import java.sql.SQLException;
|
||||||
|
import java.util.Date;
|
||||||
|
import java.util.List;
|
||||||
|
|
||||||
|
import org.apache.logging.log4j.Logger;
|
||||||
|
import org.dspace.alerts.dao.SystemWideAlertDAO;
|
||||||
|
import org.dspace.alerts.service.SystemWideAlertService;
|
||||||
|
import org.dspace.authorize.AuthorizeException;
|
||||||
|
import org.dspace.authorize.service.AuthorizeService;
|
||||||
|
import org.dspace.core.Context;
|
||||||
|
import org.dspace.core.LogHelper;
|
||||||
|
import org.dspace.eperson.EPerson;
|
||||||
|
import org.springframework.beans.factory.annotation.Autowired;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* The implementation for the {@link SystemWideAlertService} class
|
||||||
|
*/
|
||||||
|
public class SystemWideAlertServiceImpl implements SystemWideAlertService {
|
||||||
|
|
||||||
|
private static final Logger log = org.apache.logging.log4j.LogManager.getLogger(SystemWideAlertService.class);
|
||||||
|
|
||||||
|
|
||||||
|
@Autowired
|
||||||
|
private SystemWideAlertDAO systemWideAlertDAO;
|
||||||
|
|
||||||
|
@Autowired
|
||||||
|
private AuthorizeService authorizeService;
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public SystemWideAlert create(final Context context, final String message,
|
||||||
|
final AllowSessionsEnum allowSessionsType,
|
||||||
|
final Date countdownTo, final boolean active) throws SQLException,
|
||||||
|
AuthorizeException {
|
||||||
|
if (!authorizeService.isAdmin(context)) {
|
||||||
|
throw new AuthorizeException(
|
||||||
|
"Only administrators can create a system-wide alert");
|
||||||
|
}
|
||||||
|
SystemWideAlert systemWideAlert = new SystemWideAlert();
|
||||||
|
systemWideAlert.setMessage(message);
|
||||||
|
systemWideAlert.setAllowSessions(allowSessionsType);
|
||||||
|
systemWideAlert.setCountdownTo(countdownTo);
|
||||||
|
systemWideAlert.setActive(active);
|
||||||
|
|
||||||
|
SystemWideAlert createdAlert = systemWideAlertDAO.create(context, systemWideAlert);
|
||||||
|
log.info(LogHelper.getHeader(context, "system_wide_alert_create",
|
||||||
|
"System Wide Alert has been created with message: '" + message + "' and ID "
|
||||||
|
+ createdAlert.getID() + " and allowSessionsType " + allowSessionsType +
|
||||||
|
" and active set to " + active));
|
||||||
|
|
||||||
|
|
||||||
|
return createdAlert;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public SystemWideAlert find(final Context context, final int alertId) throws SQLException {
|
||||||
|
return systemWideAlertDAO.findByID(context, SystemWideAlert.class, alertId);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public List<SystemWideAlert> findAll(final Context context) throws SQLException {
|
||||||
|
return systemWideAlertDAO.findAll(context, SystemWideAlert.class);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public List<SystemWideAlert> findAll(final Context context, final int limit, final int offset) throws SQLException {
|
||||||
|
return systemWideAlertDAO.findAll(context, limit, offset);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public List<SystemWideAlert> findAllActive(final Context context, final int limit, final int offset)
|
||||||
|
throws SQLException {
|
||||||
|
return systemWideAlertDAO.findAllActive(context, limit, offset);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public void delete(final Context context, final SystemWideAlert systemWideAlert)
|
||||||
|
throws SQLException, IOException, AuthorizeException {
|
||||||
|
if (!authorizeService.isAdmin(context)) {
|
||||||
|
throw new AuthorizeException(
|
||||||
|
"Only administrators can create a system-wide alert");
|
||||||
|
}
|
||||||
|
systemWideAlertDAO.delete(context, systemWideAlert);
|
||||||
|
log.info(LogHelper.getHeader(context, "system_wide_alert_create",
|
||||||
|
"System Wide Alert with ID " + systemWideAlert.getID() + " has been deleted"));
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public void update(final Context context, final SystemWideAlert systemWideAlert)
|
||||||
|
throws SQLException, AuthorizeException {
|
||||||
|
if (!authorizeService.isAdmin(context)) {
|
||||||
|
throw new AuthorizeException(
|
||||||
|
"Only administrators can create a system-wide alert");
|
||||||
|
}
|
||||||
|
systemWideAlertDAO.save(context, systemWideAlert);
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public boolean canNonAdminUserLogin(Context context) throws SQLException {
|
||||||
|
List<SystemWideAlert> active = findAllActive(context, 1, 0);
|
||||||
|
if (active == null || active.isEmpty()) {
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
return active.get(0).getAllowSessions() == AllowSessionsEnum.ALLOW_ALL_SESSIONS;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public boolean canUserMaintainSession(Context context, EPerson ePerson) throws SQLException {
|
||||||
|
if (authorizeService.isAdmin(context, ePerson)) {
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
List<SystemWideAlert> active = findAllActive(context, 1, 0);
|
||||||
|
if (active == null || active.isEmpty()) {
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
return active.get(0).getAllowSessions() != AllowSessionsEnum.ALLOW_ADMIN_SESSIONS_ONLY;
|
||||||
|
}
|
||||||
|
}
|
@@ -0,0 +1,45 @@
|
|||||||
|
/**
|
||||||
|
* The contents of this file are subject to the license and copyright
|
||||||
|
* detailed in the LICENSE and NOTICE files at the root of the source
|
||||||
|
* tree and available online at
|
||||||
|
*
|
||||||
|
* http://www.dspace.org/license/
|
||||||
|
*/
|
||||||
|
package org.dspace.alerts.dao;
|
||||||
|
|
||||||
|
import java.sql.SQLException;
|
||||||
|
import java.util.List;
|
||||||
|
|
||||||
|
import org.dspace.alerts.SystemWideAlert;
|
||||||
|
import org.dspace.core.Context;
|
||||||
|
import org.dspace.core.GenericDAO;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* This is the Data Access Object for the {@link SystemWideAlert} object
|
||||||
|
*/
|
||||||
|
public interface SystemWideAlertDAO extends GenericDAO<SystemWideAlert> {
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Returns a list of all SystemWideAlert objects in the database
|
||||||
|
*
|
||||||
|
* @param context The relevant DSpace context
|
||||||
|
* @param limit The limit for the amount of SystemWideAlerts returned
|
||||||
|
* @param offset The offset for the Processes to be returned
|
||||||
|
* @return The list of all SystemWideAlert objects in the Database
|
||||||
|
* @throws SQLException If something goes wrong
|
||||||
|
*/
|
||||||
|
List<SystemWideAlert> findAll(Context context, int limit, int offset) throws SQLException;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Returns a list of all active SystemWideAlert objects in the database
|
||||||
|
*
|
||||||
|
* @param context The relevant DSpace context
|
||||||
|
* @param limit The limit for the amount of SystemWideAlerts returned
|
||||||
|
* @param offset The offset for the Processes to be returned
|
||||||
|
* @return The list of all SystemWideAlert objects in the Database
|
||||||
|
* @throws SQLException If something goes wrong
|
||||||
|
*/
|
||||||
|
List<SystemWideAlert> findAllActive(Context context, int limit, int offset) throws SQLException;
|
||||||
|
|
||||||
|
|
||||||
|
}
|
@@ -0,0 +1,48 @@
|
|||||||
|
/**
|
||||||
|
* The contents of this file are subject to the license and copyright
|
||||||
|
* detailed in the LICENSE and NOTICE files at the root of the source
|
||||||
|
* tree and available online at
|
||||||
|
*
|
||||||
|
* http://www.dspace.org/license/
|
||||||
|
*/
|
||||||
|
package org.dspace.alerts.dao.impl;
|
||||||
|
|
||||||
|
import java.sql.SQLException;
|
||||||
|
import java.util.List;
|
||||||
|
import javax.persistence.criteria.CriteriaBuilder;
|
||||||
|
import javax.persistence.criteria.CriteriaQuery;
|
||||||
|
import javax.persistence.criteria.Root;
|
||||||
|
|
||||||
|
import org.dspace.alerts.SystemWideAlert;
|
||||||
|
import org.dspace.alerts.SystemWideAlert_;
|
||||||
|
import org.dspace.alerts.dao.SystemWideAlertDAO;
|
||||||
|
import org.dspace.core.AbstractHibernateDAO;
|
||||||
|
import org.dspace.core.Context;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Implementation class for the {@link SystemWideAlertDAO}
|
||||||
|
*/
|
||||||
|
public class SystemWideAlertDAOImpl extends AbstractHibernateDAO<SystemWideAlert> implements SystemWideAlertDAO {
|
||||||
|
|
||||||
|
public List<SystemWideAlert> findAll(final Context context, final int limit, final int offset) throws SQLException {
|
||||||
|
CriteriaBuilder criteriaBuilder = getCriteriaBuilder(context);
|
||||||
|
CriteriaQuery criteriaQuery = getCriteriaQuery(criteriaBuilder, SystemWideAlert.class);
|
||||||
|
Root<SystemWideAlert> alertRoot = criteriaQuery.from(SystemWideAlert.class);
|
||||||
|
criteriaQuery.select(alertRoot);
|
||||||
|
|
||||||
|
return list(context, criteriaQuery, false, SystemWideAlert.class, limit, offset);
|
||||||
|
}
|
||||||
|
|
||||||
|
public List<SystemWideAlert> findAllActive(final Context context, final int limit, final int offset)
|
||||||
|
throws SQLException {
|
||||||
|
CriteriaBuilder criteriaBuilder = getCriteriaBuilder(context);
|
||||||
|
CriteriaQuery criteriaQuery = getCriteriaQuery(criteriaBuilder, SystemWideAlert.class);
|
||||||
|
Root<SystemWideAlert> alertRoot = criteriaQuery.from(SystemWideAlert.class);
|
||||||
|
criteriaQuery.select(alertRoot);
|
||||||
|
criteriaQuery.where(criteriaBuilder.equal(alertRoot.get(SystemWideAlert_.active), true));
|
||||||
|
|
||||||
|
return list(context, criteriaQuery, false, SystemWideAlert.class, limit, offset);
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
}
|
@@ -0,0 +1,118 @@
|
|||||||
|
/**
|
||||||
|
* The contents of this file are subject to the license and copyright
|
||||||
|
* detailed in the LICENSE and NOTICE files at the root of the source
|
||||||
|
* tree and available online at
|
||||||
|
*
|
||||||
|
* http://www.dspace.org/license/
|
||||||
|
*/
|
||||||
|
package org.dspace.alerts.service;
|
||||||
|
|
||||||
|
import java.io.IOException;
|
||||||
|
import java.sql.SQLException;
|
||||||
|
import java.util.Date;
|
||||||
|
import java.util.List;
|
||||||
|
|
||||||
|
import org.dspace.alerts.AllowSessionsEnum;
|
||||||
|
import org.dspace.alerts.SystemWideAlert;
|
||||||
|
import org.dspace.authorize.AuthorizeException;
|
||||||
|
import org.dspace.core.Context;
|
||||||
|
import org.dspace.eperson.EPerson;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* An interface for the SystemWideAlertService with methods regarding the SystemWideAlert workload
|
||||||
|
*/
|
||||||
|
public interface SystemWideAlertService {
|
||||||
|
|
||||||
|
/**
|
||||||
|
* This method will create a SystemWideAlert object in the database
|
||||||
|
*
|
||||||
|
* @param context The relevant DSpace context
|
||||||
|
* @param message The message of the system-wide alert
|
||||||
|
* @param allowSessionsType Which sessions need to be allowed for the system-wide alert
|
||||||
|
* @param countdownTo The date to which to count down to when the system-wide alert is active
|
||||||
|
* @param active Whether the system-wide alert os active
|
||||||
|
* @return The created SystemWideAlert object
|
||||||
|
* @throws SQLException If something goes wrong
|
||||||
|
*/
|
||||||
|
SystemWideAlert create(Context context, String message, AllowSessionsEnum allowSessionsType,
|
||||||
|
Date countdownTo, boolean active
|
||||||
|
) throws SQLException, AuthorizeException;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* This method will retrieve a SystemWideAlert object from the Database with the given ID
|
||||||
|
*
|
||||||
|
* @param context The relevant DSpace context
|
||||||
|
* @param alertId The alert id on which we'll search for in the database
|
||||||
|
* @return The system-wide alert that holds the given alert id
|
||||||
|
* @throws SQLException If something goes wrong
|
||||||
|
*/
|
||||||
|
SystemWideAlert find(Context context, int alertId) throws SQLException;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Returns a list of all SystemWideAlert objects in the database
|
||||||
|
*
|
||||||
|
* @param context The relevant DSpace context
|
||||||
|
* @return The list of all SystemWideAlert objects in the Database
|
||||||
|
* @throws SQLException If something goes wrong
|
||||||
|
*/
|
||||||
|
List<SystemWideAlert> findAll(Context context) throws SQLException;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Returns a list of all SystemWideAlert objects in the database
|
||||||
|
*
|
||||||
|
* @param context The relevant DSpace context
|
||||||
|
* @param limit The limit for the amount of system-wide alerts returned
|
||||||
|
* @param offset The offset for the system-wide alerts to be returned
|
||||||
|
* @return The list of all SystemWideAlert objects in the Database
|
||||||
|
* @throws SQLException If something goes wrong
|
||||||
|
*/
|
||||||
|
List<SystemWideAlert> findAll(Context context, int limit, int offset) throws SQLException;
|
||||||
|
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Returns a list of all active SystemWideAlert objects in the database
|
||||||
|
*
|
||||||
|
* @param context The relevant DSpace context
|
||||||
|
* @return The list of all active SystemWideAlert objects in the database
|
||||||
|
* @throws SQLException If something goes wrong
|
||||||
|
*/
|
||||||
|
List<SystemWideAlert> findAllActive(Context context, int limit, int offset) throws SQLException;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* This method will delete the given SystemWideAlert object from the database
|
||||||
|
*
|
||||||
|
* @param context The relevant DSpace context
|
||||||
|
* @param systemWideAlert The SystemWideAlert object to be deleted
|
||||||
|
* @throws SQLException If something goes wrong
|
||||||
|
*/
|
||||||
|
void delete(Context context, SystemWideAlert systemWideAlert)
|
||||||
|
throws SQLException, IOException, AuthorizeException;
|
||||||
|
|
||||||
|
|
||||||
|
/**
|
||||||
|
* This method will be used to update the given SystemWideAlert object in the database
|
||||||
|
*
|
||||||
|
* @param context The relevant DSpace context
|
||||||
|
* @param systemWideAlert The SystemWideAlert object to be updated
|
||||||
|
* @throws SQLException If something goes wrong
|
||||||
|
*/
|
||||||
|
void update(Context context, SystemWideAlert systemWideAlert) throws SQLException, AuthorizeException;
|
||||||
|
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Verifies if the user connected to the current context can retain its session
|
||||||
|
*
|
||||||
|
* @param context The relevant DSpace context
|
||||||
|
* @return if the user connected to the current context can retain its session
|
||||||
|
*/
|
||||||
|
boolean canUserMaintainSession(Context context, EPerson ePerson) throws SQLException;
|
||||||
|
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Verifies if a non admin user can log in
|
||||||
|
*
|
||||||
|
* @param context The relevant DSpace context
|
||||||
|
* @return if a non admin user can log in
|
||||||
|
*/
|
||||||
|
boolean canNonAdminUserLogin(Context context) throws SQLException;
|
||||||
|
}
|
@@ -0,0 +1,689 @@
|
|||||||
|
/**
|
||||||
|
* The contents of this file are subject to the license and copyright
|
||||||
|
* detailed in the LICENSE and NOTICE files at the root of the source
|
||||||
|
* tree and available online at
|
||||||
|
*
|
||||||
|
* http://www.dspace.org/license/
|
||||||
|
*/
|
||||||
|
package org.dspace.app.bulkaccesscontrol;
|
||||||
|
|
||||||
|
import static org.apache.commons.collections4.CollectionUtils.isEmpty;
|
||||||
|
import static org.apache.commons.collections4.CollectionUtils.isNotEmpty;
|
||||||
|
import static org.dspace.authorize.ResourcePolicy.TYPE_CUSTOM;
|
||||||
|
import static org.dspace.authorize.ResourcePolicy.TYPE_INHERITED;
|
||||||
|
import static org.dspace.core.Constants.CONTENT_BUNDLE_NAME;
|
||||||
|
|
||||||
|
import java.io.IOException;
|
||||||
|
import java.io.InputStream;
|
||||||
|
import java.sql.SQLException;
|
||||||
|
import java.text.DateFormat;
|
||||||
|
import java.text.SimpleDateFormat;
|
||||||
|
import java.util.Arrays;
|
||||||
|
import java.util.Date;
|
||||||
|
import java.util.Iterator;
|
||||||
|
import java.util.List;
|
||||||
|
import java.util.Map;
|
||||||
|
import java.util.Objects;
|
||||||
|
import java.util.Optional;
|
||||||
|
import java.util.TimeZone;
|
||||||
|
import java.util.UUID;
|
||||||
|
import java.util.function.Function;
|
||||||
|
import java.util.stream.Collectors;
|
||||||
|
|
||||||
|
import com.fasterxml.jackson.databind.ObjectMapper;
|
||||||
|
import org.apache.commons.cli.ParseException;
|
||||||
|
import org.apache.commons.lang3.StringUtils;
|
||||||
|
import org.dspace.app.bulkaccesscontrol.exception.BulkAccessControlException;
|
||||||
|
import org.dspace.app.bulkaccesscontrol.model.AccessCondition;
|
||||||
|
import org.dspace.app.bulkaccesscontrol.model.AccessConditionBitstream;
|
||||||
|
import org.dspace.app.bulkaccesscontrol.model.AccessConditionItem;
|
||||||
|
import org.dspace.app.bulkaccesscontrol.model.BulkAccessConditionConfiguration;
|
||||||
|
import org.dspace.app.bulkaccesscontrol.model.BulkAccessControlInput;
|
||||||
|
import org.dspace.app.bulkaccesscontrol.service.BulkAccessConditionConfigurationService;
|
||||||
|
import org.dspace.app.mediafilter.factory.MediaFilterServiceFactory;
|
||||||
|
import org.dspace.app.mediafilter.service.MediaFilterService;
|
||||||
|
import org.dspace.app.util.DSpaceObjectUtilsImpl;
|
||||||
|
import org.dspace.app.util.service.DSpaceObjectUtils;
|
||||||
|
import org.dspace.authorize.AuthorizeException;
|
||||||
|
import org.dspace.authorize.factory.AuthorizeServiceFactory;
|
||||||
|
import org.dspace.authorize.service.ResourcePolicyService;
|
||||||
|
import org.dspace.content.Bitstream;
|
||||||
|
import org.dspace.content.Collection;
|
||||||
|
import org.dspace.content.DSpaceObject;
|
||||||
|
import org.dspace.content.Item;
|
||||||
|
import org.dspace.content.factory.ContentServiceFactory;
|
||||||
|
import org.dspace.content.service.ItemService;
|
||||||
|
import org.dspace.core.Constants;
|
||||||
|
import org.dspace.core.Context;
|
||||||
|
import org.dspace.discovery.DiscoverQuery;
|
||||||
|
import org.dspace.discovery.SearchService;
|
||||||
|
import org.dspace.discovery.SearchServiceException;
|
||||||
|
import org.dspace.discovery.SearchUtils;
|
||||||
|
import org.dspace.discovery.indexobject.IndexableItem;
|
||||||
|
import org.dspace.eperson.EPerson;
|
||||||
|
import org.dspace.eperson.factory.EPersonServiceFactory;
|
||||||
|
import org.dspace.eperson.service.EPersonService;
|
||||||
|
import org.dspace.scripts.DSpaceRunnable;
|
||||||
|
import org.dspace.services.ConfigurationService;
|
||||||
|
import org.dspace.services.factory.DSpaceServicesFactory;
|
||||||
|
import org.dspace.submit.model.AccessConditionOption;
|
||||||
|
import org.dspace.utils.DSpace;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Implementation of {@link DSpaceRunnable} to perform a bulk access control via json file.
|
||||||
|
*
|
||||||
|
* @author Mohamed Eskander (mohamed.eskander at 4science.it)
|
||||||
|
*
|
||||||
|
*/
|
||||||
|
public class BulkAccessControl extends DSpaceRunnable<BulkAccessControlScriptConfiguration<BulkAccessControl>> {
|
||||||
|
|
||||||
|
private DSpaceObjectUtils dSpaceObjectUtils;
|
||||||
|
|
||||||
|
private SearchService searchService;
|
||||||
|
|
||||||
|
private ItemService itemService;
|
||||||
|
|
||||||
|
private String filename;
|
||||||
|
|
||||||
|
private List<String> uuids;
|
||||||
|
|
||||||
|
private Context context;
|
||||||
|
|
||||||
|
private BulkAccessConditionConfigurationService bulkAccessConditionConfigurationService;
|
||||||
|
|
||||||
|
private ResourcePolicyService resourcePolicyService;
|
||||||
|
|
||||||
|
protected EPersonService epersonService;
|
||||||
|
|
||||||
|
private ConfigurationService configurationService;
|
||||||
|
|
||||||
|
private MediaFilterService mediaFilterService;
|
||||||
|
|
||||||
|
private Map<String, AccessConditionOption> itemAccessConditions;
|
||||||
|
|
||||||
|
private Map<String, AccessConditionOption> uploadAccessConditions;
|
||||||
|
|
||||||
|
private final String ADD_MODE = "add";
|
||||||
|
|
||||||
|
private final String REPLACE_MODE = "replace";
|
||||||
|
|
||||||
|
private boolean help = false;
|
||||||
|
|
||||||
|
protected String eperson = null;
|
||||||
|
|
||||||
|
@Override
|
||||||
|
@SuppressWarnings("unchecked")
|
||||||
|
public void setup() throws ParseException {
|
||||||
|
|
||||||
|
this.searchService = SearchUtils.getSearchService();
|
||||||
|
this.itemService = ContentServiceFactory.getInstance().getItemService();
|
||||||
|
this.resourcePolicyService = AuthorizeServiceFactory.getInstance().getResourcePolicyService();
|
||||||
|
this.epersonService = EPersonServiceFactory.getInstance().getEPersonService();
|
||||||
|
this.configurationService = DSpaceServicesFactory.getInstance().getConfigurationService();
|
||||||
|
mediaFilterService = MediaFilterServiceFactory.getInstance().getMediaFilterService();
|
||||||
|
mediaFilterService.setLogHandler(handler);
|
||||||
|
this.bulkAccessConditionConfigurationService = new DSpace().getServiceManager().getServiceByName(
|
||||||
|
"bulkAccessConditionConfigurationService", BulkAccessConditionConfigurationService.class);
|
||||||
|
this.dSpaceObjectUtils = new DSpace().getServiceManager().getServiceByName(
|
||||||
|
DSpaceObjectUtilsImpl.class.getName(), DSpaceObjectUtilsImpl.class);
|
||||||
|
|
||||||
|
BulkAccessConditionConfiguration bulkAccessConditionConfiguration =
|
||||||
|
bulkAccessConditionConfigurationService.getBulkAccessConditionConfiguration("default");
|
||||||
|
|
||||||
|
itemAccessConditions = bulkAccessConditionConfiguration
|
||||||
|
.getItemAccessConditionOptions()
|
||||||
|
.stream()
|
||||||
|
.collect(Collectors.toMap(AccessConditionOption::getName, Function.identity()));
|
||||||
|
|
||||||
|
uploadAccessConditions = bulkAccessConditionConfiguration
|
||||||
|
.getBitstreamAccessConditionOptions()
|
||||||
|
.stream()
|
||||||
|
.collect(Collectors.toMap(AccessConditionOption::getName, Function.identity()));
|
||||||
|
|
||||||
|
help = commandLine.hasOption('h');
|
||||||
|
filename = commandLine.getOptionValue('f');
|
||||||
|
uuids = commandLine.hasOption('u') ? Arrays.asList(commandLine.getOptionValues('u')) : null;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public void internalRun() throws Exception {
|
||||||
|
|
||||||
|
if (help) {
|
||||||
|
printHelp();
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
ObjectMapper mapper = new ObjectMapper();
|
||||||
|
mapper.setTimeZone(TimeZone.getTimeZone("UTC"));
|
||||||
|
BulkAccessControlInput accessControl;
|
||||||
|
context = new Context(Context.Mode.BATCH_EDIT);
|
||||||
|
setEPerson(context);
|
||||||
|
|
||||||
|
if (!isAuthorized(context)) {
|
||||||
|
handler.logError("Current user is not eligible to execute script bulk-access-control");
|
||||||
|
throw new AuthorizeException("Current user is not eligible to execute script bulk-access-control");
|
||||||
|
}
|
||||||
|
|
||||||
|
if (uuids == null || uuids.size() == 0) {
|
||||||
|
handler.logError("A target uuid must be provided with at least on uuid (run with -h flag for details)");
|
||||||
|
throw new IllegalArgumentException("At least one target uuid must be provided");
|
||||||
|
}
|
||||||
|
|
||||||
|
InputStream inputStream = handler.getFileStream(context, filename)
|
||||||
|
.orElseThrow(() -> new IllegalArgumentException("Error reading file, the file couldn't be "
|
||||||
|
+ "found for filename: " + filename));
|
||||||
|
|
||||||
|
try {
|
||||||
|
accessControl = mapper.readValue(inputStream, BulkAccessControlInput.class);
|
||||||
|
} catch (IOException e) {
|
||||||
|
handler.logError("Error parsing json file " + e.getMessage());
|
||||||
|
throw new IllegalArgumentException("Error parsing json file", e);
|
||||||
|
}
|
||||||
|
try {
|
||||||
|
validate(accessControl);
|
||||||
|
updateItemsAndBitstreamsPolices(accessControl);
|
||||||
|
context.complete();
|
||||||
|
} catch (Exception e) {
|
||||||
|
handler.handleException(e);
|
||||||
|
context.abort();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* check the validation of mapped json data, it must
|
||||||
|
* provide item or bitstream information or both of them
|
||||||
|
* and check the validation of item node if provided,
|
||||||
|
* and check the validation of bitstream node if provided.
|
||||||
|
*
|
||||||
|
* @param accessControl mapped json data
|
||||||
|
* @throws SQLException if something goes wrong in the database
|
||||||
|
* @throws BulkAccessControlException if accessControl is invalid
|
||||||
|
*/
|
||||||
|
private void validate(BulkAccessControlInput accessControl) throws SQLException {
|
||||||
|
|
||||||
|
AccessConditionItem item = accessControl.getItem();
|
||||||
|
AccessConditionBitstream bitstream = accessControl.getBitstream();
|
||||||
|
|
||||||
|
if (Objects.isNull(item) && Objects.isNull(bitstream)) {
|
||||||
|
handler.logError("item or bitstream node must be provided");
|
||||||
|
throw new BulkAccessControlException("item or bitstream node must be provided");
|
||||||
|
}
|
||||||
|
|
||||||
|
if (Objects.nonNull(item)) {
|
||||||
|
validateItemNode(item);
|
||||||
|
}
|
||||||
|
|
||||||
|
if (Objects.nonNull(bitstream)) {
|
||||||
|
validateBitstreamNode(bitstream);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* check the validation of item node, the item mode
|
||||||
|
* must be provided with value 'add' or 'replace'
|
||||||
|
* if mode equals to add so the information
|
||||||
|
* of accessCondition must be provided,
|
||||||
|
* also checking that accessConditions information are valid.
|
||||||
|
*
|
||||||
|
* @param item the item node
|
||||||
|
* @throws BulkAccessControlException if item node is invalid
|
||||||
|
*/
|
||||||
|
private void validateItemNode(AccessConditionItem item) {
|
||||||
|
String mode = item.getMode();
|
||||||
|
List<AccessCondition> accessConditions = item.getAccessConditions();
|
||||||
|
|
||||||
|
if (StringUtils.isEmpty(mode)) {
|
||||||
|
handler.logError("item mode node must be provided");
|
||||||
|
throw new BulkAccessControlException("item mode node must be provided");
|
||||||
|
} else if (!(StringUtils.equalsAny(mode, ADD_MODE, REPLACE_MODE))) {
|
||||||
|
handler.logError("wrong value for item mode<" + mode + ">");
|
||||||
|
throw new BulkAccessControlException("wrong value for item mode<" + mode + ">");
|
||||||
|
} else if (ADD_MODE.equals(mode) && isEmpty(accessConditions)) {
|
||||||
|
handler.logError("accessConditions of item must be provided with mode<" + ADD_MODE + ">");
|
||||||
|
throw new BulkAccessControlException(
|
||||||
|
"accessConditions of item must be provided with mode<" + ADD_MODE + ">");
|
||||||
|
}
|
||||||
|
|
||||||
|
for (AccessCondition accessCondition : accessConditions) {
|
||||||
|
validateAccessCondition(accessCondition);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* check the validation of bitstream node, the bitstream mode
|
||||||
|
* must be provided with value 'add' or 'replace'
|
||||||
|
* if mode equals to add so the information of accessConditions
|
||||||
|
* must be provided,
|
||||||
|
* also checking that constraint information is valid,
|
||||||
|
* also checking that accessConditions information are valid.
|
||||||
|
*
|
||||||
|
* @param bitstream the bitstream node
|
||||||
|
* @throws SQLException if something goes wrong in the database
|
||||||
|
* @throws BulkAccessControlException if bitstream node is invalid
|
||||||
|
*/
|
||||||
|
private void validateBitstreamNode(AccessConditionBitstream bitstream) throws SQLException {
|
||||||
|
String mode = bitstream.getMode();
|
||||||
|
List<AccessCondition> accessConditions = bitstream.getAccessConditions();
|
||||||
|
|
||||||
|
if (StringUtils.isEmpty(mode)) {
|
||||||
|
handler.logError("bitstream mode node must be provided");
|
||||||
|
throw new BulkAccessControlException("bitstream mode node must be provided");
|
||||||
|
} else if (!(StringUtils.equalsAny(mode, ADD_MODE, REPLACE_MODE))) {
|
||||||
|
handler.logError("wrong value for bitstream mode<" + mode + ">");
|
||||||
|
throw new BulkAccessControlException("wrong value for bitstream mode<" + mode + ">");
|
||||||
|
} else if (ADD_MODE.equals(mode) && isEmpty(accessConditions)) {
|
||||||
|
handler.logError("accessConditions of bitstream must be provided with mode<" + ADD_MODE + ">");
|
||||||
|
throw new BulkAccessControlException(
|
||||||
|
"accessConditions of bitstream must be provided with mode<" + ADD_MODE + ">");
|
||||||
|
}
|
||||||
|
|
||||||
|
validateConstraint(bitstream);
|
||||||
|
|
||||||
|
for (AccessCondition accessCondition : bitstream.getAccessConditions()) {
|
||||||
|
validateAccessCondition(accessCondition);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* check the validation of constraint node if provided,
|
||||||
|
* constraint isn't supported when multiple uuids are provided
|
||||||
|
* or when uuid isn't an Item
|
||||||
|
*
|
||||||
|
* @param bitstream the bitstream node
|
||||||
|
* @throws SQLException if something goes wrong in the database
|
||||||
|
* @throws BulkAccessControlException if constraint node is invalid
|
||||||
|
*/
|
||||||
|
private void validateConstraint(AccessConditionBitstream bitstream) throws SQLException {
|
||||||
|
if (uuids.size() > 1 && containsConstraints(bitstream)) {
|
||||||
|
handler.logError("constraint isn't supported when multiple uuids are provided");
|
||||||
|
throw new BulkAccessControlException("constraint isn't supported when multiple uuids are provided");
|
||||||
|
} else if (uuids.size() == 1 && containsConstraints(bitstream)) {
|
||||||
|
DSpaceObject dso =
|
||||||
|
dSpaceObjectUtils.findDSpaceObject(context, UUID.fromString(uuids.get(0)));
|
||||||
|
|
||||||
|
if (Objects.nonNull(dso) && dso.getType() != Constants.ITEM) {
|
||||||
|
handler.logError("constraint is not supported when uuid isn't an Item");
|
||||||
|
throw new BulkAccessControlException("constraint is not supported when uuid isn't an Item");
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* check the validation of access condition,
|
||||||
|
* the access condition name must equal to one of configured access conditions,
|
||||||
|
* then call {@link AccessConditionOption#validateResourcePolicy(
|
||||||
|
* Context, String, Date, Date)} if exception happens so, it's invalid.
|
||||||
|
*
|
||||||
|
* @param accessCondition the accessCondition
|
||||||
|
* @throws BulkAccessControlException if the accessCondition is invalid
|
||||||
|
*/
|
||||||
|
private void validateAccessCondition(AccessCondition accessCondition) {
|
||||||
|
|
||||||
|
if (!itemAccessConditions.containsKey(accessCondition.getName())) {
|
||||||
|
handler.logError("wrong access condition <" + accessCondition.getName() + ">");
|
||||||
|
throw new BulkAccessControlException("wrong access condition <" + accessCondition.getName() + ">");
|
||||||
|
}
|
||||||
|
|
||||||
|
try {
|
||||||
|
itemAccessConditions.get(accessCondition.getName()).validateResourcePolicy(
|
||||||
|
context, accessCondition.getName(), accessCondition.getStartDate(), accessCondition.getEndDate());
|
||||||
|
} catch (Exception e) {
|
||||||
|
handler.logError("invalid access condition, " + e.getMessage());
|
||||||
|
handler.handleException(e);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* find all items of provided {@link #uuids} from solr,
|
||||||
|
* then update the resource policies of items
|
||||||
|
* or bitstreams of items (only bitstreams of ORIGINAL bundles)
|
||||||
|
* and derivative bitstreams, or both of them.
|
||||||
|
*
|
||||||
|
* @param accessControl the access control input
|
||||||
|
* @throws SQLException if something goes wrong in the database
|
||||||
|
* @throws SearchServiceException if a search error occurs
|
||||||
|
* @throws AuthorizeException if an authorization error occurs
|
||||||
|
*/
|
||||||
|
private void updateItemsAndBitstreamsPolices(BulkAccessControlInput accessControl)
|
||||||
|
throws SQLException, SearchServiceException, AuthorizeException {
|
||||||
|
|
||||||
|
int counter = 0;
|
||||||
|
int start = 0;
|
||||||
|
int limit = 20;
|
||||||
|
|
||||||
|
String query = buildSolrQuery(uuids);
|
||||||
|
|
||||||
|
Iterator<Item> itemIterator = findItems(query, start, limit);
|
||||||
|
|
||||||
|
while (itemIterator.hasNext()) {
|
||||||
|
|
||||||
|
Item item = context.reloadEntity(itemIterator.next());
|
||||||
|
|
||||||
|
if (Objects.nonNull(accessControl.getItem())) {
|
||||||
|
updateItemPolicies(item, accessControl);
|
||||||
|
}
|
||||||
|
|
||||||
|
if (Objects.nonNull(accessControl.getBitstream())) {
|
||||||
|
updateBitstreamsPolicies(item, accessControl);
|
||||||
|
}
|
||||||
|
|
||||||
|
context.commit();
|
||||||
|
context.uncacheEntity(item);
|
||||||
|
counter++;
|
||||||
|
|
||||||
|
if (counter == limit) {
|
||||||
|
counter = 0;
|
||||||
|
start += limit;
|
||||||
|
itemIterator = findItems(query, start, limit);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
private String buildSolrQuery(List<String> uuids) throws SQLException {
|
||||||
|
String [] query = new String[uuids.size()];
|
||||||
|
|
||||||
|
for (int i = 0 ; i < query.length ; i++) {
|
||||||
|
DSpaceObject dso = dSpaceObjectUtils.findDSpaceObject(context, UUID.fromString(uuids.get(i)));
|
||||||
|
|
||||||
|
if (dso.getType() == Constants.COMMUNITY) {
|
||||||
|
query[i] = "location.comm:" + dso.getID();
|
||||||
|
} else if (dso.getType() == Constants.COLLECTION) {
|
||||||
|
query[i] = "location.coll:" + dso.getID();
|
||||||
|
} else if (dso.getType() == Constants.ITEM) {
|
||||||
|
query[i] = "search.resourceid:" + dso.getID();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return StringUtils.joinWith(" OR ", query);
|
||||||
|
}
|
||||||
|
|
||||||
|
private Iterator<Item> findItems(String query, int start, int limit)
|
||||||
|
throws SearchServiceException {
|
||||||
|
|
||||||
|
DiscoverQuery discoverQuery = buildDiscoveryQuery(query, start, limit);
|
||||||
|
|
||||||
|
return searchService.search(context, discoverQuery)
|
||||||
|
.getIndexableObjects()
|
||||||
|
.stream()
|
||||||
|
.map(indexableObject ->
|
||||||
|
((IndexableItem) indexableObject).getIndexedObject())
|
||||||
|
.collect(Collectors.toList())
|
||||||
|
.iterator();
|
||||||
|
}
|
||||||
|
|
||||||
|
private DiscoverQuery buildDiscoveryQuery(String query, int start, int limit) {
|
||||||
|
DiscoverQuery discoverQuery = new DiscoverQuery();
|
||||||
|
discoverQuery.setDSpaceObjectFilter(IndexableItem.TYPE);
|
||||||
|
discoverQuery.setQuery(query);
|
||||||
|
discoverQuery.setStart(start);
|
||||||
|
discoverQuery.setMaxResults(limit);
|
||||||
|
|
||||||
|
return discoverQuery;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* update the item resource policies,
|
||||||
|
* when mode equals to 'replace' will remove
|
||||||
|
* all current resource polices of types 'TYPE_CUSTOM'
|
||||||
|
* and 'TYPE_INHERITED' then, set the new resource policies.
|
||||||
|
*
|
||||||
|
* @param item the item
|
||||||
|
* @param accessControl the access control input
|
||||||
|
* @throws SQLException if something goes wrong in the database
|
||||||
|
* @throws AuthorizeException if an authorization error occurs
|
||||||
|
*/
|
||||||
|
private void updateItemPolicies(Item item, BulkAccessControlInput accessControl)
|
||||||
|
throws SQLException, AuthorizeException {
|
||||||
|
|
||||||
|
AccessConditionItem acItem = accessControl.getItem();
|
||||||
|
|
||||||
|
if (REPLACE_MODE.equals(acItem.getMode())) {
|
||||||
|
removeReadPolicies(item, TYPE_CUSTOM);
|
||||||
|
removeReadPolicies(item, TYPE_INHERITED);
|
||||||
|
}
|
||||||
|
|
||||||
|
setItemPolicies(item, accessControl);
|
||||||
|
logInfo(acItem.getAccessConditions(), acItem.getMode(), item);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* create the new resource policies of item.
|
||||||
|
* then, call {@link ItemService#adjustItemPolicies(
|
||||||
|
* Context, Item, Collection)} to adjust item's default policies.
|
||||||
|
*
|
||||||
|
* @param item the item
|
||||||
|
* @param accessControl the access control input
|
||||||
|
* @throws SQLException if something goes wrong in the database
|
||||||
|
* @throws AuthorizeException if an authorization error occurs
|
||||||
|
*/
|
||||||
|
private void setItemPolicies(Item item, BulkAccessControlInput accessControl)
|
||||||
|
throws SQLException, AuthorizeException {
|
||||||
|
|
||||||
|
accessControl
|
||||||
|
.getItem()
|
||||||
|
.getAccessConditions()
|
||||||
|
.forEach(accessCondition -> createResourcePolicy(item, accessCondition,
|
||||||
|
itemAccessConditions.get(accessCondition.getName())));
|
||||||
|
|
||||||
|
itemService.adjustItemPolicies(context, item, item.getOwningCollection());
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* update the resource policies of all item's bitstreams
|
||||||
|
* or bitstreams specified into constraint node,
|
||||||
|
* and derivative bitstreams.
|
||||||
|
*
|
||||||
|
* <strong>NOTE:</strong> only bitstreams of ORIGINAL bundles
|
||||||
|
*
|
||||||
|
* @param item the item contains bitstreams
|
||||||
|
* @param accessControl the access control input
|
||||||
|
*/
|
||||||
|
private void updateBitstreamsPolicies(Item item, BulkAccessControlInput accessControl) {
|
||||||
|
AccessConditionBitstream.Constraint constraints = accessControl.getBitstream().getConstraints();
|
||||||
|
|
||||||
|
// look over all the bundles and force initialization of bitstreams collection
|
||||||
|
// to avoid lazy initialization exception
|
||||||
|
long count = item.getBundles()
|
||||||
|
.stream()
|
||||||
|
.flatMap(bundle ->
|
||||||
|
bundle.getBitstreams().stream())
|
||||||
|
.count();
|
||||||
|
|
||||||
|
item.getBundles(CONTENT_BUNDLE_NAME).stream()
|
||||||
|
.flatMap(bundle -> bundle.getBitstreams().stream())
|
||||||
|
.filter(bitstream -> constraints == null ||
|
||||||
|
constraints.getUuid() == null ||
|
||||||
|
constraints.getUuid().size() == 0 ||
|
||||||
|
constraints.getUuid().contains(bitstream.getID().toString()))
|
||||||
|
.forEach(bitstream -> updateBitstreamPolicies(bitstream, item, accessControl));
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* check that the bitstream node is existed,
|
||||||
|
* and contains constraint node,
|
||||||
|
* and constraint contains uuids.
|
||||||
|
*
|
||||||
|
* @param bitstream the bitstream node
|
||||||
|
* @return true when uuids of constraint of bitstream is not empty,
|
||||||
|
* otherwise false
|
||||||
|
*/
|
||||||
|
private boolean containsConstraints(AccessConditionBitstream bitstream) {
|
||||||
|
return Objects.nonNull(bitstream) &&
|
||||||
|
Objects.nonNull(bitstream.getConstraints()) &&
|
||||||
|
isNotEmpty(bitstream.getConstraints().getUuid());
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* update the bitstream resource policies,
|
||||||
|
* when mode equals to replace will remove
|
||||||
|
* all current resource polices of types 'TYPE_CUSTOM'
|
||||||
|
* and 'TYPE_INHERITED' then, set the new resource policies.
|
||||||
|
*
|
||||||
|
* @param bitstream the bitstream
|
||||||
|
* @param item the item of bitstream
|
||||||
|
* @param accessControl the access control input
|
||||||
|
* @throws RuntimeException if something goes wrong in the database
|
||||||
|
* or an authorization error occurs
|
||||||
|
*/
|
||||||
|
private void updateBitstreamPolicies(Bitstream bitstream, Item item, BulkAccessControlInput accessControl) {
|
||||||
|
|
||||||
|
AccessConditionBitstream acBitstream = accessControl.getBitstream();
|
||||||
|
|
||||||
|
if (REPLACE_MODE.equals(acBitstream.getMode())) {
|
||||||
|
removeReadPolicies(bitstream, TYPE_CUSTOM);
|
||||||
|
removeReadPolicies(bitstream, TYPE_INHERITED);
|
||||||
|
}
|
||||||
|
|
||||||
|
try {
|
||||||
|
setBitstreamPolicies(bitstream, item, accessControl);
|
||||||
|
logInfo(acBitstream.getAccessConditions(), acBitstream.getMode(), bitstream);
|
||||||
|
} catch (SQLException | AuthorizeException e) {
|
||||||
|
throw new RuntimeException(e);
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* remove dspace object's read policies.
|
||||||
|
*
|
||||||
|
* @param dso the dspace object
|
||||||
|
* @param type resource policy type
|
||||||
|
* @throws BulkAccessControlException if something goes wrong
|
||||||
|
* in the database or an authorization error occurs
|
||||||
|
*/
|
||||||
|
private void removeReadPolicies(DSpaceObject dso, String type) {
|
||||||
|
try {
|
||||||
|
resourcePolicyService.removePolicies(context, dso, type, Constants.READ);
|
||||||
|
} catch (SQLException | AuthorizeException e) {
|
||||||
|
throw new BulkAccessControlException(e);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* create the new resource policies of bitstream.
|
||||||
|
* then, call {@link ItemService#adjustItemPolicies(
|
||||||
|
* Context, Item, Collection)} to adjust bitstream's default policies.
|
||||||
|
* and also update the resource policies of its derivative bitstreams.
|
||||||
|
*
|
||||||
|
* @param bitstream the bitstream
|
||||||
|
* @param item the item of bitstream
|
||||||
|
* @param accessControl the access control input
|
||||||
|
* @throws SQLException if something goes wrong in the database
|
||||||
|
* @throws AuthorizeException if an authorization error occurs
|
||||||
|
*/
|
||||||
|
private void setBitstreamPolicies(Bitstream bitstream, Item item, BulkAccessControlInput accessControl)
|
||||||
|
throws SQLException, AuthorizeException {
|
||||||
|
|
||||||
|
accessControl.getBitstream()
|
||||||
|
.getAccessConditions()
|
||||||
|
.forEach(accessCondition -> createResourcePolicy(bitstream, accessCondition,
|
||||||
|
uploadAccessConditions.get(accessCondition.getName())));
|
||||||
|
|
||||||
|
itemService.adjustBitstreamPolicies(context, item, item.getOwningCollection(), bitstream);
|
||||||
|
mediaFilterService.updatePoliciesOfDerivativeBitstreams(context, item, bitstream);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* create the resource policy from the information
|
||||||
|
* comes from the access condition.
|
||||||
|
*
|
||||||
|
* @param obj the dspace object
|
||||||
|
* @param accessCondition the access condition
|
||||||
|
* @param accessConditionOption the access condition option
|
||||||
|
* @throws BulkAccessControlException if an exception occurs
|
||||||
|
*/
|
||||||
|
private void createResourcePolicy(DSpaceObject obj, AccessCondition accessCondition,
|
||||||
|
AccessConditionOption accessConditionOption) {
|
||||||
|
|
||||||
|
String name = accessCondition.getName();
|
||||||
|
String description = accessCondition.getDescription();
|
||||||
|
Date startDate = accessCondition.getStartDate();
|
||||||
|
Date endDate = accessCondition.getEndDate();
|
||||||
|
|
||||||
|
try {
|
||||||
|
accessConditionOption.createResourcePolicy(context, obj, name, description, startDate, endDate);
|
||||||
|
} catch (Exception e) {
|
||||||
|
throw new BulkAccessControlException(e);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Set the eperson in the context
|
||||||
|
*
|
||||||
|
* @param context the context
|
||||||
|
* @throws SQLException if database error
|
||||||
|
*/
|
||||||
|
protected void setEPerson(Context context) throws SQLException {
|
||||||
|
EPerson myEPerson = epersonService.find(context, this.getEpersonIdentifier());
|
||||||
|
|
||||||
|
if (myEPerson == null) {
|
||||||
|
handler.logError("EPerson cannot be found: " + this.getEpersonIdentifier());
|
||||||
|
throw new UnsupportedOperationException("EPerson cannot be found: " + this.getEpersonIdentifier());
|
||||||
|
}
|
||||||
|
|
||||||
|
context.setCurrentUser(myEPerson);
|
||||||
|
}
|
||||||
|
|
||||||
|
private void logInfo(List<AccessCondition> accessConditions, String mode, DSpaceObject dso) {
|
||||||
|
String type = dso.getClass().getSimpleName();
|
||||||
|
|
||||||
|
if (REPLACE_MODE.equals(mode) && isEmpty(accessConditions)) {
|
||||||
|
handler.logInfo("Cleaning " + type + " {" + dso.getID() + "} policies");
|
||||||
|
handler.logInfo("Inheriting policies from owning Collection in " + type + " {" + dso.getID() + "}");
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
StringBuilder message = new StringBuilder();
|
||||||
|
message.append(mode.equals(ADD_MODE) ? "Adding " : "Replacing ")
|
||||||
|
.append(type)
|
||||||
|
.append(" {")
|
||||||
|
.append(dso.getID())
|
||||||
|
.append("} policy")
|
||||||
|
.append(mode.equals(ADD_MODE) ? " with " : " to ")
|
||||||
|
.append("access conditions:");
|
||||||
|
|
||||||
|
AppendAccessConditionsInfo(message, accessConditions);
|
||||||
|
|
||||||
|
handler.logInfo(message.toString());
|
||||||
|
|
||||||
|
if (REPLACE_MODE.equals(mode) && isAppendModeEnabled()) {
|
||||||
|
handler.logInfo("Inheriting policies from owning Collection in " + type + " {" + dso.getID() + "}");
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
private void AppendAccessConditionsInfo(StringBuilder message, List<AccessCondition> accessConditions) {
|
||||||
|
DateFormat dateFormat = new SimpleDateFormat("yyyy-MM-dd");
|
||||||
|
message.append("{");
|
||||||
|
|
||||||
|
for (int i = 0; i < accessConditions.size(); i++) {
|
||||||
|
message.append(accessConditions.get(i).getName());
|
||||||
|
|
||||||
|
Optional.ofNullable(accessConditions.get(i).getStartDate())
|
||||||
|
.ifPresent(date -> message.append(", start_date=" + dateFormat.format(date)));
|
||||||
|
|
||||||
|
Optional.ofNullable(accessConditions.get(i).getEndDate())
|
||||||
|
.ifPresent(date -> message.append(", end_date=" + dateFormat.format(date)));
|
||||||
|
|
||||||
|
if (i != accessConditions.size() - 1) {
|
||||||
|
message.append(", ");
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
message.append("}");
|
||||||
|
}
|
||||||
|
|
||||||
|
private boolean isAppendModeEnabled() {
|
||||||
|
return configurationService.getBooleanProperty("core.authorization.installitem.inheritance-read.append-mode");
|
||||||
|
}
|
||||||
|
|
||||||
|
protected boolean isAuthorized(Context context) {
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
@SuppressWarnings("unchecked")
|
||||||
|
public BulkAccessControlScriptConfiguration<BulkAccessControl> getScriptConfiguration() {
|
||||||
|
return new DSpace().getServiceManager()
|
||||||
|
.getServiceByName("bulk-access-control", BulkAccessControlScriptConfiguration.class);
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
@@ -0,0 +1,66 @@
|
|||||||
|
/**
|
||||||
|
* The contents of this file are subject to the license and copyright
|
||||||
|
* detailed in the LICENSE and NOTICE files at the root of the source
|
||||||
|
* tree and available online at
|
||||||
|
*
|
||||||
|
* http://www.dspace.org/license/
|
||||||
|
*/
|
||||||
|
package org.dspace.app.bulkaccesscontrol;
|
||||||
|
|
||||||
|
import java.sql.SQLException;
|
||||||
|
import java.util.Arrays;
|
||||||
|
import java.util.UUID;
|
||||||
|
import java.util.stream.Collectors;
|
||||||
|
|
||||||
|
import org.apache.commons.lang3.StringUtils;
|
||||||
|
import org.dspace.core.Context;
|
||||||
|
import org.dspace.eperson.EPerson;
|
||||||
|
import org.dspace.scripts.DSpaceCommandLineParameter;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Extension of {@link BulkAccessControl} for CLI.
|
||||||
|
*
|
||||||
|
* @author Mohamed Eskander (mohamed.eskander at 4science.it)
|
||||||
|
*
|
||||||
|
*/
|
||||||
|
public class BulkAccessControlCli extends BulkAccessControl {
|
||||||
|
|
||||||
|
@Override
|
||||||
|
protected void setEPerson(Context context) throws SQLException {
|
||||||
|
EPerson myEPerson;
|
||||||
|
eperson = commandLine.getOptionValue('e');
|
||||||
|
|
||||||
|
if (eperson == null) {
|
||||||
|
handler.logError("An eperson to do the the Bulk Access Control must be specified " +
|
||||||
|
"(run with -h flag for details)");
|
||||||
|
throw new UnsupportedOperationException("An eperson to do the Bulk Access Control must be specified");
|
||||||
|
}
|
||||||
|
|
||||||
|
if (StringUtils.contains(eperson, '@')) {
|
||||||
|
myEPerson = epersonService.findByEmail(context, eperson);
|
||||||
|
} else {
|
||||||
|
myEPerson = epersonService.find(context, UUID.fromString(eperson));
|
||||||
|
}
|
||||||
|
|
||||||
|
if (myEPerson == null) {
|
||||||
|
handler.logError("EPerson cannot be found: " + eperson + " (run with -h flag for details)");
|
||||||
|
throw new UnsupportedOperationException("EPerson cannot be found: " + eperson);
|
||||||
|
}
|
||||||
|
|
||||||
|
context.setCurrentUser(myEPerson);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
protected boolean isAuthorized(Context context) {
|
||||||
|
|
||||||
|
if (context.getCurrentUser() == null) {
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
|
||||||
|
return getScriptConfiguration().isAllowedToExecute(context,
|
||||||
|
Arrays.stream(commandLine.getOptions())
|
||||||
|
.map(option ->
|
||||||
|
new DSpaceCommandLineParameter("-" + option.getOpt(), option.getValue()))
|
||||||
|
.collect(Collectors.toList()));
|
||||||
|
}
|
||||||
|
}
|
@@ -0,0 +1,42 @@
|
|||||||
|
/**
|
||||||
|
* The contents of this file are subject to the license and copyright
|
||||||
|
* detailed in the LICENSE and NOTICE files at the root of the source
|
||||||
|
* tree and available online at
|
||||||
|
*
|
||||||
|
* http://www.dspace.org/license/
|
||||||
|
*/
|
||||||
|
package org.dspace.app.bulkaccesscontrol;
|
||||||
|
|
||||||
|
import java.io.InputStream;
|
||||||
|
|
||||||
|
import org.apache.commons.cli.Options;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Extension of {@link BulkAccessControlScriptConfiguration} for CLI.
|
||||||
|
*
|
||||||
|
* @author Mohamed Eskander (mohamed.eskander at 4science.it)
|
||||||
|
*
|
||||||
|
*/
|
||||||
|
public class BulkAccessControlCliScriptConfiguration<T extends BulkAccessControlCli>
|
||||||
|
extends BulkAccessControlScriptConfiguration<T> {
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public Options getOptions() {
|
||||||
|
Options options = new Options();
|
||||||
|
|
||||||
|
options.addOption("u", "uuid", true, "target uuids of communities/collections/items");
|
||||||
|
options.getOption("u").setType(String.class);
|
||||||
|
options.getOption("u").setRequired(true);
|
||||||
|
|
||||||
|
options.addOption("f", "file", true, "source json file");
|
||||||
|
options.getOption("f").setType(InputStream.class);
|
||||||
|
options.getOption("f").setRequired(true);
|
||||||
|
|
||||||
|
options.addOption("e", "eperson", true, "email of EPerson used to perform actions");
|
||||||
|
options.getOption("e").setRequired(true);
|
||||||
|
|
||||||
|
options.addOption("h", "help", false, "help");
|
||||||
|
|
||||||
|
return options;
|
||||||
|
}
|
||||||
|
}
|
@@ -0,0 +1,110 @@
|
|||||||
|
/**
|
||||||
|
* The contents of this file are subject to the license and copyright
|
||||||
|
* detailed in the LICENSE and NOTICE files at the root of the source
|
||||||
|
* tree and available online at
|
||||||
|
*
|
||||||
|
* http://www.dspace.org/license/
|
||||||
|
*/
|
||||||
|
package org.dspace.app.bulkaccesscontrol;
|
||||||
|
|
||||||
|
import java.io.InputStream;
|
||||||
|
import java.sql.SQLException;
|
||||||
|
import java.util.List;
|
||||||
|
import java.util.Objects;
|
||||||
|
import java.util.UUID;
|
||||||
|
import java.util.stream.Collectors;
|
||||||
|
|
||||||
|
import org.apache.commons.cli.Options;
|
||||||
|
import org.dspace.app.util.DSpaceObjectUtilsImpl;
|
||||||
|
import org.dspace.app.util.service.DSpaceObjectUtils;
|
||||||
|
import org.dspace.content.DSpaceObject;
|
||||||
|
import org.dspace.core.Context;
|
||||||
|
import org.dspace.scripts.DSpaceCommandLineParameter;
|
||||||
|
import org.dspace.scripts.configuration.ScriptConfiguration;
|
||||||
|
import org.dspace.utils.DSpace;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Script configuration for {@link BulkAccessControl}.
|
||||||
|
*
|
||||||
|
* @author Mohamed Eskander (mohamed.eskander at 4science.it)
|
||||||
|
*
|
||||||
|
* @param <T> the {@link BulkAccessControl} type
|
||||||
|
*/
|
||||||
|
public class BulkAccessControlScriptConfiguration<T extends BulkAccessControl> extends ScriptConfiguration<T> {
|
||||||
|
|
||||||
|
private Class<T> dspaceRunnableClass;
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public boolean isAllowedToExecute(Context context, List<DSpaceCommandLineParameter> commandLineParameters) {
|
||||||
|
|
||||||
|
try {
|
||||||
|
if (Objects.isNull(commandLineParameters)) {
|
||||||
|
return authorizeService.isAdmin(context) || authorizeService.isComColAdmin(context)
|
||||||
|
|| authorizeService.isItemAdmin(context);
|
||||||
|
} else {
|
||||||
|
List<String> dspaceObjectIDs =
|
||||||
|
commandLineParameters.stream()
|
||||||
|
.filter(parameter -> "-u".equals(parameter.getName()))
|
||||||
|
.map(DSpaceCommandLineParameter::getValue)
|
||||||
|
.collect(Collectors.toList());
|
||||||
|
|
||||||
|
DSpaceObjectUtils dSpaceObjectUtils = new DSpace().getServiceManager().getServiceByName(
|
||||||
|
DSpaceObjectUtilsImpl.class.getName(), DSpaceObjectUtilsImpl.class);
|
||||||
|
|
||||||
|
for (String dspaceObjectID : dspaceObjectIDs) {
|
||||||
|
|
||||||
|
DSpaceObject dso = dSpaceObjectUtils.findDSpaceObject(context, UUID.fromString(dspaceObjectID));
|
||||||
|
|
||||||
|
if (Objects.isNull(dso)) {
|
||||||
|
throw new IllegalArgumentException();
|
||||||
|
}
|
||||||
|
|
||||||
|
if (!authorizeService.isAdmin(context, dso)) {
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
} catch (SQLException e) {
|
||||||
|
throw new RuntimeException(e);
|
||||||
|
}
|
||||||
|
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public Options getOptions() {
|
||||||
|
if (options == null) {
|
||||||
|
Options options = new Options();
|
||||||
|
|
||||||
|
options.addOption("u", "uuid", true, "target uuids of communities/collections/items");
|
||||||
|
options.getOption("u").setType(String.class);
|
||||||
|
options.getOption("u").setRequired(true);
|
||||||
|
|
||||||
|
options.addOption("f", "file", true, "source json file");
|
||||||
|
options.getOption("f").setType(InputStream.class);
|
||||||
|
options.getOption("f").setRequired(true);
|
||||||
|
|
||||||
|
options.addOption("h", "help", false, "help");
|
||||||
|
|
||||||
|
super.options = options;
|
||||||
|
}
|
||||||
|
return options;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public Class<T> getDspaceRunnableClass() {
|
||||||
|
return dspaceRunnableClass;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Generic setter for the dspaceRunnableClass
|
||||||
|
*
|
||||||
|
* @param dspaceRunnableClass The dspaceRunnableClass to be set on this
|
||||||
|
* BulkImportScriptConfiguration
|
||||||
|
*/
|
||||||
|
@Override
|
||||||
|
public void setDspaceRunnableClass(Class<T> dspaceRunnableClass) {
|
||||||
|
this.dspaceRunnableClass = dspaceRunnableClass;
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
@@ -0,0 +1,48 @@
|
|||||||
|
/**
|
||||||
|
* The contents of this file are subject to the license and copyright
|
||||||
|
* detailed in the LICENSE and NOTICE files at the root of the source
|
||||||
|
* tree and available online at
|
||||||
|
*
|
||||||
|
* http://www.dspace.org/license/
|
||||||
|
*/
|
||||||
|
package org.dspace.app.bulkaccesscontrol.exception;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Exception for errors that occurs during the bulk access control
|
||||||
|
*
|
||||||
|
* @author Mohamed Eskander (mohamed.eskander at 4science.it)
|
||||||
|
*
|
||||||
|
*/
|
||||||
|
public class BulkAccessControlException extends RuntimeException {
|
||||||
|
|
||||||
|
private static final long serialVersionUID = -74730626862418515L;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Constructor with error message and cause.
|
||||||
|
*
|
||||||
|
* @param message the error message
|
||||||
|
* @param cause the error cause
|
||||||
|
*/
|
||||||
|
public BulkAccessControlException(String message, Throwable cause) {
|
||||||
|
super(message, cause);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Constructor with error message.
|
||||||
|
*
|
||||||
|
* @param message the error message
|
||||||
|
*/
|
||||||
|
public BulkAccessControlException(String message) {
|
||||||
|
super(message);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Constructor with error cause.
|
||||||
|
*
|
||||||
|
* @param cause the error cause
|
||||||
|
*/
|
||||||
|
public BulkAccessControlException(Throwable cause) {
|
||||||
|
super(cause);
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
@@ -0,0 +1,59 @@
|
|||||||
|
/**
|
||||||
|
* The contents of this file are subject to the license and copyright
|
||||||
|
* detailed in the LICENSE and NOTICE files at the root of the source
|
||||||
|
* tree and available online at
|
||||||
|
*
|
||||||
|
* http://www.dspace.org/license/
|
||||||
|
*/
|
||||||
|
package org.dspace.app.bulkaccesscontrol.model;
|
||||||
|
|
||||||
|
import java.util.Date;
|
||||||
|
|
||||||
|
import com.fasterxml.jackson.databind.annotation.JsonDeserialize;
|
||||||
|
import org.dspace.app.bulkaccesscontrol.BulkAccessControl;
|
||||||
|
import org.dspace.util.MultiFormatDateDeserializer;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Class that model the values of an Access Condition as expressed in the {@link BulkAccessControl} input file
|
||||||
|
*
|
||||||
|
* @author Mohamed Eskander (mohamed.eskander at 4science.it)
|
||||||
|
*/
|
||||||
|
public class AccessCondition {
|
||||||
|
|
||||||
|
private String name;
|
||||||
|
|
||||||
|
private String description;
|
||||||
|
|
||||||
|
@JsonDeserialize(using = MultiFormatDateDeserializer.class)
|
||||||
|
private Date startDate;
|
||||||
|
|
||||||
|
@JsonDeserialize(using = MultiFormatDateDeserializer.class)
|
||||||
|
private Date endDate;
|
||||||
|
|
||||||
|
public AccessCondition() {
|
||||||
|
}
|
||||||
|
|
||||||
|
public AccessCondition(String name, String description, Date startDate, Date endDate) {
|
||||||
|
this.name = name;
|
||||||
|
this.description = description;
|
||||||
|
this.startDate = startDate;
|
||||||
|
this.endDate = endDate;
|
||||||
|
}
|
||||||
|
|
||||||
|
public String getName() {
|
||||||
|
return name;
|
||||||
|
}
|
||||||
|
|
||||||
|
public String getDescription() {
|
||||||
|
return description;
|
||||||
|
}
|
||||||
|
|
||||||
|
public Date getStartDate() {
|
||||||
|
return startDate;
|
||||||
|
}
|
||||||
|
|
||||||
|
public Date getEndDate() {
|
||||||
|
return endDate;
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
@@ -0,0 +1,69 @@
|
|||||||
|
/**
|
||||||
|
* The contents of this file are subject to the license and copyright
|
||||||
|
* detailed in the LICENSE and NOTICE files at the root of the source
|
||||||
|
* tree and available online at
|
||||||
|
*
|
||||||
|
* http://www.dspace.org/license/
|
||||||
|
*/
|
||||||
|
package org.dspace.app.bulkaccesscontrol.model;
|
||||||
|
|
||||||
|
import java.util.ArrayList;
|
||||||
|
import java.util.List;
|
||||||
|
|
||||||
|
import org.dspace.app.bulkaccesscontrol.BulkAccessControl;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Class that model the value of bitstream node
|
||||||
|
* from json file of the {@link BulkAccessControl}
|
||||||
|
*
|
||||||
|
* @author Mohamed Eskander (mohamed.eskander at 4science.it)
|
||||||
|
*/
|
||||||
|
public class AccessConditionBitstream {
|
||||||
|
|
||||||
|
private String mode;
|
||||||
|
|
||||||
|
private Constraint constraints;
|
||||||
|
|
||||||
|
private List<AccessCondition> accessConditions;
|
||||||
|
|
||||||
|
public String getMode() {
|
||||||
|
return mode;
|
||||||
|
}
|
||||||
|
|
||||||
|
public void setMode(String mode) {
|
||||||
|
this.mode = mode;
|
||||||
|
}
|
||||||
|
|
||||||
|
public Constraint getConstraints() {
|
||||||
|
return constraints;
|
||||||
|
}
|
||||||
|
|
||||||
|
public void setConstraints(Constraint constraints) {
|
||||||
|
this.constraints = constraints;
|
||||||
|
}
|
||||||
|
|
||||||
|
public List<AccessCondition> getAccessConditions() {
|
||||||
|
if (accessConditions == null) {
|
||||||
|
return new ArrayList<>();
|
||||||
|
}
|
||||||
|
return accessConditions;
|
||||||
|
}
|
||||||
|
|
||||||
|
public void setAccessConditions(List<AccessCondition> accessConditions) {
|
||||||
|
this.accessConditions = accessConditions;
|
||||||
|
}
|
||||||
|
|
||||||
|
public class Constraint {
|
||||||
|
|
||||||
|
private List<String> uuid;
|
||||||
|
|
||||||
|
public List<String> getUuid() {
|
||||||
|
return uuid;
|
||||||
|
}
|
||||||
|
|
||||||
|
public void setUuid(List<String> uuid) {
|
||||||
|
this.uuid = uuid;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
@@ -0,0 +1,45 @@
|
|||||||
|
/**
|
||||||
|
* The contents of this file are subject to the license and copyright
|
||||||
|
* detailed in the LICENSE and NOTICE files at the root of the source
|
||||||
|
* tree and available online at
|
||||||
|
*
|
||||||
|
* http://www.dspace.org/license/
|
||||||
|
*/
|
||||||
|
package org.dspace.app.bulkaccesscontrol.model;
|
||||||
|
|
||||||
|
import java.util.ArrayList;
|
||||||
|
import java.util.List;
|
||||||
|
|
||||||
|
import org.dspace.app.bulkaccesscontrol.BulkAccessControl;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Class that model the value of item node
|
||||||
|
* from json file of the {@link BulkAccessControl}
|
||||||
|
*
|
||||||
|
* @author Mohamed Eskander (mohamed.eskander at 4science.it)
|
||||||
|
*/
|
||||||
|
public class AccessConditionItem {
|
||||||
|
|
||||||
|
String mode;
|
||||||
|
|
||||||
|
List<AccessCondition> accessConditions;
|
||||||
|
|
||||||
|
public String getMode() {
|
||||||
|
return mode;
|
||||||
|
}
|
||||||
|
|
||||||
|
public void setMode(String mode) {
|
||||||
|
this.mode = mode;
|
||||||
|
}
|
||||||
|
|
||||||
|
public List<AccessCondition> getAccessConditions() {
|
||||||
|
if (accessConditions == null) {
|
||||||
|
return new ArrayList<>();
|
||||||
|
}
|
||||||
|
return accessConditions;
|
||||||
|
}
|
||||||
|
|
||||||
|
public void setAccessConditions(List<AccessCondition> accessConditions) {
|
||||||
|
this.accessConditions = accessConditions;
|
||||||
|
}
|
||||||
|
}
|
@@ -0,0 +1,50 @@
|
|||||||
|
/**
|
||||||
|
* The contents of this file are subject to the license and copyright
|
||||||
|
* detailed in the LICENSE and NOTICE files at the root of the source
|
||||||
|
* tree and available online at
|
||||||
|
*
|
||||||
|
* http://www.dspace.org/license/
|
||||||
|
*/
|
||||||
|
package org.dspace.app.bulkaccesscontrol.model;
|
||||||
|
|
||||||
|
import java.util.List;
|
||||||
|
|
||||||
|
import org.dspace.submit.model.AccessConditionOption;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* A collection of conditions to be met when bulk access condition.
|
||||||
|
*
|
||||||
|
* @author Mohamed Eskander (mohamed.eskander at 4science.it)
|
||||||
|
*/
|
||||||
|
public class BulkAccessConditionConfiguration {
|
||||||
|
|
||||||
|
private String name;
|
||||||
|
private List<AccessConditionOption> itemAccessConditionOptions;
|
||||||
|
private List<AccessConditionOption> bitstreamAccessConditionOptions;
|
||||||
|
|
||||||
|
public String getName() {
|
||||||
|
return name;
|
||||||
|
}
|
||||||
|
|
||||||
|
public void setName(String name) {
|
||||||
|
this.name = name;
|
||||||
|
}
|
||||||
|
|
||||||
|
public List<AccessConditionOption> getItemAccessConditionOptions() {
|
||||||
|
return itemAccessConditionOptions;
|
||||||
|
}
|
||||||
|
|
||||||
|
public void setItemAccessConditionOptions(
|
||||||
|
List<AccessConditionOption> itemAccessConditionOptions) {
|
||||||
|
this.itemAccessConditionOptions = itemAccessConditionOptions;
|
||||||
|
}
|
||||||
|
|
||||||
|
public List<AccessConditionOption> getBitstreamAccessConditionOptions() {
|
||||||
|
return bitstreamAccessConditionOptions;
|
||||||
|
}
|
||||||
|
|
||||||
|
public void setBitstreamAccessConditionOptions(
|
||||||
|
List<AccessConditionOption> bitstreamAccessConditionOptions) {
|
||||||
|
this.bitstreamAccessConditionOptions = bitstreamAccessConditionOptions;
|
||||||
|
}
|
||||||
|
}
|
@@ -0,0 +1,72 @@
|
|||||||
|
/**
|
||||||
|
* The contents of this file are subject to the license and copyright
|
||||||
|
* detailed in the LICENSE and NOTICE files at the root of the source
|
||||||
|
* tree and available online at
|
||||||
|
*
|
||||||
|
* http://www.dspace.org/license/
|
||||||
|
*/
|
||||||
|
package org.dspace.app.bulkaccesscontrol.model;
|
||||||
|
|
||||||
|
import org.dspace.app.bulkaccesscontrol.BulkAccessControl;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Class that model the content of the JSON file used as input for the {@link BulkAccessControl}
|
||||||
|
*
|
||||||
|
* <code> <br/>
|
||||||
|
* { <br/>
|
||||||
|
* item: { <br/>
|
||||||
|
* mode: "replace", <br/>
|
||||||
|
* accessConditions: [ <br/>
|
||||||
|
* { <br/>
|
||||||
|
* "name": "openaccess" <br/>
|
||||||
|
* } <br/>
|
||||||
|
* ] <br/>
|
||||||
|
* }, <br/>
|
||||||
|
* bitstream: { <br/>
|
||||||
|
* constraints: { <br/>
|
||||||
|
* uuid: [bit-uuid1, bit-uuid2, ..., bit-uuidN], <br/>
|
||||||
|
* }, <br/>
|
||||||
|
* mode: "add", <br/>
|
||||||
|
* accessConditions: [ <br/>
|
||||||
|
* { <br/>
|
||||||
|
* "name": "embargo", <br/>
|
||||||
|
* "startDate": "2024-06-24T23:59:59.999+0000" <br/>
|
||||||
|
* } <br/>
|
||||||
|
* ] <br/>
|
||||||
|
* } <br/>
|
||||||
|
* }
|
||||||
|
* </code>
|
||||||
|
*
|
||||||
|
* @author Mohamed Eskander (mohamed.eskander at 4science.it)
|
||||||
|
*/
|
||||||
|
public class BulkAccessControlInput {
|
||||||
|
|
||||||
|
AccessConditionItem item;
|
||||||
|
|
||||||
|
AccessConditionBitstream bitstream;
|
||||||
|
|
||||||
|
public BulkAccessControlInput() {
|
||||||
|
}
|
||||||
|
|
||||||
|
public BulkAccessControlInput(AccessConditionItem item,
|
||||||
|
AccessConditionBitstream bitstream) {
|
||||||
|
this.item = item;
|
||||||
|
this.bitstream = bitstream;
|
||||||
|
}
|
||||||
|
|
||||||
|
public AccessConditionItem getItem() {
|
||||||
|
return item;
|
||||||
|
}
|
||||||
|
|
||||||
|
public void setItem(AccessConditionItem item) {
|
||||||
|
this.item = item;
|
||||||
|
}
|
||||||
|
|
||||||
|
public AccessConditionBitstream getBitstream() {
|
||||||
|
return bitstream;
|
||||||
|
}
|
||||||
|
|
||||||
|
public void setBitstream(AccessConditionBitstream bitstream) {
|
||||||
|
this.bitstream = bitstream;
|
||||||
|
}
|
||||||
|
}
|
@@ -0,0 +1,45 @@
|
|||||||
|
/**
|
||||||
|
* The contents of this file are subject to the license and copyright
|
||||||
|
* detailed in the LICENSE and NOTICE files at the root of the source
|
||||||
|
* tree and available online at
|
||||||
|
*
|
||||||
|
* http://www.dspace.org/license/
|
||||||
|
*/
|
||||||
|
package org.dspace.app.bulkaccesscontrol.service;
|
||||||
|
|
||||||
|
import java.util.ArrayList;
|
||||||
|
import java.util.List;
|
||||||
|
|
||||||
|
import org.apache.commons.collections4.CollectionUtils;
|
||||||
|
import org.dspace.app.bulkaccesscontrol.model.BulkAccessConditionConfiguration;
|
||||||
|
import org.springframework.beans.factory.annotation.Autowired;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Simple bean to manage different Bulk Access Condition configurations
|
||||||
|
*
|
||||||
|
* @author Mohamed Eskander (mohamed.eskander at 4science.it)
|
||||||
|
*/
|
||||||
|
public class BulkAccessConditionConfigurationService {
|
||||||
|
|
||||||
|
@Autowired
|
||||||
|
private List<BulkAccessConditionConfiguration> bulkAccessConditionConfigurations;
|
||||||
|
|
||||||
|
public List<BulkAccessConditionConfiguration> getBulkAccessConditionConfigurations() {
|
||||||
|
if (CollectionUtils.isEmpty(bulkAccessConditionConfigurations)) {
|
||||||
|
return new ArrayList<>();
|
||||||
|
}
|
||||||
|
return bulkAccessConditionConfigurations;
|
||||||
|
}
|
||||||
|
|
||||||
|
public BulkAccessConditionConfiguration getBulkAccessConditionConfiguration(String name) {
|
||||||
|
return getBulkAccessConditionConfigurations().stream()
|
||||||
|
.filter(x -> name.equals(x.getName()))
|
||||||
|
.findFirst()
|
||||||
|
.orElse(null);
|
||||||
|
}
|
||||||
|
|
||||||
|
public void setBulkAccessConditionConfigurations(
|
||||||
|
List<BulkAccessConditionConfiguration> bulkAccessConditionConfigurations) {
|
||||||
|
this.bulkAccessConditionConfigurations = bulkAccessConditionConfigurations;
|
||||||
|
}
|
||||||
|
}
|
@@ -7,33 +7,16 @@
|
|||||||
*/
|
*/
|
||||||
package org.dspace.app.bulkedit;
|
package org.dspace.app.bulkedit;
|
||||||
|
|
||||||
import java.sql.SQLException;
|
|
||||||
|
|
||||||
import org.apache.commons.cli.Options;
|
import org.apache.commons.cli.Options;
|
||||||
import org.dspace.authorize.service.AuthorizeService;
|
|
||||||
import org.dspace.core.Context;
|
|
||||||
import org.dspace.scripts.configuration.ScriptConfiguration;
|
import org.dspace.scripts.configuration.ScriptConfiguration;
|
||||||
import org.springframework.beans.factory.annotation.Autowired;
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* The {@link ScriptConfiguration} for the {@link MetadataDeletion} script.
|
* The {@link ScriptConfiguration} for the {@link MetadataDeletion} script.
|
||||||
*/
|
*/
|
||||||
public class MetadataDeletionScriptConfiguration<T extends MetadataDeletion> extends ScriptConfiguration<T> {
|
public class MetadataDeletionScriptConfiguration<T extends MetadataDeletion> extends ScriptConfiguration<T> {
|
||||||
|
|
||||||
@Autowired
|
|
||||||
private AuthorizeService authorizeService;
|
|
||||||
|
|
||||||
private Class<T> dspaceRunnableClass;
|
private Class<T> dspaceRunnableClass;
|
||||||
|
|
||||||
@Override
|
|
||||||
public boolean isAllowedToExecute(Context context) {
|
|
||||||
try {
|
|
||||||
return authorizeService.isAdmin(context);
|
|
||||||
} catch (SQLException e) {
|
|
||||||
throw new RuntimeException("SQLException occurred when checking if the current user is an admin", e);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public Options getOptions() {
|
public Options getOptions() {
|
||||||
if (options == null) {
|
if (options == null) {
|
||||||
|
@@ -7,22 +7,14 @@
|
|||||||
*/
|
*/
|
||||||
package org.dspace.app.bulkedit;
|
package org.dspace.app.bulkedit;
|
||||||
|
|
||||||
import java.sql.SQLException;
|
|
||||||
|
|
||||||
import org.apache.commons.cli.Options;
|
import org.apache.commons.cli.Options;
|
||||||
import org.dspace.authorize.service.AuthorizeService;
|
|
||||||
import org.dspace.core.Context;
|
|
||||||
import org.dspace.scripts.configuration.ScriptConfiguration;
|
import org.dspace.scripts.configuration.ScriptConfiguration;
|
||||||
import org.springframework.beans.factory.annotation.Autowired;
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* The {@link ScriptConfiguration} for the {@link MetadataExport} script
|
* The {@link ScriptConfiguration} for the {@link MetadataExport} script
|
||||||
*/
|
*/
|
||||||
public class MetadataExportScriptConfiguration<T extends MetadataExport> extends ScriptConfiguration<T> {
|
public class MetadataExportScriptConfiguration<T extends MetadataExport> extends ScriptConfiguration<T> {
|
||||||
|
|
||||||
@Autowired
|
|
||||||
private AuthorizeService authorizeService;
|
|
||||||
|
|
||||||
private Class<T> dspaceRunnableClass;
|
private Class<T> dspaceRunnableClass;
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
@@ -39,15 +31,6 @@ public class MetadataExportScriptConfiguration<T extends MetadataExport> extends
|
|||||||
this.dspaceRunnableClass = dspaceRunnableClass;
|
this.dspaceRunnableClass = dspaceRunnableClass;
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
|
||||||
public boolean isAllowedToExecute(Context context) {
|
|
||||||
try {
|
|
||||||
return authorizeService.isAdmin(context);
|
|
||||||
} catch (SQLException e) {
|
|
||||||
throw new RuntimeException("SQLException occurred when checking if the current user is an admin", e);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public Options getOptions() {
|
public Options getOptions() {
|
||||||
if (options == null) {
|
if (options == null) {
|
||||||
|
@@ -9,7 +9,6 @@
|
|||||||
package org.dspace.app.bulkedit;
|
package org.dspace.app.bulkedit;
|
||||||
|
|
||||||
import org.apache.commons.cli.Options;
|
import org.apache.commons.cli.Options;
|
||||||
import org.dspace.core.Context;
|
|
||||||
import org.dspace.scripts.configuration.ScriptConfiguration;
|
import org.dspace.scripts.configuration.ScriptConfiguration;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
@@ -29,11 +28,6 @@ public class MetadataExportSearchScriptConfiguration<T extends MetadataExportSea
|
|||||||
this.dspaceRunnableclass = dspaceRunnableClass;
|
this.dspaceRunnableclass = dspaceRunnableClass;
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
|
||||||
public boolean isAllowedToExecute(Context context) {
|
|
||||||
return true;
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public Options getOptions() {
|
public Options getOptions() {
|
||||||
if (options == null) {
|
if (options == null) {
|
||||||
|
@@ -598,18 +598,19 @@ public class MetadataImport extends DSpaceRunnable<MetadataImportScriptConfigura
|
|||||||
changes.add(whatHasChanged);
|
changes.add(whatHasChanged);
|
||||||
}
|
}
|
||||||
|
|
||||||
if (change) {
|
if (change && (rowCount % configurationService.getIntProperty("bulkedit.change.commit.count", 100) == 0)) {
|
||||||
//only clear cache if changes have been made.
|
c.commit();
|
||||||
c.uncacheEntity(wsItem);
|
handler.logInfo(LogHelper.getHeader(c, "metadata_import_commit", "lineNumber=" + rowCount));
|
||||||
c.uncacheEntity(wfItem);
|
|
||||||
c.uncacheEntity(item);
|
|
||||||
}
|
}
|
||||||
populateRefAndRowMap(line, item == null ? null : item.getID());
|
populateRefAndRowMap(line, item == null ? null : item.getID());
|
||||||
// keep track of current rows processed
|
// keep track of current rows processed
|
||||||
rowCount++;
|
rowCount++;
|
||||||
}
|
}
|
||||||
|
if (change) {
|
||||||
|
c.commit();
|
||||||
|
}
|
||||||
|
|
||||||
c.setMode(originalMode);
|
c.setMode(Context.Mode.READ_ONLY);
|
||||||
|
|
||||||
|
|
||||||
// Return the changes
|
// Return the changes
|
||||||
|
@@ -8,22 +8,15 @@
|
|||||||
package org.dspace.app.bulkedit;
|
package org.dspace.app.bulkedit;
|
||||||
|
|
||||||
import java.io.InputStream;
|
import java.io.InputStream;
|
||||||
import java.sql.SQLException;
|
|
||||||
|
|
||||||
import org.apache.commons.cli.Options;
|
import org.apache.commons.cli.Options;
|
||||||
import org.dspace.authorize.service.AuthorizeService;
|
|
||||||
import org.dspace.core.Context;
|
|
||||||
import org.dspace.scripts.configuration.ScriptConfiguration;
|
import org.dspace.scripts.configuration.ScriptConfiguration;
|
||||||
import org.springframework.beans.factory.annotation.Autowired;
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* The {@link ScriptConfiguration} for the {@link MetadataImport} script
|
* The {@link ScriptConfiguration} for the {@link MetadataImport} script
|
||||||
*/
|
*/
|
||||||
public class MetadataImportScriptConfiguration<T extends MetadataImport> extends ScriptConfiguration<T> {
|
public class MetadataImportScriptConfiguration<T extends MetadataImport> extends ScriptConfiguration<T> {
|
||||||
|
|
||||||
@Autowired
|
|
||||||
private AuthorizeService authorizeService;
|
|
||||||
|
|
||||||
private Class<T> dspaceRunnableClass;
|
private Class<T> dspaceRunnableClass;
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
@@ -40,15 +33,6 @@ public class MetadataImportScriptConfiguration<T extends MetadataImport> extends
|
|||||||
this.dspaceRunnableClass = dspaceRunnableClass;
|
this.dspaceRunnableClass = dspaceRunnableClass;
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
|
||||||
public boolean isAllowedToExecute(Context context) {
|
|
||||||
try {
|
|
||||||
return authorizeService.isAdmin(context);
|
|
||||||
} catch (SQLException e) {
|
|
||||||
throw new RuntimeException("SQLException occurred when checking if the current user is an admin", e);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public Options getOptions() {
|
public Options getOptions() {
|
||||||
if (options == null) {
|
if (options == null) {
|
||||||
|
@@ -7,18 +7,11 @@
|
|||||||
*/
|
*/
|
||||||
package org.dspace.app.harvest;
|
package org.dspace.app.harvest;
|
||||||
|
|
||||||
import java.sql.SQLException;
|
|
||||||
|
|
||||||
import org.apache.commons.cli.Options;
|
import org.apache.commons.cli.Options;
|
||||||
import org.dspace.authorize.service.AuthorizeService;
|
|
||||||
import org.dspace.core.Context;
|
|
||||||
import org.dspace.scripts.configuration.ScriptConfiguration;
|
import org.dspace.scripts.configuration.ScriptConfiguration;
|
||||||
import org.springframework.beans.factory.annotation.Autowired;
|
|
||||||
|
|
||||||
|
|
||||||
public class HarvestScriptConfiguration<T extends Harvest> extends ScriptConfiguration<T> {
|
public class HarvestScriptConfiguration<T extends Harvest> extends ScriptConfiguration<T> {
|
||||||
@Autowired
|
|
||||||
private AuthorizeService authorizeService;
|
|
||||||
|
|
||||||
private Class<T> dspaceRunnableClass;
|
private Class<T> dspaceRunnableClass;
|
||||||
|
|
||||||
@@ -32,13 +25,6 @@ public class HarvestScriptConfiguration<T extends Harvest> extends ScriptConfigu
|
|||||||
this.dspaceRunnableClass = dspaceRunnableClass;
|
this.dspaceRunnableClass = dspaceRunnableClass;
|
||||||
}
|
}
|
||||||
|
|
||||||
public boolean isAllowedToExecute(final Context context) {
|
|
||||||
try {
|
|
||||||
return authorizeService.isAdmin(context);
|
|
||||||
} catch (SQLException e) {
|
|
||||||
throw new RuntimeException("SQLException occurred when checking if the current user is an admin", e);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
public Options getOptions() {
|
public Options getOptions() {
|
||||||
Options options = new Options();
|
Options options = new Options();
|
||||||
|
@@ -7,14 +7,9 @@
|
|||||||
*/
|
*/
|
||||||
package org.dspace.app.itemexport;
|
package org.dspace.app.itemexport;
|
||||||
|
|
||||||
import java.sql.SQLException;
|
|
||||||
|
|
||||||
import org.apache.commons.cli.Option;
|
import org.apache.commons.cli.Option;
|
||||||
import org.apache.commons.cli.Options;
|
import org.apache.commons.cli.Options;
|
||||||
import org.dspace.authorize.service.AuthorizeService;
|
|
||||||
import org.dspace.core.Context;
|
|
||||||
import org.dspace.scripts.configuration.ScriptConfiguration;
|
import org.dspace.scripts.configuration.ScriptConfiguration;
|
||||||
import org.springframework.beans.factory.annotation.Autowired;
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* The {@link ScriptConfiguration} for the {@link ItemExport} script
|
* The {@link ScriptConfiguration} for the {@link ItemExport} script
|
||||||
@@ -23,9 +18,6 @@ import org.springframework.beans.factory.annotation.Autowired;
|
|||||||
*/
|
*/
|
||||||
public class ItemExportScriptConfiguration<T extends ItemExport> extends ScriptConfiguration<T> {
|
public class ItemExportScriptConfiguration<T extends ItemExport> extends ScriptConfiguration<T> {
|
||||||
|
|
||||||
@Autowired
|
|
||||||
private AuthorizeService authorizeService;
|
|
||||||
|
|
||||||
private Class<T> dspaceRunnableClass;
|
private Class<T> dspaceRunnableClass;
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
@@ -38,15 +30,6 @@ public class ItemExportScriptConfiguration<T extends ItemExport> extends ScriptC
|
|||||||
this.dspaceRunnableClass = dspaceRunnableClass;
|
this.dspaceRunnableClass = dspaceRunnableClass;
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
|
||||||
public boolean isAllowedToExecute(final Context context) {
|
|
||||||
try {
|
|
||||||
return authorizeService.isAdmin(context);
|
|
||||||
} catch (SQLException e) {
|
|
||||||
throw new RuntimeException("SQLException occurred when checking if the current user is an admin", e);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public Options getOptions() {
|
public Options getOptions() {
|
||||||
Options options = new Options();
|
Options options = new Options();
|
||||||
|
@@ -11,6 +11,7 @@ import java.io.File;
|
|||||||
import java.io.FileInputStream;
|
import java.io.FileInputStream;
|
||||||
import java.io.IOException;
|
import java.io.IOException;
|
||||||
import java.io.InputStream;
|
import java.io.InputStream;
|
||||||
|
import java.net.URL;
|
||||||
import java.nio.file.Files;
|
import java.nio.file.Files;
|
||||||
import java.sql.SQLException;
|
import java.sql.SQLException;
|
||||||
import java.util.ArrayList;
|
import java.util.ArrayList;
|
||||||
@@ -22,6 +23,7 @@ import java.util.UUID;
|
|||||||
import org.apache.commons.cli.ParseException;
|
import org.apache.commons.cli.ParseException;
|
||||||
import org.apache.commons.io.FileUtils;
|
import org.apache.commons.io.FileUtils;
|
||||||
import org.apache.commons.lang3.StringUtils;
|
import org.apache.commons.lang3.StringUtils;
|
||||||
|
import org.apache.tika.Tika;
|
||||||
import org.dspace.app.itemimport.factory.ItemImportServiceFactory;
|
import org.dspace.app.itemimport.factory.ItemImportServiceFactory;
|
||||||
import org.dspace.app.itemimport.service.ItemImportService;
|
import org.dspace.app.itemimport.service.ItemImportService;
|
||||||
import org.dspace.authorize.AuthorizeException;
|
import org.dspace.authorize.AuthorizeException;
|
||||||
@@ -67,16 +69,19 @@ public class ItemImport extends DSpaceRunnable<ItemImportScriptConfiguration> {
|
|||||||
protected String eperson = null;
|
protected String eperson = null;
|
||||||
protected String[] collections = null;
|
protected String[] collections = null;
|
||||||
protected boolean isTest = false;
|
protected boolean isTest = false;
|
||||||
|
protected boolean isExcludeContent = false;
|
||||||
protected boolean isResume = false;
|
protected boolean isResume = false;
|
||||||
protected boolean useWorkflow = false;
|
protected boolean useWorkflow = false;
|
||||||
protected boolean useWorkflowSendEmail = false;
|
protected boolean useWorkflowSendEmail = false;
|
||||||
protected boolean isQuiet = false;
|
protected boolean isQuiet = false;
|
||||||
protected boolean commandLineCollections = false;
|
protected boolean commandLineCollections = false;
|
||||||
protected boolean zip = false;
|
protected boolean zip = false;
|
||||||
|
protected boolean remoteUrl = false;
|
||||||
protected String zipfilename = null;
|
protected String zipfilename = null;
|
||||||
|
protected boolean zipvalid = false;
|
||||||
protected boolean help = false;
|
protected boolean help = false;
|
||||||
protected File workDir = null;
|
protected File workDir = null;
|
||||||
private File workFile = null;
|
protected File workFile = null;
|
||||||
|
|
||||||
protected static final CollectionService collectionService =
|
protected static final CollectionService collectionService =
|
||||||
ContentServiceFactory.getInstance().getCollectionService();
|
ContentServiceFactory.getInstance().getCollectionService();
|
||||||
@@ -119,6 +124,8 @@ public class ItemImport extends DSpaceRunnable<ItemImportScriptConfiguration> {
|
|||||||
handler.logInfo("**Test Run** - not actually importing items.");
|
handler.logInfo("**Test Run** - not actually importing items.");
|
||||||
}
|
}
|
||||||
|
|
||||||
|
isExcludeContent = commandLine.hasOption('x');
|
||||||
|
|
||||||
if (commandLine.hasOption('p')) {
|
if (commandLine.hasOption('p')) {
|
||||||
template = true;
|
template = true;
|
||||||
}
|
}
|
||||||
@@ -204,6 +211,7 @@ public class ItemImport extends DSpaceRunnable<ItemImportScriptConfiguration> {
|
|||||||
.getItemImportService();
|
.getItemImportService();
|
||||||
try {
|
try {
|
||||||
itemImportService.setTest(isTest);
|
itemImportService.setTest(isTest);
|
||||||
|
itemImportService.setExcludeContent(isExcludeContent);
|
||||||
itemImportService.setResume(isResume);
|
itemImportService.setResume(isResume);
|
||||||
itemImportService.setUseWorkflow(useWorkflow);
|
itemImportService.setUseWorkflow(useWorkflow);
|
||||||
itemImportService.setUseWorkflowSendEmail(useWorkflowSendEmail);
|
itemImportService.setUseWorkflowSendEmail(useWorkflowSendEmail);
|
||||||
@@ -229,10 +237,21 @@ public class ItemImport extends DSpaceRunnable<ItemImportScriptConfiguration> {
|
|||||||
handler.logInfo("***End of Test Run***");
|
handler.logInfo("***End of Test Run***");
|
||||||
}
|
}
|
||||||
} finally {
|
} finally {
|
||||||
// clean work dir
|
|
||||||
if (zip) {
|
if (zip) {
|
||||||
FileUtils.deleteDirectory(new File(sourcedir));
|
// if zip file was valid then clean sourcedir
|
||||||
FileUtils.deleteDirectory(workDir);
|
if (zipvalid && sourcedir != null && new File(sourcedir).exists()) {
|
||||||
|
FileUtils.deleteDirectory(new File(sourcedir));
|
||||||
|
}
|
||||||
|
|
||||||
|
// clean workdir
|
||||||
|
if (workDir != null && workDir.exists()) {
|
||||||
|
FileUtils.deleteDirectory(workDir);
|
||||||
|
}
|
||||||
|
|
||||||
|
// conditionally clean workFile if import was done in the UI or via a URL and it still exists
|
||||||
|
if (workFile != null && workFile.exists()) {
|
||||||
|
workFile.delete();
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
Date endTime = new Date();
|
Date endTime = new Date();
|
||||||
@@ -249,6 +268,17 @@ public class ItemImport extends DSpaceRunnable<ItemImportScriptConfiguration> {
|
|||||||
* @param context
|
* @param context
|
||||||
*/
|
*/
|
||||||
protected void validate(Context context) {
|
protected void validate(Context context) {
|
||||||
|
// check zip type: uploaded file or remote url
|
||||||
|
if (commandLine.hasOption('z')) {
|
||||||
|
zipfilename = commandLine.getOptionValue('z');
|
||||||
|
} else if (commandLine.hasOption('u')) {
|
||||||
|
remoteUrl = true;
|
||||||
|
zipfilename = commandLine.getOptionValue('u');
|
||||||
|
}
|
||||||
|
if (StringUtils.isBlank(zipfilename)) {
|
||||||
|
throw new UnsupportedOperationException("Must run with either name of zip file or url of zip file");
|
||||||
|
}
|
||||||
|
|
||||||
if (command == null) {
|
if (command == null) {
|
||||||
handler.logError("Must run with either add, replace, or remove (run with -h flag for details)");
|
handler.logError("Must run with either add, replace, or remove (run with -h flag for details)");
|
||||||
throw new UnsupportedOperationException("Must run with either add, replace, or remove");
|
throw new UnsupportedOperationException("Must run with either add, replace, or remove");
|
||||||
@@ -291,7 +321,6 @@ public class ItemImport extends DSpaceRunnable<ItemImportScriptConfiguration> {
|
|||||||
handler.writeFilestream(context, MAPFILE_FILENAME, mapfileInputStream, MAPFILE_BITSTREAM_TYPE);
|
handler.writeFilestream(context, MAPFILE_FILENAME, mapfileInputStream, MAPFILE_BITSTREAM_TYPE);
|
||||||
} finally {
|
} finally {
|
||||||
mapFile.delete();
|
mapFile.delete();
|
||||||
workFile.delete();
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -302,17 +331,55 @@ public class ItemImport extends DSpaceRunnable<ItemImportScriptConfiguration> {
|
|||||||
* @throws Exception
|
* @throws Exception
|
||||||
*/
|
*/
|
||||||
protected void readZip(Context context, ItemImportService itemImportService) throws Exception {
|
protected void readZip(Context context, ItemImportService itemImportService) throws Exception {
|
||||||
Optional<InputStream> optionalFileStream = handler.getFileStream(context, zipfilename);
|
Optional<InputStream> optionalFileStream = Optional.empty();
|
||||||
if (optionalFileStream.isPresent()) {
|
Optional<InputStream> validationFileStream = Optional.empty();
|
||||||
|
if (!remoteUrl) {
|
||||||
|
// manage zip via upload
|
||||||
|
optionalFileStream = handler.getFileStream(context, zipfilename);
|
||||||
|
validationFileStream = handler.getFileStream(context, zipfilename);
|
||||||
|
} else {
|
||||||
|
// manage zip via remote url
|
||||||
|
optionalFileStream = Optional.ofNullable(new URL(zipfilename).openStream());
|
||||||
|
validationFileStream = Optional.ofNullable(new URL(zipfilename).openStream());
|
||||||
|
}
|
||||||
|
|
||||||
|
if (validationFileStream.isPresent()) {
|
||||||
|
// validate zip file
|
||||||
|
if (validationFileStream.isPresent()) {
|
||||||
|
validateZip(validationFileStream.get());
|
||||||
|
}
|
||||||
|
|
||||||
workFile = new File(itemImportService.getTempWorkDir() + File.separator
|
workFile = new File(itemImportService.getTempWorkDir() + File.separator
|
||||||
+ zipfilename + "-" + context.getCurrentUser().getID());
|
+ zipfilename + "-" + context.getCurrentUser().getID());
|
||||||
FileUtils.copyInputStreamToFile(optionalFileStream.get(), workFile);
|
FileUtils.copyInputStreamToFile(optionalFileStream.get(), workFile);
|
||||||
workDir = new File(itemImportService.getTempWorkDir() + File.separator + TEMP_DIR);
|
|
||||||
sourcedir = itemImportService.unzip(workFile, workDir.getAbsolutePath());
|
|
||||||
} else {
|
} else {
|
||||||
throw new IllegalArgumentException(
|
throw new IllegalArgumentException(
|
||||||
"Error reading file, the file couldn't be found for filename: " + zipfilename);
|
"Error reading file, the file couldn't be found for filename: " + zipfilename);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
workDir = new File(itemImportService.getTempWorkDir() + File.separator + TEMP_DIR
|
||||||
|
+ File.separator + context.getCurrentUser().getID());
|
||||||
|
sourcedir = itemImportService.unzip(workFile, workDir.getAbsolutePath());
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Confirm that the zip file has the correct MIME type
|
||||||
|
* @param inputStream
|
||||||
|
*/
|
||||||
|
protected void validateZip(InputStream inputStream) {
|
||||||
|
Tika tika = new Tika();
|
||||||
|
try {
|
||||||
|
String mimeType = tika.detect(inputStream);
|
||||||
|
if (mimeType.equals("application/zip")) {
|
||||||
|
zipvalid = true;
|
||||||
|
} else {
|
||||||
|
handler.logError("A valid zip file must be supplied. The provided file has mimetype: " + mimeType);
|
||||||
|
throw new UnsupportedOperationException("A valid zip file must be supplied");
|
||||||
|
}
|
||||||
|
} catch (IOException e) {
|
||||||
|
throw new IllegalArgumentException(
|
||||||
|
"There was an error while reading the zip file: " + zipfilename);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
@@ -352,7 +419,6 @@ public class ItemImport extends DSpaceRunnable<ItemImportScriptConfiguration> {
|
|||||||
*/
|
*/
|
||||||
protected void setZip() {
|
protected void setZip() {
|
||||||
zip = true;
|
zip = true;
|
||||||
zipfilename = commandLine.getOptionValue('z');
|
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
@@ -8,10 +8,15 @@
|
|||||||
package org.dspace.app.itemimport;
|
package org.dspace.app.itemimport;
|
||||||
|
|
||||||
import java.io.File;
|
import java.io.File;
|
||||||
|
import java.io.FileInputStream;
|
||||||
|
import java.io.InputStream;
|
||||||
|
import java.net.URL;
|
||||||
import java.sql.SQLException;
|
import java.sql.SQLException;
|
||||||
import java.util.List;
|
import java.util.List;
|
||||||
|
import java.util.Optional;
|
||||||
import java.util.UUID;
|
import java.util.UUID;
|
||||||
|
|
||||||
|
import org.apache.commons.io.FileUtils;
|
||||||
import org.apache.commons.lang3.StringUtils;
|
import org.apache.commons.lang3.StringUtils;
|
||||||
import org.dspace.app.itemimport.service.ItemImportService;
|
import org.dspace.app.itemimport.service.ItemImportService;
|
||||||
import org.dspace.content.Collection;
|
import org.dspace.content.Collection;
|
||||||
@@ -62,7 +67,7 @@ public class ItemImportCLI extends ItemImport {
|
|||||||
handler.logError("Must run with either add, replace, or remove (run with -h flag for details)");
|
handler.logError("Must run with either add, replace, or remove (run with -h flag for details)");
|
||||||
throw new UnsupportedOperationException("Must run with either add, replace, or remove");
|
throw new UnsupportedOperationException("Must run with either add, replace, or remove");
|
||||||
} else if ("add".equals(command) || "replace".equals(command)) {
|
} else if ("add".equals(command) || "replace".equals(command)) {
|
||||||
if (sourcedir == null) {
|
if (!remoteUrl && sourcedir == null) {
|
||||||
handler.logError("A source directory containing items must be set (run with -h flag for details)");
|
handler.logError("A source directory containing items must be set (run with -h flag for details)");
|
||||||
throw new UnsupportedOperationException("A source directory containing items must be set");
|
throw new UnsupportedOperationException("A source directory containing items must be set");
|
||||||
}
|
}
|
||||||
@@ -96,10 +101,43 @@ public class ItemImportCLI extends ItemImport {
|
|||||||
protected void readZip(Context context, ItemImportService itemImportService) throws Exception {
|
protected void readZip(Context context, ItemImportService itemImportService) throws Exception {
|
||||||
// If this is a zip archive, unzip it first
|
// If this is a zip archive, unzip it first
|
||||||
if (zip) {
|
if (zip) {
|
||||||
workDir = new File(itemImportService.getTempWorkDir() + File.separator + TEMP_DIR
|
if (!remoteUrl) {
|
||||||
+ File.separator + context.getCurrentUser().getID());
|
// confirm zip file exists
|
||||||
sourcedir = itemImportService.unzip(
|
File myZipFile = new File(sourcedir + File.separator + zipfilename);
|
||||||
new File(sourcedir + File.separator + zipfilename), workDir.getAbsolutePath());
|
if ((!myZipFile.exists()) || (!myZipFile.isFile())) {
|
||||||
|
throw new IllegalArgumentException(
|
||||||
|
"Error reading file, the file couldn't be found for filename: " + zipfilename);
|
||||||
|
}
|
||||||
|
|
||||||
|
// validate zip file
|
||||||
|
InputStream validationFileStream = new FileInputStream(myZipFile);
|
||||||
|
validateZip(validationFileStream);
|
||||||
|
|
||||||
|
workDir = new File(itemImportService.getTempWorkDir() + File.separator + TEMP_DIR
|
||||||
|
+ File.separator + context.getCurrentUser().getID());
|
||||||
|
sourcedir = itemImportService.unzip(
|
||||||
|
new File(sourcedir + File.separator + zipfilename), workDir.getAbsolutePath());
|
||||||
|
} else {
|
||||||
|
// manage zip via remote url
|
||||||
|
Optional<InputStream> optionalFileStream = Optional.ofNullable(new URL(zipfilename).openStream());
|
||||||
|
if (optionalFileStream.isPresent()) {
|
||||||
|
// validate zip file via url
|
||||||
|
Optional<InputStream> validationFileStream = Optional.ofNullable(new URL(zipfilename).openStream());
|
||||||
|
if (validationFileStream.isPresent()) {
|
||||||
|
validateZip(validationFileStream.get());
|
||||||
|
}
|
||||||
|
|
||||||
|
workFile = new File(itemImportService.getTempWorkDir() + File.separator
|
||||||
|
+ zipfilename + "-" + context.getCurrentUser().getID());
|
||||||
|
FileUtils.copyInputStreamToFile(optionalFileStream.get(), workFile);
|
||||||
|
workDir = new File(itemImportService.getTempWorkDir() + File.separator + TEMP_DIR
|
||||||
|
+ File.separator + context.getCurrentUser().getID());
|
||||||
|
sourcedir = itemImportService.unzip(workFile, workDir.getAbsolutePath());
|
||||||
|
} else {
|
||||||
|
throw new IllegalArgumentException(
|
||||||
|
"Error reading file, the file couldn't be found for filename: " + zipfilename);
|
||||||
|
}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -120,6 +158,12 @@ public class ItemImportCLI extends ItemImport {
|
|||||||
zip = true;
|
zip = true;
|
||||||
zipfilename = commandLine.getOptionValue('z');
|
zipfilename = commandLine.getOptionValue('z');
|
||||||
}
|
}
|
||||||
|
|
||||||
|
if (commandLine.hasOption('u')) { // remote url
|
||||||
|
zip = true;
|
||||||
|
remoteUrl = true;
|
||||||
|
zipfilename = commandLine.getOptionValue('u');
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
|
@@ -13,7 +13,7 @@ import org.dspace.scripts.configuration.ScriptConfiguration;
|
|||||||
|
|
||||||
/**
|
/**
|
||||||
* The {@link ScriptConfiguration} for the {@link ItemImportCLI} script
|
* The {@link ScriptConfiguration} for the {@link ItemImportCLI} script
|
||||||
*
|
*
|
||||||
* @author Francesco Pio Scognamiglio (francescopio.scognamiglio at 4science.com)
|
* @author Francesco Pio Scognamiglio (francescopio.scognamiglio at 4science.com)
|
||||||
*/
|
*/
|
||||||
public class ItemImportCLIScriptConfiguration extends ItemImportScriptConfiguration<ItemImportCLI> {
|
public class ItemImportCLIScriptConfiguration extends ItemImportScriptConfiguration<ItemImportCLI> {
|
||||||
@@ -37,6 +37,9 @@ public class ItemImportCLIScriptConfiguration extends ItemImportScriptConfigurat
|
|||||||
options.addOption(Option.builder("z").longOpt("zip")
|
options.addOption(Option.builder("z").longOpt("zip")
|
||||||
.desc("name of zip file")
|
.desc("name of zip file")
|
||||||
.hasArg().required(false).build());
|
.hasArg().required(false).build());
|
||||||
|
options.addOption(Option.builder("u").longOpt("url")
|
||||||
|
.desc("url of zip file")
|
||||||
|
.hasArg().build());
|
||||||
options.addOption(Option.builder("c").longOpt("collection")
|
options.addOption(Option.builder("c").longOpt("collection")
|
||||||
.desc("destination collection(s) Handle or database ID")
|
.desc("destination collection(s) Handle or database ID")
|
||||||
.hasArg().required(false).build());
|
.hasArg().required(false).build());
|
||||||
@@ -55,6 +58,9 @@ public class ItemImportCLIScriptConfiguration extends ItemImportScriptConfigurat
|
|||||||
options.addOption(Option.builder("v").longOpt("validate")
|
options.addOption(Option.builder("v").longOpt("validate")
|
||||||
.desc("test run - do not actually import items")
|
.desc("test run - do not actually import items")
|
||||||
.hasArg(false).required(false).build());
|
.hasArg(false).required(false).build());
|
||||||
|
options.addOption(Option.builder("x").longOpt("exclude-bitstreams")
|
||||||
|
.desc("do not load or expect content bitstreams")
|
||||||
|
.hasArg(false).required(false).build());
|
||||||
options.addOption(Option.builder("p").longOpt("template")
|
options.addOption(Option.builder("p").longOpt("template")
|
||||||
.desc("apply template")
|
.desc("apply template")
|
||||||
.hasArg(false).required(false).build());
|
.hasArg(false).required(false).build());
|
||||||
|
@@ -8,25 +8,18 @@
|
|||||||
package org.dspace.app.itemimport;
|
package org.dspace.app.itemimport;
|
||||||
|
|
||||||
import java.io.InputStream;
|
import java.io.InputStream;
|
||||||
import java.sql.SQLException;
|
|
||||||
|
|
||||||
import org.apache.commons.cli.Option;
|
import org.apache.commons.cli.Option;
|
||||||
import org.apache.commons.cli.Options;
|
import org.apache.commons.cli.Options;
|
||||||
import org.dspace.authorize.service.AuthorizeService;
|
|
||||||
import org.dspace.core.Context;
|
|
||||||
import org.dspace.scripts.configuration.ScriptConfiguration;
|
import org.dspace.scripts.configuration.ScriptConfiguration;
|
||||||
import org.springframework.beans.factory.annotation.Autowired;
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* The {@link ScriptConfiguration} for the {@link ItemImport} script
|
* The {@link ScriptConfiguration} for the {@link ItemImport} script
|
||||||
*
|
*
|
||||||
* @author Francesco Pio Scognamiglio (francescopio.scognamiglio at 4science.com)
|
* @author Francesco Pio Scognamiglio (francescopio.scognamiglio at 4science.com)
|
||||||
*/
|
*/
|
||||||
public class ItemImportScriptConfiguration<T extends ItemImport> extends ScriptConfiguration<T> {
|
public class ItemImportScriptConfiguration<T extends ItemImport> extends ScriptConfiguration<T> {
|
||||||
|
|
||||||
@Autowired
|
|
||||||
private AuthorizeService authorizeService;
|
|
||||||
|
|
||||||
private Class<T> dspaceRunnableClass;
|
private Class<T> dspaceRunnableClass;
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
@@ -39,15 +32,6 @@ public class ItemImportScriptConfiguration<T extends ItemImport> extends ScriptC
|
|||||||
this.dspaceRunnableClass = dspaceRunnableClass;
|
this.dspaceRunnableClass = dspaceRunnableClass;
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
|
||||||
public boolean isAllowedToExecute(final Context context) {
|
|
||||||
try {
|
|
||||||
return authorizeService.isAdmin(context);
|
|
||||||
} catch (SQLException e) {
|
|
||||||
throw new RuntimeException("SQLException occurred when checking if the current user is an admin", e);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public Options getOptions() {
|
public Options getOptions() {
|
||||||
Options options = new Options();
|
Options options = new Options();
|
||||||
@@ -64,7 +48,10 @@ public class ItemImportScriptConfiguration<T extends ItemImport> extends ScriptC
|
|||||||
options.addOption(Option.builder("z").longOpt("zip")
|
options.addOption(Option.builder("z").longOpt("zip")
|
||||||
.desc("name of zip file")
|
.desc("name of zip file")
|
||||||
.type(InputStream.class)
|
.type(InputStream.class)
|
||||||
.hasArg().required().build());
|
.hasArg().build());
|
||||||
|
options.addOption(Option.builder("u").longOpt("url")
|
||||||
|
.desc("url of zip file")
|
||||||
|
.hasArg().build());
|
||||||
options.addOption(Option.builder("c").longOpt("collection")
|
options.addOption(Option.builder("c").longOpt("collection")
|
||||||
.desc("destination collection(s) Handle or database ID")
|
.desc("destination collection(s) Handle or database ID")
|
||||||
.hasArg().required(false).build());
|
.hasArg().required(false).build());
|
||||||
@@ -81,6 +68,9 @@ public class ItemImportScriptConfiguration<T extends ItemImport> extends ScriptC
|
|||||||
options.addOption(Option.builder("v").longOpt("validate")
|
options.addOption(Option.builder("v").longOpt("validate")
|
||||||
.desc("test run - do not actually import items")
|
.desc("test run - do not actually import items")
|
||||||
.hasArg(false).required(false).build());
|
.hasArg(false).required(false).build());
|
||||||
|
options.addOption(Option.builder("x").longOpt("exclude-bitstreams")
|
||||||
|
.desc("do not load or expect content bitstreams")
|
||||||
|
.hasArg(false).required(false).build());
|
||||||
options.addOption(Option.builder("p").longOpt("template")
|
options.addOption(Option.builder("p").longOpt("template")
|
||||||
.desc("apply template")
|
.desc("apply template")
|
||||||
.hasArg(false).required(false).build());
|
.hasArg(false).required(false).build());
|
||||||
|
@@ -62,6 +62,7 @@ import org.apache.commons.io.FileUtils;
|
|||||||
import org.apache.commons.lang3.RandomStringUtils;
|
import org.apache.commons.lang3.RandomStringUtils;
|
||||||
import org.apache.commons.lang3.StringUtils;
|
import org.apache.commons.lang3.StringUtils;
|
||||||
import org.apache.commons.lang3.exception.ExceptionUtils;
|
import org.apache.commons.lang3.exception.ExceptionUtils;
|
||||||
|
import org.apache.logging.log4j.LogManager;
|
||||||
import org.apache.logging.log4j.Logger;
|
import org.apache.logging.log4j.Logger;
|
||||||
import org.dspace.app.itemimport.service.ItemImportService;
|
import org.dspace.app.itemimport.service.ItemImportService;
|
||||||
import org.dspace.app.util.LocalSchemaFilenameFilter;
|
import org.dspace.app.util.LocalSchemaFilenameFilter;
|
||||||
@@ -135,7 +136,7 @@ import org.xml.sax.SAXException;
|
|||||||
* allow the registration of files (bitstreams) into DSpace.
|
* allow the registration of files (bitstreams) into DSpace.
|
||||||
*/
|
*/
|
||||||
public class ItemImportServiceImpl implements ItemImportService, InitializingBean {
|
public class ItemImportServiceImpl implements ItemImportService, InitializingBean {
|
||||||
private final Logger log = org.apache.logging.log4j.LogManager.getLogger(ItemImportServiceImpl.class);
|
private final Logger log = LogManager.getLogger();
|
||||||
|
|
||||||
private DSpaceRunnableHandler handler;
|
private DSpaceRunnableHandler handler;
|
||||||
|
|
||||||
@@ -181,6 +182,7 @@ public class ItemImportServiceImpl implements ItemImportService, InitializingBea
|
|||||||
protected String tempWorkDir;
|
protected String tempWorkDir;
|
||||||
|
|
||||||
protected boolean isTest = false;
|
protected boolean isTest = false;
|
||||||
|
protected boolean isExcludeContent = false;
|
||||||
protected boolean isResume = false;
|
protected boolean isResume = false;
|
||||||
protected boolean useWorkflow = false;
|
protected boolean useWorkflow = false;
|
||||||
protected boolean useWorkflowSendEmail = false;
|
protected boolean useWorkflowSendEmail = false;
|
||||||
@@ -950,9 +952,10 @@ public class ItemImportServiceImpl implements ItemImportService, InitializingBea
|
|||||||
String qualifier = getAttributeValue(n, "qualifier"); //NodeValue();
|
String qualifier = getAttributeValue(n, "qualifier"); //NodeValue();
|
||||||
// //getElementData(n,
|
// //getElementData(n,
|
||||||
// "qualifier");
|
// "qualifier");
|
||||||
String language = getAttributeValue(n, "language");
|
|
||||||
if (language != null) {
|
String language = null;
|
||||||
language = language.trim();
|
if (StringUtils.isNotBlank(getAttributeValue(n, "language"))) {
|
||||||
|
language = getAttributeValue(n, "language").trim();
|
||||||
}
|
}
|
||||||
|
|
||||||
if (!isQuiet) {
|
if (!isQuiet) {
|
||||||
@@ -1403,6 +1406,10 @@ public class ItemImportServiceImpl implements ItemImportService, InitializingBea
|
|||||||
protected void processContentFileEntry(Context c, Item i, String path,
|
protected void processContentFileEntry(Context c, Item i, String path,
|
||||||
String fileName, String bundleName, boolean primary) throws SQLException,
|
String fileName, String bundleName, boolean primary) throws SQLException,
|
||||||
IOException, AuthorizeException {
|
IOException, AuthorizeException {
|
||||||
|
if (isExcludeContent) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
String fullpath = path + File.separatorChar + fileName;
|
String fullpath = path + File.separatorChar + fileName;
|
||||||
|
|
||||||
// get an input stream
|
// get an input stream
|
||||||
@@ -2342,6 +2349,11 @@ public class ItemImportServiceImpl implements ItemImportService, InitializingBea
|
|||||||
this.isTest = isTest;
|
this.isTest = isTest;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public void setExcludeContent(boolean isExcludeContent) {
|
||||||
|
this.isExcludeContent = isExcludeContent;
|
||||||
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public void setResume(boolean isResume) {
|
public void setResume(boolean isResume) {
|
||||||
this.isResume = isResume;
|
this.isResume = isResume;
|
||||||
|
@@ -211,6 +211,13 @@ public interface ItemImportService {
|
|||||||
*/
|
*/
|
||||||
public void setTest(boolean isTest);
|
public void setTest(boolean isTest);
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Set exclude-content flag.
|
||||||
|
*
|
||||||
|
* @param isExcludeContent true or false
|
||||||
|
*/
|
||||||
|
public void setExcludeContent(boolean isExcludeContent);
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Set resume flag
|
* Set resume flag
|
||||||
*
|
*
|
||||||
|
@@ -22,7 +22,9 @@ public class ImageMagickPdfThumbnailFilter extends ImageMagickThumbnailFilter {
|
|||||||
File f2 = null;
|
File f2 = null;
|
||||||
File f3 = null;
|
File f3 = null;
|
||||||
try {
|
try {
|
||||||
f2 = getImageFile(f, 0, verbose);
|
// Step 1: get an image from our PDF file, with PDF-specific processing options
|
||||||
|
f2 = getImageFile(f, verbose);
|
||||||
|
// Step 2: use the image above to create the final resized and rotated thumbnail
|
||||||
f3 = getThumbnailFile(f2, verbose);
|
f3 = getThumbnailFile(f2, verbose);
|
||||||
byte[] bytes = Files.readAllBytes(f3.toPath());
|
byte[] bytes = Files.readAllBytes(f3.toPath());
|
||||||
return new ByteArrayInputStream(bytes);
|
return new ByteArrayInputStream(bytes);
|
||||||
|
@@ -14,6 +14,9 @@ import java.io.InputStream;
|
|||||||
import java.util.regex.Pattern;
|
import java.util.regex.Pattern;
|
||||||
import java.util.regex.PatternSyntaxException;
|
import java.util.regex.PatternSyntaxException;
|
||||||
|
|
||||||
|
import org.apache.pdfbox.pdmodel.PDDocument;
|
||||||
|
import org.apache.pdfbox.pdmodel.PDPage;
|
||||||
|
import org.apache.pdfbox.pdmodel.common.PDRectangle;
|
||||||
import org.dspace.content.Bitstream;
|
import org.dspace.content.Bitstream;
|
||||||
import org.dspace.content.Bundle;
|
import org.dspace.content.Bundle;
|
||||||
import org.dspace.content.Item;
|
import org.dspace.content.Item;
|
||||||
@@ -113,9 +116,17 @@ public abstract class ImageMagickThumbnailFilter extends MediaFilter {
|
|||||||
return f2;
|
return f2;
|
||||||
}
|
}
|
||||||
|
|
||||||
public File getImageFile(File f, int page, boolean verbose)
|
/**
|
||||||
|
* Return an image from a bitstream with specific processing options for
|
||||||
|
* PDFs. This is only used by ImageMagickPdfThumbnailFilter in order to
|
||||||
|
* generate an intermediate image file for use with getThumbnailFile.
|
||||||
|
*/
|
||||||
|
public File getImageFile(File f, boolean verbose)
|
||||||
throws IOException, InterruptedException, IM4JavaException {
|
throws IOException, InterruptedException, IM4JavaException {
|
||||||
File f2 = new File(f.getParentFile(), f.getName() + ".jpg");
|
// Writing an intermediate file to disk is inefficient, but since we're
|
||||||
|
// doing it anyway, we should use a lossless format. IM's internal MIFF
|
||||||
|
// is lossless like PNG and TIFF, but much faster.
|
||||||
|
File f2 = new File(f.getParentFile(), f.getName() + ".miff");
|
||||||
f2.deleteOnExit();
|
f2.deleteOnExit();
|
||||||
ConvertCmd cmd = new ConvertCmd();
|
ConvertCmd cmd = new ConvertCmd();
|
||||||
IMOperation op = new IMOperation();
|
IMOperation op = new IMOperation();
|
||||||
@@ -132,7 +143,27 @@ public abstract class ImageMagickThumbnailFilter extends MediaFilter {
|
|||||||
op.density(Integer.valueOf(density));
|
op.density(Integer.valueOf(density));
|
||||||
}
|
}
|
||||||
|
|
||||||
String s = "[" + page + "]";
|
// Check the PDF's MediaBox and CropBox to see if they are the same.
|
||||||
|
// If not, then tell ImageMagick to use the CropBox when generating
|
||||||
|
// the thumbnail because the CropBox is generally used to define the
|
||||||
|
// area displayed when a user opens the PDF on a screen, whereas the
|
||||||
|
// MediaBox is used for print. Not all PDFs set these correctly, so
|
||||||
|
// we can use ImageMagick's default behavior unless we see an explit
|
||||||
|
// CropBox. Note: we don't need to do anything special to detect if
|
||||||
|
// the CropBox is missing or empty because pdfbox will set it to the
|
||||||
|
// same size as the MediaBox if it doesn't exist. Also note that we
|
||||||
|
// only need to check the first page, since that's what we use for
|
||||||
|
// generating the thumbnail (PDDocument uses a zero-based index).
|
||||||
|
PDPage pdfPage = PDDocument.load(f).getPage(0);
|
||||||
|
PDRectangle pdfPageMediaBox = pdfPage.getMediaBox();
|
||||||
|
PDRectangle pdfPageCropBox = pdfPage.getCropBox();
|
||||||
|
|
||||||
|
// This option must come *before* we open the input file.
|
||||||
|
if (pdfPageCropBox != pdfPageMediaBox) {
|
||||||
|
op.define("pdf:use-cropbox=true");
|
||||||
|
}
|
||||||
|
|
||||||
|
String s = "[0]";
|
||||||
op.addImage(f.getAbsolutePath() + s);
|
op.addImage(f.getAbsolutePath() + s);
|
||||||
if (configurationService.getBooleanProperty(PRE + ".flatten", true)) {
|
if (configurationService.getBooleanProperty(PRE + ".flatten", true)) {
|
||||||
op.flatten();
|
op.flatten();
|
||||||
@@ -185,20 +216,20 @@ public abstract class ImageMagickThumbnailFilter extends MediaFilter {
|
|||||||
if (description != null) {
|
if (description != null) {
|
||||||
if (replaceRegex.matcher(description).matches()) {
|
if (replaceRegex.matcher(description).matches()) {
|
||||||
if (verbose) {
|
if (verbose) {
|
||||||
System.out.format("%s %s matches pattern and is replacable.%n",
|
System.out.format("%s %s matches pattern and is replaceable.%n",
|
||||||
description, nsrc);
|
description, n);
|
||||||
}
|
}
|
||||||
continue;
|
continue;
|
||||||
}
|
}
|
||||||
if (description.equals(getDescription())) {
|
if (description.equals(getDescription())) {
|
||||||
if (verbose) {
|
if (verbose) {
|
||||||
System.out.format("%s %s is replaceable.%n",
|
System.out.format("%s %s is replaceable.%n",
|
||||||
getDescription(), nsrc);
|
getDescription(), n);
|
||||||
}
|
}
|
||||||
continue;
|
continue;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
System.out.format("Custom Thumbnail exists for %s for item %s. Thumbnail will not be generated.%n",
|
System.out.format("Custom thumbnail exists for %s for item %s. Thumbnail will not be generated.%n",
|
||||||
nsrc, item.getHandle());
|
nsrc, item.getHandle());
|
||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
|
@@ -0,0 +1,76 @@
|
|||||||
|
/**
|
||||||
|
* The contents of this file are subject to the license and copyright
|
||||||
|
* detailed in the LICENSE and NOTICE files at the root of the source
|
||||||
|
* tree and available online at
|
||||||
|
*
|
||||||
|
* http://www.dspace.org/license/
|
||||||
|
*/
|
||||||
|
package org.dspace.app.mediafilter;
|
||||||
|
|
||||||
|
import java.io.ByteArrayInputStream;
|
||||||
|
import java.io.File;
|
||||||
|
import java.io.IOException;
|
||||||
|
import java.io.InputStream;
|
||||||
|
import java.nio.file.Files;
|
||||||
|
|
||||||
|
import org.dspace.content.Item;
|
||||||
|
import org.im4java.core.ConvertCmd;
|
||||||
|
import org.im4java.core.IM4JavaException;
|
||||||
|
import org.im4java.core.IMOperation;
|
||||||
|
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Filter video bitstreams, scaling the image to be within the bounds of
|
||||||
|
* thumbnail.maxwidth, thumbnail.maxheight, the size we want our thumbnail to be
|
||||||
|
* no bigger than. Creates only JPEGs.
|
||||||
|
*/
|
||||||
|
public class ImageMagickVideoThumbnailFilter extends ImageMagickThumbnailFilter {
|
||||||
|
private static final int DEFAULT_WIDTH = 180;
|
||||||
|
private static final int DEFAULT_HEIGHT = 120;
|
||||||
|
private static final int FRAME_NUMBER = 100;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @param currentItem item
|
||||||
|
* @param source source input stream
|
||||||
|
* @param verbose verbose mode
|
||||||
|
* @return InputStream the resulting input stream
|
||||||
|
* @throws Exception if error
|
||||||
|
*/
|
||||||
|
@Override
|
||||||
|
public InputStream getDestinationStream(Item currentItem, InputStream source, boolean verbose)
|
||||||
|
throws Exception {
|
||||||
|
File f = inputStreamToTempFile(source, "imthumb", ".tmp");
|
||||||
|
File f2 = null;
|
||||||
|
try {
|
||||||
|
f2 = getThumbnailFile(f, verbose);
|
||||||
|
byte[] bytes = Files.readAllBytes(f2.toPath());
|
||||||
|
return new ByteArrayInputStream(bytes);
|
||||||
|
} finally {
|
||||||
|
//noinspection ResultOfMethodCallIgnored
|
||||||
|
f.delete();
|
||||||
|
if (f2 != null) {
|
||||||
|
//noinspection ResultOfMethodCallIgnored
|
||||||
|
f2.delete();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public File getThumbnailFile(File f, boolean verbose)
|
||||||
|
throws IOException, InterruptedException, IM4JavaException {
|
||||||
|
File f2 = new File(f.getParentFile(), f.getName() + ".jpg");
|
||||||
|
f2.deleteOnExit();
|
||||||
|
ConvertCmd cmd = new ConvertCmd();
|
||||||
|
IMOperation op = new IMOperation();
|
||||||
|
op.autoOrient();
|
||||||
|
op.addImage("VIDEO:" + f.getAbsolutePath() + "[" + FRAME_NUMBER + "]");
|
||||||
|
op.thumbnail(configurationService.getIntProperty("thumbnail.maxwidth", DEFAULT_WIDTH),
|
||||||
|
configurationService.getIntProperty("thumbnail.maxheight", DEFAULT_HEIGHT));
|
||||||
|
op.addImage(f2.getAbsolutePath());
|
||||||
|
if (verbose) {
|
||||||
|
System.out.println("IM Thumbnail Param: " + op);
|
||||||
|
}
|
||||||
|
cmd.run(op);
|
||||||
|
return f2;
|
||||||
|
}
|
||||||
|
}
|
@@ -7,25 +7,16 @@
|
|||||||
*/
|
*/
|
||||||
package org.dspace.app.mediafilter;
|
package org.dspace.app.mediafilter;
|
||||||
|
|
||||||
import java.sql.SQLException;
|
|
||||||
|
|
||||||
import org.apache.commons.cli.Option;
|
import org.apache.commons.cli.Option;
|
||||||
import org.apache.commons.cli.Options;
|
import org.apache.commons.cli.Options;
|
||||||
import org.dspace.authorize.service.AuthorizeService;
|
|
||||||
import org.dspace.core.Context;
|
|
||||||
import org.dspace.scripts.configuration.ScriptConfiguration;
|
import org.dspace.scripts.configuration.ScriptConfiguration;
|
||||||
import org.springframework.beans.factory.annotation.Autowired;
|
|
||||||
|
|
||||||
public class MediaFilterScriptConfiguration<T extends MediaFilterScript> extends ScriptConfiguration<T> {
|
public class MediaFilterScriptConfiguration<T extends MediaFilterScript> extends ScriptConfiguration<T> {
|
||||||
|
|
||||||
@Autowired
|
|
||||||
private AuthorizeService authorizeService;
|
|
||||||
|
|
||||||
private Class<T> dspaceRunnableClass;
|
private Class<T> dspaceRunnableClass;
|
||||||
|
|
||||||
private static final String MEDIA_FILTER_PLUGINS_KEY = "filter.plugins";
|
private static final String MEDIA_FILTER_PLUGINS_KEY = "filter.plugins";
|
||||||
|
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public Class<T> getDspaceRunnableClass() {
|
public Class<T> getDspaceRunnableClass() {
|
||||||
return dspaceRunnableClass;
|
return dspaceRunnableClass;
|
||||||
@@ -36,16 +27,6 @@ public class MediaFilterScriptConfiguration<T extends MediaFilterScript> extends
|
|||||||
this.dspaceRunnableClass = dspaceRunnableClass;
|
this.dspaceRunnableClass = dspaceRunnableClass;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public boolean isAllowedToExecute(final Context context) {
|
|
||||||
try {
|
|
||||||
return authorizeService.isAdmin(context);
|
|
||||||
} catch (SQLException e) {
|
|
||||||
throw new RuntimeException("SQLException occurred when checking if the current user is an admin", e);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public Options getOptions() {
|
public Options getOptions() {
|
||||||
Options options = new Options();
|
Options options = new Options();
|
||||||
|
@@ -8,13 +8,17 @@
|
|||||||
package org.dspace.app.mediafilter;
|
package org.dspace.app.mediafilter;
|
||||||
|
|
||||||
import java.io.InputStream;
|
import java.io.InputStream;
|
||||||
|
import java.sql.SQLException;
|
||||||
import java.util.ArrayList;
|
import java.util.ArrayList;
|
||||||
import java.util.HashMap;
|
import java.util.HashMap;
|
||||||
import java.util.Iterator;
|
import java.util.Iterator;
|
||||||
import java.util.List;
|
import java.util.List;
|
||||||
import java.util.Map;
|
import java.util.Map;
|
||||||
|
import java.util.stream.Collectors;
|
||||||
|
|
||||||
|
import org.apache.commons.lang3.StringUtils;
|
||||||
import org.dspace.app.mediafilter.service.MediaFilterService;
|
import org.dspace.app.mediafilter.service.MediaFilterService;
|
||||||
|
import org.dspace.authorize.AuthorizeException;
|
||||||
import org.dspace.authorize.service.AuthorizeService;
|
import org.dspace.authorize.service.AuthorizeService;
|
||||||
import org.dspace.content.Bitstream;
|
import org.dspace.content.Bitstream;
|
||||||
import org.dspace.content.BitstreamFormat;
|
import org.dspace.content.BitstreamFormat;
|
||||||
@@ -315,25 +319,25 @@ public class MediaFilterServiceImpl implements MediaFilterService, InitializingB
|
|||||||
|
|
||||||
// check if destination bitstream exists
|
// check if destination bitstream exists
|
||||||
Bundle existingBundle = null;
|
Bundle existingBundle = null;
|
||||||
Bitstream existingBitstream = null;
|
List<Bitstream> existingBitstreams = new ArrayList<Bitstream>();
|
||||||
List<Bundle> bundles = itemService.getBundles(item, formatFilter.getBundleName());
|
List<Bundle> bundles = itemService.getBundles(item, formatFilter.getBundleName());
|
||||||
|
|
||||||
if (bundles.size() > 0) {
|
if (bundles.size() > 0) {
|
||||||
// only finds the last match (FIXME?)
|
// only finds the last matching bundle and all matching bitstreams in the proper bundle(s)
|
||||||
for (Bundle bundle : bundles) {
|
for (Bundle bundle : bundles) {
|
||||||
List<Bitstream> bitstreams = bundle.getBitstreams();
|
List<Bitstream> bitstreams = bundle.getBitstreams();
|
||||||
|
|
||||||
for (Bitstream bitstream : bitstreams) {
|
for (Bitstream bitstream : bitstreams) {
|
||||||
if (bitstream.getName().trim().equals(newName.trim())) {
|
if (bitstream.getName().trim().equals(newName.trim())) {
|
||||||
existingBundle = bundle;
|
existingBundle = bundle;
|
||||||
existingBitstream = bitstream;
|
existingBitstreams.add(bitstream);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// if exists and overwrite = false, exit
|
// if exists and overwrite = false, exit
|
||||||
if (!overWrite && (existingBitstream != null)) {
|
if (!overWrite && (existingBitstreams.size() > 0)) {
|
||||||
if (!isQuiet) {
|
if (!isQuiet) {
|
||||||
logInfo("SKIPPED: bitstream " + source.getID()
|
logInfo("SKIPPED: bitstream " + source.getID()
|
||||||
+ " (item: " + item.getHandle() + ") because '" + newName + "' already exists");
|
+ " (item: " + item.getHandle() + ") because '" + newName + "' already exists");
|
||||||
@@ -388,18 +392,7 @@ public class MediaFilterServiceImpl implements MediaFilterService, InitializingB
|
|||||||
bitstreamService.update(context, b);
|
bitstreamService.update(context, b);
|
||||||
|
|
||||||
//Set permissions on the derivative bitstream
|
//Set permissions on the derivative bitstream
|
||||||
//- First remove any existing policies
|
updatePoliciesOfDerivativeBitstream(context, b, formatFilter, source);
|
||||||
authorizeService.removeAllPolicies(context, b);
|
|
||||||
|
|
||||||
//- Determine if this is a public-derivative format
|
|
||||||
if (publicFiltersClasses.contains(formatFilter.getClass().getSimpleName())) {
|
|
||||||
//- Set derivative bitstream to be publicly accessible
|
|
||||||
Group anonymous = groupService.findByName(context, Group.ANONYMOUS);
|
|
||||||
authorizeService.addPolicy(context, b, Constants.READ, anonymous);
|
|
||||||
} else {
|
|
||||||
//- Inherit policies from the source bitstream
|
|
||||||
authorizeService.inheritPolicies(context, source, b);
|
|
||||||
}
|
|
||||||
|
|
||||||
//do post-processing of the generated bitstream
|
//do post-processing of the generated bitstream
|
||||||
formatFilter.postProcessBitstream(context, item, b);
|
formatFilter.postProcessBitstream(context, item, b);
|
||||||
@@ -408,9 +401,8 @@ public class MediaFilterServiceImpl implements MediaFilterService, InitializingB
|
|||||||
logError("!!! OutOfMemoryError !!!");
|
logError("!!! OutOfMemoryError !!!");
|
||||||
}
|
}
|
||||||
|
|
||||||
// fixme - set date?
|
|
||||||
// we are overwriting, so remove old bitstream
|
// we are overwriting, so remove old bitstream
|
||||||
if (existingBitstream != null) {
|
for (Bitstream existingBitstream : existingBitstreams) {
|
||||||
bundleService.removeBitstream(context, existingBundle, existingBitstream);
|
bundleService.removeBitstream(context, existingBundle, existingBitstream);
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -422,6 +414,71 @@ public class MediaFilterServiceImpl implements MediaFilterService, InitializingB
|
|||||||
return true;
|
return true;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public void updatePoliciesOfDerivativeBitstreams(Context context, Item item, Bitstream source)
|
||||||
|
throws SQLException, AuthorizeException {
|
||||||
|
|
||||||
|
if (filterClasses == null) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
for (FormatFilter formatFilter : filterClasses) {
|
||||||
|
for (Bitstream bitstream : findDerivativeBitstreams(item, source, formatFilter)) {
|
||||||
|
updatePoliciesOfDerivativeBitstream(context, bitstream, formatFilter, source);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* find derivative bitstreams related to source bitstream
|
||||||
|
*
|
||||||
|
* @param item item containing bitstreams
|
||||||
|
* @param source source bitstream
|
||||||
|
* @param formatFilter formatFilter
|
||||||
|
* @return list of derivative bitstreams from source bitstream
|
||||||
|
* @throws SQLException If something goes wrong in the database
|
||||||
|
*/
|
||||||
|
private List<Bitstream> findDerivativeBitstreams(Item item, Bitstream source, FormatFilter formatFilter)
|
||||||
|
throws SQLException {
|
||||||
|
|
||||||
|
String bitstreamName = formatFilter.getFilteredName(source.getName());
|
||||||
|
List<Bundle> bundles = itemService.getBundles(item, formatFilter.getBundleName());
|
||||||
|
|
||||||
|
return bundles.stream()
|
||||||
|
.flatMap(bundle ->
|
||||||
|
bundle.getBitstreams().stream())
|
||||||
|
.filter(bitstream ->
|
||||||
|
StringUtils.equals(bitstream.getName().trim(), bitstreamName.trim()))
|
||||||
|
.collect(Collectors.toList());
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* update resource polices of derivative bitstreams.
|
||||||
|
* by remove all resource policies and
|
||||||
|
* set derivative bitstreams to be publicly accessible or
|
||||||
|
* replace derivative bitstreams policies using
|
||||||
|
* the same in the source bitstream.
|
||||||
|
*
|
||||||
|
* @param context the context
|
||||||
|
* @param bitstream derivative bitstream
|
||||||
|
* @param formatFilter formatFilter
|
||||||
|
* @param source the source bitstream
|
||||||
|
* @throws SQLException If something goes wrong in the database
|
||||||
|
* @throws AuthorizeException if authorization error
|
||||||
|
*/
|
||||||
|
private void updatePoliciesOfDerivativeBitstream(Context context, Bitstream bitstream, FormatFilter formatFilter,
|
||||||
|
Bitstream source) throws SQLException, AuthorizeException {
|
||||||
|
|
||||||
|
authorizeService.removeAllPolicies(context, bitstream);
|
||||||
|
|
||||||
|
if (publicFiltersClasses.contains(formatFilter.getClass().getSimpleName())) {
|
||||||
|
Group anonymous = groupService.findByName(context, Group.ANONYMOUS);
|
||||||
|
authorizeService.addPolicy(context, bitstream, Constants.READ, anonymous);
|
||||||
|
} else {
|
||||||
|
authorizeService.replaceAllPolicies(context, source, bitstream);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public Item getCurrentItem() {
|
public Item getCurrentItem() {
|
||||||
return currentItem;
|
return currentItem;
|
||||||
|
@@ -7,10 +7,12 @@
|
|||||||
*/
|
*/
|
||||||
package org.dspace.app.mediafilter.service;
|
package org.dspace.app.mediafilter.service;
|
||||||
|
|
||||||
|
import java.sql.SQLException;
|
||||||
import java.util.List;
|
import java.util.List;
|
||||||
import java.util.Map;
|
import java.util.Map;
|
||||||
|
|
||||||
import org.dspace.app.mediafilter.FormatFilter;
|
import org.dspace.app.mediafilter.FormatFilter;
|
||||||
|
import org.dspace.authorize.AuthorizeException;
|
||||||
import org.dspace.content.Bitstream;
|
import org.dspace.content.Bitstream;
|
||||||
import org.dspace.content.Collection;
|
import org.dspace.content.Collection;
|
||||||
import org.dspace.content.Community;
|
import org.dspace.content.Community;
|
||||||
@@ -91,6 +93,22 @@ public interface MediaFilterService {
|
|||||||
public boolean processBitstream(Context context, Item item, Bitstream source, FormatFilter formatFilter)
|
public boolean processBitstream(Context context, Item item, Bitstream source, FormatFilter formatFilter)
|
||||||
throws Exception;
|
throws Exception;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* update resource polices of derivative bitstreams
|
||||||
|
* related to source bitstream.
|
||||||
|
* set derivative bitstreams to be publicly accessible or
|
||||||
|
* replace derivative bitstreams policies using
|
||||||
|
* the same in the source bitstream.
|
||||||
|
*
|
||||||
|
* @param context context
|
||||||
|
* @param item item containing bitstreams
|
||||||
|
* @param source source bitstream
|
||||||
|
* @throws SQLException If something goes wrong in the database
|
||||||
|
* @throws AuthorizeException if authorization error
|
||||||
|
*/
|
||||||
|
public void updatePoliciesOfDerivativeBitstreams(Context context, Item item, Bitstream source)
|
||||||
|
throws SQLException, AuthorizeException;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Return the item that is currently being processed/filtered
|
* Return the item that is currently being processed/filtered
|
||||||
* by the MediaFilterManager.
|
* by the MediaFilterManager.
|
||||||
|
@@ -31,5 +31,5 @@ public interface RequestItemAuthorExtractor {
|
|||||||
*/
|
*/
|
||||||
@NonNull
|
@NonNull
|
||||||
public List<RequestItemAuthor> getRequestItemAuthor(Context context, Item item)
|
public List<RequestItemAuthor> getRequestItemAuthor(Context context, Item item)
|
||||||
throws SQLException;
|
throws SQLException;
|
||||||
}
|
}
|
||||||
|
@@ -11,54 +11,59 @@ package org.dspace.app.requestitem;
|
|||||||
import java.io.IOException;
|
import java.io.IOException;
|
||||||
import java.sql.SQLException;
|
import java.sql.SQLException;
|
||||||
import java.util.List;
|
import java.util.List;
|
||||||
|
import javax.annotation.ManagedBean;
|
||||||
|
import javax.inject.Inject;
|
||||||
|
import javax.inject.Singleton;
|
||||||
import javax.mail.MessagingException;
|
import javax.mail.MessagingException;
|
||||||
|
|
||||||
import org.apache.logging.log4j.LogManager;
|
import org.apache.logging.log4j.LogManager;
|
||||||
import org.apache.logging.log4j.Logger;
|
import org.apache.logging.log4j.Logger;
|
||||||
import org.dspace.app.requestitem.factory.RequestItemServiceFactory;
|
|
||||||
import org.dspace.app.requestitem.service.RequestItemService;
|
import org.dspace.app.requestitem.service.RequestItemService;
|
||||||
import org.dspace.authorize.AuthorizeException;
|
import org.dspace.authorize.AuthorizeException;
|
||||||
import org.dspace.content.Bitstream;
|
import org.dspace.content.Bitstream;
|
||||||
import org.dspace.content.Bundle;
|
import org.dspace.content.Bundle;
|
||||||
import org.dspace.content.Item;
|
import org.dspace.content.Item;
|
||||||
import org.dspace.content.factory.ContentServiceFactory;
|
|
||||||
import org.dspace.content.service.BitstreamService;
|
import org.dspace.content.service.BitstreamService;
|
||||||
import org.dspace.core.Context;
|
import org.dspace.core.Context;
|
||||||
import org.dspace.core.Email;
|
import org.dspace.core.Email;
|
||||||
import org.dspace.core.I18nUtil;
|
import org.dspace.core.I18nUtil;
|
||||||
import org.dspace.core.LogHelper;
|
import org.dspace.core.LogHelper;
|
||||||
import org.dspace.eperson.EPerson;
|
import org.dspace.eperson.EPerson;
|
||||||
import org.dspace.handle.factory.HandleServiceFactory;
|
|
||||||
import org.dspace.handle.service.HandleService;
|
import org.dspace.handle.service.HandleService;
|
||||||
import org.dspace.services.ConfigurationService;
|
import org.dspace.services.ConfigurationService;
|
||||||
import org.dspace.services.factory.DSpaceServicesFactory;
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Send item requests and responses by email.
|
* Send item requests and responses by email.
|
||||||
*
|
*
|
||||||
|
* <p>The "strategy" by which approvers are chosen is in an implementation of
|
||||||
|
* {@link RequestItemAuthorExtractor} which is injected by the name
|
||||||
|
* {@code requestItemAuthorExtractor}. See the DI configuration documents.
|
||||||
|
*
|
||||||
* @author Mark H. Wood <mwood@iupui.edu>
|
* @author Mark H. Wood <mwood@iupui.edu>
|
||||||
*/
|
*/
|
||||||
|
@Singleton
|
||||||
|
@ManagedBean
|
||||||
public class RequestItemEmailNotifier {
|
public class RequestItemEmailNotifier {
|
||||||
private static final Logger LOG = LogManager.getLogger();
|
private static final Logger LOG = LogManager.getLogger();
|
||||||
|
|
||||||
private static final BitstreamService bitstreamService
|
@Inject
|
||||||
= ContentServiceFactory.getInstance().getBitstreamService();
|
protected BitstreamService bitstreamService;
|
||||||
|
|
||||||
private static final ConfigurationService configurationService
|
@Inject
|
||||||
= DSpaceServicesFactory.getInstance().getConfigurationService();
|
protected ConfigurationService configurationService;
|
||||||
|
|
||||||
private static final HandleService handleService
|
@Inject
|
||||||
= HandleServiceFactory.getInstance().getHandleService();
|
protected HandleService handleService;
|
||||||
|
|
||||||
private static final RequestItemService requestItemService
|
@Inject
|
||||||
= RequestItemServiceFactory.getInstance().getRequestItemService();
|
protected RequestItemService requestItemService;
|
||||||
|
|
||||||
private static final RequestItemAuthorExtractor requestItemAuthorExtractor
|
protected final RequestItemAuthorExtractor requestItemAuthorExtractor;
|
||||||
= DSpaceServicesFactory.getInstance()
|
|
||||||
.getServiceManager()
|
|
||||||
.getServiceByName(null, RequestItemAuthorExtractor.class);
|
|
||||||
|
|
||||||
private RequestItemEmailNotifier() {}
|
@Inject
|
||||||
|
public RequestItemEmailNotifier(RequestItemAuthorExtractor requestItemAuthorExtractor) {
|
||||||
|
this.requestItemAuthorExtractor = requestItemAuthorExtractor;
|
||||||
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Send the request to the approver(s).
|
* Send the request to the approver(s).
|
||||||
@@ -69,7 +74,7 @@ public class RequestItemEmailNotifier {
|
|||||||
* @throws IOException passed through.
|
* @throws IOException passed through.
|
||||||
* @throws SQLException if the message was not sent.
|
* @throws SQLException if the message was not sent.
|
||||||
*/
|
*/
|
||||||
static public void sendRequest(Context context, RequestItem ri, String responseLink)
|
public void sendRequest(Context context, RequestItem ri, String responseLink)
|
||||||
throws IOException, SQLException {
|
throws IOException, SQLException {
|
||||||
// Who is making this request?
|
// Who is making this request?
|
||||||
List<RequestItemAuthor> authors = requestItemAuthorExtractor
|
List<RequestItemAuthor> authors = requestItemAuthorExtractor
|
||||||
@@ -146,12 +151,38 @@ public class RequestItemEmailNotifier {
|
|||||||
* @param message email body (may be empty).
|
* @param message email body (may be empty).
|
||||||
* @throws IOException if sending failed.
|
* @throws IOException if sending failed.
|
||||||
*/
|
*/
|
||||||
static public void sendResponse(Context context, RequestItem ri, String subject,
|
public void sendResponse(Context context, RequestItem ri, String subject,
|
||||||
String message)
|
String message)
|
||||||
throws IOException {
|
throws IOException {
|
||||||
|
// Who granted this request?
|
||||||
|
List<RequestItemAuthor> grantors;
|
||||||
|
try {
|
||||||
|
grantors = requestItemAuthorExtractor.getRequestItemAuthor(context, ri.getItem());
|
||||||
|
} catch (SQLException e) {
|
||||||
|
LOG.warn("Failed to get grantor's name and address: {}", e.getMessage());
|
||||||
|
grantors = List.of();
|
||||||
|
}
|
||||||
|
|
||||||
|
String grantorName;
|
||||||
|
String grantorAddress;
|
||||||
|
if (grantors.isEmpty()) {
|
||||||
|
grantorName = configurationService.getProperty("mail.admin.name");
|
||||||
|
grantorAddress = configurationService.getProperty("mail.admin");
|
||||||
|
} else {
|
||||||
|
RequestItemAuthor grantor = grantors.get(0); // XXX Cannot know which one
|
||||||
|
grantorName = grantor.getFullName();
|
||||||
|
grantorAddress = grantor.getEmail();
|
||||||
|
}
|
||||||
|
|
||||||
// Build an email back to the requester.
|
// Build an email back to the requester.
|
||||||
Email email = new Email();
|
Email email = Email.getEmail(I18nUtil.getEmailFilename(context.getCurrentLocale(),
|
||||||
email.setContent("body", message);
|
ri.isAccept_request() ? "request_item.granted" : "request_item.rejected"));
|
||||||
|
email.addArgument(ri.getReqName()); // {0} requestor's name
|
||||||
|
email.addArgument(handleService.getCanonicalForm(ri.getItem().getHandle())); // {1} URL of the requested Item
|
||||||
|
email.addArgument(ri.getItem().getName()); // {2} title of the requested Item
|
||||||
|
email.addArgument(grantorName); // {3} name of the grantor
|
||||||
|
email.addArgument(grantorAddress); // {4} email of the grantor
|
||||||
|
email.addArgument(message); // {5} grantor's optional message
|
||||||
email.setSubject(subject);
|
email.setSubject(subject);
|
||||||
email.addRecipient(ri.getReqEmail());
|
email.addRecipient(ri.getReqEmail());
|
||||||
// Attach bitstreams.
|
// Attach bitstreams.
|
||||||
@@ -166,17 +197,25 @@ public class RequestItemEmailNotifier {
|
|||||||
if (!bitstream.getFormat(context).isInternal() &&
|
if (!bitstream.getFormat(context).isInternal() &&
|
||||||
requestItemService.isRestricted(context,
|
requestItemService.isRestricted(context,
|
||||||
bitstream)) {
|
bitstream)) {
|
||||||
email.addAttachment(bitstreamService.retrieve(context,
|
// #8636 Anyone receiving the email can respond to the
|
||||||
bitstream), bitstream.getName(),
|
// request without authenticating into DSpace
|
||||||
|
context.turnOffAuthorisationSystem();
|
||||||
|
email.addAttachment(
|
||||||
|
bitstreamService.retrieve(context, bitstream),
|
||||||
|
bitstream.getName(),
|
||||||
bitstream.getFormat(context).getMIMEType());
|
bitstream.getFormat(context).getMIMEType());
|
||||||
|
context.restoreAuthSystemState();
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
Bitstream bitstream = ri.getBitstream();
|
Bitstream bitstream = ri.getBitstream();
|
||||||
|
// #8636 Anyone receiving the email can respond to the request without authenticating into DSpace
|
||||||
|
context.turnOffAuthorisationSystem();
|
||||||
email.addAttachment(bitstreamService.retrieve(context, bitstream),
|
email.addAttachment(bitstreamService.retrieve(context, bitstream),
|
||||||
bitstream.getName(),
|
bitstream.getName(),
|
||||||
bitstream.getFormat(context).getMIMEType());
|
bitstream.getFormat(context).getMIMEType());
|
||||||
|
context.restoreAuthSystemState();
|
||||||
}
|
}
|
||||||
email.send();
|
email.send();
|
||||||
} else {
|
} else {
|
||||||
@@ -206,7 +245,7 @@ public class RequestItemEmailNotifier {
|
|||||||
* @throws IOException if the message body cannot be loaded or the message
|
* @throws IOException if the message body cannot be loaded or the message
|
||||||
* cannot be sent.
|
* cannot be sent.
|
||||||
*/
|
*/
|
||||||
static public void requestOpenAccess(Context context, RequestItem ri)
|
public void requestOpenAccess(Context context, RequestItem ri)
|
||||||
throws IOException {
|
throws IOException {
|
||||||
Email message = Email.getEmail(I18nUtil.getEmailFilename(context.getCurrentLocale(),
|
Email message = Email.getEmail(I18nUtil.getEmailFilename(context.getCurrentLocale(),
|
||||||
"request_item.admin"));
|
"request_item.admin"));
|
||||||
@@ -228,8 +267,13 @@ public class RequestItemEmailNotifier {
|
|||||||
message.addArgument(bitstreamName); // {0} bitstream name or "all"
|
message.addArgument(bitstreamName); // {0} bitstream name or "all"
|
||||||
message.addArgument(item.getHandle()); // {1} Item handle
|
message.addArgument(item.getHandle()); // {1} Item handle
|
||||||
message.addArgument(ri.getToken()); // {2} Request token
|
message.addArgument(ri.getToken()); // {2} Request token
|
||||||
message.addArgument(approver.getFullName()); // {3} Approver's name
|
if (approver != null) {
|
||||||
message.addArgument(approver.getEmail()); // {4} Approver's address
|
message.addArgument(approver.getFullName()); // {3} Approver's name
|
||||||
|
message.addArgument(approver.getEmail()); // {4} Approver's address
|
||||||
|
} else {
|
||||||
|
message.addArgument("anonymous approver"); // [3] Approver's name
|
||||||
|
message.addArgument(configurationService.getProperty("mail.admin")); // [4] Approver's address
|
||||||
|
}
|
||||||
|
|
||||||
// Who gets this message?
|
// Who gets this message?
|
||||||
String recipient;
|
String recipient;
|
||||||
|
@@ -22,21 +22,27 @@ import org.springframework.beans.factory.annotation.Autowired;
|
|||||||
import org.springframework.lang.NonNull;
|
import org.springframework.lang.NonNull;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* RequestItem strategy to allow DSpace support team's helpdesk to receive requestItem request.
|
* RequestItem strategy to allow DSpace support team's help desk to receive
|
||||||
* With this enabled, then the Item author/submitter doesn't receive the request, but the helpdesk instead does.
|
* requestItem requests. With this enabled, the Item author/submitter doesn't
|
||||||
|
* receive the request, but the help desk instead does.
|
||||||
*
|
*
|
||||||
* Failover to the RequestItemSubmitterStrategy, which means the submitter would get the request if there is no
|
* <p>Fails over to the {@link RequestItemSubmitterStrategy}, which means the
|
||||||
* specified helpdesk email.
|
* submitter would get the request if there is no specified help desk email.
|
||||||
*
|
*
|
||||||
* @author Sam Ottenhoff
|
* @author Sam Ottenhoff
|
||||||
* @author Peter Dietz
|
* @author Peter Dietz
|
||||||
*/
|
*/
|
||||||
public class RequestItemHelpdeskStrategy extends RequestItemSubmitterStrategy {
|
public class RequestItemHelpdeskStrategy
|
||||||
|
extends RequestItemSubmitterStrategy {
|
||||||
|
static final String P_HELPDESK_OVERRIDE
|
||||||
|
= "request.item.helpdesk.override";
|
||||||
|
static final String P_MAIL_HELPDESK = "mail.helpdesk";
|
||||||
|
|
||||||
@Autowired(required = true)
|
@Autowired(required = true)
|
||||||
protected EPersonService ePersonService;
|
protected EPersonService ePersonService;
|
||||||
|
|
||||||
@Autowired(required = true)
|
@Autowired(required = true)
|
||||||
private ConfigurationService configuration;
|
protected ConfigurationService configurationService;
|
||||||
|
|
||||||
public RequestItemHelpdeskStrategy() {
|
public RequestItemHelpdeskStrategy() {
|
||||||
}
|
}
|
||||||
@@ -45,9 +51,9 @@ public class RequestItemHelpdeskStrategy extends RequestItemSubmitterStrategy {
|
|||||||
@NonNull
|
@NonNull
|
||||||
public List<RequestItemAuthor> getRequestItemAuthor(Context context, Item item)
|
public List<RequestItemAuthor> getRequestItemAuthor(Context context, Item item)
|
||||||
throws SQLException {
|
throws SQLException {
|
||||||
boolean helpdeskOverridesSubmitter = configuration
|
boolean helpdeskOverridesSubmitter = configurationService
|
||||||
.getBooleanProperty("request.item.helpdesk.override", false);
|
.getBooleanProperty("request.item.helpdesk.override", false);
|
||||||
String helpDeskEmail = configuration.getProperty("mail.helpdesk");
|
String helpDeskEmail = configurationService.getProperty("mail.helpdesk");
|
||||||
|
|
||||||
if (helpdeskOverridesSubmitter && StringUtils.isNotBlank(helpDeskEmail)) {
|
if (helpdeskOverridesSubmitter && StringUtils.isNotBlank(helpDeskEmail)) {
|
||||||
List<RequestItemAuthor> authors = new ArrayList<>(1);
|
List<RequestItemAuthor> authors = new ArrayList<>(1);
|
||||||
@@ -60,16 +66,18 @@ public class RequestItemHelpdeskStrategy extends RequestItemSubmitterStrategy {
|
|||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Return a RequestItemAuthor object for the specified helpdesk email address.
|
* Return a RequestItemAuthor object for the specified help desk email address.
|
||||||
* It makes an attempt to find if there is a matching eperson for the helpdesk address, to use the name,
|
* It makes an attempt to find if there is a matching {@link EPerson} for
|
||||||
* Otherwise it falls back to a helpdeskname key in the Messages.props.
|
* the help desk address, to use its name. Otherwise it falls back to the
|
||||||
|
* {@code helpdeskname} key in {@code Messages.properties}.
|
||||||
*
|
*
|
||||||
* @param context context
|
* @param context context
|
||||||
* @param helpDeskEmail email
|
* @param helpDeskEmail email
|
||||||
* @return RequestItemAuthor
|
* @return RequestItemAuthor
|
||||||
* @throws SQLException if database error
|
* @throws SQLException if database error
|
||||||
*/
|
*/
|
||||||
public RequestItemAuthor getHelpDeskPerson(Context context, String helpDeskEmail) throws SQLException {
|
public RequestItemAuthor getHelpDeskPerson(Context context, String helpDeskEmail)
|
||||||
|
throws SQLException {
|
||||||
context.turnOffAuthorisationSystem();
|
context.turnOffAuthorisationSystem();
|
||||||
EPerson helpdeskEPerson = ePersonService.findByEmail(context, helpDeskEmail);
|
EPerson helpdeskEPerson = ePersonService.findByEmail(context, helpDeskEmail);
|
||||||
context.restoreAuthSystemState();
|
context.restoreAuthSystemState();
|
||||||
|
@@ -9,6 +9,7 @@ package org.dspace.app.requestitem;
|
|||||||
|
|
||||||
import java.sql.SQLException;
|
import java.sql.SQLException;
|
||||||
import java.util.Date;
|
import java.util.Date;
|
||||||
|
import java.util.Iterator;
|
||||||
import java.util.List;
|
import java.util.List;
|
||||||
|
|
||||||
import org.apache.logging.log4j.LogManager;
|
import org.apache.logging.log4j.LogManager;
|
||||||
@@ -90,6 +91,11 @@ public class RequestItemServiceImpl implements RequestItemService {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public Iterator<RequestItem> findByItem(Context context, Item item) throws SQLException {
|
||||||
|
return requestItemDAO.findByItem(context, item);
|
||||||
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public void update(Context context, RequestItem requestItem) {
|
public void update(Context context, RequestItem requestItem) {
|
||||||
try {
|
try {
|
||||||
|
@@ -22,7 +22,6 @@ import org.springframework.lang.NonNull;
|
|||||||
* @author Andrea Bollini
|
* @author Andrea Bollini
|
||||||
*/
|
*/
|
||||||
public class RequestItemSubmitterStrategy implements RequestItemAuthorExtractor {
|
public class RequestItemSubmitterStrategy implements RequestItemAuthorExtractor {
|
||||||
|
|
||||||
public RequestItemSubmitterStrategy() {
|
public RequestItemSubmitterStrategy() {
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@@ -8,8 +8,10 @@
|
|||||||
package org.dspace.app.requestitem.dao;
|
package org.dspace.app.requestitem.dao;
|
||||||
|
|
||||||
import java.sql.SQLException;
|
import java.sql.SQLException;
|
||||||
|
import java.util.Iterator;
|
||||||
|
|
||||||
import org.dspace.app.requestitem.RequestItem;
|
import org.dspace.app.requestitem.RequestItem;
|
||||||
|
import org.dspace.content.Item;
|
||||||
import org.dspace.core.Context;
|
import org.dspace.core.Context;
|
||||||
import org.dspace.core.GenericDAO;
|
import org.dspace.core.GenericDAO;
|
||||||
|
|
||||||
@@ -32,4 +34,6 @@ public interface RequestItemDAO extends GenericDAO<RequestItem> {
|
|||||||
* @throws SQLException passed through.
|
* @throws SQLException passed through.
|
||||||
*/
|
*/
|
||||||
public RequestItem findByToken(Context context, String token) throws SQLException;
|
public RequestItem findByToken(Context context, String token) throws SQLException;
|
||||||
|
|
||||||
|
public Iterator<RequestItem> findByItem(Context context, Item item) throws SQLException;
|
||||||
}
|
}
|
||||||
|
@@ -8,6 +8,8 @@
|
|||||||
package org.dspace.app.requestitem.dao.impl;
|
package org.dspace.app.requestitem.dao.impl;
|
||||||
|
|
||||||
import java.sql.SQLException;
|
import java.sql.SQLException;
|
||||||
|
import java.util.Iterator;
|
||||||
|
import javax.persistence.Query;
|
||||||
import javax.persistence.criteria.CriteriaBuilder;
|
import javax.persistence.criteria.CriteriaBuilder;
|
||||||
import javax.persistence.criteria.CriteriaQuery;
|
import javax.persistence.criteria.CriteriaQuery;
|
||||||
import javax.persistence.criteria.Root;
|
import javax.persistence.criteria.Root;
|
||||||
@@ -15,6 +17,7 @@ import javax.persistence.criteria.Root;
|
|||||||
import org.dspace.app.requestitem.RequestItem;
|
import org.dspace.app.requestitem.RequestItem;
|
||||||
import org.dspace.app.requestitem.RequestItem_;
|
import org.dspace.app.requestitem.RequestItem_;
|
||||||
import org.dspace.app.requestitem.dao.RequestItemDAO;
|
import org.dspace.app.requestitem.dao.RequestItemDAO;
|
||||||
|
import org.dspace.content.Item;
|
||||||
import org.dspace.core.AbstractHibernateDAO;
|
import org.dspace.core.AbstractHibernateDAO;
|
||||||
import org.dspace.core.Context;
|
import org.dspace.core.Context;
|
||||||
|
|
||||||
@@ -39,4 +42,10 @@ public class RequestItemDAOImpl extends AbstractHibernateDAO<RequestItem> implem
|
|||||||
criteriaQuery.where(criteriaBuilder.equal(requestItemRoot.get(RequestItem_.token), token));
|
criteriaQuery.where(criteriaBuilder.equal(requestItemRoot.get(RequestItem_.token), token));
|
||||||
return uniqueResult(context, criteriaQuery, false, RequestItem.class);
|
return uniqueResult(context, criteriaQuery, false, RequestItem.class);
|
||||||
}
|
}
|
||||||
|
@Override
|
||||||
|
public Iterator<RequestItem> findByItem(Context context, Item item) throws SQLException {
|
||||||
|
Query query = createQuery(context, "FROM RequestItem WHERE item_id= :uuid");
|
||||||
|
query.setParameter("uuid", item.getID());
|
||||||
|
return iterate(query);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
@@ -12,10 +12,15 @@
|
|||||||
* e-mailed to a responsible party for consideration and action. Find details
|
* e-mailed to a responsible party for consideration and action. Find details
|
||||||
* in the user documentation under the rubric "Request a Copy".
|
* in the user documentation under the rubric "Request a Copy".
|
||||||
*
|
*
|
||||||
* <p>This package includes several "strategy" classes which discover responsible
|
* <p>Mailing is handled by {@link RequestItemEmailNotifier}. Responsible
|
||||||
* parties in various ways. See {@link RequestItemSubmitterStrategy} and the
|
* parties are represented by {@link RequestItemAuthor}
|
||||||
* classes which extend it. A strategy class must be configured and identified
|
*
|
||||||
* as {@link RequestItemAuthorExtractor} for injection into code which requires
|
* <p>This package includes several "strategy" classes which discover
|
||||||
* Request a Copy services.
|
* responsible parties in various ways. See
|
||||||
|
* {@link RequestItemSubmitterStrategy} and the classes which extend it, and
|
||||||
|
* others which implement {@link RequestItemAuthorExtractor}. A strategy class
|
||||||
|
* must be configured and identified as {@link requestItemAuthorExtractor}
|
||||||
|
* (<em>note capitalization</em>) for injection into code which requires Request
|
||||||
|
* a Copy services.
|
||||||
*/
|
*/
|
||||||
package org.dspace.app.requestitem;
|
package org.dspace.app.requestitem;
|
||||||
|
@@ -8,6 +8,7 @@
|
|||||||
package org.dspace.app.requestitem.service;
|
package org.dspace.app.requestitem.service;
|
||||||
|
|
||||||
import java.sql.SQLException;
|
import java.sql.SQLException;
|
||||||
|
import java.util.Iterator;
|
||||||
import java.util.List;
|
import java.util.List;
|
||||||
|
|
||||||
import org.dspace.app.requestitem.RequestItem;
|
import org.dspace.app.requestitem.RequestItem;
|
||||||
@@ -62,6 +63,14 @@ public interface RequestItemService {
|
|||||||
*/
|
*/
|
||||||
public RequestItem findByToken(Context context, String token);
|
public RequestItem findByToken(Context context, String token);
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Retrieve a request based on the item.
|
||||||
|
* @param context current DSpace session.
|
||||||
|
* @param item the item to find requests for.
|
||||||
|
* @return the matching requests, or null if not found.
|
||||||
|
*/
|
||||||
|
public Iterator<RequestItem> findByItem(Context context, Item item) throws SQLException;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Save updates to the record. Only accept_request, and decision_date are set-able.
|
* Save updates to the record. Only accept_request, and decision_date are set-able.
|
||||||
*
|
*
|
||||||
|
@@ -8,7 +8,6 @@
|
|||||||
package org.dspace.app.solrdatabaseresync;
|
package org.dspace.app.solrdatabaseresync;
|
||||||
|
|
||||||
import org.apache.commons.cli.Options;
|
import org.apache.commons.cli.Options;
|
||||||
import org.dspace.core.Context;
|
|
||||||
import org.dspace.scripts.configuration.ScriptConfiguration;
|
import org.dspace.scripts.configuration.ScriptConfiguration;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
@@ -27,11 +26,6 @@ public class SolrDatabaseResyncCliScriptConfiguration extends ScriptConfiguratio
|
|||||||
this.dspaceRunnableClass = dspaceRunnableClass;
|
this.dspaceRunnableClass = dspaceRunnableClass;
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
|
||||||
public boolean isAllowedToExecute(Context context) {
|
|
||||||
return true;
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public Options getOptions() {
|
public Options getOptions() {
|
||||||
if (options == null) {
|
if (options == null) {
|
||||||
|
@@ -10,6 +10,7 @@ package org.dspace.app.util;
|
|||||||
import java.util.ArrayList;
|
import java.util.ArrayList;
|
||||||
import java.util.List;
|
import java.util.List;
|
||||||
import java.util.Map;
|
import java.util.Map;
|
||||||
|
import java.util.Optional;
|
||||||
import java.util.regex.Pattern;
|
import java.util.regex.Pattern;
|
||||||
import java.util.regex.PatternSyntaxException;
|
import java.util.regex.PatternSyntaxException;
|
||||||
import javax.annotation.Nullable;
|
import javax.annotation.Nullable;
|
||||||
@@ -131,10 +132,15 @@ public class DCInput {
|
|||||||
private boolean closedVocabulary = false;
|
private boolean closedVocabulary = false;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* the regex to comply with, null if nothing
|
* the regex in ECMAScript standard format, usable also by rests.
|
||||||
*/
|
*/
|
||||||
private String regex = null;
|
private String regex = null;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* the computed pattern, null if nothing
|
||||||
|
*/
|
||||||
|
private Pattern pattern = null;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* allowed document types
|
* allowed document types
|
||||||
*/
|
*/
|
||||||
@@ -178,7 +184,7 @@ public class DCInput {
|
|||||||
|
|
||||||
//check if the input have a language tag
|
//check if the input have a language tag
|
||||||
language = Boolean.valueOf(fieldMap.get("language"));
|
language = Boolean.valueOf(fieldMap.get("language"));
|
||||||
valueLanguageList = new ArrayList();
|
valueLanguageList = new ArrayList<>();
|
||||||
if (language) {
|
if (language) {
|
||||||
String languageNameTmp = fieldMap.get("value-pairs-name");
|
String languageNameTmp = fieldMap.get("value-pairs-name");
|
||||||
if (StringUtils.isBlank(languageNameTmp)) {
|
if (StringUtils.isBlank(languageNameTmp)) {
|
||||||
@@ -191,7 +197,7 @@ public class DCInput {
|
|||||||
repeatable = "true".equalsIgnoreCase(repStr)
|
repeatable = "true".equalsIgnoreCase(repStr)
|
||||||
|| "yes".equalsIgnoreCase(repStr);
|
|| "yes".equalsIgnoreCase(repStr);
|
||||||
String nameVariantsString = fieldMap.get("name-variants");
|
String nameVariantsString = fieldMap.get("name-variants");
|
||||||
nameVariants = (StringUtils.isNotBlank(nameVariantsString)) ?
|
nameVariants = StringUtils.isNotBlank(nameVariantsString) ?
|
||||||
nameVariantsString.equalsIgnoreCase("true") : false;
|
nameVariantsString.equalsIgnoreCase("true") : false;
|
||||||
label = fieldMap.get("label");
|
label = fieldMap.get("label");
|
||||||
inputType = fieldMap.get("input-type");
|
inputType = fieldMap.get("input-type");
|
||||||
@@ -203,17 +209,17 @@ public class DCInput {
|
|||||||
}
|
}
|
||||||
hint = fieldMap.get("hint");
|
hint = fieldMap.get("hint");
|
||||||
warning = fieldMap.get("required");
|
warning = fieldMap.get("required");
|
||||||
required = (warning != null && warning.length() > 0);
|
required = warning != null && warning.length() > 0;
|
||||||
visibility = fieldMap.get("visibility");
|
visibility = fieldMap.get("visibility");
|
||||||
readOnly = fieldMap.get("readonly");
|
readOnly = fieldMap.get("readonly");
|
||||||
vocabulary = fieldMap.get("vocabulary");
|
vocabulary = fieldMap.get("vocabulary");
|
||||||
regex = fieldMap.get("regex");
|
this.initRegex(fieldMap.get("regex"));
|
||||||
String closedVocabularyStr = fieldMap.get("closedVocabulary");
|
String closedVocabularyStr = fieldMap.get("closedVocabulary");
|
||||||
closedVocabulary = "true".equalsIgnoreCase(closedVocabularyStr)
|
closedVocabulary = "true".equalsIgnoreCase(closedVocabularyStr)
|
||||||
|| "yes".equalsIgnoreCase(closedVocabularyStr);
|
|| "yes".equalsIgnoreCase(closedVocabularyStr);
|
||||||
|
|
||||||
// parsing of the <type-bind> element (using the colon as split separator)
|
// parsing of the <type-bind> element (using the colon as split separator)
|
||||||
typeBind = new ArrayList<>();
|
typeBind = new ArrayList<String>();
|
||||||
String typeBindDef = fieldMap.get("type-bind");
|
String typeBindDef = fieldMap.get("type-bind");
|
||||||
if (typeBindDef != null && typeBindDef.trim().length() > 0) {
|
if (typeBindDef != null && typeBindDef.trim().length() > 0) {
|
||||||
String[] types = typeBindDef.split(",");
|
String[] types = typeBindDef.split(",");
|
||||||
@@ -238,6 +244,22 @@ public class DCInput {
|
|||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
protected void initRegex(String regex) {
|
||||||
|
this.regex = null;
|
||||||
|
this.pattern = null;
|
||||||
|
if (regex != null) {
|
||||||
|
try {
|
||||||
|
Optional.ofNullable(RegexPatternUtils.computePattern(regex))
|
||||||
|
.ifPresent(pattern -> {
|
||||||
|
this.pattern = pattern;
|
||||||
|
this.regex = regex;
|
||||||
|
});
|
||||||
|
} catch (PatternSyntaxException e) {
|
||||||
|
log.warn("The regex field of input {} with value {} is invalid!", this.label, regex);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Is this DCInput for display in the given scope? The scope should be
|
* Is this DCInput for display in the given scope? The scope should be
|
||||||
* either "workflow" or "submit", as per the input forms definition. If the
|
* either "workflow" or "submit", as per the input forms definition. If the
|
||||||
@@ -248,7 +270,7 @@ public class DCInput {
|
|||||||
* @return whether the input should be displayed or not
|
* @return whether the input should be displayed or not
|
||||||
*/
|
*/
|
||||||
public boolean isVisible(String scope) {
|
public boolean isVisible(String scope) {
|
||||||
return (visibility == null || visibility.equals(scope));
|
return visibility == null || visibility.equals(scope);
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
@@ -381,7 +403,7 @@ public class DCInput {
|
|||||||
|
|
||||||
/**
|
/**
|
||||||
* Get the style for this form field
|
* Get the style for this form field
|
||||||
*
|
*
|
||||||
* @return the style
|
* @return the style
|
||||||
*/
|
*/
|
||||||
public String getStyle() {
|
public String getStyle() {
|
||||||
@@ -512,8 +534,12 @@ public class DCInput {
|
|||||||
return visibility;
|
return visibility;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
public Pattern getPattern() {
|
||||||
|
return this.pattern;
|
||||||
|
}
|
||||||
|
|
||||||
public String getRegex() {
|
public String getRegex() {
|
||||||
return regex;
|
return this.regex;
|
||||||
}
|
}
|
||||||
|
|
||||||
public String getFieldName() {
|
public String getFieldName() {
|
||||||
@@ -546,8 +572,7 @@ public class DCInput {
|
|||||||
public boolean validate(String value) {
|
public boolean validate(String value) {
|
||||||
if (StringUtils.isNotBlank(value)) {
|
if (StringUtils.isNotBlank(value)) {
|
||||||
try {
|
try {
|
||||||
if (StringUtils.isNotBlank(regex)) {
|
if (this.pattern != null) {
|
||||||
Pattern pattern = Pattern.compile(regex);
|
|
||||||
if (!pattern.matcher(value).matches()) {
|
if (!pattern.matcher(value).matches()) {
|
||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
@@ -557,7 +582,6 @@ public class DCInput {
|
|||||||
}
|
}
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
return true;
|
return true;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@@ -10,6 +10,7 @@ package org.dspace.app.util;
|
|||||||
import java.util.ArrayList;
|
import java.util.ArrayList;
|
||||||
import java.util.List;
|
import java.util.List;
|
||||||
import java.util.Map;
|
import java.util.Map;
|
||||||
|
import java.util.Objects;
|
||||||
|
|
||||||
import org.apache.commons.lang3.StringUtils;
|
import org.apache.commons.lang3.StringUtils;
|
||||||
import org.dspace.core.Utils;
|
import org.dspace.core.Utils;
|
||||||
@@ -118,9 +119,12 @@ public class DCInputSet {
|
|||||||
return true;
|
return true;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
} else if (field.isRelationshipField() &&
|
||||||
|
("relation." + field.getRelationshipType()).equals(fieldName)) {
|
||||||
|
return true;
|
||||||
} else {
|
} else {
|
||||||
String fullName = field.getFieldName();
|
String fullName = field.getFieldName();
|
||||||
if (fullName.equals(fieldName)) {
|
if (Objects.equals(fullName, fieldName)) {
|
||||||
return true;
|
return true;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@@ -0,0 +1,73 @@
|
|||||||
|
/**
|
||||||
|
* The contents of this file are subject to the license and copyright
|
||||||
|
* detailed in the LICENSE and NOTICE files at the root of the source
|
||||||
|
* tree and available online at
|
||||||
|
*
|
||||||
|
* http://www.dspace.org/license/
|
||||||
|
*/
|
||||||
|
package org.dspace.app.util;
|
||||||
|
|
||||||
|
import static java.util.regex.Pattern.CASE_INSENSITIVE;
|
||||||
|
|
||||||
|
import java.util.Optional;
|
||||||
|
import java.util.regex.Matcher;
|
||||||
|
import java.util.regex.Pattern;
|
||||||
|
import java.util.regex.PatternSyntaxException;
|
||||||
|
|
||||||
|
import org.apache.commons.lang3.StringUtils;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Utility class useful for check regex and patterns.
|
||||||
|
*
|
||||||
|
* @author Vincenzo Mecca (vins01-4science - vincenzo.mecca at 4science.com)
|
||||||
|
*
|
||||||
|
*/
|
||||||
|
public class RegexPatternUtils {
|
||||||
|
|
||||||
|
// checks input having the format /{pattern}/{flags}
|
||||||
|
// allowed flags are: g,i,m,s,u,y
|
||||||
|
public static final String REGEX_INPUT_VALIDATOR = "(/?)(.+)\\1([gimsuy]*)";
|
||||||
|
// flags usable inside regex definition using format (?i|m|s|u|y)
|
||||||
|
public static final String REGEX_FLAGS = "(?%s)";
|
||||||
|
public static final Pattern PATTERN_REGEX_INPUT_VALIDATOR =
|
||||||
|
Pattern.compile(REGEX_INPUT_VALIDATOR, CASE_INSENSITIVE);
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Computes a pattern starting from a regex definition with flags that
|
||||||
|
* uses the standard format: <code>/{regex}/{flags}</code> (ECMAScript format).
|
||||||
|
* This method can transform an ECMAScript regex into a java {@code Pattern} object
|
||||||
|
* wich can be used to validate strings.
|
||||||
|
* <br/>
|
||||||
|
* If regex is null, empty or blank a null {@code Pattern} will be retrieved
|
||||||
|
* If it's a valid regex, then a non-null {@code Pattern} will be retrieved,
|
||||||
|
* an exception will be thrown otherwise.
|
||||||
|
*
|
||||||
|
* @param regex with format <code>/{regex}/{flags}</code>
|
||||||
|
* @return {@code Pattern} regex pattern instance
|
||||||
|
* @throws PatternSyntaxException
|
||||||
|
*/
|
||||||
|
public static final Pattern computePattern(String regex) throws PatternSyntaxException {
|
||||||
|
if (StringUtils.isBlank(regex)) {
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
Matcher inputMatcher = PATTERN_REGEX_INPUT_VALIDATOR.matcher(regex);
|
||||||
|
String regexPattern = regex;
|
||||||
|
String regexFlags = "";
|
||||||
|
if (inputMatcher.matches()) {
|
||||||
|
regexPattern =
|
||||||
|
Optional.of(inputMatcher.group(2))
|
||||||
|
.filter(StringUtils::isNotBlank)
|
||||||
|
.orElse(regex);
|
||||||
|
regexFlags =
|
||||||
|
Optional.ofNullable(inputMatcher.group(3))
|
||||||
|
.filter(StringUtils::isNotBlank)
|
||||||
|
.map(flags -> String.format(REGEX_FLAGS, flags))
|
||||||
|
.orElse("")
|
||||||
|
.replaceAll("g", "");
|
||||||
|
}
|
||||||
|
return Pattern.compile(regexFlags + regexPattern);
|
||||||
|
}
|
||||||
|
|
||||||
|
private RegexPatternUtils() {}
|
||||||
|
|
||||||
|
}
|
@@ -22,7 +22,10 @@ import org.apache.commons.lang3.StringUtils;
|
|||||||
import org.apache.logging.log4j.Logger;
|
import org.apache.logging.log4j.Logger;
|
||||||
import org.dspace.content.Collection;
|
import org.dspace.content.Collection;
|
||||||
import org.dspace.content.DSpaceObject;
|
import org.dspace.content.DSpaceObject;
|
||||||
|
import org.dspace.content.factory.ContentServiceFactory;
|
||||||
|
import org.dspace.content.service.CollectionService;
|
||||||
import org.dspace.core.Context;
|
import org.dspace.core.Context;
|
||||||
|
import org.dspace.discovery.SearchServiceException;
|
||||||
import org.dspace.handle.factory.HandleServiceFactory;
|
import org.dspace.handle.factory.HandleServiceFactory;
|
||||||
import org.dspace.services.factory.DSpaceServicesFactory;
|
import org.dspace.services.factory.DSpaceServicesFactory;
|
||||||
import org.w3c.dom.Document;
|
import org.w3c.dom.Document;
|
||||||
@@ -105,6 +108,13 @@ public class SubmissionConfigReader {
|
|||||||
*/
|
*/
|
||||||
private SubmissionConfig lastSubmissionConfig = null;
|
private SubmissionConfig lastSubmissionConfig = null;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Collection Service instance, needed to interact with collection's
|
||||||
|
* stored data
|
||||||
|
*/
|
||||||
|
protected static final CollectionService collectionService
|
||||||
|
= ContentServiceFactory.getInstance().getCollectionService();
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Load Submission Configuration from the
|
* Load Submission Configuration from the
|
||||||
* item-submission.xml configuration file
|
* item-submission.xml configuration file
|
||||||
@@ -152,6 +162,9 @@ public class SubmissionConfigReader {
|
|||||||
} catch (FactoryConfigurationError fe) {
|
} catch (FactoryConfigurationError fe) {
|
||||||
throw new SubmissionConfigReaderException(
|
throw new SubmissionConfigReaderException(
|
||||||
"Cannot create Item Submission Configuration parser", fe);
|
"Cannot create Item Submission Configuration parser", fe);
|
||||||
|
} catch (SearchServiceException se) {
|
||||||
|
throw new SubmissionConfigReaderException(
|
||||||
|
"Cannot perform a discovery search for Item Submission Configuration", se);
|
||||||
} catch (Exception e) {
|
} catch (Exception e) {
|
||||||
throw new SubmissionConfigReaderException(
|
throw new SubmissionConfigReaderException(
|
||||||
"Error creating Item Submission Configuration: " + e);
|
"Error creating Item Submission Configuration: " + e);
|
||||||
@@ -287,7 +300,7 @@ public class SubmissionConfigReader {
|
|||||||
* should correspond to the collection-form maps, the form definitions, and
|
* should correspond to the collection-form maps, the form definitions, and
|
||||||
* the display/storage word pairs.
|
* the display/storage word pairs.
|
||||||
*/
|
*/
|
||||||
private void doNodes(Node n) throws SAXException, SubmissionConfigReaderException {
|
private void doNodes(Node n) throws SAXException, SearchServiceException, SubmissionConfigReaderException {
|
||||||
if (n == null) {
|
if (n == null) {
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
@@ -334,18 +347,23 @@ public class SubmissionConfigReader {
|
|||||||
* the collection handle and item submission name, put name in hashmap keyed
|
* the collection handle and item submission name, put name in hashmap keyed
|
||||||
* by the collection handle.
|
* by the collection handle.
|
||||||
*/
|
*/
|
||||||
private void processMap(Node e) throws SAXException {
|
private void processMap(Node e) throws SAXException, SearchServiceException {
|
||||||
|
// create a context
|
||||||
|
Context context = new Context();
|
||||||
|
|
||||||
NodeList nl = e.getChildNodes();
|
NodeList nl = e.getChildNodes();
|
||||||
int len = nl.getLength();
|
int len = nl.getLength();
|
||||||
for (int i = 0; i < len; i++) {
|
for (int i = 0; i < len; i++) {
|
||||||
Node nd = nl.item(i);
|
Node nd = nl.item(i);
|
||||||
if (nd.getNodeName().equals("name-map")) {
|
if (nd.getNodeName().equals("name-map")) {
|
||||||
String id = getAttribute(nd, "collection-handle");
|
String id = getAttribute(nd, "collection-handle");
|
||||||
|
String entityType = getAttribute(nd, "collection-entity-type");
|
||||||
String value = getAttribute(nd, "submission-name");
|
String value = getAttribute(nd, "submission-name");
|
||||||
String content = getValue(nd);
|
String content = getValue(nd);
|
||||||
if (id == null) {
|
if (id == null && entityType == null) {
|
||||||
throw new SAXException(
|
throw new SAXException(
|
||||||
"name-map element is missing collection-handle attribute in 'item-submission.xml'");
|
"name-map element is missing collection-handle or collection-entity-type attribute " +
|
||||||
|
"in 'item-submission.xml'");
|
||||||
}
|
}
|
||||||
if (value == null) {
|
if (value == null) {
|
||||||
throw new SAXException(
|
throw new SAXException(
|
||||||
@@ -355,7 +373,17 @@ public class SubmissionConfigReader {
|
|||||||
throw new SAXException(
|
throw new SAXException(
|
||||||
"name-map element has content in 'item-submission.xml', it should be empty.");
|
"name-map element has content in 'item-submission.xml', it should be empty.");
|
||||||
}
|
}
|
||||||
collectionToSubmissionConfig.put(id, value);
|
if (id != null) {
|
||||||
|
collectionToSubmissionConfig.put(id, value);
|
||||||
|
|
||||||
|
} else {
|
||||||
|
// get all collections for this entity-type
|
||||||
|
List<Collection> collections = collectionService.findAllCollectionsByEntityType( context,
|
||||||
|
entityType);
|
||||||
|
for (Collection collection : collections) {
|
||||||
|
collectionToSubmissionConfig.putIfAbsent(collection.getHandle(), value);
|
||||||
|
}
|
||||||
|
}
|
||||||
} // ignore any child node that isn't a "name-map"
|
} // ignore any child node that isn't a "name-map"
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -635,4 +663,4 @@ public class SubmissionConfigReader {
|
|||||||
}
|
}
|
||||||
return results;
|
return results;
|
||||||
}
|
}
|
||||||
}
|
}
|
@@ -11,6 +11,9 @@ import java.io.Serializable;
|
|||||||
import java.util.Map;
|
import java.util.Map;
|
||||||
|
|
||||||
import org.apache.commons.lang3.BooleanUtils;
|
import org.apache.commons.lang3.BooleanUtils;
|
||||||
|
import org.dspace.content.InProgressSubmission;
|
||||||
|
import org.dspace.content.WorkspaceItem;
|
||||||
|
import org.hibernate.proxy.HibernateProxyHelper;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Class representing configuration for a single step within an Item Submission
|
* Class representing configuration for a single step within an Item Submission
|
||||||
@@ -173,6 +176,38 @@ public class SubmissionStepConfig implements Serializable {
|
|||||||
return visibilityOutside;
|
return visibilityOutside;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Check if given submission section object is hidden for the current submission scope
|
||||||
|
*
|
||||||
|
* @param obj the InProgressSubmission to check
|
||||||
|
* @return true if the submission section is hidden, false otherwise
|
||||||
|
*/
|
||||||
|
public boolean isHiddenForInProgressSubmission(InProgressSubmission obj) {
|
||||||
|
|
||||||
|
String scopeToCheck = getScope(obj);
|
||||||
|
|
||||||
|
if (scope == null || scopeToCheck == null) {
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
|
||||||
|
String visibility = getVisibility();
|
||||||
|
String visibilityOutside = getVisibilityOutside();
|
||||||
|
|
||||||
|
if (scope.equalsIgnoreCase(scopeToCheck)) {
|
||||||
|
return "hidden".equalsIgnoreCase(visibility);
|
||||||
|
} else {
|
||||||
|
return visibilityOutside == null || "hidden".equalsIgnoreCase(visibilityOutside);
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
private String getScope(InProgressSubmission obj) {
|
||||||
|
if (HibernateProxyHelper.getClassWithoutInitializingProxy(obj).equals(WorkspaceItem.class)) {
|
||||||
|
return "submission";
|
||||||
|
}
|
||||||
|
return "workflow";
|
||||||
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Get the number of this step in the current Submission process config.
|
* Get the number of this step in the current Submission process config.
|
||||||
* Step numbers start with #0 (although step #0 is ALWAYS the special
|
* Step numbers start with #0 (although step #0 is ALWAYS the special
|
||||||
|
@@ -51,6 +51,7 @@ import org.dspace.content.service.CollectionService;
|
|||||||
import org.dspace.content.service.CommunityService;
|
import org.dspace.content.service.CommunityService;
|
||||||
import org.dspace.content.service.ItemService;
|
import org.dspace.content.service.ItemService;
|
||||||
import org.dspace.core.Context;
|
import org.dspace.core.Context;
|
||||||
|
import org.dspace.core.I18nUtil;
|
||||||
import org.dspace.discovery.IndexableObject;
|
import org.dspace.discovery.IndexableObject;
|
||||||
import org.dspace.discovery.indexobject.IndexableCollection;
|
import org.dspace.discovery.indexobject.IndexableCollection;
|
||||||
import org.dspace.discovery.indexobject.IndexableCommunity;
|
import org.dspace.discovery.indexobject.IndexableCommunity;
|
||||||
@@ -91,6 +92,7 @@ public class SyndicationFeed {
|
|||||||
|
|
||||||
// default DC fields for entry
|
// default DC fields for entry
|
||||||
protected String defaultTitleField = "dc.title";
|
protected String defaultTitleField = "dc.title";
|
||||||
|
protected String defaultDescriptionField = "dc.description";
|
||||||
protected String defaultAuthorField = "dc.contributor.author";
|
protected String defaultAuthorField = "dc.contributor.author";
|
||||||
protected String defaultDateField = "dc.date.issued";
|
protected String defaultDateField = "dc.date.issued";
|
||||||
private static final String[] defaultDescriptionFields =
|
private static final String[] defaultDescriptionFields =
|
||||||
@@ -196,15 +198,15 @@ public class SyndicationFeed {
|
|||||||
// dso is null for the whole site, or a search without scope
|
// dso is null for the whole site, or a search without scope
|
||||||
if (dso == null) {
|
if (dso == null) {
|
||||||
defaultTitle = configurationService.getProperty("dspace.name");
|
defaultTitle = configurationService.getProperty("dspace.name");
|
||||||
feed.setDescription(localize(labels, MSG_FEED_DESCRIPTION));
|
defaultDescriptionField = localize(labels, MSG_FEED_DESCRIPTION);
|
||||||
objectURL = resolveURL(request, null);
|
objectURL = resolveURL(request, null);
|
||||||
} else {
|
} else {
|
||||||
Bitstream logo = null;
|
Bitstream logo = null;
|
||||||
if (dso instanceof IndexableCollection) {
|
if (dso instanceof IndexableCollection) {
|
||||||
Collection col = ((IndexableCollection) dso).getIndexedObject();
|
Collection col = ((IndexableCollection) dso).getIndexedObject();
|
||||||
defaultTitle = col.getName();
|
defaultTitle = col.getName();
|
||||||
feed.setDescription(collectionService.getMetadataFirstValue(col,
|
defaultDescriptionField = collectionService.getMetadataFirstValue(col,
|
||||||
CollectionService.MD_SHORT_DESCRIPTION, Item.ANY));
|
CollectionService.MD_SHORT_DESCRIPTION, Item.ANY);
|
||||||
logo = col.getLogo();
|
logo = col.getLogo();
|
||||||
String cols = configurationService.getProperty("webui.feed.podcast.collections");
|
String cols = configurationService.getProperty("webui.feed.podcast.collections");
|
||||||
if (cols != null && cols.length() > 1 && cols.contains(col.getHandle())) {
|
if (cols != null && cols.length() > 1 && cols.contains(col.getHandle())) {
|
||||||
@@ -214,8 +216,8 @@ public class SyndicationFeed {
|
|||||||
} else if (dso instanceof IndexableCommunity) {
|
} else if (dso instanceof IndexableCommunity) {
|
||||||
Community comm = ((IndexableCommunity) dso).getIndexedObject();
|
Community comm = ((IndexableCommunity) dso).getIndexedObject();
|
||||||
defaultTitle = comm.getName();
|
defaultTitle = comm.getName();
|
||||||
feed.setDescription(communityService.getMetadataFirstValue(comm,
|
defaultDescriptionField = communityService.getMetadataFirstValue(comm,
|
||||||
CommunityService.MD_SHORT_DESCRIPTION, Item.ANY));
|
CommunityService.MD_SHORT_DESCRIPTION, Item.ANY);
|
||||||
logo = comm.getLogo();
|
logo = comm.getLogo();
|
||||||
String comms = configurationService.getProperty("webui.feed.podcast.communities");
|
String comms = configurationService.getProperty("webui.feed.podcast.communities");
|
||||||
if (comms != null && comms.length() > 1 && comms.contains(comm.getHandle())) {
|
if (comms != null && comms.length() > 1 && comms.contains(comm.getHandle())) {
|
||||||
@@ -230,6 +232,12 @@ public class SyndicationFeed {
|
|||||||
}
|
}
|
||||||
feed.setTitle(labels.containsKey(MSG_FEED_TITLE) ?
|
feed.setTitle(labels.containsKey(MSG_FEED_TITLE) ?
|
||||||
localize(labels, MSG_FEED_TITLE) : defaultTitle);
|
localize(labels, MSG_FEED_TITLE) : defaultTitle);
|
||||||
|
|
||||||
|
if (defaultDescriptionField == null || defaultDescriptionField == "") {
|
||||||
|
defaultDescriptionField = I18nUtil.getMessage("org.dspace.app.util.SyndicationFeed.no-description");
|
||||||
|
}
|
||||||
|
|
||||||
|
feed.setDescription(defaultDescriptionField);
|
||||||
feed.setLink(objectURL);
|
feed.setLink(objectURL);
|
||||||
feed.setPublishedDate(new Date());
|
feed.setPublishedDate(new Date());
|
||||||
feed.setUri(objectURL);
|
feed.setUri(objectURL);
|
||||||
|
@@ -52,11 +52,6 @@ public class IPAuthentication implements AuthenticationMethod {
|
|||||||
*/
|
*/
|
||||||
private static Logger log = org.apache.logging.log4j.LogManager.getLogger(IPAuthentication.class);
|
private static Logger log = org.apache.logging.log4j.LogManager.getLogger(IPAuthentication.class);
|
||||||
|
|
||||||
/**
|
|
||||||
* Whether to look for x-forwarded headers for logging IP addresses
|
|
||||||
*/
|
|
||||||
protected static Boolean useProxies;
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* All the IP matchers
|
* All the IP matchers
|
||||||
*/
|
*/
|
||||||
@@ -250,7 +245,7 @@ public class IPAuthentication implements AuthenticationMethod {
|
|||||||
|
|
||||||
log.debug(LogHelper.getHeader(context, "authenticated",
|
log.debug(LogHelper.getHeader(context, "authenticated",
|
||||||
"special_groups=" + gsb.toString()
|
"special_groups=" + gsb.toString()
|
||||||
+ " (by IP=" + addr + ", useProxies=" + useProxies.toString() + ")"
|
+ " (by IP=" + addr + ")"
|
||||||
));
|
));
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@@ -11,9 +11,11 @@ import static org.dspace.eperson.service.EPersonService.MD_PHONE;
|
|||||||
|
|
||||||
import java.io.IOException;
|
import java.io.IOException;
|
||||||
import java.sql.SQLException;
|
import java.sql.SQLException;
|
||||||
|
import java.util.ArrayList;
|
||||||
import java.util.Arrays;
|
import java.util.Arrays;
|
||||||
import java.util.Collections;
|
import java.util.Collections;
|
||||||
import java.util.Hashtable;
|
import java.util.Hashtable;
|
||||||
|
import java.util.Iterator;
|
||||||
import java.util.List;
|
import java.util.List;
|
||||||
import javax.naming.NamingEnumeration;
|
import javax.naming.NamingEnumeration;
|
||||||
import javax.naming.NamingException;
|
import javax.naming.NamingException;
|
||||||
@@ -64,6 +66,7 @@ import org.dspace.services.factory.DSpaceServicesFactory;
|
|||||||
* @author Reuben Pasquini
|
* @author Reuben Pasquini
|
||||||
* @author Samuel Ottenhoff
|
* @author Samuel Ottenhoff
|
||||||
* @author Ivan Masár
|
* @author Ivan Masár
|
||||||
|
* @author Michael Plate
|
||||||
*/
|
*/
|
||||||
public class LDAPAuthentication
|
public class LDAPAuthentication
|
||||||
implements AuthenticationMethod {
|
implements AuthenticationMethod {
|
||||||
@@ -391,7 +394,7 @@ public class LDAPAuthentication
|
|||||||
protected String ldapGivenName = null;
|
protected String ldapGivenName = null;
|
||||||
protected String ldapSurname = null;
|
protected String ldapSurname = null;
|
||||||
protected String ldapPhone = null;
|
protected String ldapPhone = null;
|
||||||
protected String ldapGroup = null;
|
protected ArrayList<String> ldapGroup = null;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* LDAP settings
|
* LDAP settings
|
||||||
@@ -406,9 +409,9 @@ public class LDAPAuthentication
|
|||||||
final String ldap_surname_field;
|
final String ldap_surname_field;
|
||||||
final String ldap_phone_field;
|
final String ldap_phone_field;
|
||||||
final String ldap_group_field;
|
final String ldap_group_field;
|
||||||
|
|
||||||
final boolean useTLS;
|
final boolean useTLS;
|
||||||
|
|
||||||
|
|
||||||
SpeakerToLDAP(Logger thelog) {
|
SpeakerToLDAP(Logger thelog) {
|
||||||
ConfigurationService configurationService
|
ConfigurationService configurationService
|
||||||
= DSpaceServicesFactory.getInstance().getConfigurationService();
|
= DSpaceServicesFactory.getInstance().getConfigurationService();
|
||||||
@@ -547,7 +550,11 @@ public class LDAPAuthentication
|
|||||||
if (attlist[4] != null) {
|
if (attlist[4] != null) {
|
||||||
att = atts.get(attlist[4]);
|
att = atts.get(attlist[4]);
|
||||||
if (att != null) {
|
if (att != null) {
|
||||||
ldapGroup = (String) att.get();
|
// loop through all groups returned by LDAP
|
||||||
|
ldapGroup = new ArrayList<String>();
|
||||||
|
for (NamingEnumeration val = att.getAll(); val.hasMoreElements(); ) {
|
||||||
|
ldapGroup.add((String) val.next());
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -693,48 +700,69 @@ public class LDAPAuthentication
|
|||||||
/*
|
/*
|
||||||
* Add authenticated users to the group defined in dspace.cfg by
|
* Add authenticated users to the group defined in dspace.cfg by
|
||||||
* the authentication-ldap.login.groupmap.* key.
|
* the authentication-ldap.login.groupmap.* key.
|
||||||
|
*
|
||||||
|
* @param dn
|
||||||
|
* The string containing distinguished name of the user
|
||||||
|
*
|
||||||
|
* @param group
|
||||||
|
* List of strings with LDAP dn of groups
|
||||||
|
*
|
||||||
|
* @param context
|
||||||
|
* DSpace context
|
||||||
*/
|
*/
|
||||||
private void assignGroups(String dn, String group, Context context) {
|
private void assignGroups(String dn, ArrayList<String> group, Context context) {
|
||||||
if (StringUtils.isNotBlank(dn)) {
|
if (StringUtils.isNotBlank(dn)) {
|
||||||
System.out.println("dn:" + dn);
|
System.out.println("dn:" + dn);
|
||||||
int i = 1;
|
int i = 1;
|
||||||
String groupMap = configurationService.getProperty("authentication-ldap.login.groupmap." + i);
|
String groupMap = configurationService.getProperty("authentication-ldap.login.groupmap." + i);
|
||||||
|
|
||||||
boolean cmp;
|
boolean cmp;
|
||||||
|
|
||||||
|
|
||||||
|
// groupmap contains the mapping of LDAP groups to DSpace groups
|
||||||
|
// outer loop with the DSpace groups
|
||||||
while (groupMap != null) {
|
while (groupMap != null) {
|
||||||
String t[] = groupMap.split(":");
|
String t[] = groupMap.split(":");
|
||||||
String ldapSearchString = t[0];
|
String ldapSearchString = t[0];
|
||||||
String dspaceGroupName = t[1];
|
String dspaceGroupName = t[1];
|
||||||
|
|
||||||
if (group == null) {
|
// list of strings with dn from LDAP groups
|
||||||
cmp = StringUtils.containsIgnoreCase(dn, ldapSearchString + ",");
|
// inner loop
|
||||||
} else {
|
Iterator<String> groupIterator = group.iterator();
|
||||||
cmp = StringUtils.equalsIgnoreCase(group, ldapSearchString);
|
while (groupIterator.hasNext()) {
|
||||||
}
|
|
||||||
|
|
||||||
if (cmp) {
|
// save the current entry from iterator for further use
|
||||||
// assign user to this group
|
String currentGroup = groupIterator.next();
|
||||||
try {
|
|
||||||
Group ldapGroup = groupService.findByName(context, dspaceGroupName);
|
// very much the old code from DSpace <= 7.5
|
||||||
if (ldapGroup != null) {
|
if (currentGroup == null) {
|
||||||
groupService.addMember(context, ldapGroup, context.getCurrentUser());
|
cmp = StringUtils.containsIgnoreCase(dn, ldapSearchString + ",");
|
||||||
groupService.update(context, ldapGroup);
|
} else {
|
||||||
} else {
|
cmp = StringUtils.equalsIgnoreCase(currentGroup, ldapSearchString);
|
||||||
// The group does not exist
|
}
|
||||||
log.warn(LogHelper.getHeader(context,
|
|
||||||
"ldap_assignGroupsBasedOnLdapDn",
|
if (cmp) {
|
||||||
"Group defined in authentication-ldap.login.groupmap." + i
|
// assign user to this group
|
||||||
+ " does not exist :: " + dspaceGroupName));
|
try {
|
||||||
|
Group ldapGroup = groupService.findByName(context, dspaceGroupName);
|
||||||
|
if (ldapGroup != null) {
|
||||||
|
groupService.addMember(context, ldapGroup, context.getCurrentUser());
|
||||||
|
groupService.update(context, ldapGroup);
|
||||||
|
} else {
|
||||||
|
// The group does not exist
|
||||||
|
log.warn(LogHelper.getHeader(context,
|
||||||
|
"ldap_assignGroupsBasedOnLdapDn",
|
||||||
|
"Group defined in authentication-ldap.login.groupmap." + i
|
||||||
|
+ " does not exist :: " + dspaceGroupName));
|
||||||
|
}
|
||||||
|
} catch (AuthorizeException ae) {
|
||||||
|
log.debug(LogHelper.getHeader(context,
|
||||||
|
"assignGroupsBasedOnLdapDn could not authorize addition to " +
|
||||||
|
"group",
|
||||||
|
dspaceGroupName));
|
||||||
|
} catch (SQLException e) {
|
||||||
|
log.debug(LogHelper.getHeader(context, "assignGroupsBasedOnLdapDn could not find group",
|
||||||
|
dspaceGroupName));
|
||||||
}
|
}
|
||||||
} catch (AuthorizeException ae) {
|
|
||||||
log.debug(LogHelper.getHeader(context,
|
|
||||||
"assignGroupsBasedOnLdapDn could not authorize addition to " +
|
|
||||||
"group",
|
|
||||||
dspaceGroupName));
|
|
||||||
} catch (SQLException e) {
|
|
||||||
log.debug(LogHelper.getHeader(context, "assignGroupsBasedOnLdapDn could not find group",
|
|
||||||
dspaceGroupName));
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@@ -31,16 +31,19 @@ import org.dspace.content.DSpaceObject;
|
|||||||
import org.dspace.content.Item;
|
import org.dspace.content.Item;
|
||||||
import org.dspace.content.factory.ContentServiceFactory;
|
import org.dspace.content.factory.ContentServiceFactory;
|
||||||
import org.dspace.content.service.BitstreamService;
|
import org.dspace.content.service.BitstreamService;
|
||||||
|
import org.dspace.content.service.CollectionService;
|
||||||
import org.dspace.content.service.WorkspaceItemService;
|
import org.dspace.content.service.WorkspaceItemService;
|
||||||
import org.dspace.core.Constants;
|
import org.dspace.core.Constants;
|
||||||
import org.dspace.core.Context;
|
import org.dspace.core.Context;
|
||||||
import org.dspace.discovery.DiscoverQuery;
|
import org.dspace.discovery.DiscoverQuery;
|
||||||
|
import org.dspace.discovery.DiscoverQuery.SORT_ORDER;
|
||||||
import org.dspace.discovery.DiscoverResult;
|
import org.dspace.discovery.DiscoverResult;
|
||||||
import org.dspace.discovery.IndexableObject;
|
import org.dspace.discovery.IndexableObject;
|
||||||
import org.dspace.discovery.SearchService;
|
import org.dspace.discovery.SearchService;
|
||||||
import org.dspace.discovery.SearchServiceException;
|
import org.dspace.discovery.SearchServiceException;
|
||||||
import org.dspace.discovery.indexobject.IndexableCollection;
|
import org.dspace.discovery.indexobject.IndexableCollection;
|
||||||
import org.dspace.discovery.indexobject.IndexableCommunity;
|
import org.dspace.discovery.indexobject.IndexableCommunity;
|
||||||
|
import org.dspace.discovery.indexobject.IndexableItem;
|
||||||
import org.dspace.eperson.EPerson;
|
import org.dspace.eperson.EPerson;
|
||||||
import org.dspace.eperson.Group;
|
import org.dspace.eperson.Group;
|
||||||
import org.dspace.eperson.service.GroupService;
|
import org.dspace.eperson.service.GroupService;
|
||||||
@@ -521,6 +524,15 @@ public class AuthorizeServiceImpl implements AuthorizeService {
|
|||||||
addPolicies(c, nonAdminPolicies, dest);
|
addPolicies(c, nonAdminPolicies, dest);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public void replaceAllPolicies(Context context, DSpaceObject source, DSpaceObject dest)
|
||||||
|
throws SQLException, AuthorizeException {
|
||||||
|
// find all policies for the source object
|
||||||
|
List<ResourcePolicy> policies = getPolicies(context, source);
|
||||||
|
removeAllPolicies(context, dest);
|
||||||
|
addPolicies(context, policies, dest);
|
||||||
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public void switchPoliciesAction(Context context, DSpaceObject dso, int fromAction, int toAction)
|
public void switchPoliciesAction(Context context, DSpaceObject dso, int fromAction, int toAction)
|
||||||
throws SQLException, AuthorizeException {
|
throws SQLException, AuthorizeException {
|
||||||
@@ -643,60 +655,6 @@ public class AuthorizeServiceImpl implements AuthorizeService {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
|
||||||
* Generate Policies policies READ for the date in input adding reason. New policies are assigned automatically
|
|
||||||
* at the groups that
|
|
||||||
* have right on the collection. E.g., if the anonymous can access the collection policies are assigned to
|
|
||||||
* anonymous.
|
|
||||||
*
|
|
||||||
* @param context The relevant DSpace Context.
|
|
||||||
* @param embargoDate embargo end date
|
|
||||||
* @param reason embargo reason
|
|
||||||
* @param dso DSpace object
|
|
||||||
* @param owningCollection collection to get group policies from
|
|
||||||
* @throws SQLException if database error
|
|
||||||
* @throws AuthorizeException if authorization error
|
|
||||||
*/
|
|
||||||
@Override
|
|
||||||
public void generateAutomaticPolicies(Context context, Date embargoDate,
|
|
||||||
String reason, DSpaceObject dso, Collection owningCollection)
|
|
||||||
throws SQLException, AuthorizeException {
|
|
||||||
|
|
||||||
if (embargoDate != null || (embargoDate == null && dso instanceof Bitstream)) {
|
|
||||||
|
|
||||||
List<Group> authorizedGroups = getAuthorizedGroups(context, owningCollection, Constants.DEFAULT_ITEM_READ);
|
|
||||||
|
|
||||||
removeAllPoliciesByDSOAndType(context, dso, ResourcePolicy.TYPE_CUSTOM);
|
|
||||||
|
|
||||||
// look for anonymous
|
|
||||||
boolean isAnonymousInPlace = false;
|
|
||||||
for (Group g : authorizedGroups) {
|
|
||||||
if (StringUtils.equals(g.getName(), Group.ANONYMOUS)) {
|
|
||||||
isAnonymousInPlace = true;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
if (!isAnonymousInPlace) {
|
|
||||||
// add policies for all the groups
|
|
||||||
for (Group g : authorizedGroups) {
|
|
||||||
ResourcePolicy rp = createOrModifyPolicy(null, context, null, g, null, embargoDate, Constants.READ,
|
|
||||||
reason, dso);
|
|
||||||
if (rp != null) {
|
|
||||||
resourcePolicyService.update(context, rp);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
} else {
|
|
||||||
// add policy just for anonymous
|
|
||||||
ResourcePolicy rp = createOrModifyPolicy(null, context, null,
|
|
||||||
groupService.findByName(context, Group.ANONYMOUS), null,
|
|
||||||
embargoDate, Constants.READ, reason, dso);
|
|
||||||
if (rp != null) {
|
|
||||||
resourcePolicyService.update(context, rp);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public ResourcePolicy createResourcePolicy(Context context, DSpaceObject dso, Group group, EPerson eperson,
|
public ResourcePolicy createResourcePolicy(Context context, DSpaceObject dso, Group group, EPerson eperson,
|
||||||
int type, String rpType) throws SQLException, AuthorizeException {
|
int type, String rpType) throws SQLException, AuthorizeException {
|
||||||
@@ -798,6 +756,19 @@ public class AuthorizeServiceImpl implements AuthorizeService {
|
|||||||
return performCheck(context, "search.resourcetype:" + IndexableCollection.TYPE);
|
return performCheck(context, "search.resourcetype:" + IndexableCollection.TYPE);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Checks that the context's current user is an item admin in the site by querying the solr database.
|
||||||
|
*
|
||||||
|
* @param context context with the current user
|
||||||
|
* @return true if the current user is an item admin in the site
|
||||||
|
* false when this is not the case, or an exception occurred
|
||||||
|
* @throws java.sql.SQLException passed through.
|
||||||
|
*/
|
||||||
|
@Override
|
||||||
|
public boolean isItemAdmin(Context context) throws SQLException {
|
||||||
|
return performCheck(context, "search.resourcetype:" + IndexableItem.TYPE);
|
||||||
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Checks that the context's current user is a community or collection admin in the site.
|
* Checks that the context's current user is a community or collection admin in the site.
|
||||||
*
|
*
|
||||||
@@ -830,7 +801,7 @@ public class AuthorizeServiceImpl implements AuthorizeService {
|
|||||||
query = formatCustomQuery(query);
|
query = formatCustomQuery(query);
|
||||||
DiscoverResult discoverResult = getDiscoverResult(context, query + "search.resourcetype:" +
|
DiscoverResult discoverResult = getDiscoverResult(context, query + "search.resourcetype:" +
|
||||||
IndexableCommunity.TYPE,
|
IndexableCommunity.TYPE,
|
||||||
offset, limit);
|
offset, limit, null, null);
|
||||||
for (IndexableObject solrCollections : discoverResult.getIndexableObjects()) {
|
for (IndexableObject solrCollections : discoverResult.getIndexableObjects()) {
|
||||||
Community community = ((IndexableCommunity) solrCollections).getIndexedObject();
|
Community community = ((IndexableCommunity) solrCollections).getIndexedObject();
|
||||||
communities.add(community);
|
communities.add(community);
|
||||||
@@ -852,7 +823,7 @@ public class AuthorizeServiceImpl implements AuthorizeService {
|
|||||||
query = formatCustomQuery(query);
|
query = formatCustomQuery(query);
|
||||||
DiscoverResult discoverResult = getDiscoverResult(context, query + "search.resourcetype:" +
|
DiscoverResult discoverResult = getDiscoverResult(context, query + "search.resourcetype:" +
|
||||||
IndexableCommunity.TYPE,
|
IndexableCommunity.TYPE,
|
||||||
null, null);
|
null, null, null, null);
|
||||||
return discoverResult.getTotalSearchResults();
|
return discoverResult.getTotalSearchResults();
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -877,7 +848,7 @@ public class AuthorizeServiceImpl implements AuthorizeService {
|
|||||||
query = formatCustomQuery(query);
|
query = formatCustomQuery(query);
|
||||||
DiscoverResult discoverResult = getDiscoverResult(context, query + "search.resourcetype:" +
|
DiscoverResult discoverResult = getDiscoverResult(context, query + "search.resourcetype:" +
|
||||||
IndexableCollection.TYPE,
|
IndexableCollection.TYPE,
|
||||||
offset, limit);
|
offset, limit, CollectionService.SOLR_SORT_FIELD, SORT_ORDER.asc);
|
||||||
for (IndexableObject solrCollections : discoverResult.getIndexableObjects()) {
|
for (IndexableObject solrCollections : discoverResult.getIndexableObjects()) {
|
||||||
Collection collection = ((IndexableCollection) solrCollections).getIndexedObject();
|
Collection collection = ((IndexableCollection) solrCollections).getIndexedObject();
|
||||||
collections.add(collection);
|
collections.add(collection);
|
||||||
@@ -899,7 +870,7 @@ public class AuthorizeServiceImpl implements AuthorizeService {
|
|||||||
query = formatCustomQuery(query);
|
query = formatCustomQuery(query);
|
||||||
DiscoverResult discoverResult = getDiscoverResult(context, query + "search.resourcetype:" +
|
DiscoverResult discoverResult = getDiscoverResult(context, query + "search.resourcetype:" +
|
||||||
IndexableCollection.TYPE,
|
IndexableCollection.TYPE,
|
||||||
null, null);
|
null, null, null, null);
|
||||||
return discoverResult.getTotalSearchResults();
|
return discoverResult.getTotalSearchResults();
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -919,7 +890,7 @@ public class AuthorizeServiceImpl implements AuthorizeService {
|
|||||||
}
|
}
|
||||||
|
|
||||||
try {
|
try {
|
||||||
DiscoverResult discoverResult = getDiscoverResult(context, query, null, null);
|
DiscoverResult discoverResult = getDiscoverResult(context, query, null, null, null, null);
|
||||||
if (discoverResult.getTotalSearchResults() > 0) {
|
if (discoverResult.getTotalSearchResults() > 0) {
|
||||||
return true;
|
return true;
|
||||||
}
|
}
|
||||||
@@ -931,7 +902,8 @@ public class AuthorizeServiceImpl implements AuthorizeService {
|
|||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
|
|
||||||
private DiscoverResult getDiscoverResult(Context context, String query, Integer offset, Integer limit)
|
private DiscoverResult getDiscoverResult(Context context, String query, Integer offset, Integer limit,
|
||||||
|
String sortField, SORT_ORDER sortOrder)
|
||||||
throws SearchServiceException, SQLException {
|
throws SearchServiceException, SQLException {
|
||||||
String groupQuery = getGroupToQuery(groupService.allMemberGroups(context, context.getCurrentUser()));
|
String groupQuery = getGroupToQuery(groupService.allMemberGroups(context, context.getCurrentUser()));
|
||||||
|
|
||||||
@@ -947,7 +919,9 @@ public class AuthorizeServiceImpl implements AuthorizeService {
|
|||||||
if (limit != null) {
|
if (limit != null) {
|
||||||
discoverQuery.setMaxResults(limit);
|
discoverQuery.setMaxResults(limit);
|
||||||
}
|
}
|
||||||
|
if (sortField != null && sortOrder != null) {
|
||||||
|
discoverQuery.setSortField(sortField, sortOrder);
|
||||||
|
}
|
||||||
|
|
||||||
return searchService.search(context, discoverQuery);
|
return searchService.search(context, discoverQuery);
|
||||||
}
|
}
|
||||||
|
@@ -41,9 +41,16 @@ import org.hibernate.proxy.HibernateProxyHelper;
|
|||||||
@Entity
|
@Entity
|
||||||
@Table(name = "resourcepolicy")
|
@Table(name = "resourcepolicy")
|
||||||
public class ResourcePolicy implements ReloadableEntity<Integer> {
|
public class ResourcePolicy implements ReloadableEntity<Integer> {
|
||||||
|
/** This policy was set on submission, to give the submitter access. */
|
||||||
public static String TYPE_SUBMISSION = "TYPE_SUBMISSION";
|
public static String TYPE_SUBMISSION = "TYPE_SUBMISSION";
|
||||||
|
|
||||||
|
/** This policy was set to allow access by a workflow group. */
|
||||||
public static String TYPE_WORKFLOW = "TYPE_WORKFLOW";
|
public static String TYPE_WORKFLOW = "TYPE_WORKFLOW";
|
||||||
|
|
||||||
|
/** This policy was explicitly set on this object. */
|
||||||
public static String TYPE_CUSTOM = "TYPE_CUSTOM";
|
public static String TYPE_CUSTOM = "TYPE_CUSTOM";
|
||||||
|
|
||||||
|
/** This policy was copied from the containing object's default policies. */
|
||||||
public static String TYPE_INHERITED = "TYPE_INHERITED";
|
public static String TYPE_INHERITED = "TYPE_INHERITED";
|
||||||
|
|
||||||
@Id
|
@Id
|
||||||
@@ -93,7 +100,7 @@ public class ResourcePolicy implements ReloadableEntity<Integer> {
|
|||||||
private String rptype;
|
private String rptype;
|
||||||
|
|
||||||
@Lob
|
@Lob
|
||||||
@Type(type = "org.dspace.storage.rdbms.hibernate.DatabaseAwareLobType")
|
@Type(type = "org.hibernate.type.TextType")
|
||||||
@Column(name = "rpdescription")
|
@Column(name = "rpdescription")
|
||||||
private String rpdescription;
|
private String rpdescription;
|
||||||
|
|
||||||
|
@@ -232,6 +232,15 @@ public class ResourcePolicyServiceImpl implements ResourcePolicyService {
|
|||||||
c.restoreAuthSystemState();
|
c.restoreAuthSystemState();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public void removePolicies(Context c, DSpaceObject o, String type, int action)
|
||||||
|
throws SQLException, AuthorizeException {
|
||||||
|
resourcePolicyDAO.deleteByDsoAndTypeAndAction(c, o, type, action);
|
||||||
|
c.turnOffAuthorisationSystem();
|
||||||
|
contentServiceFactory.getDSpaceObjectService(o).updateLastModified(c, o);
|
||||||
|
c.restoreAuthSystemState();
|
||||||
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public void removeDsoGroupPolicies(Context context, DSpaceObject dso, Group group)
|
public void removeDsoGroupPolicies(Context context, DSpaceObject dso, Group group)
|
||||||
throws SQLException, AuthorizeException {
|
throws SQLException, AuthorizeException {
|
||||||
|
@@ -39,6 +39,9 @@ public interface ResourcePolicyDAO extends GenericDAO<ResourcePolicy> {
|
|||||||
|
|
||||||
public List<ResourcePolicy> findByDSoAndAction(Context context, DSpaceObject dso, int actionId) throws SQLException;
|
public List<ResourcePolicy> findByDSoAndAction(Context context, DSpaceObject dso, int actionId) throws SQLException;
|
||||||
|
|
||||||
|
public void deleteByDsoAndTypeAndAction(Context context, DSpaceObject dSpaceObject, String type, int action)
|
||||||
|
throws SQLException;
|
||||||
|
|
||||||
public List<ResourcePolicy> findByTypeGroupAction(Context context, DSpaceObject dso, Group group, int action)
|
public List<ResourcePolicy> findByTypeGroupAction(Context context, DSpaceObject dso, Group group, int action)
|
||||||
throws SQLException;
|
throws SQLException;
|
||||||
|
|
||||||
|
@@ -103,6 +103,19 @@ public class ResourcePolicyDAOImpl extends AbstractHibernateDAO<ResourcePolicy>
|
|||||||
return list(context, criteriaQuery, false, ResourcePolicy.class, -1, -1);
|
return list(context, criteriaQuery, false, ResourcePolicy.class, -1, -1);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public void deleteByDsoAndTypeAndAction(Context context, DSpaceObject dso, String type, int actionId)
|
||||||
|
throws SQLException {
|
||||||
|
String queryString = "delete from ResourcePolicy where dSpaceObject.id = :dsoId "
|
||||||
|
+ "AND rptype = :rptype AND actionId= :actionId";
|
||||||
|
Query query = createQuery(context, queryString);
|
||||||
|
query.setParameter("dsoId", dso.getID());
|
||||||
|
query.setParameter("rptype", type);
|
||||||
|
query.setParameter("actionId", actionId);
|
||||||
|
query.executeUpdate();
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public List<ResourcePolicy> findByTypeGroupAction(Context context, DSpaceObject dso, Group group, int action)
|
public List<ResourcePolicy> findByTypeGroupAction(Context context, DSpaceObject dso, Group group, int action)
|
||||||
throws SQLException {
|
throws SQLException {
|
||||||
|
@@ -0,0 +1,67 @@
|
|||||||
|
/**
|
||||||
|
* The contents of this file are subject to the license and copyright
|
||||||
|
* detailed in the LICENSE and NOTICE files at the root of the source
|
||||||
|
* tree and available online at
|
||||||
|
*
|
||||||
|
* http://www.dspace.org/license/
|
||||||
|
*/
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Represents permissions for access to DSpace content.
|
||||||
|
*
|
||||||
|
* <h2>Philosophy</h2>
|
||||||
|
* DSpace's authorization system follows the classical "police state"
|
||||||
|
* philosophy of security - the user can do nothing, unless it is
|
||||||
|
* specifically allowed. Those permissions are spelled out with
|
||||||
|
* {@link ResourcePolicy} objects, stored in the {@code resourcepolicy} table
|
||||||
|
* in the database.
|
||||||
|
*
|
||||||
|
* <h2>Policies are attached to Content</h2>
|
||||||
|
* Resource Policies get assigned to all of the content objects in
|
||||||
|
* DSpace - collections, communities, items, bundles, and bitstreams.
|
||||||
|
* (Currently they are not attached to non-content objects such as
|
||||||
|
* {@code EPerson} or {@code Group}. But they could be, hence the name
|
||||||
|
* {@code ResourcePolicy} instead of {@code ContentPolicy}.)
|
||||||
|
*
|
||||||
|
* <h2>Policies are tuples</h2>
|
||||||
|
* Authorization is based on evaluating the tuple of (object, action, actor),
|
||||||
|
* such as (ITEM, READ, EPerson John Smith) to check if the {@code EPerson}
|
||||||
|
* "John Smith" can read an item. {@code ResourcePolicy} objects are pretty
|
||||||
|
* simple, describing a single instance of (object, action, actor). If
|
||||||
|
* multiple actors are desired, such as groups 10, 11, and 12 are allowed to
|
||||||
|
* READ Item 13, you simply create a {@code ResourcePolicy} for each group.
|
||||||
|
*
|
||||||
|
* <h2>Built-in groups</h2>
|
||||||
|
* The install process should create two built-in groups - {@code Anonymous}
|
||||||
|
* for anonymous/public access, and {@code Administrators} for administrators.
|
||||||
|
* Group {@code Anonymous} allows anyone access, even if not authenticated.
|
||||||
|
* Group {@code Administrators}' members have super-user rights,
|
||||||
|
* and are allowed to do any action to any object.
|
||||||
|
*
|
||||||
|
* <h2>Policy types
|
||||||
|
* Policies have a "type" used to distinguish policies which are applied for
|
||||||
|
* specific purposes.
|
||||||
|
* <dl>
|
||||||
|
* <dt>CUSTOM</dt>
|
||||||
|
* <dd>These are created and assigned explicitly by users.</dd>
|
||||||
|
* <dt>INHERITED</dt>
|
||||||
|
* <dd>These are copied from a containing object's default policies.</dd>
|
||||||
|
* <dt>SUBMISSION</dt>
|
||||||
|
* <dd>These are applied during submission to give the submitter access while
|
||||||
|
* composing a submission.</dd>
|
||||||
|
* <dt>WORKFLOW</dt>
|
||||||
|
* <dd>These are automatically applied during workflow, to give curators
|
||||||
|
* access to submissions in their curation queues. They usually have an
|
||||||
|
* automatically-created workflow group as the actor.</dd>
|
||||||
|
*
|
||||||
|
* <h2>Start and End dates</h2>
|
||||||
|
* A policy may have a start date and/or an end date. The policy is
|
||||||
|
* considered not valid before the start date or after the end date. No date
|
||||||
|
* means do not apply the related test. For example, embargo until a given
|
||||||
|
* date can be expressed by a READ policy with a given start date, and a
|
||||||
|
* limited-time offer by a READ policy with a given end date.
|
||||||
|
*
|
||||||
|
* @author dstuve
|
||||||
|
* @author mwood
|
||||||
|
*/
|
||||||
|
package org.dspace.authorize;
|
@@ -1,68 +0,0 @@
|
|||||||
<!--
|
|
||||||
|
|
||||||
The contents of this file are subject to the license and copyright
|
|
||||||
detailed in the LICENSE and NOTICE files at the root of the source
|
|
||||||
tree and available online at
|
|
||||||
|
|
||||||
http://www.dspace.org/license/
|
|
||||||
|
|
||||||
-->
|
|
||||||
<!DOCTYPE HTML PUBLIC "-//W3C//DTD HTML 3.2 Final//EN">
|
|
||||||
<html>
|
|
||||||
<head>
|
|
||||||
<!--
|
|
||||||
Author: dstuve
|
|
||||||
Version: $Id$
|
|
||||||
Date: $Date$
|
|
||||||
-->
|
|
||||||
</head>
|
|
||||||
<body bgcolor="white">
|
|
||||||
<p>Handles permissions for DSpace content.
|
|
||||||
</p>
|
|
||||||
|
|
||||||
<p><strong>Philosophy</strong><br>
|
|
||||||
DSpace's authorization system follows the classical "police state"
|
|
||||||
philosophy of security - the user can do nothing, unless it is
|
|
||||||
specifically allowed. Those permissions are spelled out with
|
|
||||||
ResourcePolicy objects, stored in the resourcepolicy table in the
|
|
||||||
database.
|
|
||||||
</p>
|
|
||||||
|
|
||||||
<h2>Policies are attached to Content</h2>
|
|
||||||
<p><strong>Policies are attached to Content</strong><br>
|
|
||||||
Resource Policies get assigned to all of the content objects in
|
|
||||||
DSpace - collections, communities, items, bundles, and bitstreams.
|
|
||||||
(Currently they are not attached to non-content objects such as EPerson
|
|
||||||
or Group. But they could be, hence the name ResourcePolicy instead of
|
|
||||||
ContentPolicy.)
|
|
||||||
</p>
|
|
||||||
|
|
||||||
<h2>Policies are tuples</h2>
|
|
||||||
Authorization is based on evaluating the tuple of (object, action, who),
|
|
||||||
such as (ITEM, READ, EPerson John Smith) to check if the EPerson "John Smith"
|
|
||||||
can read an item. ResourcePolicy objects are pretty simple, describing a single instance of
|
|
||||||
(object, action, who). If multiple who's are desired, such as Groups 10, 11, and
|
|
||||||
12 are allowed to READ Item 13, you simply create a ResourcePolicy for each
|
|
||||||
group.
|
|
||||||
</p>
|
|
||||||
|
|
||||||
<h2>Special Groups</h2>
|
|
||||||
The install process should create two special groups - group 0, for
|
|
||||||
anonymous/public access, and group 1 for administrators.
|
|
||||||
Group 0 (public/anonymous) allows anyone access, even if they are not
|
|
||||||
authenticated. Group 1's (admin) members have super-user rights, and
|
|
||||||
are allowed to do any action to any object.
|
|
||||||
</p>
|
|
||||||
|
|
||||||
<h2>Unused ResourcePolicy attributes </h2>
|
|
||||||
ResourcePolicies have a few attributes that are currently unused,
|
|
||||||
but are included with the intent that they will be used someday.
|
|
||||||
One is start and end dates, for when policies will be active, so that
|
|
||||||
permissions for content can change over time. The other is the EPerson -
|
|
||||||
policies could apply to only a single EPerson, but for ease of
|
|
||||||
administration currently a Group is the recommended unit to use to
|
|
||||||
describe 'who'.
|
|
||||||
</p>
|
|
||||||
|
|
||||||
</body>
|
|
||||||
</html>
|
|
@@ -470,24 +470,6 @@ public interface AuthorizeService {
|
|||||||
public ResourcePolicy findByTypeGroupAction(Context c, DSpaceObject dso, Group group, int action)
|
public ResourcePolicy findByTypeGroupAction(Context c, DSpaceObject dso, Group group, int action)
|
||||||
throws SQLException;
|
throws SQLException;
|
||||||
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Generate Policies policies READ for the date in input adding reason. New policies are assigned automatically
|
|
||||||
* at the groups that
|
|
||||||
* have right on the collection. E.g., if the anonymous can access the collection policies are assigned to
|
|
||||||
* anonymous.
|
|
||||||
*
|
|
||||||
* @param context current context
|
|
||||||
* @param embargoDate date
|
|
||||||
* @param reason reason
|
|
||||||
* @param dso DSpaceObject
|
|
||||||
* @param owningCollection collection
|
|
||||||
* @throws SQLException if database error
|
|
||||||
* @throws AuthorizeException if authorization error
|
|
||||||
*/
|
|
||||||
public void generateAutomaticPolicies(Context context, Date embargoDate, String reason, DSpaceObject dso,
|
|
||||||
Collection owningCollection) throws SQLException, AuthorizeException;
|
|
||||||
|
|
||||||
public ResourcePolicy createResourcePolicy(Context context, DSpaceObject dso, Group group, EPerson eperson,
|
public ResourcePolicy createResourcePolicy(Context context, DSpaceObject dso, Group group, EPerson eperson,
|
||||||
int type, String rpType) throws SQLException, AuthorizeException;
|
int type, String rpType) throws SQLException, AuthorizeException;
|
||||||
|
|
||||||
@@ -532,6 +514,15 @@ public interface AuthorizeService {
|
|||||||
*/
|
*/
|
||||||
boolean isCollectionAdmin(Context context) throws SQLException;
|
boolean isCollectionAdmin(Context context) throws SQLException;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Checks that the context's current user is an item admin in the site by querying the solr database.
|
||||||
|
*
|
||||||
|
* @param context context with the current user
|
||||||
|
* @return true if the current user is an item admin in the site
|
||||||
|
* false when this is not the case, or an exception occurred
|
||||||
|
*/
|
||||||
|
boolean isItemAdmin(Context context) throws SQLException;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Checks that the context's current user is a community or collection admin in the site.
|
* Checks that the context's current user is a community or collection admin in the site.
|
||||||
*
|
*
|
||||||
@@ -600,4 +591,17 @@ public interface AuthorizeService {
|
|||||||
* @return true if the current user can manage accounts
|
* @return true if the current user can manage accounts
|
||||||
*/
|
*/
|
||||||
boolean isAccountManager(Context context);
|
boolean isAccountManager(Context context);
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Replace all the policies in the target object with exactly the same policies that exist in the source object
|
||||||
|
*
|
||||||
|
* @param context DSpace Context
|
||||||
|
* @param source source of policies
|
||||||
|
* @param dest destination of inherited policies
|
||||||
|
* @throws SQLException if there's a database problem
|
||||||
|
* @throws AuthorizeException if the current user is not authorized to add these policies
|
||||||
|
*/
|
||||||
|
public void replaceAllPolicies(Context context, DSpaceObject source, DSpaceObject dest)
|
||||||
|
throws SQLException, AuthorizeException;
|
||||||
|
|
||||||
}
|
}
|
||||||
|
@@ -53,12 +53,19 @@ public interface ResourcePolicyService extends DSpaceCRUDService<ResourcePolicy>
|
|||||||
throws SQLException;
|
throws SQLException;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Look for ResourcePolicies by DSpaceObject, Group, and action, ignoring IDs with a specific PolicyID.
|
* Look for ResourcePolicies by DSpaceObject, Group, and action, ignoring
|
||||||
* This method can be used to detect duplicate ResourcePolicies.
|
* IDs with a specific PolicyID. This method can be used to detect duplicate
|
||||||
|
* ResourcePolicies.
|
||||||
*
|
*
|
||||||
* @param notPolicyID ResourcePolicies with this ID will be ignored while looking out for equal ResourcePolicies.
|
* @param context current DSpace session.
|
||||||
* @return List of resource policies for the same DSpaceObject, group and action but other policyID.
|
* @param dso find policies for this object.
|
||||||
* @throws SQLException
|
* @param group find policies referring to this group.
|
||||||
|
* @param action find policies for this action.
|
||||||
|
* @param notPolicyID ResourcePolicies with this ID will be ignored while
|
||||||
|
* looking out for equal ResourcePolicies.
|
||||||
|
* @return List of resource policies for the same DSpaceObject, group and
|
||||||
|
* action but other policyID.
|
||||||
|
* @throws SQLException passed through.
|
||||||
*/
|
*/
|
||||||
public List<ResourcePolicy> findByTypeGroupActionExceptId(Context context, DSpaceObject dso, Group group,
|
public List<ResourcePolicy> findByTypeGroupActionExceptId(Context context, DSpaceObject dso, Group group,
|
||||||
int action, int notPolicyID)
|
int action, int notPolicyID)
|
||||||
@@ -68,6 +75,16 @@ public interface ResourcePolicyService extends DSpaceCRUDService<ResourcePolicy>
|
|||||||
|
|
||||||
public boolean isDateValid(ResourcePolicy resourcePolicy);
|
public boolean isDateValid(ResourcePolicy resourcePolicy);
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Create and persist a copy of a given ResourcePolicy, with an empty
|
||||||
|
* dSpaceObject field.
|
||||||
|
*
|
||||||
|
* @param context current DSpace session.
|
||||||
|
* @param resourcePolicy the policy to be copied.
|
||||||
|
* @return the copy.
|
||||||
|
* @throws SQLException passed through.
|
||||||
|
* @throws AuthorizeException passed through.
|
||||||
|
*/
|
||||||
public ResourcePolicy clone(Context context, ResourcePolicy resourcePolicy) throws SQLException, AuthorizeException;
|
public ResourcePolicy clone(Context context, ResourcePolicy resourcePolicy) throws SQLException, AuthorizeException;
|
||||||
|
|
||||||
public void removeAllPolicies(Context c, DSpaceObject o) throws SQLException, AuthorizeException;
|
public void removeAllPolicies(Context c, DSpaceObject o) throws SQLException, AuthorizeException;
|
||||||
@@ -76,6 +93,9 @@ public interface ResourcePolicyService extends DSpaceCRUDService<ResourcePolicy>
|
|||||||
|
|
||||||
public void removePolicies(Context c, DSpaceObject o, String type) throws SQLException, AuthorizeException;
|
public void removePolicies(Context c, DSpaceObject o, String type) throws SQLException, AuthorizeException;
|
||||||
|
|
||||||
|
public void removePolicies(Context c, DSpaceObject o, String type, int action)
|
||||||
|
throws SQLException, AuthorizeException;
|
||||||
|
|
||||||
public void removeDsoGroupPolicies(Context context, DSpaceObject dso, Group group)
|
public void removeDsoGroupPolicies(Context context, DSpaceObject dso, Group group)
|
||||||
throws SQLException, AuthorizeException;
|
throws SQLException, AuthorizeException;
|
||||||
|
|
||||||
@@ -117,6 +137,7 @@ public interface ResourcePolicyService extends DSpaceCRUDService<ResourcePolicy>
|
|||||||
* @param ePerson ePerson whose policies want to find
|
* @param ePerson ePerson whose policies want to find
|
||||||
* @param offset the position of the first result to return
|
* @param offset the position of the first result to return
|
||||||
* @param limit paging limit
|
* @param limit paging limit
|
||||||
|
* @return some of the policies referring to {@code ePerson}.
|
||||||
* @throws SQLException if database error
|
* @throws SQLException if database error
|
||||||
*/
|
*/
|
||||||
public List<ResourcePolicy> findByEPerson(Context context, EPerson ePerson, int offset, int limit)
|
public List<ResourcePolicy> findByEPerson(Context context, EPerson ePerson, int offset, int limit)
|
||||||
|
@@ -8,8 +8,8 @@
|
|||||||
package org.dspace.browse;
|
package org.dspace.browse;
|
||||||
|
|
||||||
import java.util.List;
|
import java.util.List;
|
||||||
import java.util.UUID;
|
|
||||||
|
|
||||||
|
import org.dspace.content.DSpaceObject;
|
||||||
import org.dspace.content.Item;
|
import org.dspace.content.Item;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
@@ -140,21 +140,21 @@ public interface BrowseDAO {
|
|||||||
public void setAscending(boolean ascending);
|
public void setAscending(boolean ascending);
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Get the database ID of the container object. The container object will be a
|
* Get the container object. The container object will be a
|
||||||
* Community or a Collection.
|
* Community or a Collection.
|
||||||
*
|
*
|
||||||
* @return the database id of the container, or -1 if none is set
|
* @return the container, or null if none is set
|
||||||
*/
|
*/
|
||||||
public UUID getContainerID();
|
public DSpaceObject getContainer();
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Set the database id of the container object. This should be the id of a
|
* Set the container object. This should be a Community or Collection.
|
||||||
* Community or Collection. This will constrain the results of the browse
|
* This will constrain the results of the browse to only items or values within items that appear in the given
|
||||||
* to only items or values within items that appear in the given container.
|
* container and add the related configuration default filters.
|
||||||
*
|
*
|
||||||
* @param containerID community/collection internal ID (UUID)
|
* @param container community/collection
|
||||||
*/
|
*/
|
||||||
public void setContainerID(UUID containerID);
|
public void setContainer(DSpaceObject container);
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* get the name of the field in which to look for the container id. This is
|
* get the name of the field in which to look for the container id. This is
|
||||||
|
@@ -141,12 +141,12 @@ public class BrowseEngine {
|
|||||||
Collection col = (Collection) scope.getBrowseContainer();
|
Collection col = (Collection) scope.getBrowseContainer();
|
||||||
dao.setContainerTable("collection2item");
|
dao.setContainerTable("collection2item");
|
||||||
dao.setContainerIDField("collection_id");
|
dao.setContainerIDField("collection_id");
|
||||||
dao.setContainerID(col.getID());
|
dao.setContainer(col);
|
||||||
} else if (scope.inCommunity()) {
|
} else if (scope.inCommunity()) {
|
||||||
Community com = (Community) scope.getBrowseContainer();
|
Community com = (Community) scope.getBrowseContainer();
|
||||||
dao.setContainerTable("communities2item");
|
dao.setContainerTable("communities2item");
|
||||||
dao.setContainerIDField("community_id");
|
dao.setContainerIDField("community_id");
|
||||||
dao.setContainerID(com.getID());
|
dao.setContainer(com);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -247,12 +247,12 @@ public class BrowseEngine {
|
|||||||
Collection col = (Collection) scope.getBrowseContainer();
|
Collection col = (Collection) scope.getBrowseContainer();
|
||||||
dao.setContainerTable("collection2item");
|
dao.setContainerTable("collection2item");
|
||||||
dao.setContainerIDField("collection_id");
|
dao.setContainerIDField("collection_id");
|
||||||
dao.setContainerID(col.getID());
|
dao.setContainer(col);
|
||||||
} else if (scope.inCommunity()) {
|
} else if (scope.inCommunity()) {
|
||||||
Community com = (Community) scope.getBrowseContainer();
|
Community com = (Community) scope.getBrowseContainer();
|
||||||
dao.setContainerTable("communities2item");
|
dao.setContainerTable("communities2item");
|
||||||
dao.setContainerIDField("community_id");
|
dao.setContainerIDField("community_id");
|
||||||
dao.setContainerID(com.getID());
|
dao.setContainer(com);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -413,12 +413,12 @@ public class BrowseEngine {
|
|||||||
Collection col = (Collection) scope.getBrowseContainer();
|
Collection col = (Collection) scope.getBrowseContainer();
|
||||||
dao.setContainerTable("collection2item");
|
dao.setContainerTable("collection2item");
|
||||||
dao.setContainerIDField("collection_id");
|
dao.setContainerIDField("collection_id");
|
||||||
dao.setContainerID(col.getID());
|
dao.setContainer(col);
|
||||||
} else if (scope.inCommunity()) {
|
} else if (scope.inCommunity()) {
|
||||||
Community com = (Community) scope.getBrowseContainer();
|
Community com = (Community) scope.getBrowseContainer();
|
||||||
dao.setContainerTable("communities2item");
|
dao.setContainerTable("communities2item");
|
||||||
dao.setContainerIDField("community_id");
|
dao.setContainerIDField("community_id");
|
||||||
dao.setContainerID(com.getID());
|
dao.setContainer(com);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@@ -22,11 +22,13 @@ import org.dspace.sort.SortOption;
|
|||||||
* This class holds all the information about a specifically configured
|
* This class holds all the information about a specifically configured
|
||||||
* BrowseIndex. It is responsible for parsing the configuration, understanding
|
* BrowseIndex. It is responsible for parsing the configuration, understanding
|
||||||
* about what sort options are available, and what the names of the database
|
* about what sort options are available, and what the names of the database
|
||||||
* tables that hold all the information are actually called.
|
* tables that hold all the information are actually called. Hierarchical browse
|
||||||
|
* indexes also contain information about the vocabulary they're using, see:
|
||||||
|
* {@link org.dspace.content.authority.DSpaceControlledVocabularyIndex}
|
||||||
*
|
*
|
||||||
* @author Richard Jones
|
* @author Richard Jones
|
||||||
*/
|
*/
|
||||||
public final class BrowseIndex {
|
public class BrowseIndex {
|
||||||
/** the configuration number, as specified in the config */
|
/** the configuration number, as specified in the config */
|
||||||
/**
|
/**
|
||||||
* used for single metadata browse tables for generating the table name
|
* used for single metadata browse tables for generating the table name
|
||||||
@@ -102,7 +104,7 @@ public final class BrowseIndex {
|
|||||||
*
|
*
|
||||||
* @param baseName The base of the table name
|
* @param baseName The base of the table name
|
||||||
*/
|
*/
|
||||||
private BrowseIndex(String baseName) {
|
protected BrowseIndex(String baseName) {
|
||||||
try {
|
try {
|
||||||
number = -1;
|
number = -1;
|
||||||
tableBaseName = baseName;
|
tableBaseName = baseName;
|
||||||
|
@@ -59,7 +59,16 @@ public class CrossLinks {
|
|||||||
* @return true/false
|
* @return true/false
|
||||||
*/
|
*/
|
||||||
public boolean hasLink(String metadata) {
|
public boolean hasLink(String metadata) {
|
||||||
return links.containsKey(metadata);
|
return findLinkType(metadata) != null;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Is there a link for the given browse name (eg 'author')
|
||||||
|
* @param browseIndexName
|
||||||
|
* @return true/false
|
||||||
|
*/
|
||||||
|
public boolean hasBrowseName(String browseIndexName) {
|
||||||
|
return links.containsValue(browseIndexName);
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
@@ -69,6 +78,41 @@ public class CrossLinks {
|
|||||||
* @return type
|
* @return type
|
||||||
*/
|
*/
|
||||||
public String getLinkType(String metadata) {
|
public String getLinkType(String metadata) {
|
||||||
return links.get(metadata);
|
return findLinkType(metadata);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Get full map of field->indexname link configurations
|
||||||
|
* @return
|
||||||
|
*/
|
||||||
|
public Map<String, String> getLinks() {
|
||||||
|
return links;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Find and return the browse name for a given metadata field.
|
||||||
|
* If the link key contains a wildcard eg dc.subject.*, it should
|
||||||
|
* match dc.subject.other, etc.
|
||||||
|
* @param metadata
|
||||||
|
* @return
|
||||||
|
*/
|
||||||
|
public String findLinkType(String metadata) {
|
||||||
|
// Resolve wildcards properly, eg. dc.subject.other matches a configuration for dc.subject.*
|
||||||
|
for (String key : links.keySet()) {
|
||||||
|
if (null != key && key.endsWith(".*")) {
|
||||||
|
// A substring of length-1, also substracting the wildcard should work as a "startsWith"
|
||||||
|
// check for the field eg. dc.subject.* -> dc.subject is the start of dc.subject.other
|
||||||
|
if (null != metadata && metadata.startsWith(key.substring(0, key.length() - 1 - ".*".length()))) {
|
||||||
|
return links.get(key);
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
// Exact match, if the key field has no .* wildcard
|
||||||
|
if (links.containsKey(metadata)) {
|
||||||
|
return links.get(key);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
// No match
|
||||||
|
return null;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@@ -18,6 +18,7 @@ import org.dspace.content.service.ItemService;
|
|||||||
import org.dspace.core.Context;
|
import org.dspace.core.Context;
|
||||||
import org.dspace.services.ConfigurationService;
|
import org.dspace.services.ConfigurationService;
|
||||||
import org.dspace.services.factory.DSpaceServicesFactory;
|
import org.dspace.services.factory.DSpaceServicesFactory;
|
||||||
|
import org.dspace.web.ContextUtil;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* This class provides a standard interface to all item counting
|
* This class provides a standard interface to all item counting
|
||||||
@@ -49,9 +50,20 @@ public class ItemCounter {
|
|||||||
*/
|
*/
|
||||||
private Context context;
|
private Context context;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* This field is used to hold singular instance of a class.
|
||||||
|
* Singleton pattern is used but this class should be
|
||||||
|
* refactored to modern DSpace approach (injectible service).
|
||||||
|
*/
|
||||||
|
|
||||||
|
private static ItemCounter instance;
|
||||||
|
|
||||||
protected ItemService itemService;
|
protected ItemService itemService;
|
||||||
protected ConfigurationService configurationService;
|
protected ConfigurationService configurationService;
|
||||||
|
|
||||||
|
private boolean showStrengths;
|
||||||
|
private boolean useCache;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Construct a new item counter which will use the given DSpace Context
|
* Construct a new item counter which will use the given DSpace Context
|
||||||
*
|
*
|
||||||
@@ -63,21 +75,42 @@ public class ItemCounter {
|
|||||||
this.dao = ItemCountDAOFactory.getInstance(this.context);
|
this.dao = ItemCountDAOFactory.getInstance(this.context);
|
||||||
this.itemService = ContentServiceFactory.getInstance().getItemService();
|
this.itemService = ContentServiceFactory.getInstance().getItemService();
|
||||||
this.configurationService = DSpaceServicesFactory.getInstance().getConfigurationService();
|
this.configurationService = DSpaceServicesFactory.getInstance().getConfigurationService();
|
||||||
|
this.showStrengths = configurationService.getBooleanProperty("webui.strengths.show", false);
|
||||||
|
this.useCache = configurationService.getBooleanProperty("webui.strengths.cache", true);
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Get the count of the items in the given container. If the configuration
|
* Get the singular instance of a class.
|
||||||
* value webui.strengths.cache is equal to 'true' this will return the
|
* It creates a new instance at the first usage of this method.
|
||||||
* cached value if it exists. If it is equal to 'false' it will count
|
*
|
||||||
* the number of items in the container in real time.
|
* @return instance af a class
|
||||||
|
* @throws ItemCountException when error occurs
|
||||||
|
*/
|
||||||
|
public static ItemCounter getInstance() throws ItemCountException {
|
||||||
|
if (instance == null) {
|
||||||
|
instance = new ItemCounter(ContextUtil.obtainCurrentRequestContext());
|
||||||
|
}
|
||||||
|
return instance;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Get the count of the items in the given container. If the configuration
|
||||||
|
* value webui.strengths.show is equal to 'true' this method will return all
|
||||||
|
* archived items. If the configuration value webui.strengths.show is equal to
|
||||||
|
* 'false' this method will return -1.
|
||||||
|
* If the configuration value webui.strengths.cache
|
||||||
|
* is equal to 'true' this will return the cached value if it exists.
|
||||||
|
* If it is equal to 'false' it will count the number of items
|
||||||
|
* in the container in real time.
|
||||||
*
|
*
|
||||||
* @param dso DSpaceObject
|
* @param dso DSpaceObject
|
||||||
* @return count
|
* @return count
|
||||||
* @throws ItemCountException when error occurs
|
* @throws ItemCountException when error occurs
|
||||||
*/
|
*/
|
||||||
public int getCount(DSpaceObject dso) throws ItemCountException {
|
public int getCount(DSpaceObject dso) throws ItemCountException {
|
||||||
boolean useCache = configurationService.getBooleanProperty(
|
if (!showStrengths) {
|
||||||
"webui.strengths.cache", true);
|
return -1;
|
||||||
|
}
|
||||||
|
|
||||||
if (useCache) {
|
if (useCache) {
|
||||||
return dao.getCount(dso);
|
return dao.getCount(dso);
|
||||||
|
@@ -8,17 +8,17 @@
|
|||||||
package org.dspace.browse;
|
package org.dspace.browse;
|
||||||
|
|
||||||
import java.io.Serializable;
|
import java.io.Serializable;
|
||||||
import java.sql.SQLException;
|
|
||||||
import java.util.ArrayList;
|
import java.util.ArrayList;
|
||||||
import java.util.Collections;
|
import java.util.Collections;
|
||||||
import java.util.Comparator;
|
import java.util.Comparator;
|
||||||
import java.util.List;
|
import java.util.List;
|
||||||
import java.util.UUID;
|
|
||||||
|
|
||||||
import org.apache.commons.lang3.StringUtils;
|
import org.apache.commons.lang3.StringUtils;
|
||||||
import org.apache.logging.log4j.Logger;
|
import org.apache.logging.log4j.Logger;
|
||||||
|
import org.apache.solr.client.solrj.util.ClientUtils;
|
||||||
import org.dspace.authorize.factory.AuthorizeServiceFactory;
|
import org.dspace.authorize.factory.AuthorizeServiceFactory;
|
||||||
import org.dspace.authorize.service.AuthorizeService;
|
import org.dspace.authorize.service.AuthorizeService;
|
||||||
|
import org.dspace.content.DSpaceObject;
|
||||||
import org.dspace.content.Item;
|
import org.dspace.content.Item;
|
||||||
import org.dspace.core.Context;
|
import org.dspace.core.Context;
|
||||||
import org.dspace.discovery.DiscoverFacetField;
|
import org.dspace.discovery.DiscoverFacetField;
|
||||||
@@ -30,6 +30,8 @@ import org.dspace.discovery.DiscoverResult.SearchDocument;
|
|||||||
import org.dspace.discovery.IndexableObject;
|
import org.dspace.discovery.IndexableObject;
|
||||||
import org.dspace.discovery.SearchService;
|
import org.dspace.discovery.SearchService;
|
||||||
import org.dspace.discovery.SearchServiceException;
|
import org.dspace.discovery.SearchServiceException;
|
||||||
|
import org.dspace.discovery.SearchUtils;
|
||||||
|
import org.dspace.discovery.configuration.DiscoveryConfiguration;
|
||||||
import org.dspace.discovery.configuration.DiscoveryConfigurationParameters;
|
import org.dspace.discovery.configuration.DiscoveryConfigurationParameters;
|
||||||
import org.dspace.discovery.indexobject.IndexableItem;
|
import org.dspace.discovery.indexobject.IndexableItem;
|
||||||
import org.dspace.services.factory.DSpaceServicesFactory;
|
import org.dspace.services.factory.DSpaceServicesFactory;
|
||||||
@@ -123,9 +125,9 @@ public class SolrBrowseDAO implements BrowseDAO {
|
|||||||
private String containerIDField = null;
|
private String containerIDField = null;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* the database id of the container we are constraining to
|
* the container we are constraining to
|
||||||
*/
|
*/
|
||||||
private UUID containerID = null;
|
private DSpaceObject container = null;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* the column that we are sorting results by
|
* the column that we are sorting results by
|
||||||
@@ -175,7 +177,7 @@ public class SolrBrowseDAO implements BrowseDAO {
|
|||||||
if (sResponse == null) {
|
if (sResponse == null) {
|
||||||
DiscoverQuery query = new DiscoverQuery();
|
DiscoverQuery query = new DiscoverQuery();
|
||||||
addLocationScopeFilter(query);
|
addLocationScopeFilter(query);
|
||||||
addStatusFilter(query);
|
addDefaultFilterQueries(query);
|
||||||
if (distinct) {
|
if (distinct) {
|
||||||
DiscoverFacetField dff;
|
DiscoverFacetField dff;
|
||||||
if (StringUtils.isNotBlank(startsWith)) {
|
if (StringUtils.isNotBlank(startsWith)) {
|
||||||
@@ -206,7 +208,8 @@ public class SolrBrowseDAO implements BrowseDAO {
|
|||||||
query.addFilterQueries("{!field f=" + facetField + "_partial}" + value);
|
query.addFilterQueries("{!field f=" + facetField + "_partial}" + value);
|
||||||
}
|
}
|
||||||
if (StringUtils.isNotBlank(startsWith) && orderField != null) {
|
if (StringUtils.isNotBlank(startsWith) && orderField != null) {
|
||||||
query.addFilterQueries("bi_" + orderField + "_sort:" + startsWith + "*");
|
query.addFilterQueries(
|
||||||
|
"bi_" + orderField + "_sort:" + ClientUtils.escapeQueryChars(startsWith) + "*");
|
||||||
}
|
}
|
||||||
// filter on item to be sure to don't include any other object
|
// filter on item to be sure to don't include any other object
|
||||||
// indexed in the Discovery Search core
|
// indexed in the Discovery Search core
|
||||||
@@ -225,26 +228,19 @@ public class SolrBrowseDAO implements BrowseDAO {
|
|||||||
return sResponse;
|
return sResponse;
|
||||||
}
|
}
|
||||||
|
|
||||||
private void addStatusFilter(DiscoverQuery query) {
|
private void addLocationScopeFilter(DiscoverQuery query) {
|
||||||
try {
|
if (container != null) {
|
||||||
if (!authorizeService.isAdmin(context)
|
if (containerIDField.startsWith("collection")) {
|
||||||
&& (authorizeService.isCommunityAdmin(context)
|
query.addFilterQueries("location.coll:" + container.getID());
|
||||||
|| authorizeService.isCollectionAdmin(context))) {
|
} else if (containerIDField.startsWith("community")) {
|
||||||
query.addFilterQueries(searcher.createLocationQueryForAdministrableItems(context));
|
query.addFilterQueries("location.comm:" + container.getID());
|
||||||
}
|
}
|
||||||
} catch (SQLException ex) {
|
|
||||||
log.error("Error looking up authorization rights of current user", ex);
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
private void addLocationScopeFilter(DiscoverQuery query) {
|
private void addDefaultFilterQueries(DiscoverQuery query) {
|
||||||
if (containerID != null) {
|
DiscoveryConfiguration discoveryConfiguration = SearchUtils.getDiscoveryConfiguration(context, container);
|
||||||
if (containerIDField.startsWith("collection")) {
|
discoveryConfiguration.getDefaultFilterQueries().forEach(query::addFilterQueries);
|
||||||
query.addFilterQueries("location.coll:" + containerID);
|
|
||||||
} else if (containerIDField.startsWith("community")) {
|
|
||||||
query.addFilterQueries("location.comm:" + containerID);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
@@ -335,7 +331,7 @@ public class SolrBrowseDAO implements BrowseDAO {
|
|||||||
throws BrowseException {
|
throws BrowseException {
|
||||||
DiscoverQuery query = new DiscoverQuery();
|
DiscoverQuery query = new DiscoverQuery();
|
||||||
addLocationScopeFilter(query);
|
addLocationScopeFilter(query);
|
||||||
addStatusFilter(query);
|
addDefaultFilterQueries(query);
|
||||||
query.setMaxResults(0);
|
query.setMaxResults(0);
|
||||||
query.addFilterQueries("search.resourcetype:" + IndexableItem.TYPE);
|
query.addFilterQueries("search.resourcetype:" + IndexableItem.TYPE);
|
||||||
|
|
||||||
@@ -396,8 +392,8 @@ public class SolrBrowseDAO implements BrowseDAO {
|
|||||||
* @see org.dspace.browse.BrowseDAO#getContainerID()
|
* @see org.dspace.browse.BrowseDAO#getContainerID()
|
||||||
*/
|
*/
|
||||||
@Override
|
@Override
|
||||||
public UUID getContainerID() {
|
public DSpaceObject getContainer() {
|
||||||
return containerID;
|
return container;
|
||||||
}
|
}
|
||||||
|
|
||||||
/*
|
/*
|
||||||
@@ -559,8 +555,8 @@ public class SolrBrowseDAO implements BrowseDAO {
|
|||||||
* @see org.dspace.browse.BrowseDAO#setContainerID(int)
|
* @see org.dspace.browse.BrowseDAO#setContainerID(int)
|
||||||
*/
|
*/
|
||||||
@Override
|
@Override
|
||||||
public void setContainerID(UUID containerID) {
|
public void setContainer(DSpaceObject container) {
|
||||||
this.containerID = containerID;
|
this.container = container;
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@@ -245,7 +245,7 @@ public final class CheckerCommand {
|
|||||||
info.setProcessStartDate(new Date());
|
info.setProcessStartDate(new Date());
|
||||||
|
|
||||||
try {
|
try {
|
||||||
Map checksumMap = bitstreamStorageService.computeChecksum(context, info.getBitstream());
|
Map<String, Object> checksumMap = bitstreamStorageService.computeChecksum(context, info.getBitstream());
|
||||||
if (MapUtils.isNotEmpty(checksumMap)) {
|
if (MapUtils.isNotEmpty(checksumMap)) {
|
||||||
info.setBitstreamFound(true);
|
info.setBitstreamFound(true);
|
||||||
if (checksumMap.containsKey("checksum")) {
|
if (checksumMap.containsKey("checksum")) {
|
||||||
@@ -255,10 +255,16 @@ public final class CheckerCommand {
|
|||||||
if (checksumMap.containsKey("checksum_algorithm")) {
|
if (checksumMap.containsKey("checksum_algorithm")) {
|
||||||
info.setChecksumAlgorithm(checksumMap.get("checksum_algorithm").toString());
|
info.setChecksumAlgorithm(checksumMap.get("checksum_algorithm").toString());
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// compare new checksum to previous checksum
|
||||||
|
info.setChecksumResult(compareChecksums(info.getExpectedChecksum(), info.getCurrentChecksum()));
|
||||||
|
|
||||||
|
} else {
|
||||||
|
info.setCurrentChecksum("");
|
||||||
|
info.setChecksumResult(getChecksumResultByCode(ChecksumResultCode.BITSTREAM_NOT_FOUND));
|
||||||
|
info.setToBeProcessed(false);
|
||||||
}
|
}
|
||||||
|
|
||||||
// compare new checksum to previous checksum
|
|
||||||
info.setChecksumResult(compareChecksums(info.getExpectedChecksum(), info.getCurrentChecksum()));
|
|
||||||
} catch (IOException e) {
|
} catch (IOException e) {
|
||||||
// bitstream located, but file missing from asset store
|
// bitstream located, but file missing from asset store
|
||||||
info.setChecksumResult(getChecksumResultByCode(ChecksumResultCode.BITSTREAM_NOT_FOUND));
|
info.setChecksumResult(getChecksumResultByCode(ChecksumResultCode.BITSTREAM_NOT_FOUND));
|
||||||
|
@@ -74,7 +74,8 @@ public class ChecksumHistoryServiceImpl implements ChecksumHistoryService {
|
|||||||
if (mostRecentChecksum.getBitstream().isDeleted()) {
|
if (mostRecentChecksum.getBitstream().isDeleted()) {
|
||||||
checksumResult = checksumResultService.findByCode(context, ChecksumResultCode.BITSTREAM_MARKED_DELETED);
|
checksumResult = checksumResultService.findByCode(context, ChecksumResultCode.BITSTREAM_MARKED_DELETED);
|
||||||
} else {
|
} else {
|
||||||
checksumResult = checksumResultService.findByCode(context, ChecksumResultCode.CHECKSUM_MATCH);
|
checksumResult = checksumResultService.findByCode(context,
|
||||||
|
mostRecentChecksum.getChecksumResult().getResultCode());
|
||||||
}
|
}
|
||||||
|
|
||||||
checksumHistory.setResult(checksumResult);
|
checksumHistory.setResult(checksumResult);
|
||||||
|
@@ -152,6 +152,7 @@ public class SimpleReporterServiceImpl implements SimpleReporterService {
|
|||||||
|
|
||||||
osw.write("\n");
|
osw.write("\n");
|
||||||
osw.write(msg("bitstream-not-found-report"));
|
osw.write(msg("bitstream-not-found-report"));
|
||||||
|
osw.write(" ");
|
||||||
osw.write(applyDateFormatShort(startDate));
|
osw.write(applyDateFormatShort(startDate));
|
||||||
osw.write(" ");
|
osw.write(" ");
|
||||||
osw.write(msg("date-range-to"));
|
osw.write(msg("date-range-to"));
|
||||||
@@ -230,6 +231,7 @@ public class SimpleReporterServiceImpl implements SimpleReporterService {
|
|||||||
|
|
||||||
osw.write("\n");
|
osw.write("\n");
|
||||||
osw.write(msg("unchecked-bitstream-report"));
|
osw.write(msg("unchecked-bitstream-report"));
|
||||||
|
osw.write(" ");
|
||||||
osw.write(applyDateFormatShort(new Date()));
|
osw.write(applyDateFormatShort(new Date()));
|
||||||
osw.write("\n\n\n");
|
osw.write("\n\n\n");
|
||||||
|
|
||||||
|
@@ -92,8 +92,8 @@ public class MostRecentChecksumDAOImpl extends AbstractHibernateDAO<MostRecentCh
|
|||||||
criteriaQuery.where(criteriaBuilder.and(
|
criteriaQuery.where(criteriaBuilder.and(
|
||||||
criteriaBuilder.equal(mostRecentResult.get(ChecksumResult_.resultCode), resultCode),
|
criteriaBuilder.equal(mostRecentResult.get(ChecksumResult_.resultCode), resultCode),
|
||||||
criteriaBuilder.lessThanOrEqualTo(
|
criteriaBuilder.lessThanOrEqualTo(
|
||||||
mostRecentChecksumRoot.get(MostRecentChecksum_.processStartDate), startDate),
|
mostRecentChecksumRoot.get(MostRecentChecksum_.processStartDate), endDate),
|
||||||
criteriaBuilder.greaterThan(mostRecentChecksumRoot.get(MostRecentChecksum_.processStartDate), endDate)
|
criteriaBuilder.greaterThan(mostRecentChecksumRoot.get(MostRecentChecksum_.processStartDate), startDate)
|
||||||
)
|
)
|
||||||
);
|
);
|
||||||
List<Order> orderList = new LinkedList<>();
|
List<Order> orderList = new LinkedList<>();
|
||||||
|
@@ -332,8 +332,8 @@ public class BitstreamServiceImpl extends DSpaceObjectServiceImpl<Bitstream> imp
|
|||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public List<Bitstream> findDeletedBitstreams(Context context) throws SQLException {
|
public List<Bitstream> findDeletedBitstreams(Context context, int limit, int offset) throws SQLException {
|
||||||
return bitstreamDAO.findDeletedBitstreams(context);
|
return bitstreamDAO.findDeletedBitstreams(context, limit, offset);
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
|
@@ -8,6 +8,7 @@
|
|||||||
package org.dspace.content;
|
package org.dspace.content;
|
||||||
|
|
||||||
import static org.dspace.core.Constants.ADD;
|
import static org.dspace.core.Constants.ADD;
|
||||||
|
import static org.dspace.core.Constants.READ;
|
||||||
import static org.dspace.core.Constants.REMOVE;
|
import static org.dspace.core.Constants.REMOVE;
|
||||||
import static org.dspace.core.Constants.WRITE;
|
import static org.dspace.core.Constants.WRITE;
|
||||||
|
|
||||||
@@ -34,6 +35,7 @@ import org.dspace.content.service.ItemService;
|
|||||||
import org.dspace.core.Constants;
|
import org.dspace.core.Constants;
|
||||||
import org.dspace.core.Context;
|
import org.dspace.core.Context;
|
||||||
import org.dspace.core.LogHelper;
|
import org.dspace.core.LogHelper;
|
||||||
|
import org.dspace.eperson.Group;
|
||||||
import org.dspace.event.Event;
|
import org.dspace.event.Event;
|
||||||
import org.springframework.beans.factory.annotation.Autowired;
|
import org.springframework.beans.factory.annotation.Autowired;
|
||||||
|
|
||||||
@@ -74,14 +76,14 @@ public class BundleServiceImpl extends DSpaceObjectServiceImpl<Bundle> implement
|
|||||||
if (bundle == null) {
|
if (bundle == null) {
|
||||||
if (log.isDebugEnabled()) {
|
if (log.isDebugEnabled()) {
|
||||||
log.debug(LogHelper.getHeader(context, "find_bundle",
|
log.debug(LogHelper.getHeader(context, "find_bundle",
|
||||||
"not_found,bundle_id=" + id));
|
"not_found,bundle_id=" + id));
|
||||||
}
|
}
|
||||||
|
|
||||||
return null;
|
return null;
|
||||||
} else {
|
} else {
|
||||||
if (log.isDebugEnabled()) {
|
if (log.isDebugEnabled()) {
|
||||||
log.debug(LogHelper.getHeader(context, "find_bundle",
|
log.debug(LogHelper.getHeader(context, "find_bundle",
|
||||||
"bundle_id=" + id));
|
"bundle_id=" + id));
|
||||||
}
|
}
|
||||||
|
|
||||||
return bundle;
|
return bundle;
|
||||||
@@ -106,7 +108,7 @@ public class BundleServiceImpl extends DSpaceObjectServiceImpl<Bundle> implement
|
|||||||
|
|
||||||
|
|
||||||
log.info(LogHelper.getHeader(context, "create_bundle", "bundle_id="
|
log.info(LogHelper.getHeader(context, "create_bundle", "bundle_id="
|
||||||
+ bundle.getID()));
|
+ bundle.getID()));
|
||||||
|
|
||||||
// if we ever use the identifier service for bundles, we should
|
// if we ever use the identifier service for bundles, we should
|
||||||
// create the bundle before we create the Event and should add all
|
// create the bundle before we create the Event and should add all
|
||||||
@@ -132,12 +134,12 @@ public class BundleServiceImpl extends DSpaceObjectServiceImpl<Bundle> implement
|
|||||||
|
|
||||||
@Override
|
@Override
|
||||||
public void addBitstream(Context context, Bundle bundle, Bitstream bitstream)
|
public void addBitstream(Context context, Bundle bundle, Bitstream bitstream)
|
||||||
throws SQLException, AuthorizeException {
|
throws SQLException, AuthorizeException {
|
||||||
// Check authorisation
|
// Check authorisation
|
||||||
authorizeService.authorizeAction(context, bundle, Constants.ADD);
|
authorizeService.authorizeAction(context, bundle, Constants.ADD);
|
||||||
|
|
||||||
log.info(LogHelper.getHeader(context, "add_bitstream", "bundle_id="
|
log.info(LogHelper.getHeader(context, "add_bitstream", "bundle_id="
|
||||||
+ bundle.getID() + ",bitstream_id=" + bitstream.getID()));
|
+ bundle.getID() + ",bitstream_id=" + bitstream.getID()));
|
||||||
|
|
||||||
// First check that the bitstream isn't already in the list
|
// First check that the bitstream isn't already in the list
|
||||||
List<Bitstream> bitstreams = bundle.getBitstreams();
|
List<Bitstream> bitstreams = bundle.getBitstreams();
|
||||||
@@ -167,28 +169,61 @@ public class BundleServiceImpl extends DSpaceObjectServiceImpl<Bundle> implement
|
|||||||
|
|
||||||
|
|
||||||
context.addEvent(new Event(Event.ADD, Constants.BUNDLE, bundle.getID(),
|
context.addEvent(new Event(Event.ADD, Constants.BUNDLE, bundle.getID(),
|
||||||
Constants.BITSTREAM, bitstream.getID(), String.valueOf(bitstream.getSequenceID()),
|
Constants.BITSTREAM, bitstream.getID(), String.valueOf(bitstream.getSequenceID()),
|
||||||
getIdentifiers(context, bundle)));
|
getIdentifiers(context, bundle)));
|
||||||
|
|
||||||
// copy authorization policies from bundle to bitstream
|
// copy authorization policies from bundle to bitstream
|
||||||
// FIXME: multiple inclusion is affected by this...
|
// FIXME: multiple inclusion is affected by this...
|
||||||
authorizeService.inheritPolicies(context, bundle, bitstream);
|
authorizeService.inheritPolicies(context, bundle, bitstream);
|
||||||
|
// The next logic is a bit overly cautious but ensures that if there are any future start dates
|
||||||
|
// on the item or bitstream read policies, that we'll skip inheriting anything from the owning collection
|
||||||
|
// just in case. In practice, the item install process would overwrite these anyway but it may satisfy
|
||||||
|
// some other bitstream creation methods and integration tests
|
||||||
|
boolean isEmbargoed = false;
|
||||||
|
for (ResourcePolicy resourcePolicy : authorizeService.getPoliciesActionFilter(context, owningItem, READ)) {
|
||||||
|
if (!resourcePolicyService.isDateValid(resourcePolicy)) {
|
||||||
|
isEmbargoed = true;
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if (owningItem != null && !isEmbargoed) {
|
||||||
|
// Resolve owning collection
|
||||||
|
Collection owningCollection = owningItem.getOwningCollection();
|
||||||
|
if (owningCollection != null) {
|
||||||
|
// Get DEFAULT_BITSTREAM_READ policy from the collection
|
||||||
|
List<Group> defaultBitstreamReadGroups =
|
||||||
|
authorizeService.getAuthorizedGroups(context, owningCollection,
|
||||||
|
Constants.DEFAULT_BITSTREAM_READ);
|
||||||
|
log.info(defaultBitstreamReadGroups.size());
|
||||||
|
// If this collection is configured with a DEFAULT_BITSTREAM_READ group, overwrite the READ policy
|
||||||
|
// inherited from the bundle with this policy.
|
||||||
|
if (!defaultBitstreamReadGroups.isEmpty()) {
|
||||||
|
// Remove read policies from the bitstream
|
||||||
|
authorizeService.removePoliciesActionFilter(context, bitstream, Constants.READ);
|
||||||
|
for (Group defaultBitstreamReadGroup : defaultBitstreamReadGroups) {
|
||||||
|
// Inherit this policy as READ, directly from the collection roles
|
||||||
|
authorizeService.addPolicy(context, bitstream,
|
||||||
|
Constants.READ, defaultBitstreamReadGroup, ResourcePolicy.TYPE_INHERITED);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
bitstreamService.update(context, bitstream);
|
bitstreamService.update(context, bitstream);
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public void removeBitstream(Context context, Bundle bundle, Bitstream bitstream)
|
public void removeBitstream(Context context, Bundle bundle, Bitstream bitstream)
|
||||||
throws AuthorizeException, SQLException, IOException {
|
throws AuthorizeException, SQLException, IOException {
|
||||||
// Check authorisation
|
// Check authorisation
|
||||||
authorizeService.authorizeAction(context, bundle, Constants.REMOVE);
|
authorizeService.authorizeAction(context, bundle, Constants.REMOVE);
|
||||||
|
|
||||||
log.info(LogHelper.getHeader(context, "remove_bitstream",
|
log.info(LogHelper.getHeader(context, "remove_bitstream",
|
||||||
"bundle_id=" + bundle.getID() + ",bitstream_id=" + bitstream.getID()));
|
"bundle_id=" + bundle.getID() + ",bitstream_id=" + bitstream.getID()));
|
||||||
|
|
||||||
|
|
||||||
context.addEvent(new Event(Event.REMOVE, Constants.BUNDLE, bundle.getID(),
|
context.addEvent(new Event(Event.REMOVE, Constants.BUNDLE, bundle.getID(),
|
||||||
Constants.BITSTREAM, bitstream.getID(), String.valueOf(bitstream.getSequenceID()),
|
Constants.BITSTREAM, bitstream.getID(), String.valueOf(bitstream.getSequenceID()),
|
||||||
getIdentifiers(context, bundle)));
|
getIdentifiers(context, bundle)));
|
||||||
|
|
||||||
//Ensure that the last modified from the item is triggered !
|
//Ensure that the last modified from the item is triggered !
|
||||||
Item owningItem = (Item) getParentObject(context, bundle);
|
Item owningItem = (Item) getParentObject(context, bundle);
|
||||||
@@ -221,9 +256,9 @@ public class BundleServiceImpl extends DSpaceObjectServiceImpl<Bundle> implement
|
|||||||
|
|
||||||
@Override
|
@Override
|
||||||
public void inheritCollectionDefaultPolicies(Context context, Bundle bundle, Collection collection)
|
public void inheritCollectionDefaultPolicies(Context context, Bundle bundle, Collection collection)
|
||||||
throws SQLException, AuthorizeException {
|
throws SQLException, AuthorizeException {
|
||||||
List<ResourcePolicy> policies = authorizeService.getPoliciesActionFilter(context, collection,
|
List<ResourcePolicy> policies = authorizeService.getPoliciesActionFilter(context, collection,
|
||||||
Constants.DEFAULT_BITSTREAM_READ);
|
Constants.DEFAULT_BITSTREAM_READ);
|
||||||
|
|
||||||
// change the action to just READ
|
// change the action to just READ
|
||||||
// just don't call update on the resourcepolicies!!!
|
// just don't call update on the resourcepolicies!!!
|
||||||
@@ -231,7 +266,7 @@ public class BundleServiceImpl extends DSpaceObjectServiceImpl<Bundle> implement
|
|||||||
|
|
||||||
if (!i.hasNext()) {
|
if (!i.hasNext()) {
|
||||||
throw new java.sql.SQLException("Collection " + collection.getID()
|
throw new java.sql.SQLException("Collection " + collection.getID()
|
||||||
+ " has no default bitstream READ policies");
|
+ " has no default bitstream READ policies");
|
||||||
}
|
}
|
||||||
|
|
||||||
List<ResourcePolicy> newPolicies = new ArrayList<ResourcePolicy>();
|
List<ResourcePolicy> newPolicies = new ArrayList<ResourcePolicy>();
|
||||||
@@ -246,7 +281,7 @@ public class BundleServiceImpl extends DSpaceObjectServiceImpl<Bundle> implement
|
|||||||
|
|
||||||
@Override
|
@Override
|
||||||
public void replaceAllBitstreamPolicies(Context context, Bundle bundle, List<ResourcePolicy> newpolicies)
|
public void replaceAllBitstreamPolicies(Context context, Bundle bundle, List<ResourcePolicy> newpolicies)
|
||||||
throws SQLException, AuthorizeException {
|
throws SQLException, AuthorizeException {
|
||||||
List<Bitstream> bitstreams = bundle.getBitstreams();
|
List<Bitstream> bitstreams = bundle.getBitstreams();
|
||||||
if (CollectionUtils.isNotEmpty(bitstreams)) {
|
if (CollectionUtils.isNotEmpty(bitstreams)) {
|
||||||
for (Bitstream bs : bitstreams) {
|
for (Bitstream bs : bitstreams) {
|
||||||
@@ -368,16 +403,16 @@ public class BundleServiceImpl extends DSpaceObjectServiceImpl<Bundle> implement
|
|||||||
if (bitstream == null) {
|
if (bitstream == null) {
|
||||||
//This should never occur but just in case
|
//This should never occur but just in case
|
||||||
log.warn(LogHelper.getHeader(context, "Invalid bitstream id while changing bitstream order",
|
log.warn(LogHelper.getHeader(context, "Invalid bitstream id while changing bitstream order",
|
||||||
"Bundle: " + bundle.getID() + ", bitstream id: " + bitstreamId));
|
"Bundle: " + bundle.getID() + ", bitstream id: " + bitstreamId));
|
||||||
continue;
|
continue;
|
||||||
}
|
}
|
||||||
|
|
||||||
// If we have a Bitstream not in the current list, log a warning & exit immediately
|
// If we have a Bitstream not in the current list, log a warning & exit immediately
|
||||||
if (!currentBitstreams.contains(bitstream)) {
|
if (!currentBitstreams.contains(bitstream)) {
|
||||||
log.warn(LogHelper.getHeader(context,
|
log.warn(LogHelper.getHeader(context,
|
||||||
"Encountered a bitstream not in this bundle while changing bitstream " +
|
"Encountered a bitstream not in this bundle while changing bitstream " +
|
||||||
"order. Bitstream order will not be changed.",
|
"order. Bitstream order will not be changed.",
|
||||||
"Bundle: " + bundle.getID() + ", bitstream id: " + bitstreamId));
|
"Bundle: " + bundle.getID() + ", bitstream id: " + bitstreamId));
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
updatedBitstreams.add(bitstream);
|
updatedBitstreams.add(bitstream);
|
||||||
@@ -386,9 +421,9 @@ public class BundleServiceImpl extends DSpaceObjectServiceImpl<Bundle> implement
|
|||||||
// If our lists are different sizes, exit immediately
|
// If our lists are different sizes, exit immediately
|
||||||
if (updatedBitstreams.size() != currentBitstreams.size()) {
|
if (updatedBitstreams.size() != currentBitstreams.size()) {
|
||||||
log.warn(LogHelper.getHeader(context,
|
log.warn(LogHelper.getHeader(context,
|
||||||
"Size of old list and new list do not match. Bitstream order will not be " +
|
"Size of old list and new list do not match. Bitstream order will not be " +
|
||||||
"changed.",
|
"changed.",
|
||||||
"Bundle: " + bundle.getID()));
|
"Bundle: " + bundle.getID()));
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -434,7 +469,7 @@ public class BundleServiceImpl extends DSpaceObjectServiceImpl<Bundle> implement
|
|||||||
} else if (AuthorizeConfiguration.canCollectionAdminPerformBitstreamDeletion()) {
|
} else if (AuthorizeConfiguration.canCollectionAdminPerformBitstreamDeletion()) {
|
||||||
adminObject = collection;
|
adminObject = collection;
|
||||||
} else if (AuthorizeConfiguration
|
} else if (AuthorizeConfiguration
|
||||||
.canCommunityAdminPerformBitstreamDeletion()) {
|
.canCommunityAdminPerformBitstreamDeletion()) {
|
||||||
adminObject = community;
|
adminObject = community;
|
||||||
}
|
}
|
||||||
break;
|
break;
|
||||||
@@ -442,10 +477,10 @@ public class BundleServiceImpl extends DSpaceObjectServiceImpl<Bundle> implement
|
|||||||
if (AuthorizeConfiguration.canItemAdminPerformBitstreamCreation()) {
|
if (AuthorizeConfiguration.canItemAdminPerformBitstreamCreation()) {
|
||||||
adminObject = item;
|
adminObject = item;
|
||||||
} else if (AuthorizeConfiguration
|
} else if (AuthorizeConfiguration
|
||||||
.canCollectionAdminPerformBitstreamCreation()) {
|
.canCollectionAdminPerformBitstreamCreation()) {
|
||||||
adminObject = collection;
|
adminObject = collection;
|
||||||
} else if (AuthorizeConfiguration
|
} else if (AuthorizeConfiguration
|
||||||
.canCommunityAdminPerformBitstreamCreation()) {
|
.canCommunityAdminPerformBitstreamCreation()) {
|
||||||
adminObject = community;
|
adminObject = community;
|
||||||
}
|
}
|
||||||
break;
|
break;
|
||||||
@@ -477,7 +512,7 @@ public class BundleServiceImpl extends DSpaceObjectServiceImpl<Bundle> implement
|
|||||||
// Check authorisation
|
// Check authorisation
|
||||||
//AuthorizeManager.authorizeAction(ourContext, this, Constants.WRITE);
|
//AuthorizeManager.authorizeAction(ourContext, this, Constants.WRITE);
|
||||||
log.info(LogHelper.getHeader(context, "update_bundle", "bundle_id="
|
log.info(LogHelper.getHeader(context, "update_bundle", "bundle_id="
|
||||||
+ bundle.getID()));
|
+ bundle.getID()));
|
||||||
|
|
||||||
super.update(context, bundle);
|
super.update(context, bundle);
|
||||||
bundleDAO.save(context, bundle);
|
bundleDAO.save(context, bundle);
|
||||||
@@ -485,10 +520,10 @@ public class BundleServiceImpl extends DSpaceObjectServiceImpl<Bundle> implement
|
|||||||
if (bundle.isModified() || bundle.isMetadataModified()) {
|
if (bundle.isModified() || bundle.isMetadataModified()) {
|
||||||
if (bundle.isMetadataModified()) {
|
if (bundle.isMetadataModified()) {
|
||||||
context.addEvent(new Event(Event.MODIFY_METADATA, bundle.getType(), bundle.getID(), bundle.getDetails(),
|
context.addEvent(new Event(Event.MODIFY_METADATA, bundle.getType(), bundle.getID(), bundle.getDetails(),
|
||||||
getIdentifiers(context, bundle)));
|
getIdentifiers(context, bundle)));
|
||||||
}
|
}
|
||||||
context.addEvent(new Event(Event.MODIFY, Constants.BUNDLE, bundle.getID(),
|
context.addEvent(new Event(Event.MODIFY, Constants.BUNDLE, bundle.getID(),
|
||||||
null, getIdentifiers(context, bundle)));
|
null, getIdentifiers(context, bundle)));
|
||||||
bundle.clearModified();
|
bundle.clearModified();
|
||||||
bundle.clearDetails();
|
bundle.clearDetails();
|
||||||
}
|
}
|
||||||
@@ -497,12 +532,12 @@ public class BundleServiceImpl extends DSpaceObjectServiceImpl<Bundle> implement
|
|||||||
@Override
|
@Override
|
||||||
public void delete(Context context, Bundle bundle) throws SQLException, AuthorizeException, IOException {
|
public void delete(Context context, Bundle bundle) throws SQLException, AuthorizeException, IOException {
|
||||||
log.info(LogHelper.getHeader(context, "delete_bundle", "bundle_id="
|
log.info(LogHelper.getHeader(context, "delete_bundle", "bundle_id="
|
||||||
+ bundle.getID()));
|
+ bundle.getID()));
|
||||||
|
|
||||||
authorizeService.authorizeAction(context, bundle, Constants.DELETE);
|
authorizeService.authorizeAction(context, bundle, Constants.DELETE);
|
||||||
|
|
||||||
context.addEvent(new Event(Event.DELETE, Constants.BUNDLE, bundle.getID(),
|
context.addEvent(new Event(Event.DELETE, Constants.BUNDLE, bundle.getID(),
|
||||||
bundle.getName(), getIdentifiers(context, bundle)));
|
bundle.getName(), getIdentifiers(context, bundle)));
|
||||||
|
|
||||||
// Remove bitstreams
|
// Remove bitstreams
|
||||||
List<Bitstream> bitstreams = bundle.getBitstreams();
|
List<Bitstream> bitstreams = bundle.getBitstreams();
|
||||||
|
@@ -29,6 +29,7 @@ import javax.persistence.Table;
|
|||||||
import javax.persistence.Transient;
|
import javax.persistence.Transient;
|
||||||
|
|
||||||
import org.dspace.authorize.AuthorizeException;
|
import org.dspace.authorize.AuthorizeException;
|
||||||
|
import org.dspace.browse.ItemCountException;
|
||||||
import org.dspace.content.comparator.NameAscendingComparator;
|
import org.dspace.content.comparator.NameAscendingComparator;
|
||||||
import org.dspace.content.factory.ContentServiceFactory;
|
import org.dspace.content.factory.ContentServiceFactory;
|
||||||
import org.dspace.content.service.CollectionService;
|
import org.dspace.content.service.CollectionService;
|
||||||
@@ -336,4 +337,17 @@ public class Collection extends DSpaceObject implements DSpaceObjectLegacySuppor
|
|||||||
return collectionService;
|
return collectionService;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* return count of the collection items
|
||||||
|
*
|
||||||
|
* @return int
|
||||||
|
*/
|
||||||
|
public int countArchivedItems() {
|
||||||
|
try {
|
||||||
|
return collectionService.countArchivedItems(this);
|
||||||
|
} catch (ItemCountException e) {
|
||||||
|
throw new RuntimeException(e);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
}
|
}
|
||||||
|
@@ -31,6 +31,8 @@ import org.dspace.authorize.AuthorizeException;
|
|||||||
import org.dspace.authorize.ResourcePolicy;
|
import org.dspace.authorize.ResourcePolicy;
|
||||||
import org.dspace.authorize.service.AuthorizeService;
|
import org.dspace.authorize.service.AuthorizeService;
|
||||||
import org.dspace.authorize.service.ResourcePolicyService;
|
import org.dspace.authorize.service.ResourcePolicyService;
|
||||||
|
import org.dspace.browse.ItemCountException;
|
||||||
|
import org.dspace.browse.ItemCounter;
|
||||||
import org.dspace.content.dao.CollectionDAO;
|
import org.dspace.content.dao.CollectionDAO;
|
||||||
import org.dspace.content.service.BitstreamService;
|
import org.dspace.content.service.BitstreamService;
|
||||||
import org.dspace.content.service.CollectionService;
|
import org.dspace.content.service.CollectionService;
|
||||||
@@ -43,6 +45,7 @@ import org.dspace.core.I18nUtil;
|
|||||||
import org.dspace.core.LogHelper;
|
import org.dspace.core.LogHelper;
|
||||||
import org.dspace.core.service.LicenseService;
|
import org.dspace.core.service.LicenseService;
|
||||||
import org.dspace.discovery.DiscoverQuery;
|
import org.dspace.discovery.DiscoverQuery;
|
||||||
|
import org.dspace.discovery.DiscoverQuery.SORT_ORDER;
|
||||||
import org.dspace.discovery.DiscoverResult;
|
import org.dspace.discovery.DiscoverResult;
|
||||||
import org.dspace.discovery.IndexableObject;
|
import org.dspace.discovery.IndexableObject;
|
||||||
import org.dspace.discovery.SearchService;
|
import org.dspace.discovery.SearchService;
|
||||||
@@ -735,7 +738,7 @@ public class CollectionServiceImpl extends DSpaceObjectServiceImpl<Collection> i
|
|||||||
collection.getID(), collection.getHandle(), getIdentifiers(context, collection)));
|
collection.getID(), collection.getHandle(), getIdentifiers(context, collection)));
|
||||||
|
|
||||||
// remove subscriptions - hmm, should this be in Subscription.java?
|
// remove subscriptions - hmm, should this be in Subscription.java?
|
||||||
subscribeService.deleteByCollection(context, collection);
|
subscribeService.deleteByDspaceObject(context, collection);
|
||||||
|
|
||||||
// Remove Template Item
|
// Remove Template Item
|
||||||
removeTemplateItem(context, collection);
|
removeTemplateItem(context, collection);
|
||||||
@@ -946,6 +949,7 @@ public class CollectionServiceImpl extends DSpaceObjectServiceImpl<Collection> i
|
|||||||
discoverQuery.setDSpaceObjectFilter(IndexableCollection.TYPE);
|
discoverQuery.setDSpaceObjectFilter(IndexableCollection.TYPE);
|
||||||
discoverQuery.setStart(offset);
|
discoverQuery.setStart(offset);
|
||||||
discoverQuery.setMaxResults(limit);
|
discoverQuery.setMaxResults(limit);
|
||||||
|
discoverQuery.setSortField(SOLR_SORT_FIELD, SORT_ORDER.asc);
|
||||||
DiscoverResult resp = retrieveCollectionsWithSubmit(context, discoverQuery, null, community, q);
|
DiscoverResult resp = retrieveCollectionsWithSubmit(context, discoverQuery, null, community, q);
|
||||||
for (IndexableObject solrCollections : resp.getIndexableObjects()) {
|
for (IndexableObject solrCollections : resp.getIndexableObjects()) {
|
||||||
Collection c = ((IndexableCollection) solrCollections).getIndexedObject();
|
Collection c = ((IndexableCollection) solrCollections).getIndexedObject();
|
||||||
@@ -1080,6 +1084,7 @@ public class CollectionServiceImpl extends DSpaceObjectServiceImpl<Collection> i
|
|||||||
discoverQuery.setDSpaceObjectFilter(IndexableCollection.TYPE);
|
discoverQuery.setDSpaceObjectFilter(IndexableCollection.TYPE);
|
||||||
discoverQuery.setStart(offset);
|
discoverQuery.setStart(offset);
|
||||||
discoverQuery.setMaxResults(limit);
|
discoverQuery.setMaxResults(limit);
|
||||||
|
discoverQuery.setSortField(SOLR_SORT_FIELD, SORT_ORDER.asc);
|
||||||
DiscoverResult resp = retrieveCollectionsWithSubmit(context, discoverQuery,
|
DiscoverResult resp = retrieveCollectionsWithSubmit(context, discoverQuery,
|
||||||
entityType, community, q);
|
entityType, community, q);
|
||||||
for (IndexableObject solrCollections : resp.getIndexableObjects()) {
|
for (IndexableObject solrCollections : resp.getIndexableObjects()) {
|
||||||
@@ -1099,4 +1104,35 @@ public class CollectionServiceImpl extends DSpaceObjectServiceImpl<Collection> i
|
|||||||
return (int) resp.getTotalSearchResults();
|
return (int) resp.getTotalSearchResults();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
@SuppressWarnings("rawtypes")
|
||||||
|
public List<Collection> findAllCollectionsByEntityType(Context context, String entityType)
|
||||||
|
throws SearchServiceException {
|
||||||
|
List<Collection> collectionList = new ArrayList<>();
|
||||||
|
|
||||||
|
DiscoverQuery discoverQuery = new DiscoverQuery();
|
||||||
|
discoverQuery.setDSpaceObjectFilter(IndexableCollection.TYPE);
|
||||||
|
discoverQuery.addFilterQueries("dspace.entity.type:" + entityType);
|
||||||
|
|
||||||
|
DiscoverResult discoverResult = searchService.search(context, discoverQuery);
|
||||||
|
List<IndexableObject> solrIndexableObjects = discoverResult.getIndexableObjects();
|
||||||
|
|
||||||
|
for (IndexableObject solrCollection : solrIndexableObjects) {
|
||||||
|
Collection c = ((IndexableCollection) solrCollection).getIndexedObject();
|
||||||
|
collectionList.add(c);
|
||||||
|
}
|
||||||
|
return collectionList;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Returns total collection archived items
|
||||||
|
*
|
||||||
|
* @param collection Collection
|
||||||
|
* @return total collection archived items
|
||||||
|
* @throws ItemCountException
|
||||||
|
*/
|
||||||
|
@Override
|
||||||
|
public int countArchivedItems(Collection collection) throws ItemCountException {
|
||||||
|
return ItemCounter.getInstance().getCount(collection);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
@@ -25,6 +25,7 @@ import javax.persistence.Table;
|
|||||||
import javax.persistence.Transient;
|
import javax.persistence.Transient;
|
||||||
|
|
||||||
import org.apache.commons.lang3.builder.HashCodeBuilder;
|
import org.apache.commons.lang3.builder.HashCodeBuilder;
|
||||||
|
import org.dspace.browse.ItemCountException;
|
||||||
import org.dspace.content.comparator.NameAscendingComparator;
|
import org.dspace.content.comparator.NameAscendingComparator;
|
||||||
import org.dspace.content.factory.ContentServiceFactory;
|
import org.dspace.content.factory.ContentServiceFactory;
|
||||||
import org.dspace.content.service.CommunityService;
|
import org.dspace.content.service.CommunityService;
|
||||||
@@ -264,4 +265,16 @@ public class Community extends DSpaceObject implements DSpaceObjectLegacySupport
|
|||||||
return communityService;
|
return communityService;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* return count of the community items
|
||||||
|
*
|
||||||
|
* @return int
|
||||||
|
*/
|
||||||
|
public int countArchivedItems() {
|
||||||
|
try {
|
||||||
|
return communityService.countArchivedItems(this);
|
||||||
|
} catch (ItemCountException e) {
|
||||||
|
throw new RuntimeException(e);
|
||||||
|
}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user