mirror of
https://github.com/DSpace/DSpace.git
synced 2025-10-07 10:04:21 +00:00
Replace occurrences of DSpace with the dspace.name variable
This commit is contained in:
@@ -4,13 +4,6 @@
|
||||
# Can be validated via instructions at:
|
||||
# https://docs.codecov.io/docs/codecov-yaml#validate-your-repository-yaml
|
||||
|
||||
# Tell Codecov not to send a coverage notification until (at least) 2 builds are completed
|
||||
# Since we run Unit & Integration tests in parallel, this lets Codecov know that coverage
|
||||
# needs to be merged across those builds
|
||||
codecov:
|
||||
notify:
|
||||
after_n_builds: 2
|
||||
|
||||
# Settings related to code coverage analysis
|
||||
coverage:
|
||||
status:
|
||||
|
@@ -6,6 +6,5 @@ dspace/modules/*/target/
|
||||
Dockerfile.*
|
||||
dspace/src/main/docker/dspace-postgres-pgcrypto
|
||||
dspace/src/main/docker/dspace-postgres-pgcrypto-curl
|
||||
dspace/src/main/docker/solr
|
||||
dspace/src/main/docker/README.md
|
||||
dspace/src/main/docker-compose/
|
||||
|
10
.github/pull_request_template.md
vendored
10
.github/pull_request_template.md
vendored
@@ -1,7 +1,7 @@
|
||||
## References
|
||||
_Add references/links to any related issues or PRs. These may include:_
|
||||
* Fixes #[issue-number]
|
||||
* Related to [REST Contract](https://github.com/DSpace/Rest7Contract)
|
||||
* Fixes #`issue-number` (if this fixes an issue ticket)
|
||||
* Related to DSpace/RestContract#`pr-number` (if a corresponding REST Contract PR exists)
|
||||
|
||||
## Description
|
||||
Short summary of changes (1-2 sentences).
|
||||
@@ -22,5 +22,7 @@ _This checklist provides a reminder of what we are going to look for when review
|
||||
- [ ] My PR passes Checkstyle validation based on the [Code Style Guide](https://wiki.lyrasis.org/display/DSPACE/Code+Style+Guide).
|
||||
- [ ] My PR includes Javadoc for _all new (or modified) public methods and classes_. It also includes Javadoc for large or complex private methods.
|
||||
- [ ] My PR passes all tests and includes new/updated Unit or Integration Tests based on the [Code Testing Guide](https://wiki.lyrasis.org/display/DSPACE/Code+Testing+Guide).
|
||||
- [ ] If my PR includes new, third-party dependencies (in any `pom.xml`), I've made sure their licenses align with the [DSpace BSD License](https://github.com/DSpace/DSpace/blob/main/LICENSE) based on the [Licensing of Contributions](https://wiki.lyrasis.org/display/DSPACE/Code+Contribution+Guidelines#CodeContributionGuidelines-LicensingofContributions) documentation.
|
||||
- [ ] If my PR modifies the REST API, I've linked to the REST Contract page (or open PR) related to this change.
|
||||
- [ ] If my PR includes new libraries/dependencies (in any `pom.xml`), I've made sure their licenses align with the [DSpace BSD License](https://github.com/DSpace/DSpace/blob/main/LICENSE) based on the [Licensing of Contributions](https://wiki.lyrasis.org/display/DSPACE/Code+Contribution+Guidelines#CodeContributionGuidelines-LicensingofContributions) documentation.
|
||||
- [ ] If my PR modifies REST API endpoints, I've opened a separate [REST Contract](https://github.com/DSpace/RestContract/blob/main/README.md) PR related to this change.
|
||||
- [ ] If my PR includes new configurations, I've provided basic technical documentation in the PR itself.
|
||||
- [ ] If my PR fixes an issue ticket, I've [linked them together](https://docs.github.com/en/issues/tracking-your-work-with-issues/linking-a-pull-request-to-an-issue).
|
||||
|
48
.github/workflows/build.yml
vendored
48
.github/workflows/build.yml
vendored
@@ -6,6 +6,9 @@ name: Build
|
||||
# Run this Build for all pushes / PRs to current branch
|
||||
on: [push, pull_request]
|
||||
|
||||
permissions:
|
||||
contents: read # to fetch code (actions/checkout)
|
||||
|
||||
jobs:
|
||||
tests:
|
||||
runs-on: ubuntu-latest
|
||||
@@ -42,18 +45,18 @@ jobs:
|
||||
steps:
|
||||
# https://github.com/actions/checkout
|
||||
- name: Checkout codebase
|
||||
uses: actions/checkout@v2
|
||||
uses: actions/checkout@v3
|
||||
|
||||
# https://github.com/actions/setup-java
|
||||
- name: Install JDK ${{ matrix.java }}
|
||||
uses: actions/setup-java@v2
|
||||
uses: actions/setup-java@v3
|
||||
with:
|
||||
java-version: ${{ matrix.java }}
|
||||
distribution: 'temurin'
|
||||
|
||||
# https://github.com/actions/cache
|
||||
- name: Cache Maven dependencies
|
||||
uses: actions/cache@v2
|
||||
uses: actions/cache@v3
|
||||
with:
|
||||
# Cache entire ~/.m2/repository
|
||||
path: ~/.m2/repository
|
||||
@@ -71,11 +74,44 @@ jobs:
|
||||
# (This artifact is downloadable at the bottom of any job's summary page)
|
||||
- name: Upload Results of ${{ matrix.type }} to Artifact
|
||||
if: ${{ failure() }}
|
||||
uses: actions/upload-artifact@v2
|
||||
uses: actions/upload-artifact@v3
|
||||
with:
|
||||
name: ${{ matrix.type }} results
|
||||
path: ${{ matrix.resultsdir }}
|
||||
|
||||
# https://github.com/codecov/codecov-action
|
||||
# Upload code coverage report to artifact, so that it can be shared with the 'codecov' job (see below)
|
||||
- name: Upload code coverage report to Artifact
|
||||
uses: actions/upload-artifact@v3
|
||||
with:
|
||||
name: ${{ matrix.type }} coverage report
|
||||
path: 'dspace/target/site/jacoco-aggregate/jacoco.xml'
|
||||
retention-days: 14
|
||||
|
||||
# Codecov upload is a separate job in order to allow us to restart this separate from the entire build/test
|
||||
# job above. This is necessary because Codecov uploads seem to randomly fail at times.
|
||||
# See https://community.codecov.com/t/upload-issues-unable-to-locate-build-via-github-actions-api/3954
|
||||
codecov:
|
||||
# Must run after 'tests' job above
|
||||
needs: tests
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v3
|
||||
|
||||
# Download artifacts from previous 'tests' job
|
||||
- name: Download coverage artifacts
|
||||
uses: actions/download-artifact@v3
|
||||
|
||||
# Now attempt upload to Codecov using its action.
|
||||
# NOTE: We use a retry action to retry the Codecov upload if it fails the first time.
|
||||
#
|
||||
# Retry action: https://github.com/marketplace/actions/retry-action
|
||||
# Codecov action: https://github.com/codecov/codecov-action
|
||||
- name: Upload coverage to Codecov.io
|
||||
uses: codecov/codecov-action@v2
|
||||
uses: Wandalen/wretry.action@v1.0.36
|
||||
with:
|
||||
action: codecov/codecov-action@v3
|
||||
# Try upload 5 times max
|
||||
attempt_limit: 5
|
||||
# Run again in 30 seconds
|
||||
attempt_delay: 30000
|
||||
|
59
.github/workflows/codescan.yml
vendored
Normal file
59
.github/workflows/codescan.yml
vendored
Normal file
@@ -0,0 +1,59 @@
|
||||
# DSpace CodeQL code scanning configuration for GitHub
|
||||
# https://docs.github.com/en/code-security/code-scanning
|
||||
#
|
||||
# NOTE: Code scanning must be run separate from our default build.yml
|
||||
# because CodeQL requires a fresh build with all tests *disabled*.
|
||||
name: "Code Scanning"
|
||||
|
||||
# Run this code scan for all pushes / PRs to main branch. Also run once a week.
|
||||
on:
|
||||
push:
|
||||
branches: [ main ]
|
||||
pull_request:
|
||||
branches: [ main ]
|
||||
# Don't run if PR is only updating static documentation
|
||||
paths-ignore:
|
||||
- '**/*.md'
|
||||
- '**/*.txt'
|
||||
schedule:
|
||||
- cron: "37 0 * * 1"
|
||||
|
||||
jobs:
|
||||
analyze:
|
||||
name: Analyze Code
|
||||
runs-on: ubuntu-latest
|
||||
# Limit permissions of this GitHub action. Can only write to security-events
|
||||
permissions:
|
||||
actions: read
|
||||
contents: read
|
||||
security-events: write
|
||||
|
||||
steps:
|
||||
# https://github.com/actions/checkout
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v3
|
||||
|
||||
# https://github.com/actions/setup-java
|
||||
- name: Install JDK
|
||||
uses: actions/setup-java@v3
|
||||
with:
|
||||
java-version: 11
|
||||
distribution: 'temurin'
|
||||
|
||||
# Initializes the CodeQL tools for scanning.
|
||||
# https://github.com/github/codeql-action
|
||||
- name: Initialize CodeQL
|
||||
uses: github/codeql-action/init@v2
|
||||
with:
|
||||
# Codescan Javascript as well since a few JS files exist in REST API's interface
|
||||
languages: java, javascript
|
||||
|
||||
# Autobuild attempts to build any compiled languages
|
||||
# NOTE: Based on testing, this autobuild process works well for DSpace. A custom
|
||||
# DSpace build w/caching (like in build.yml) was about the same speed as autobuild.
|
||||
- name: Autobuild
|
||||
uses: github/codeql-action/autobuild@v2
|
||||
|
||||
# Perform GitHub Code Scanning.
|
||||
- name: Perform CodeQL Analysis
|
||||
uses: github/codeql-action/analyze@v2
|
108
.github/workflows/docker.yml
vendored
108
.github/workflows/docker.yml
vendored
@@ -12,6 +12,9 @@ on:
|
||||
- 'dspace-**'
|
||||
pull_request:
|
||||
|
||||
permissions:
|
||||
contents: read # to fetch code (actions/checkout)
|
||||
|
||||
jobs:
|
||||
docker:
|
||||
# Ensure this job never runs on forked repos. It's only executed for 'dspace/dspace'
|
||||
@@ -40,11 +43,11 @@ jobs:
|
||||
steps:
|
||||
# https://github.com/actions/checkout
|
||||
- name: Checkout codebase
|
||||
uses: actions/checkout@v2
|
||||
uses: actions/checkout@v3
|
||||
|
||||
# https://github.com/docker/setup-buildx-action
|
||||
- name: Setup Docker Buildx
|
||||
uses: docker/setup-buildx-action@v1
|
||||
uses: docker/setup-buildx-action@v2
|
||||
|
||||
# https://github.com/docker/setup-qemu-action
|
||||
- name: Set up QEMU emulation to build for multiple architectures
|
||||
@@ -54,7 +57,7 @@ jobs:
|
||||
- name: Login to DockerHub
|
||||
# Only login if not a PR, as PRs only trigger a Docker build and not a push
|
||||
if: github.event_name != 'pull_request'
|
||||
uses: docker/login-action@v1
|
||||
uses: docker/login-action@v2
|
||||
with:
|
||||
username: ${{ secrets.DOCKER_USERNAME }}
|
||||
password: ${{ secrets.DOCKER_ACCESS_TOKEN }}
|
||||
@@ -66,7 +69,7 @@ jobs:
|
||||
# Get Metadata for docker_build_deps step below
|
||||
- name: Sync metadata (tags, labels) from GitHub to Docker for 'dspace-dependencies' image
|
||||
id: meta_build_deps
|
||||
uses: docker/metadata-action@v3
|
||||
uses: docker/metadata-action@v4
|
||||
with:
|
||||
images: dspace/dspace-dependencies
|
||||
tags: ${{ env.IMAGE_TAGS }}
|
||||
@@ -75,7 +78,7 @@ jobs:
|
||||
# https://github.com/docker/build-push-action
|
||||
- name: Build and push 'dspace-dependencies' image
|
||||
id: docker_build_deps
|
||||
uses: docker/build-push-action@v2
|
||||
uses: docker/build-push-action@v3
|
||||
with:
|
||||
context: .
|
||||
file: ./Dockerfile.dependencies
|
||||
@@ -93,7 +96,7 @@ jobs:
|
||||
# Get Metadata for docker_build step below
|
||||
- name: Sync metadata (tags, labels) from GitHub to Docker for 'dspace' image
|
||||
id: meta_build
|
||||
uses: docker/metadata-action@v3
|
||||
uses: docker/metadata-action@v4
|
||||
with:
|
||||
images: dspace/dspace
|
||||
tags: ${{ env.IMAGE_TAGS }}
|
||||
@@ -101,7 +104,7 @@ jobs:
|
||||
|
||||
- name: Build and push 'dspace' image
|
||||
id: docker_build
|
||||
uses: docker/build-push-action@v2
|
||||
uses: docker/build-push-action@v3
|
||||
with:
|
||||
context: .
|
||||
file: ./Dockerfile
|
||||
@@ -119,7 +122,7 @@ jobs:
|
||||
# Get Metadata for docker_build_test step below
|
||||
- name: Sync metadata (tags, labels) from GitHub to Docker for 'dspace-test' image
|
||||
id: meta_build_test
|
||||
uses: docker/metadata-action@v3
|
||||
uses: docker/metadata-action@v4
|
||||
with:
|
||||
images: dspace/dspace
|
||||
tags: ${{ env.IMAGE_TAGS }}
|
||||
@@ -130,7 +133,7 @@ jobs:
|
||||
|
||||
- name: Build and push 'dspace-test' image
|
||||
id: docker_build_test
|
||||
uses: docker/build-push-action@v2
|
||||
uses: docker/build-push-action@v3
|
||||
with:
|
||||
context: .
|
||||
file: ./Dockerfile.test
|
||||
@@ -148,7 +151,7 @@ jobs:
|
||||
# Get Metadata for docker_build_test step below
|
||||
- name: Sync metadata (tags, labels) from GitHub to Docker for 'dspace-cli' image
|
||||
id: meta_build_cli
|
||||
uses: docker/metadata-action@v3
|
||||
uses: docker/metadata-action@v4
|
||||
with:
|
||||
images: dspace/dspace-cli
|
||||
tags: ${{ env.IMAGE_TAGS }}
|
||||
@@ -156,7 +159,7 @@ jobs:
|
||||
|
||||
- name: Build and push 'dspace-cli' image
|
||||
id: docker_build_cli
|
||||
uses: docker/build-push-action@v2
|
||||
uses: docker/build-push-action@v3
|
||||
with:
|
||||
context: .
|
||||
file: ./Dockerfile.cli
|
||||
@@ -167,3 +170,86 @@ jobs:
|
||||
# Use tags / labels provided by 'docker/metadata-action' above
|
||||
tags: ${{ steps.meta_build_cli.outputs.tags }}
|
||||
labels: ${{ steps.meta_build_cli.outputs.labels }}
|
||||
|
||||
###########################################
|
||||
# Build/Push the 'dspace/dspace-solr' image
|
||||
###########################################
|
||||
# Get Metadata for docker_build_solr step below
|
||||
- name: Sync metadata (tags, labels) from GitHub to Docker for 'dspace-solr' image
|
||||
id: meta_build_solr
|
||||
uses: docker/metadata-action@v4
|
||||
with:
|
||||
images: dspace/dspace-solr
|
||||
tags: ${{ env.IMAGE_TAGS }}
|
||||
flavor: ${{ env.TAGS_FLAVOR }}
|
||||
|
||||
- name: Build and push 'dspace-solr' image
|
||||
id: docker_build_solr
|
||||
uses: docker/build-push-action@v3
|
||||
with:
|
||||
context: .
|
||||
file: ./dspace/src/main/docker/dspace-solr/Dockerfile
|
||||
platforms: ${{ env.PLATFORMS }}
|
||||
# For pull requests, we run the Docker build (to ensure no PR changes break the build),
|
||||
# but we ONLY do an image push to DockerHub if it's NOT a PR
|
||||
push: ${{ github.event_name != 'pull_request' }}
|
||||
# Use tags / labels provided by 'docker/metadata-action' above
|
||||
tags: ${{ steps.meta_build_solr.outputs.tags }}
|
||||
labels: ${{ steps.meta_build_solr.outputs.labels }}
|
||||
|
||||
###########################################################
|
||||
# Build/Push the 'dspace/dspace-postgres-pgcrypto' image
|
||||
###########################################################
|
||||
# Get Metadata for docker_build_postgres step below
|
||||
- name: Sync metadata (tags, labels) from GitHub to Docker for 'dspace-postgres-pgcrypto' image
|
||||
id: meta_build_postgres
|
||||
uses: docker/metadata-action@v4
|
||||
with:
|
||||
images: dspace/dspace-postgres-pgcrypto
|
||||
tags: ${{ env.IMAGE_TAGS }}
|
||||
flavor: ${{ env.TAGS_FLAVOR }}
|
||||
|
||||
- name: Build and push 'dspace-postgres-pgcrypto' image
|
||||
id: docker_build_postgres
|
||||
uses: docker/build-push-action@v3
|
||||
with:
|
||||
# Must build out of subdirectory to have access to install script for pgcrypto
|
||||
context: ./dspace/src/main/docker/dspace-postgres-pgcrypto/
|
||||
dockerfile: Dockerfile
|
||||
platforms: ${{ env.PLATFORMS }}
|
||||
# For pull requests, we run the Docker build (to ensure no PR changes break the build),
|
||||
# but we ONLY do an image push to DockerHub if it's NOT a PR
|
||||
push: ${{ github.event_name != 'pull_request' }}
|
||||
# Use tags / labels provided by 'docker/metadata-action' above
|
||||
tags: ${{ steps.meta_build_postgres.outputs.tags }}
|
||||
labels: ${{ steps.meta_build_postgres.outputs.labels }}
|
||||
|
||||
###########################################################
|
||||
# Build/Push the 'dspace/dspace-postgres-pgcrypto' image ('-loadsql' tag)
|
||||
###########################################################
|
||||
# Get Metadata for docker_build_postgres_loadsql step below
|
||||
- name: Sync metadata (tags, labels) from GitHub to Docker for 'dspace-postgres-pgcrypto-loadsql' image
|
||||
id: meta_build_postgres_loadsql
|
||||
uses: docker/metadata-action@v4
|
||||
with:
|
||||
images: dspace/dspace-postgres-pgcrypto
|
||||
tags: ${{ env.IMAGE_TAGS }}
|
||||
# Suffix all tags with "-loadsql". Otherwise, it uses the same
|
||||
# tagging logic as the primary 'dspace/dspace-postgres-pgcrypto' image above.
|
||||
flavor: ${{ env.TAGS_FLAVOR }}
|
||||
suffix=-loadsql
|
||||
|
||||
- name: Build and push 'dspace-postgres-pgcrypto-loadsql' image
|
||||
id: docker_build_postgres_loadsql
|
||||
uses: docker/build-push-action@v3
|
||||
with:
|
||||
# Must build out of subdirectory to have access to install script for pgcrypto
|
||||
context: ./dspace/src/main/docker/dspace-postgres-pgcrypto-curl/
|
||||
dockerfile: Dockerfile
|
||||
platforms: ${{ env.PLATFORMS }}
|
||||
# For pull requests, we run the Docker build (to ensure no PR changes break the build),
|
||||
# but we ONLY do an image push to DockerHub if it's NOT a PR
|
||||
push: ${{ github.event_name != 'pull_request' }}
|
||||
# Use tags / labels provided by 'docker/metadata-action' above
|
||||
tags: ${{ steps.meta_build_postgres_loadsql.outputs.tags }}
|
||||
labels: ${{ steps.meta_build_postgres_loadsql.outputs.labels }}
|
17
.github/workflows/issue_opened.yml
vendored
17
.github/workflows/issue_opened.yml
vendored
@@ -5,25 +5,22 @@ on:
|
||||
issues:
|
||||
types: [opened]
|
||||
|
||||
permissions: {}
|
||||
jobs:
|
||||
automation:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
# Add the new issue to a project board, if it needs triage
|
||||
# See https://github.com/marketplace/actions/create-project-card-action
|
||||
- name: Add issue to project board
|
||||
# See https://github.com/actions/add-to-project
|
||||
- name: Add issue to triage board
|
||||
# Only add to project board if issue is flagged as "needs triage" or has no labels
|
||||
# NOTE: By default we flag new issues as "needs triage" in our issue template
|
||||
if: (contains(github.event.issue.labels.*.name, 'needs triage') || join(github.event.issue.labels.*.name) == '')
|
||||
uses: technote-space/create-project-card-action@v1
|
||||
uses: actions/add-to-project@v0.5.0
|
||||
# Note, the authentication token below is an ORG level Secret.
|
||||
# It must be created/recreated manually via a personal access token with "public_repo" and "admin:org" permissions
|
||||
# It must be created/recreated manually via a personal access token with admin:org, project, public_repo permissions
|
||||
# See: https://docs.github.com/en/actions/configuring-and-managing-workflows/authenticating-with-the-github_token#permissions-for-the-github_token
|
||||
# This is necessary because the "DSpace Backlog" project is an org level project (i.e. not repo specific)
|
||||
with:
|
||||
GITHUB_TOKEN: ${{ secrets.ORG_PROJECT_TOKEN }}
|
||||
PROJECT: DSpace Backlog
|
||||
COLUMN: Triage
|
||||
CHECK_ORG_PROJECT: true
|
||||
# Ignore errors.
|
||||
continue-on-error: true
|
||||
github-token: ${{ secrets.TRIAGE_PROJECT_TOKEN }}
|
||||
project-url: https://github.com/orgs/DSpace/projects/24
|
||||
|
27
.github/workflows/label_merge_conflicts.yml
vendored
27
.github/workflows/label_merge_conflicts.yml
vendored
@@ -5,21 +5,32 @@ name: Check for merge conflicts
|
||||
# NOTE: This means merge conflicts are only checked for when a PR is merged to main.
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- main
|
||||
branches: [ main ]
|
||||
# So that the `conflict_label_name` is removed if conflicts are resolved,
|
||||
# we allow this to run for `pull_request_target` so that github secrets are available.
|
||||
pull_request_target:
|
||||
types: [ synchronize ]
|
||||
|
||||
permissions: {}
|
||||
|
||||
jobs:
|
||||
triage:
|
||||
# Ensure this job never runs on forked repos. It's only executed for 'dspace/dspace'
|
||||
if: github.repository == 'dspace/dspace'
|
||||
runs-on: ubuntu-latest
|
||||
permissions:
|
||||
pull-requests: write
|
||||
steps:
|
||||
# See: https://github.com/mschilde/auto-label-merge-conflicts/
|
||||
# See: https://github.com/prince-chrismc/label-merge-conflicts-action
|
||||
- name: Auto-label PRs with merge conflicts
|
||||
uses: mschilde/auto-label-merge-conflicts@v2.0
|
||||
uses: prince-chrismc/label-merge-conflicts-action@v3
|
||||
# Add "merge conflict" label if a merge conflict is detected. Remove it when resolved.
|
||||
# Note, the authentication token is created automatically
|
||||
# See: https://docs.github.com/en/actions/configuring-and-managing-workflows/authenticating-with-the-github_token
|
||||
with:
|
||||
CONFLICT_LABEL_NAME: 'merge conflict'
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
# Ignore errors
|
||||
continue-on-error: true
|
||||
conflict_label_name: 'merge conflict'
|
||||
github_token: ${{ secrets.GITHUB_TOKEN }}
|
||||
conflict_comment: |
|
||||
Hi @${author},
|
||||
Conflicts have been detected against the base branch.
|
||||
Please [resolve these conflicts](https://docs.github.com/en/pull-requests/collaborating-with-pull-requests/addressing-merge-conflicts/about-merge-conflicts) as soon as you can. Thanks!
|
||||
|
45
CONTRIBUTING.md
Normal file
45
CONTRIBUTING.md
Normal file
@@ -0,0 +1,45 @@
|
||||
# How to Contribute
|
||||
|
||||
DSpace is a community built and supported project. We do not have a centralized development or support team, but have a dedicated group of volunteers who help us improve the software, documentation, resources, etc.
|
||||
|
||||
* [Contribute new code via a Pull Request](#contribute-new-code-via-a-pull-request)
|
||||
* [Contribute documentation](#contribute-documentation)
|
||||
* [Help others on mailing lists or Slack](#help-others-on-mailing-lists-or-slack)
|
||||
* [Join a working or interest group](#join-a-working-or-interest-group)
|
||||
|
||||
## Contribute new code via a Pull Request
|
||||
|
||||
We accept [GitHub Pull Requests (PRs)](https://docs.github.com/en/pull-requests/collaborating-with-pull-requests/proposing-changes-to-your-work-with-pull-requests/creating-a-pull-request-from-a-fork) at any time from anyone.
|
||||
Contributors to each release are recognized in our [Release Notes](https://wiki.lyrasis.org/display/DSDOC7x/Release+Notes).
|
||||
|
||||
Code Contribution Checklist
|
||||
- [ ] PRs _should_ be smaller in size (ideally less than 1,000 lines of code, not including comments & tests)
|
||||
- [ ] PRs **must** pass Checkstyle validation based on our [Code Style Guide](https://wiki.lyrasis.org/display/DSPACE/Code+Style+Guide).
|
||||
- [ ] PRs **must** include Javadoc for _all new/modified public methods and classes_. Larger private methods should also have Javadoc
|
||||
- [ ] PRs **must** pass all automated tests and include new/updated Unit or Integration tests based on our [Code Testing Guide](https://wiki.lyrasis.org/display/DSPACE/Code+Testing+Guide).
|
||||
- [ ] If a PR includes new libraries/dependencies (in any `pom.xml`), then their software licenses **must** align with the [DSpace BSD License](https://github.com/DSpace/DSpace/blob/main/LICENSE) based on the [Licensing of Contributions](https://wiki.lyrasis.org/display/DSPACE/Code+Contribution+Guidelines#CodeContributionGuidelines-LicensingofContributions) documentation.
|
||||
- [ ] Basic technical documentation _should_ be provided for any new features or changes to the REST API. REST API changes should be documented in our [Rest Contract](https://github.com/DSpace/RestContract).
|
||||
- [ ] If a PR fixes an issue ticket, please [link them together](https://docs.github.com/en/issues/tracking-your-work-with-issues/linking-a-pull-request-to-an-issue).
|
||||
|
||||
Additional details on the code contribution process can be found in our [Code Contribution Guidelines](https://wiki.lyrasis.org/display/DSPACE/Code+Contribution+Guidelines)
|
||||
|
||||
## Contribute documentation
|
||||
|
||||
DSpace Documentation is a collaborative effort in a shared Wiki. The latest documentation is at https://wiki.lyrasis.org/display/DSDOC7x
|
||||
|
||||
If you find areas of the DSpace Documentation which you wish to improve, please request a Wiki account by emailing wikihelp@lyrasis.org.
|
||||
Once you have an account setup, contact @tdonohue (via [Slack](https://wiki.lyrasis.org/display/DSPACE/Slack) or email) for access to edit our Documentation.
|
||||
|
||||
## Help others on mailing lists or Slack
|
||||
|
||||
DSpace has our own [Slack](https://wiki.lyrasis.org/display/DSPACE/Slack) community and [Mailing Lists](https://wiki.lyrasis.org/display/DSPACE/Mailing+Lists) where discussions take place and questions are answered.
|
||||
Anyone is welcome to join and help others. We just ask you to follow our [Code of Conduct](https://www.lyrasis.org/about/Pages/Code-of-Conduct.aspx) (adopted via LYRASIS).
|
||||
|
||||
## Join a working or interest group
|
||||
|
||||
Most of the work in building/improving DSpace comes via [Working Groups](https://wiki.lyrasis.org/display/DSPACE/DSpace+Working+Groups) or [Interest Groups](https://wiki.lyrasis.org/display/DSPACE/DSpace+Interest+Groups).
|
||||
|
||||
All working/interest groups are open to anyone to join and participate. A few key groups to be aware of include:
|
||||
|
||||
* [DSpace 7 Working Group](https://wiki.lyrasis.org/display/DSPACE/DSpace+7+Working+Group) - This is the main (mostly volunteer) development team. We meet weekly to review our current development [project board](https://github.com/orgs/DSpace/projects), assigning tickets and/or PRs.
|
||||
* [DSpace Community Advisory Team (DCAT)](https://wiki.lyrasis.org/display/cmtygp/DSpace+Community+Advisory+Team) - This is an interest group for repository managers/administrators. We meet monthly to discuss DSpace, share tips & provide feedback back to developers.
|
@@ -31,7 +31,7 @@ ARG TARGET_DIR=dspace-installer
|
||||
COPY --from=build /install /dspace-src
|
||||
WORKDIR /dspace-src
|
||||
# Create the initial install deployment using ANT
|
||||
ENV ANT_VERSION 1.10.12
|
||||
ENV ANT_VERSION 1.10.13
|
||||
ENV ANT_HOME /tmp/ant-$ANT_VERSION
|
||||
ENV PATH $ANT_HOME/bin:$PATH
|
||||
# Need wget to install ant
|
||||
|
@@ -30,12 +30,12 @@ ARG TARGET_DIR=dspace-installer
|
||||
COPY --from=build /install /dspace-src
|
||||
WORKDIR /dspace-src
|
||||
# Create the initial install deployment using ANT
|
||||
ENV ANT_VERSION 1.10.12
|
||||
ENV ANT_VERSION 1.10.13
|
||||
ENV ANT_HOME /tmp/ant-$ANT_VERSION
|
||||
ENV PATH $ANT_HOME/bin:$PATH
|
||||
# Need wget to install ant
|
||||
# Need wget to install ant, and unzip for managing AIPs
|
||||
RUN apt-get update \
|
||||
&& apt-get install -y --no-install-recommends wget \
|
||||
&& apt-get install -y --no-install-recommends wget unzip \
|
||||
&& apt-get purge -y --auto-remove \
|
||||
&& rm -rf /var/lib/apt/lists/*
|
||||
# Download and install 'ant'
|
||||
|
@@ -58,9 +58,11 @@ COPY --from=ant_build /dspace $DSPACE_INSTALL
|
||||
# NOTE: secretRequired="false" should only be used when AJP is NOT accessible from an external network. But, secretRequired="true" isn't supported by mod_proxy_ajp until Apache 2.5
|
||||
RUN sed -i '/Service name="Catalina".*/a \\n <Connector protocol="AJP/1.3" port="8009" address="0.0.0.0" redirectPort="8443" URIEncoding="UTF-8" secretRequired="false" />' $TOMCAT_INSTALL/conf/server.xml
|
||||
# Expose Tomcat port and AJP port
|
||||
EXPOSE 8080 8009
|
||||
EXPOSE 8080 8009 8000
|
||||
# Give java extra memory (2GB)
|
||||
ENV JAVA_OPTS=-Xmx2000m
|
||||
# Set up debugging
|
||||
ENV CATALINA_OPTS=-Xrunjdwp:transport=dt_socket,server=y,suspend=n,address=*:8000
|
||||
|
||||
# Link the DSpace 'server' webapp into Tomcat's webapps directory.
|
||||
# This ensures that when we start Tomcat, it runs from /server path (e.g. http://localhost:8080/server/)
|
||||
|
@@ -30,9 +30,9 @@ https://wiki.lyrasis.org/display/DSPACE/Code+Contribution+Guidelines
|
||||
* parso (com.epam:parso:2.0.14 - https://github.com/epam/parso)
|
||||
* Esri Geometry API for Java (com.esri.geometry:esri-geometry-api:2.2.0 - https://github.com/Esri/geometry-api-java)
|
||||
* ClassMate (com.fasterxml:classmate:1.3.0 - http://github.com/cowtowncoder/java-classmate)
|
||||
* Jackson-annotations (com.fasterxml.jackson.core:jackson-annotations:2.12.6 - http://github.com/FasterXML/jackson)
|
||||
* Jackson-core (com.fasterxml.jackson.core:jackson-core:2.12.6 - https://github.com/FasterXML/jackson-core)
|
||||
* jackson-databind (com.fasterxml.jackson.core:jackson-databind:2.12.6.1 - http://github.com/FasterXML/jackson)
|
||||
* Jackson-annotations (com.fasterxml.jackson.core:jackson-annotations:2.13.4 - http://github.com/FasterXML/jackson)
|
||||
* Jackson-core (com.fasterxml.jackson.core:jackson-core:2.13.4 - https://github.com/FasterXML/jackson-core)
|
||||
* jackson-databind (com.fasterxml.jackson.core:jackson-databind:2.13.4.2 - http://github.com/FasterXML/jackson)
|
||||
* Jackson dataformat: CBOR (com.fasterxml.jackson.dataformat:jackson-dataformat-cbor:2.12.6 - http://github.com/FasterXML/jackson-dataformats-binary)
|
||||
* Jackson dataformat: Smile (com.fasterxml.jackson.dataformat:jackson-dataformat-smile:2.12.3 - http://github.com/FasterXML/jackson-dataformats-binary)
|
||||
* Jackson-dataformat-YAML (com.fasterxml.jackson.dataformat:jackson-dataformat-yaml:2.11.1 - https://github.com/FasterXML/jackson-dataformats-text)
|
||||
@@ -151,7 +151,7 @@ https://wiki.lyrasis.org/display/DSPACE/Code+Contribution+Guidelines
|
||||
* I18N Libraries (org.apache.abdera:abdera-i18n:1.1.3 - http://abdera.apache.org)
|
||||
* Apache Ant Core (org.apache.ant:ant:1.10.11 - https://ant.apache.org/)
|
||||
* Apache Ant Launcher (org.apache.ant:ant-launcher:1.10.11 - https://ant.apache.org/)
|
||||
* Apache Commons BCEL (org.apache.bcel:bcel:6.4.0 - https://commons.apache.org/proper/commons-bcel)
|
||||
* Apache Commons BCEL (org.apache.bcel:bcel:6.6.0 - https://commons.apache.org/proper/commons-bcel)
|
||||
* Calcite Core (org.apache.calcite:calcite-core:1.27.0 - https://calcite.apache.org)
|
||||
* Calcite Linq4j (org.apache.calcite:calcite-linq4j:1.27.0 - https://calcite.apache.org)
|
||||
* Apache Calcite Avatica (org.apache.calcite.avatica:avatica-core:1.18.0 - https://calcite.apache.org/avatica)
|
||||
@@ -159,12 +159,12 @@ https://wiki.lyrasis.org/display/DSPACE/Code+Contribution+Guidelines
|
||||
* Apache Commons Compress (org.apache.commons:commons-compress:1.21 - https://commons.apache.org/proper/commons-compress/)
|
||||
* Apache Commons Configuration (org.apache.commons:commons-configuration2:2.8.0 - https://commons.apache.org/proper/commons-configuration/)
|
||||
* Apache Commons CSV (org.apache.commons:commons-csv:1.9.0 - https://commons.apache.org/proper/commons-csv/)
|
||||
* Apache Commons DBCP (org.apache.commons:commons-dbcp2:2.8.0 - https://commons.apache.org/dbcp/)
|
||||
* Apache Commons DBCP (org.apache.commons:commons-dbcp2:2.9.0 - https://commons.apache.org/dbcp/)
|
||||
* Apache Commons Exec (org.apache.commons:commons-exec:1.3 - http://commons.apache.org/proper/commons-exec/)
|
||||
* Apache Commons Lang (org.apache.commons:commons-lang3:3.12.0 - https://commons.apache.org/proper/commons-lang/)
|
||||
* Apache Commons Math (org.apache.commons:commons-math3:3.6.1 - http://commons.apache.org/proper/commons-math/)
|
||||
* Apache Commons Pool (org.apache.commons:commons-pool2:2.9.0 - https://commons.apache.org/proper/commons-pool/)
|
||||
* Apache Commons Text (org.apache.commons:commons-text:1.9 - https://commons.apache.org/proper/commons-text)
|
||||
* Apache Commons Pool (org.apache.commons:commons-pool2:2.11.1 - https://commons.apache.org/proper/commons-pool/)
|
||||
* Apache Commons Text (org.apache.commons:commons-text:1.10.0 - https://commons.apache.org/proper/commons-text)
|
||||
* Curator Client (org.apache.curator:curator-client:2.13.0 - http://curator.apache.org/curator-client)
|
||||
* Curator Framework (org.apache.curator:curator-framework:2.13.0 - http://curator.apache.org/curator-framework)
|
||||
* Curator Recipes (org.apache.curator:curator-recipes:2.13.0 - http://curator.apache.org/curator-recipes)
|
||||
@@ -218,10 +218,10 @@ https://wiki.lyrasis.org/display/DSPACE/Code+Contribution+Guidelines
|
||||
* Lucene Spatial Extras (org.apache.lucene:lucene-spatial-extras:8.11.1 - https://lucene.apache.org/lucene-parent/lucene-spatial-extras)
|
||||
* Lucene Spatial 3D (org.apache.lucene:lucene-spatial3d:8.11.1 - https://lucene.apache.org/lucene-parent/lucene-spatial3d)
|
||||
* Lucene Suggest (org.apache.lucene:lucene-suggest:8.11.1 - https://lucene.apache.org/lucene-parent/lucene-suggest)
|
||||
* Apache FontBox (org.apache.pdfbox:fontbox:2.0.24 - http://pdfbox.apache.org/)
|
||||
* Apache FontBox (org.apache.pdfbox:fontbox:2.0.27 - http://pdfbox.apache.org/)
|
||||
* PDFBox JBIG2 ImageIO plugin (org.apache.pdfbox:jbig2-imageio:3.0.3 - https://www.apache.org/jbig2-imageio/)
|
||||
* Apache JempBox (org.apache.pdfbox:jempbox:1.8.16 - http://www.apache.org/pdfbox-parent/jempbox/)
|
||||
* Apache PDFBox (org.apache.pdfbox:pdfbox:2.0.24 - https://www.apache.org/pdfbox-parent/pdfbox/)
|
||||
* Apache PDFBox (org.apache.pdfbox:pdfbox:2.0.27 - https://www.apache.org/pdfbox-parent/pdfbox/)
|
||||
* Apache PDFBox Debugger (org.apache.pdfbox:pdfbox-debugger:2.0.25 - https://www.apache.org/pdfbox-parent/pdfbox-debugger/)
|
||||
* Apache PDFBox tools (org.apache.pdfbox:pdfbox-tools:2.0.25 - https://www.apache.org/pdfbox-parent/pdfbox-tools/)
|
||||
* Apache XmpBox (org.apache.pdfbox:xmpbox:2.0.25 - https://www.apache.org/pdfbox-parent/xmpbox/)
|
||||
@@ -426,7 +426,7 @@ https://wiki.lyrasis.org/display/DSPACE/Code+Contribution+Guidelines
|
||||
* asm-commons (org.ow2.asm:asm-commons:8.0.1 - http://asm.ow2.io/)
|
||||
* asm-tree (org.ow2.asm:asm-tree:7.1 - http://asm.ow2.org/)
|
||||
* asm-util (org.ow2.asm:asm-util:7.1 - http://asm.ow2.org/)
|
||||
* PostgreSQL JDBC Driver (org.postgresql:postgresql:42.4.1 - https://jdbc.postgresql.org)
|
||||
* PostgreSQL JDBC Driver (org.postgresql:postgresql:42.4.3 - https://jdbc.postgresql.org)
|
||||
* Reflections (org.reflections:reflections:0.9.12 - http://github.com/ronmamo/reflections)
|
||||
* JMatIO (org.tallison:jmatio:1.5 - https://github.com/tballison/jmatio)
|
||||
* XMLUnit for Java (xmlunit:xmlunit:1.3 - http://xmlunit.sourceforge.net/)
|
||||
@@ -589,7 +589,7 @@ https://wiki.lyrasis.org/display/DSPACE/Code+Contribution+Guidelines
|
||||
* jquery (org.webjars.bowergithub.jquery:jquery-dist:3.6.0 - https://www.webjars.org)
|
||||
* urijs (org.webjars.bowergithub.medialize:uri.js:1.19.10 - https://www.webjars.org)
|
||||
* bootstrap (org.webjars.bowergithub.twbs:bootstrap:4.6.1 - https://www.webjars.org)
|
||||
* core-js (org.webjars.npm:core-js:3.25.2 - https://www.webjars.org)
|
||||
* core-js (org.webjars.npm:core-js:3.28.0 - https://www.webjars.org)
|
||||
* @json-editor/json-editor (org.webjars.npm:json-editor__json-editor:2.6.1 - https://www.webjars.org)
|
||||
|
||||
Mozilla Public License:
|
||||
|
13
README.md
13
README.md
@@ -48,18 +48,7 @@ See [Running DSpace 7 with Docker Compose](dspace/src/main/docker-compose/README
|
||||
|
||||
## Contributing
|
||||
|
||||
DSpace is a community built and supported project. We do not have a centralized development or support team,
|
||||
but have a dedicated group of volunteers who help us improve the software, documentation, resources, etc.
|
||||
|
||||
We welcome contributions of any type. Here's a few basic guides that provide suggestions for contributing to DSpace:
|
||||
* [How to Contribute to DSpace](https://wiki.lyrasis.org/display/DSPACE/How+to+Contribute+to+DSpace): How to contribute in general (via code, documentation, bug reports, expertise, etc)
|
||||
* [Code Contribution Guidelines](https://wiki.lyrasis.org/display/DSPACE/Code+Contribution+Guidelines): How to give back code or contribute features, bug fixes, etc.
|
||||
* [DSpace Community Advisory Team (DCAT)](https://wiki.lyrasis.org/display/cmtygp/DSpace+Community+Advisory+Team): If you are not a developer, we also have an interest group specifically for repository managers. The DCAT group meets virtually, once a month, and sends open invitations to join their meetings via the [DCAT mailing list](https://groups.google.com/d/forum/DSpaceCommunityAdvisoryTeam).
|
||||
|
||||
We also encourage GitHub Pull Requests (PRs) at any time. Please see our [Development with Git](https://wiki.lyrasis.org/display/DSPACE/Development+with+Git) guide for more info.
|
||||
|
||||
In addition, a listing of all known contributors to DSpace software can be
|
||||
found online at: https://wiki.lyrasis.org/display/DSPACE/DSpaceContributors
|
||||
See [Contributing documentation](CONTRIBUTING.md)
|
||||
|
||||
## Getting Help
|
||||
|
||||
|
@@ -92,9 +92,7 @@ For more information on CheckStyle configurations below, see: http://checkstyle.
|
||||
<!-- Requirements for Javadocs for methods -->
|
||||
<module name="JavadocMethod">
|
||||
<!-- All public methods MUST HAVE Javadocs -->
|
||||
<!-- <property name="scope" value="public"/> -->
|
||||
<!-- TODO: Above rule has been disabled because of large amount of missing public method Javadocs -->
|
||||
<property name="scope" value="nothing"/>
|
||||
<property name="scope" value="public"/>
|
||||
<!-- Allow params, throws and return tags to be optional -->
|
||||
<property name="allowMissingParamTags" value="true"/>
|
||||
<property name="allowMissingReturnTag" value="true"/>
|
||||
|
@@ -41,6 +41,8 @@ services:
|
||||
target: 8080
|
||||
- published: 8009
|
||||
target: 8009
|
||||
- published: 8000
|
||||
target: 8000
|
||||
stdin_open: true
|
||||
tty: true
|
||||
volumes:
|
||||
@@ -60,13 +62,17 @@ services:
|
||||
while (!</dev/tcp/dspacedb/5432) > /dev/null 2>&1; do sleep 1; done;
|
||||
/dspace/bin/dspace database migrate
|
||||
catalina.sh run
|
||||
# DSpace database container
|
||||
# DSpace PostgreSQL database container
|
||||
dspacedb:
|
||||
container_name: dspacedb
|
||||
# Uses a custom Postgres image with pgcrypto installed
|
||||
image: "${DOCKER_OWNER:-dspace}/dspace-postgres-pgcrypto:${DSPACE_VER:-dspace-7_x}"
|
||||
build:
|
||||
# Must build out of subdirectory to have access to install script for pgcrypto
|
||||
context: ./dspace/src/main/docker/dspace-postgres-pgcrypto/
|
||||
environment:
|
||||
PGDATA: /pgdata
|
||||
# Uses a custom Postgres image with pgcrypto installed
|
||||
image: dspace/dspace-postgres-pgcrypto
|
||||
POSTGRES_PASSWORD: dspace
|
||||
networks:
|
||||
dspacenet:
|
||||
ports:
|
||||
@@ -75,12 +81,17 @@ services:
|
||||
stdin_open: true
|
||||
tty: true
|
||||
volumes:
|
||||
# Keep Postgres data directory between reboots
|
||||
- pgdata:/pgdata
|
||||
# DSpace Solr container
|
||||
dspacesolr:
|
||||
container_name: dspacesolr
|
||||
# Uses official Solr image at https://hub.docker.com/_/solr/
|
||||
image: solr:8.11-slim
|
||||
image: "${DOCKER_OWNER:-dspace}/dspace-solr:${DSPACE_VER:-dspace-7_x}"
|
||||
build:
|
||||
context: .
|
||||
dockerfile: ./dspace/src/main/docker/dspace-solr/Dockerfile
|
||||
args:
|
||||
SOLR_VERSION: "${SOLR_VER:-8.11}"
|
||||
networks:
|
||||
dspacenet:
|
||||
ports:
|
||||
@@ -90,30 +101,25 @@ services:
|
||||
tty: true
|
||||
working_dir: /var/solr/data
|
||||
volumes:
|
||||
# Mount our local Solr core configs so that they are available as Solr configsets on container
|
||||
- ./dspace/solr/authority:/opt/solr/server/solr/configsets/authority
|
||||
- ./dspace/solr/oai:/opt/solr/server/solr/configsets/oai
|
||||
- ./dspace/solr/search:/opt/solr/server/solr/configsets/search
|
||||
- ./dspace/solr/statistics:/opt/solr/server/solr/configsets/statistics
|
||||
# Keep Solr data directory between reboots
|
||||
- solr_data:/var/solr/data
|
||||
# Initialize all DSpace Solr cores using the mounted local configsets (see above), then start Solr
|
||||
# Initialize all DSpace Solr cores then start Solr:
|
||||
# * First, run precreate-core to create the core (if it doesn't yet exist). If exists already, this is a no-op
|
||||
# * Second, copy updated configs from mounted configsets to this core. If it already existed, this updates core
|
||||
# to the latest configs. If it's a newly created core, this is a no-op.
|
||||
# * Second, copy configsets to this core:
|
||||
# Updates to Solr configs require the container to be rebuilt/restarted: `docker compose -p d7 up -d --build dspacesolr`
|
||||
entrypoint:
|
||||
- /bin/bash
|
||||
- '-c'
|
||||
- |
|
||||
init-var-solr
|
||||
precreate-core authority /opt/solr/server/solr/configsets/authority
|
||||
cp -r -u /opt/solr/server/solr/configsets/authority/* authority
|
||||
cp -r /opt/solr/server/solr/configsets/authority/* authority
|
||||
precreate-core oai /opt/solr/server/solr/configsets/oai
|
||||
cp -r -u /opt/solr/server/solr/configsets/oai/* oai
|
||||
cp -r /opt/solr/server/solr/configsets/oai/* oai
|
||||
precreate-core search /opt/solr/server/solr/configsets/search
|
||||
cp -r -u /opt/solr/server/solr/configsets/search/* search
|
||||
cp -r /opt/solr/server/solr/configsets/search/* search
|
||||
precreate-core statistics /opt/solr/server/solr/configsets/statistics
|
||||
cp -r -u /opt/solr/server/solr/configsets/statistics/* statistics
|
||||
cp -r /opt/solr/server/solr/configsets/statistics/* statistics
|
||||
exec solr -f
|
||||
volumes:
|
||||
assetstore:
|
||||
|
@@ -12,7 +12,7 @@
|
||||
<parent>
|
||||
<groupId>org.dspace</groupId>
|
||||
<artifactId>dspace-parent</artifactId>
|
||||
<version>7.4</version>
|
||||
<version>7.6-SNAPSHOT</version>
|
||||
<relativePath>..</relativePath>
|
||||
</parent>
|
||||
|
||||
@@ -589,13 +589,6 @@
|
||||
<artifactId>solr-core</artifactId>
|
||||
<scope>test</scope>
|
||||
<version>${solr.client.version}</version>
|
||||
<exclusions>
|
||||
<!-- Newer version brought in by opencsv -->
|
||||
<exclusion>
|
||||
<groupId>org.apache.commons</groupId>
|
||||
<artifactId>commons-text</artifactId>
|
||||
</exclusion>
|
||||
</exclusions>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.apache.lucene</groupId>
|
||||
@@ -783,7 +776,7 @@
|
||||
<dependency>
|
||||
<groupId>org.json</groupId>
|
||||
<artifactId>json</artifactId>
|
||||
<version>20180130</version>
|
||||
<version>20230227</version>
|
||||
</dependency>
|
||||
|
||||
<!-- Useful for testing command-line tools -->
|
||||
@@ -813,10 +806,11 @@
|
||||
<scope>test</scope>
|
||||
</dependency>
|
||||
|
||||
<dependency>
|
||||
<dependency>
|
||||
<groupId>org.apache.bcel</groupId>
|
||||
<artifactId>bcel</artifactId>
|
||||
<version>6.4.0</version>
|
||||
<version>6.6.0</version>
|
||||
<scope>test</scope>
|
||||
</dependency>
|
||||
|
||||
<!-- required for openaire api integration -->
|
||||
@@ -846,11 +840,6 @@
|
||||
<!-- for mockserver -->
|
||||
<!-- Solve dependency convergence issues related to
|
||||
'mockserver-junit-rule' by selecting the versions we want to use. -->
|
||||
<dependency>
|
||||
<groupId>org.apache.commons</groupId>
|
||||
<artifactId>commons-text</artifactId>
|
||||
<version>1.9</version>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>io.netty</groupId>
|
||||
<artifactId>netty-buffer</artifactId>
|
||||
|
@@ -0,0 +1,54 @@
|
||||
/**
|
||||
* The contents of this file are subject to the license and copyright
|
||||
* detailed in the LICENSE and NOTICE files at the root of the source
|
||||
* tree and available online at
|
||||
*
|
||||
* http://www.dspace.org/license/
|
||||
*/
|
||||
package org.dspace.alerts;
|
||||
|
||||
/**
|
||||
* Enum representing the options for allowing sessions:
|
||||
* ALLOW_ALL_SESSIONS - Will allow all users to log in and continue their sessions
|
||||
* ALLOW_CURRENT_SESSIONS_ONLY - Will prevent non admin users from logging in, however logged-in users
|
||||
* will remain logged in
|
||||
* ALLOW_ADMIN_SESSIONS_ONLY - Only admin users can log in, non admin sessions will be interrupted
|
||||
*
|
||||
* NOTE: This functionality can be stored in the database, but no support is present right now to interrupt and prevent
|
||||
* sessions.
|
||||
*/
|
||||
public enum AllowSessionsEnum {
|
||||
ALLOW_ALL_SESSIONS("all"),
|
||||
ALLOW_CURRENT_SESSIONS_ONLY("current"),
|
||||
ALLOW_ADMIN_SESSIONS_ONLY("admin");
|
||||
|
||||
private String allowSessionsType;
|
||||
|
||||
AllowSessionsEnum(String allowSessionsType) {
|
||||
this.allowSessionsType = allowSessionsType;
|
||||
}
|
||||
|
||||
public String getValue() {
|
||||
return allowSessionsType;
|
||||
}
|
||||
|
||||
public static AllowSessionsEnum fromString(String alertAllowSessionType) {
|
||||
if (alertAllowSessionType == null) {
|
||||
return AllowSessionsEnum.ALLOW_ALL_SESSIONS;
|
||||
}
|
||||
|
||||
switch (alertAllowSessionType) {
|
||||
case "all":
|
||||
return AllowSessionsEnum.ALLOW_ALL_SESSIONS;
|
||||
case "current":
|
||||
return AllowSessionsEnum.ALLOW_CURRENT_SESSIONS_ONLY;
|
||||
case "admin" :
|
||||
return AllowSessionsEnum.ALLOW_ADMIN_SESSIONS_ONLY;
|
||||
default:
|
||||
throw new IllegalArgumentException("No corresponding enum value for provided string: "
|
||||
+ alertAllowSessionType);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
}
|
179
dspace-api/src/main/java/org/dspace/alerts/SystemWideAlert.java
Normal file
179
dspace-api/src/main/java/org/dspace/alerts/SystemWideAlert.java
Normal file
@@ -0,0 +1,179 @@
|
||||
/**
|
||||
* The contents of this file are subject to the license and copyright
|
||||
* detailed in the LICENSE and NOTICE files at the root of the source
|
||||
* tree and available online at
|
||||
*
|
||||
* http://www.dspace.org/license/
|
||||
*/
|
||||
package org.dspace.alerts;
|
||||
|
||||
import java.util.Date;
|
||||
import javax.persistence.Cacheable;
|
||||
import javax.persistence.Column;
|
||||
import javax.persistence.Entity;
|
||||
import javax.persistence.GeneratedValue;
|
||||
import javax.persistence.GenerationType;
|
||||
import javax.persistence.Id;
|
||||
import javax.persistence.SequenceGenerator;
|
||||
import javax.persistence.Table;
|
||||
import javax.persistence.Temporal;
|
||||
import javax.persistence.TemporalType;
|
||||
|
||||
import org.apache.commons.lang3.builder.EqualsBuilder;
|
||||
import org.apache.commons.lang3.builder.HashCodeBuilder;
|
||||
import org.dspace.core.ReloadableEntity;
|
||||
import org.hibernate.annotations.CacheConcurrencyStrategy;
|
||||
|
||||
/**
|
||||
* Database object representing system-wide alerts
|
||||
*/
|
||||
@Entity
|
||||
@Cacheable
|
||||
@org.hibernate.annotations.Cache(usage = CacheConcurrencyStrategy.NONSTRICT_READ_WRITE, include = "non-lazy")
|
||||
@Table(name = "systemwidealert")
|
||||
public class SystemWideAlert implements ReloadableEntity<Integer> {
|
||||
|
||||
@Id
|
||||
@GeneratedValue(strategy = GenerationType.SEQUENCE, generator = "alert_id_seq")
|
||||
@SequenceGenerator(name = "alert_id_seq", sequenceName = "alert_id_seq", allocationSize = 1)
|
||||
@Column(name = "alert_id", unique = true, nullable = false)
|
||||
private Integer alertId;
|
||||
|
||||
@Column(name = "message", nullable = false)
|
||||
private String message;
|
||||
|
||||
@Column(name = "allow_sessions")
|
||||
private String allowSessions;
|
||||
|
||||
@Column(name = "countdown_to")
|
||||
@Temporal(TemporalType.TIMESTAMP)
|
||||
private Date countdownTo;
|
||||
|
||||
@Column(name = "active")
|
||||
private boolean active;
|
||||
|
||||
protected SystemWideAlert() {
|
||||
}
|
||||
|
||||
/**
|
||||
* This method returns the ID that the system-wide alert holds within the database
|
||||
*
|
||||
* @return The ID that the system-wide alert holds within the database
|
||||
*/
|
||||
@Override
|
||||
public Integer getID() {
|
||||
return alertId;
|
||||
}
|
||||
|
||||
/**
|
||||
* Set the ID for the system-wide alert
|
||||
*
|
||||
* @param alertID The ID to set
|
||||
*/
|
||||
public void setID(final Integer alertID) {
|
||||
this.alertId = alertID;
|
||||
}
|
||||
|
||||
/**
|
||||
* Retrieve the message of the system-wide alert
|
||||
*
|
||||
* @return the message of the system-wide alert
|
||||
*/
|
||||
public String getMessage() {
|
||||
return message;
|
||||
}
|
||||
|
||||
/**
|
||||
* Set the message of the system-wide alert
|
||||
*
|
||||
* @param message The message to set
|
||||
*/
|
||||
public void setMessage(final String message) {
|
||||
this.message = message;
|
||||
}
|
||||
|
||||
/**
|
||||
* Retrieve what kind of sessions are allowed while the system-wide alert is active
|
||||
*
|
||||
* @return what kind of sessions are allowed while the system-wide alert is active
|
||||
*/
|
||||
public AllowSessionsEnum getAllowSessions() {
|
||||
return AllowSessionsEnum.fromString(allowSessions);
|
||||
}
|
||||
|
||||
/**
|
||||
* Set what kind of sessions are allowed while the system-wide alert is active
|
||||
*
|
||||
* @param allowSessions Integer representing what kind of sessions are allowed
|
||||
*/
|
||||
public void setAllowSessions(AllowSessionsEnum allowSessions) {
|
||||
this.allowSessions = allowSessions.getValue();
|
||||
}
|
||||
|
||||
/**
|
||||
* Retrieve the date to which will be count down when the system-wide alert is active
|
||||
*
|
||||
* @return the date to which will be count down when the system-wide alert is active
|
||||
*/
|
||||
public Date getCountdownTo() {
|
||||
return countdownTo;
|
||||
}
|
||||
|
||||
/**
|
||||
* Set the date to which will be count down when the system-wide alert is active
|
||||
*
|
||||
* @param countdownTo The date to which will be count down
|
||||
*/
|
||||
public void setCountdownTo(final Date countdownTo) {
|
||||
this.countdownTo = countdownTo;
|
||||
}
|
||||
|
||||
/**
|
||||
* Retrieve whether the system-wide alert is active
|
||||
*
|
||||
* @return whether the system-wide alert is active
|
||||
*/
|
||||
public boolean isActive() {
|
||||
return active;
|
||||
}
|
||||
|
||||
/**
|
||||
* Set whether the system-wide alert is active
|
||||
*
|
||||
* @param active Whether the system-wide alert is active
|
||||
*/
|
||||
public void setActive(final boolean active) {
|
||||
this.active = active;
|
||||
}
|
||||
|
||||
/**
|
||||
* Return <code>true</code> if <code>other</code> is the same SystemWideAlert
|
||||
* as this object, <code>false</code> otherwise
|
||||
*
|
||||
* @param other object to compare to
|
||||
* @return <code>true</code> if object passed in represents the same
|
||||
* system-wide alert as this object
|
||||
*/
|
||||
@Override
|
||||
public boolean equals(Object other) {
|
||||
return (other instanceof SystemWideAlert &&
|
||||
new EqualsBuilder().append(this.getID(), ((SystemWideAlert) other).getID())
|
||||
.append(this.getMessage(), ((SystemWideAlert) other).getMessage())
|
||||
.append(this.getAllowSessions(), ((SystemWideAlert) other).getAllowSessions())
|
||||
.append(this.getCountdownTo(), ((SystemWideAlert) other).getCountdownTo())
|
||||
.append(this.isActive(), ((SystemWideAlert) other).isActive())
|
||||
.isEquals());
|
||||
}
|
||||
|
||||
@Override
|
||||
public int hashCode() {
|
||||
return new HashCodeBuilder(17, 37)
|
||||
.append(this.getID())
|
||||
.append(this.getMessage())
|
||||
.append(this.getAllowSessions())
|
||||
.append(this.getCountdownTo())
|
||||
.append(this.isActive())
|
||||
.toHashCode();
|
||||
}
|
||||
|
||||
}
|
@@ -0,0 +1,129 @@
|
||||
/**
|
||||
* The contents of this file are subject to the license and copyright
|
||||
* detailed in the LICENSE and NOTICE files at the root of the source
|
||||
* tree and available online at
|
||||
*
|
||||
* http://www.dspace.org/license/
|
||||
*/
|
||||
package org.dspace.alerts;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.sql.SQLException;
|
||||
import java.util.Date;
|
||||
import java.util.List;
|
||||
|
||||
import org.apache.logging.log4j.Logger;
|
||||
import org.dspace.alerts.dao.SystemWideAlertDAO;
|
||||
import org.dspace.alerts.service.SystemWideAlertService;
|
||||
import org.dspace.authorize.AuthorizeException;
|
||||
import org.dspace.authorize.service.AuthorizeService;
|
||||
import org.dspace.core.Context;
|
||||
import org.dspace.core.LogHelper;
|
||||
import org.dspace.eperson.EPerson;
|
||||
import org.springframework.beans.factory.annotation.Autowired;
|
||||
|
||||
/**
|
||||
* The implementation for the {@link SystemWideAlertService} class
|
||||
*/
|
||||
public class SystemWideAlertServiceImpl implements SystemWideAlertService {
|
||||
|
||||
private static final Logger log = org.apache.logging.log4j.LogManager.getLogger(SystemWideAlertService.class);
|
||||
|
||||
|
||||
@Autowired
|
||||
private SystemWideAlertDAO systemWideAlertDAO;
|
||||
|
||||
@Autowired
|
||||
private AuthorizeService authorizeService;
|
||||
|
||||
@Override
|
||||
public SystemWideAlert create(final Context context, final String message,
|
||||
final AllowSessionsEnum allowSessionsType,
|
||||
final Date countdownTo, final boolean active) throws SQLException,
|
||||
AuthorizeException {
|
||||
if (!authorizeService.isAdmin(context)) {
|
||||
throw new AuthorizeException(
|
||||
"Only administrators can create a system-wide alert");
|
||||
}
|
||||
SystemWideAlert systemWideAlert = new SystemWideAlert();
|
||||
systemWideAlert.setMessage(message);
|
||||
systemWideAlert.setAllowSessions(allowSessionsType);
|
||||
systemWideAlert.setCountdownTo(countdownTo);
|
||||
systemWideAlert.setActive(active);
|
||||
|
||||
SystemWideAlert createdAlert = systemWideAlertDAO.create(context, systemWideAlert);
|
||||
log.info(LogHelper.getHeader(context, "system_wide_alert_create",
|
||||
"System Wide Alert has been created with message: '" + message + "' and ID "
|
||||
+ createdAlert.getID() + " and allowSessionsType " + allowSessionsType +
|
||||
" and active set to " + active));
|
||||
|
||||
|
||||
return createdAlert;
|
||||
}
|
||||
|
||||
@Override
|
||||
public SystemWideAlert find(final Context context, final int alertId) throws SQLException {
|
||||
return systemWideAlertDAO.findByID(context, SystemWideAlert.class, alertId);
|
||||
}
|
||||
|
||||
@Override
|
||||
public List<SystemWideAlert> findAll(final Context context) throws SQLException {
|
||||
return systemWideAlertDAO.findAll(context, SystemWideAlert.class);
|
||||
}
|
||||
|
||||
@Override
|
||||
public List<SystemWideAlert> findAll(final Context context, final int limit, final int offset) throws SQLException {
|
||||
return systemWideAlertDAO.findAll(context, limit, offset);
|
||||
}
|
||||
|
||||
@Override
|
||||
public List<SystemWideAlert> findAllActive(final Context context, final int limit, final int offset)
|
||||
throws SQLException {
|
||||
return systemWideAlertDAO.findAllActive(context, limit, offset);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void delete(final Context context, final SystemWideAlert systemWideAlert)
|
||||
throws SQLException, IOException, AuthorizeException {
|
||||
if (!authorizeService.isAdmin(context)) {
|
||||
throw new AuthorizeException(
|
||||
"Only administrators can create a system-wide alert");
|
||||
}
|
||||
systemWideAlertDAO.delete(context, systemWideAlert);
|
||||
log.info(LogHelper.getHeader(context, "system_wide_alert_create",
|
||||
"System Wide Alert with ID " + systemWideAlert.getID() + " has been deleted"));
|
||||
|
||||
}
|
||||
|
||||
@Override
|
||||
public void update(final Context context, final SystemWideAlert systemWideAlert)
|
||||
throws SQLException, AuthorizeException {
|
||||
if (!authorizeService.isAdmin(context)) {
|
||||
throw new AuthorizeException(
|
||||
"Only administrators can create a system-wide alert");
|
||||
}
|
||||
systemWideAlertDAO.save(context, systemWideAlert);
|
||||
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean canNonAdminUserLogin(Context context) throws SQLException {
|
||||
List<SystemWideAlert> active = findAllActive(context, 1, 0);
|
||||
if (active == null || active.isEmpty()) {
|
||||
return true;
|
||||
}
|
||||
return active.get(0).getAllowSessions() == AllowSessionsEnum.ALLOW_ALL_SESSIONS;
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean canUserMaintainSession(Context context, EPerson ePerson) throws SQLException {
|
||||
if (authorizeService.isAdmin(context, ePerson)) {
|
||||
return true;
|
||||
}
|
||||
List<SystemWideAlert> active = findAllActive(context, 1, 0);
|
||||
if (active == null || active.isEmpty()) {
|
||||
return true;
|
||||
}
|
||||
return active.get(0).getAllowSessions() != AllowSessionsEnum.ALLOW_ADMIN_SESSIONS_ONLY;
|
||||
}
|
||||
}
|
@@ -0,0 +1,45 @@
|
||||
/**
|
||||
* The contents of this file are subject to the license and copyright
|
||||
* detailed in the LICENSE and NOTICE files at the root of the source
|
||||
* tree and available online at
|
||||
*
|
||||
* http://www.dspace.org/license/
|
||||
*/
|
||||
package org.dspace.alerts.dao;
|
||||
|
||||
import java.sql.SQLException;
|
||||
import java.util.List;
|
||||
|
||||
import org.dspace.alerts.SystemWideAlert;
|
||||
import org.dspace.core.Context;
|
||||
import org.dspace.core.GenericDAO;
|
||||
|
||||
/**
|
||||
* This is the Data Access Object for the {@link SystemWideAlert} object
|
||||
*/
|
||||
public interface SystemWideAlertDAO extends GenericDAO<SystemWideAlert> {
|
||||
|
||||
/**
|
||||
* Returns a list of all SystemWideAlert objects in the database
|
||||
*
|
||||
* @param context The relevant DSpace context
|
||||
* @param limit The limit for the amount of SystemWideAlerts returned
|
||||
* @param offset The offset for the Processes to be returned
|
||||
* @return The list of all SystemWideAlert objects in the Database
|
||||
* @throws SQLException If something goes wrong
|
||||
*/
|
||||
List<SystemWideAlert> findAll(Context context, int limit, int offset) throws SQLException;
|
||||
|
||||
/**
|
||||
* Returns a list of all active SystemWideAlert objects in the database
|
||||
*
|
||||
* @param context The relevant DSpace context
|
||||
* @param limit The limit for the amount of SystemWideAlerts returned
|
||||
* @param offset The offset for the Processes to be returned
|
||||
* @return The list of all SystemWideAlert objects in the Database
|
||||
* @throws SQLException If something goes wrong
|
||||
*/
|
||||
List<SystemWideAlert> findAllActive(Context context, int limit, int offset) throws SQLException;
|
||||
|
||||
|
||||
}
|
@@ -0,0 +1,48 @@
|
||||
/**
|
||||
* The contents of this file are subject to the license and copyright
|
||||
* detailed in the LICENSE and NOTICE files at the root of the source
|
||||
* tree and available online at
|
||||
*
|
||||
* http://www.dspace.org/license/
|
||||
*/
|
||||
package org.dspace.alerts.dao.impl;
|
||||
|
||||
import java.sql.SQLException;
|
||||
import java.util.List;
|
||||
import javax.persistence.criteria.CriteriaBuilder;
|
||||
import javax.persistence.criteria.CriteriaQuery;
|
||||
import javax.persistence.criteria.Root;
|
||||
|
||||
import org.dspace.alerts.SystemWideAlert;
|
||||
import org.dspace.alerts.SystemWideAlert_;
|
||||
import org.dspace.alerts.dao.SystemWideAlertDAO;
|
||||
import org.dspace.core.AbstractHibernateDAO;
|
||||
import org.dspace.core.Context;
|
||||
|
||||
/**
|
||||
* Implementation class for the {@link SystemWideAlertDAO}
|
||||
*/
|
||||
public class SystemWideAlertDAOImpl extends AbstractHibernateDAO<SystemWideAlert> implements SystemWideAlertDAO {
|
||||
|
||||
public List<SystemWideAlert> findAll(final Context context, final int limit, final int offset) throws SQLException {
|
||||
CriteriaBuilder criteriaBuilder = getCriteriaBuilder(context);
|
||||
CriteriaQuery criteriaQuery = getCriteriaQuery(criteriaBuilder, SystemWideAlert.class);
|
||||
Root<SystemWideAlert> alertRoot = criteriaQuery.from(SystemWideAlert.class);
|
||||
criteriaQuery.select(alertRoot);
|
||||
|
||||
return list(context, criteriaQuery, false, SystemWideAlert.class, limit, offset);
|
||||
}
|
||||
|
||||
public List<SystemWideAlert> findAllActive(final Context context, final int limit, final int offset)
|
||||
throws SQLException {
|
||||
CriteriaBuilder criteriaBuilder = getCriteriaBuilder(context);
|
||||
CriteriaQuery criteriaQuery = getCriteriaQuery(criteriaBuilder, SystemWideAlert.class);
|
||||
Root<SystemWideAlert> alertRoot = criteriaQuery.from(SystemWideAlert.class);
|
||||
criteriaQuery.select(alertRoot);
|
||||
criteriaQuery.where(criteriaBuilder.equal(alertRoot.get(SystemWideAlert_.active), true));
|
||||
|
||||
return list(context, criteriaQuery, false, SystemWideAlert.class, limit, offset);
|
||||
}
|
||||
|
||||
|
||||
}
|
@@ -0,0 +1,118 @@
|
||||
/**
|
||||
* The contents of this file are subject to the license and copyright
|
||||
* detailed in the LICENSE and NOTICE files at the root of the source
|
||||
* tree and available online at
|
||||
*
|
||||
* http://www.dspace.org/license/
|
||||
*/
|
||||
package org.dspace.alerts.service;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.sql.SQLException;
|
||||
import java.util.Date;
|
||||
import java.util.List;
|
||||
|
||||
import org.dspace.alerts.AllowSessionsEnum;
|
||||
import org.dspace.alerts.SystemWideAlert;
|
||||
import org.dspace.authorize.AuthorizeException;
|
||||
import org.dspace.core.Context;
|
||||
import org.dspace.eperson.EPerson;
|
||||
|
||||
/**
|
||||
* An interface for the SystemWideAlertService with methods regarding the SystemWideAlert workload
|
||||
*/
|
||||
public interface SystemWideAlertService {
|
||||
|
||||
/**
|
||||
* This method will create a SystemWideAlert object in the database
|
||||
*
|
||||
* @param context The relevant DSpace context
|
||||
* @param message The message of the system-wide alert
|
||||
* @param allowSessionsType Which sessions need to be allowed for the system-wide alert
|
||||
* @param countdownTo The date to which to count down to when the system-wide alert is active
|
||||
* @param active Whether the system-wide alert os active
|
||||
* @return The created SystemWideAlert object
|
||||
* @throws SQLException If something goes wrong
|
||||
*/
|
||||
SystemWideAlert create(Context context, String message, AllowSessionsEnum allowSessionsType,
|
||||
Date countdownTo, boolean active
|
||||
) throws SQLException, AuthorizeException;
|
||||
|
||||
/**
|
||||
* This method will retrieve a SystemWideAlert object from the Database with the given ID
|
||||
*
|
||||
* @param context The relevant DSpace context
|
||||
* @param alertId The alert id on which we'll search for in the database
|
||||
* @return The system-wide alert that holds the given alert id
|
||||
* @throws SQLException If something goes wrong
|
||||
*/
|
||||
SystemWideAlert find(Context context, int alertId) throws SQLException;
|
||||
|
||||
/**
|
||||
* Returns a list of all SystemWideAlert objects in the database
|
||||
*
|
||||
* @param context The relevant DSpace context
|
||||
* @return The list of all SystemWideAlert objects in the Database
|
||||
* @throws SQLException If something goes wrong
|
||||
*/
|
||||
List<SystemWideAlert> findAll(Context context) throws SQLException;
|
||||
|
||||
/**
|
||||
* Returns a list of all SystemWideAlert objects in the database
|
||||
*
|
||||
* @param context The relevant DSpace context
|
||||
* @param limit The limit for the amount of system-wide alerts returned
|
||||
* @param offset The offset for the system-wide alerts to be returned
|
||||
* @return The list of all SystemWideAlert objects in the Database
|
||||
* @throws SQLException If something goes wrong
|
||||
*/
|
||||
List<SystemWideAlert> findAll(Context context, int limit, int offset) throws SQLException;
|
||||
|
||||
|
||||
/**
|
||||
* Returns a list of all active SystemWideAlert objects in the database
|
||||
*
|
||||
* @param context The relevant DSpace context
|
||||
* @return The list of all active SystemWideAlert objects in the database
|
||||
* @throws SQLException If something goes wrong
|
||||
*/
|
||||
List<SystemWideAlert> findAllActive(Context context, int limit, int offset) throws SQLException;
|
||||
|
||||
/**
|
||||
* This method will delete the given SystemWideAlert object from the database
|
||||
*
|
||||
* @param context The relevant DSpace context
|
||||
* @param systemWideAlert The SystemWideAlert object to be deleted
|
||||
* @throws SQLException If something goes wrong
|
||||
*/
|
||||
void delete(Context context, SystemWideAlert systemWideAlert)
|
||||
throws SQLException, IOException, AuthorizeException;
|
||||
|
||||
|
||||
/**
|
||||
* This method will be used to update the given SystemWideAlert object in the database
|
||||
*
|
||||
* @param context The relevant DSpace context
|
||||
* @param systemWideAlert The SystemWideAlert object to be updated
|
||||
* @throws SQLException If something goes wrong
|
||||
*/
|
||||
void update(Context context, SystemWideAlert systemWideAlert) throws SQLException, AuthorizeException;
|
||||
|
||||
|
||||
/**
|
||||
* Verifies if the user connected to the current context can retain its session
|
||||
*
|
||||
* @param context The relevant DSpace context
|
||||
* @return if the user connected to the current context can retain its session
|
||||
*/
|
||||
boolean canUserMaintainSession(Context context, EPerson ePerson) throws SQLException;
|
||||
|
||||
|
||||
/**
|
||||
* Verifies if a non admin user can log in
|
||||
*
|
||||
* @param context The relevant DSpace context
|
||||
* @return if a non admin user can log in
|
||||
*/
|
||||
boolean canNonAdminUserLogin(Context context) throws SQLException;
|
||||
}
|
@@ -598,18 +598,19 @@ public class MetadataImport extends DSpaceRunnable<MetadataImportScriptConfigura
|
||||
changes.add(whatHasChanged);
|
||||
}
|
||||
|
||||
if (change) {
|
||||
//only clear cache if changes have been made.
|
||||
c.uncacheEntity(wsItem);
|
||||
c.uncacheEntity(wfItem);
|
||||
c.uncacheEntity(item);
|
||||
if (change && (rowCount % configurationService.getIntProperty("bulkedit.change.commit.count", 100) == 0)) {
|
||||
c.commit();
|
||||
handler.logInfo(LogHelper.getHeader(c, "metadata_import_commit", "lineNumber=" + rowCount));
|
||||
}
|
||||
populateRefAndRowMap(line, item == null ? null : item.getID());
|
||||
// keep track of current rows processed
|
||||
rowCount++;
|
||||
}
|
||||
if (change) {
|
||||
c.commit();
|
||||
}
|
||||
|
||||
c.setMode(originalMode);
|
||||
c.setMode(Context.Mode.READ_ONLY);
|
||||
|
||||
|
||||
// Return the changes
|
||||
|
@@ -11,6 +11,7 @@ import java.io.File;
|
||||
import java.io.FileInputStream;
|
||||
import java.io.IOException;
|
||||
import java.io.InputStream;
|
||||
import java.net.URL;
|
||||
import java.nio.file.Files;
|
||||
import java.sql.SQLException;
|
||||
import java.util.ArrayList;
|
||||
@@ -67,16 +68,18 @@ public class ItemImport extends DSpaceRunnable<ItemImportScriptConfiguration> {
|
||||
protected String eperson = null;
|
||||
protected String[] collections = null;
|
||||
protected boolean isTest = false;
|
||||
protected boolean isExcludeContent = false;
|
||||
protected boolean isResume = false;
|
||||
protected boolean useWorkflow = false;
|
||||
protected boolean useWorkflowSendEmail = false;
|
||||
protected boolean isQuiet = false;
|
||||
protected boolean commandLineCollections = false;
|
||||
protected boolean zip = false;
|
||||
protected boolean remoteUrl = false;
|
||||
protected String zipfilename = null;
|
||||
protected boolean help = false;
|
||||
protected File workDir = null;
|
||||
private File workFile = null;
|
||||
protected File workFile = null;
|
||||
|
||||
protected static final CollectionService collectionService =
|
||||
ContentServiceFactory.getInstance().getCollectionService();
|
||||
@@ -119,6 +122,8 @@ public class ItemImport extends DSpaceRunnable<ItemImportScriptConfiguration> {
|
||||
handler.logInfo("**Test Run** - not actually importing items.");
|
||||
}
|
||||
|
||||
isExcludeContent = commandLine.hasOption('x');
|
||||
|
||||
if (commandLine.hasOption('p')) {
|
||||
template = true;
|
||||
}
|
||||
@@ -204,6 +209,7 @@ public class ItemImport extends DSpaceRunnable<ItemImportScriptConfiguration> {
|
||||
.getItemImportService();
|
||||
try {
|
||||
itemImportService.setTest(isTest);
|
||||
itemImportService.setExcludeContent(isExcludeContent);
|
||||
itemImportService.setResume(isResume);
|
||||
itemImportService.setUseWorkflow(useWorkflow);
|
||||
itemImportService.setUseWorkflowSendEmail(useWorkflowSendEmail);
|
||||
@@ -233,6 +239,9 @@ public class ItemImport extends DSpaceRunnable<ItemImportScriptConfiguration> {
|
||||
if (zip) {
|
||||
FileUtils.deleteDirectory(new File(sourcedir));
|
||||
FileUtils.deleteDirectory(workDir);
|
||||
if (remoteUrl && workFile != null && workFile.exists()) {
|
||||
workFile.delete();
|
||||
}
|
||||
}
|
||||
|
||||
Date endTime = new Date();
|
||||
@@ -249,6 +258,17 @@ public class ItemImport extends DSpaceRunnable<ItemImportScriptConfiguration> {
|
||||
* @param context
|
||||
*/
|
||||
protected void validate(Context context) {
|
||||
// check zip type: uploaded file or remote url
|
||||
if (commandLine.hasOption('z')) {
|
||||
zipfilename = commandLine.getOptionValue('z');
|
||||
} else if (commandLine.hasOption('u')) {
|
||||
remoteUrl = true;
|
||||
zipfilename = commandLine.getOptionValue('u');
|
||||
}
|
||||
if (StringUtils.isBlank(zipfilename)) {
|
||||
throw new UnsupportedOperationException("Must run with either name of zip file or url of zip file");
|
||||
}
|
||||
|
||||
if (command == null) {
|
||||
handler.logError("Must run with either add, replace, or remove (run with -h flag for details)");
|
||||
throw new UnsupportedOperationException("Must run with either add, replace, or remove");
|
||||
@@ -291,7 +311,6 @@ public class ItemImport extends DSpaceRunnable<ItemImportScriptConfiguration> {
|
||||
handler.writeFilestream(context, MAPFILE_FILENAME, mapfileInputStream, MAPFILE_BITSTREAM_TYPE);
|
||||
} finally {
|
||||
mapFile.delete();
|
||||
workFile.delete();
|
||||
}
|
||||
}
|
||||
|
||||
@@ -302,17 +321,24 @@ public class ItemImport extends DSpaceRunnable<ItemImportScriptConfiguration> {
|
||||
* @throws Exception
|
||||
*/
|
||||
protected void readZip(Context context, ItemImportService itemImportService) throws Exception {
|
||||
Optional<InputStream> optionalFileStream = handler.getFileStream(context, zipfilename);
|
||||
Optional<InputStream> optionalFileStream = Optional.empty();
|
||||
if (!remoteUrl) {
|
||||
// manage zip via upload
|
||||
optionalFileStream = handler.getFileStream(context, zipfilename);
|
||||
} else {
|
||||
// manage zip via remote url
|
||||
optionalFileStream = Optional.ofNullable(new URL(zipfilename).openStream());
|
||||
}
|
||||
if (optionalFileStream.isPresent()) {
|
||||
workFile = new File(itemImportService.getTempWorkDir() + File.separator
|
||||
+ zipfilename + "-" + context.getCurrentUser().getID());
|
||||
FileUtils.copyInputStreamToFile(optionalFileStream.get(), workFile);
|
||||
workDir = new File(itemImportService.getTempWorkDir() + File.separator + TEMP_DIR);
|
||||
sourcedir = itemImportService.unzip(workFile, workDir.getAbsolutePath());
|
||||
} else {
|
||||
throw new IllegalArgumentException(
|
||||
"Error reading file, the file couldn't be found for filename: " + zipfilename);
|
||||
}
|
||||
workDir = new File(itemImportService.getTempWorkDir() + File.separator + TEMP_DIR);
|
||||
sourcedir = itemImportService.unzip(workFile, workDir.getAbsolutePath());
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -352,7 +378,6 @@ public class ItemImport extends DSpaceRunnable<ItemImportScriptConfiguration> {
|
||||
*/
|
||||
protected void setZip() {
|
||||
zip = true;
|
||||
zipfilename = commandLine.getOptionValue('z');
|
||||
}
|
||||
|
||||
/**
|
||||
|
@@ -8,10 +8,14 @@
|
||||
package org.dspace.app.itemimport;
|
||||
|
||||
import java.io.File;
|
||||
import java.io.InputStream;
|
||||
import java.net.URL;
|
||||
import java.sql.SQLException;
|
||||
import java.util.List;
|
||||
import java.util.Optional;
|
||||
import java.util.UUID;
|
||||
|
||||
import org.apache.commons.io.FileUtils;
|
||||
import org.apache.commons.lang3.StringUtils;
|
||||
import org.dspace.app.itemimport.service.ItemImportService;
|
||||
import org.dspace.content.Collection;
|
||||
@@ -62,7 +66,7 @@ public class ItemImportCLI extends ItemImport {
|
||||
handler.logError("Must run with either add, replace, or remove (run with -h flag for details)");
|
||||
throw new UnsupportedOperationException("Must run with either add, replace, or remove");
|
||||
} else if ("add".equals(command) || "replace".equals(command)) {
|
||||
if (sourcedir == null) {
|
||||
if (!remoteUrl && sourcedir == null) {
|
||||
handler.logError("A source directory containing items must be set (run with -h flag for details)");
|
||||
throw new UnsupportedOperationException("A source directory containing items must be set");
|
||||
}
|
||||
@@ -96,10 +100,25 @@ public class ItemImportCLI extends ItemImport {
|
||||
protected void readZip(Context context, ItemImportService itemImportService) throws Exception {
|
||||
// If this is a zip archive, unzip it first
|
||||
if (zip) {
|
||||
workDir = new File(itemImportService.getTempWorkDir() + File.separator + TEMP_DIR
|
||||
+ File.separator + context.getCurrentUser().getID());
|
||||
sourcedir = itemImportService.unzip(
|
||||
new File(sourcedir + File.separator + zipfilename), workDir.getAbsolutePath());
|
||||
if (!remoteUrl) {
|
||||
workDir = new File(itemImportService.getTempWorkDir() + File.separator + TEMP_DIR
|
||||
+ File.separator + context.getCurrentUser().getID());
|
||||
sourcedir = itemImportService.unzip(
|
||||
new File(sourcedir + File.separator + zipfilename), workDir.getAbsolutePath());
|
||||
} else {
|
||||
// manage zip via remote url
|
||||
Optional<InputStream> optionalFileStream = Optional.ofNullable(new URL(zipfilename).openStream());
|
||||
if (optionalFileStream.isPresent()) {
|
||||
workFile = new File(itemImportService.getTempWorkDir() + File.separator
|
||||
+ zipfilename + "-" + context.getCurrentUser().getID());
|
||||
FileUtils.copyInputStreamToFile(optionalFileStream.get(), workFile);
|
||||
} else {
|
||||
throw new IllegalArgumentException(
|
||||
"Error reading file, the file couldn't be found for filename: " + zipfilename);
|
||||
}
|
||||
workDir = new File(itemImportService.getTempWorkDir() + File.separator + TEMP_DIR);
|
||||
sourcedir = itemImportService.unzip(workFile, workDir.getAbsolutePath());
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -120,6 +139,12 @@ public class ItemImportCLI extends ItemImport {
|
||||
zip = true;
|
||||
zipfilename = commandLine.getOptionValue('z');
|
||||
}
|
||||
|
||||
if (commandLine.hasOption('u')) { // remote url
|
||||
zip = true;
|
||||
remoteUrl = true;
|
||||
zipfilename = commandLine.getOptionValue('u');
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@@ -37,6 +37,9 @@ public class ItemImportCLIScriptConfiguration extends ItemImportScriptConfigurat
|
||||
options.addOption(Option.builder("z").longOpt("zip")
|
||||
.desc("name of zip file")
|
||||
.hasArg().required(false).build());
|
||||
options.addOption(Option.builder("u").longOpt("url")
|
||||
.desc("url of zip file")
|
||||
.hasArg().build());
|
||||
options.addOption(Option.builder("c").longOpt("collection")
|
||||
.desc("destination collection(s) Handle or database ID")
|
||||
.hasArg().required(false).build());
|
||||
@@ -55,6 +58,9 @@ public class ItemImportCLIScriptConfiguration extends ItemImportScriptConfigurat
|
||||
options.addOption(Option.builder("v").longOpt("validate")
|
||||
.desc("test run - do not actually import items")
|
||||
.hasArg(false).required(false).build());
|
||||
options.addOption(Option.builder("x").longOpt("exclude-bitstreams")
|
||||
.desc("do not load or expect content bitstreams")
|
||||
.hasArg(false).required(false).build());
|
||||
options.addOption(Option.builder("p").longOpt("template")
|
||||
.desc("apply template")
|
||||
.hasArg(false).required(false).build());
|
||||
|
@@ -64,7 +64,10 @@ public class ItemImportScriptConfiguration<T extends ItemImport> extends ScriptC
|
||||
options.addOption(Option.builder("z").longOpt("zip")
|
||||
.desc("name of zip file")
|
||||
.type(InputStream.class)
|
||||
.hasArg().required().build());
|
||||
.hasArg().build());
|
||||
options.addOption(Option.builder("u").longOpt("url")
|
||||
.desc("url of zip file")
|
||||
.hasArg().build());
|
||||
options.addOption(Option.builder("c").longOpt("collection")
|
||||
.desc("destination collection(s) Handle or database ID")
|
||||
.hasArg().required(false).build());
|
||||
@@ -81,6 +84,9 @@ public class ItemImportScriptConfiguration<T extends ItemImport> extends ScriptC
|
||||
options.addOption(Option.builder("v").longOpt("validate")
|
||||
.desc("test run - do not actually import items")
|
||||
.hasArg(false).required(false).build());
|
||||
options.addOption(Option.builder("x").longOpt("exclude-bitstreams")
|
||||
.desc("do not load or expect content bitstreams")
|
||||
.hasArg(false).required(false).build());
|
||||
options.addOption(Option.builder("p").longOpt("template")
|
||||
.desc("apply template")
|
||||
.hasArg(false).required(false).build());
|
||||
|
@@ -62,6 +62,7 @@ import org.apache.commons.io.FileUtils;
|
||||
import org.apache.commons.lang3.RandomStringUtils;
|
||||
import org.apache.commons.lang3.StringUtils;
|
||||
import org.apache.commons.lang3.exception.ExceptionUtils;
|
||||
import org.apache.logging.log4j.LogManager;
|
||||
import org.apache.logging.log4j.Logger;
|
||||
import org.dspace.app.itemimport.service.ItemImportService;
|
||||
import org.dspace.app.util.LocalSchemaFilenameFilter;
|
||||
@@ -135,7 +136,7 @@ import org.xml.sax.SAXException;
|
||||
* allow the registration of files (bitstreams) into DSpace.
|
||||
*/
|
||||
public class ItemImportServiceImpl implements ItemImportService, InitializingBean {
|
||||
private final Logger log = org.apache.logging.log4j.LogManager.getLogger(ItemImportServiceImpl.class);
|
||||
private final Logger log = LogManager.getLogger();
|
||||
|
||||
private DSpaceRunnableHandler handler;
|
||||
|
||||
@@ -181,6 +182,7 @@ public class ItemImportServiceImpl implements ItemImportService, InitializingBea
|
||||
protected String tempWorkDir;
|
||||
|
||||
protected boolean isTest = false;
|
||||
protected boolean isExcludeContent = false;
|
||||
protected boolean isResume = false;
|
||||
protected boolean useWorkflow = false;
|
||||
protected boolean useWorkflowSendEmail = false;
|
||||
@@ -950,9 +952,10 @@ public class ItemImportServiceImpl implements ItemImportService, InitializingBea
|
||||
String qualifier = getAttributeValue(n, "qualifier"); //NodeValue();
|
||||
// //getElementData(n,
|
||||
// "qualifier");
|
||||
String language = getAttributeValue(n, "language");
|
||||
if (language != null) {
|
||||
language = language.trim();
|
||||
|
||||
String language = null;
|
||||
if (StringUtils.isNotBlank(getAttributeValue(n, "language"))) {
|
||||
language = getAttributeValue(n, "language").trim();
|
||||
}
|
||||
|
||||
if (!isQuiet) {
|
||||
@@ -1403,6 +1406,10 @@ public class ItemImportServiceImpl implements ItemImportService, InitializingBea
|
||||
protected void processContentFileEntry(Context c, Item i, String path,
|
||||
String fileName, String bundleName, boolean primary) throws SQLException,
|
||||
IOException, AuthorizeException {
|
||||
if (isExcludeContent) {
|
||||
return;
|
||||
}
|
||||
|
||||
String fullpath = path + File.separatorChar + fileName;
|
||||
|
||||
// get an input stream
|
||||
@@ -2342,6 +2349,11 @@ public class ItemImportServiceImpl implements ItemImportService, InitializingBea
|
||||
this.isTest = isTest;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void setExcludeContent(boolean isExcludeContent) {
|
||||
this.isExcludeContent = isExcludeContent;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void setResume(boolean isResume) {
|
||||
this.isResume = isResume;
|
||||
|
@@ -211,6 +211,13 @@ public interface ItemImportService {
|
||||
*/
|
||||
public void setTest(boolean isTest);
|
||||
|
||||
/**
|
||||
* Set exclude-content flag.
|
||||
*
|
||||
* @param isExcludeContent true or false
|
||||
*/
|
||||
public void setExcludeContent(boolean isExcludeContent);
|
||||
|
||||
/**
|
||||
* Set resume flag
|
||||
*
|
||||
|
@@ -14,6 +14,9 @@ import java.io.InputStream;
|
||||
import java.util.regex.Pattern;
|
||||
import java.util.regex.PatternSyntaxException;
|
||||
|
||||
import org.apache.pdfbox.pdmodel.PDDocument;
|
||||
import org.apache.pdfbox.pdmodel.PDPage;
|
||||
import org.apache.pdfbox.pdmodel.common.PDRectangle;
|
||||
import org.dspace.content.Bitstream;
|
||||
import org.dspace.content.Bundle;
|
||||
import org.dspace.content.Item;
|
||||
@@ -119,6 +122,39 @@ public abstract class ImageMagickThumbnailFilter extends MediaFilter {
|
||||
f2.deleteOnExit();
|
||||
ConvertCmd cmd = new ConvertCmd();
|
||||
IMOperation op = new IMOperation();
|
||||
|
||||
// Optionally override ImageMagick's default density of 72 DPI to use a
|
||||
// "supersample" when creating the PDF thumbnail. Note that I prefer to
|
||||
// use the getProperty() method here instead of getIntPropert() because
|
||||
// the latter always returns an integer (0 in the case it's not set). I
|
||||
// would prefer to keep ImageMagick's default to itself rather than for
|
||||
// us to set one. Also note that the density option *must* come before
|
||||
// we open the input file.
|
||||
String density = configurationService.getProperty(PRE + ".density");
|
||||
if (density != null) {
|
||||
op.density(Integer.valueOf(density));
|
||||
}
|
||||
|
||||
// Check the PDF's MediaBox and CropBox to see if they are the same.
|
||||
// If not, then tell ImageMagick to use the CropBox when generating
|
||||
// the thumbnail because the CropBox is generally used to define the
|
||||
// area displayed when a user opens the PDF on a screen, whereas the
|
||||
// MediaBox is used for print. Not all PDFs set these correctly, so
|
||||
// we can use ImageMagick's default behavior unless we see an explit
|
||||
// CropBox. Note: we don't need to do anything special to detect if
|
||||
// the CropBox is missing or empty because pdfbox will set it to the
|
||||
// same size as the MediaBox if it doesn't exist. Also note that we
|
||||
// only need to check the first page, since that's what we use for
|
||||
// generating the thumbnail (PDDocument uses a zero-based index).
|
||||
PDPage pdfPage = PDDocument.load(f).getPage(0);
|
||||
PDRectangle pdfPageMediaBox = pdfPage.getMediaBox();
|
||||
PDRectangle pdfPageCropBox = pdfPage.getCropBox();
|
||||
|
||||
// This option must come *before* we open the input file.
|
||||
if (pdfPageCropBox != pdfPageMediaBox) {
|
||||
op.define("pdf:use-cropbox=true");
|
||||
}
|
||||
|
||||
String s = "[" + page + "]";
|
||||
op.addImage(f.getAbsolutePath() + s);
|
||||
if (configurationService.getBooleanProperty(PRE + ".flatten", true)) {
|
||||
|
@@ -315,25 +315,25 @@ public class MediaFilterServiceImpl implements MediaFilterService, InitializingB
|
||||
|
||||
// check if destination bitstream exists
|
||||
Bundle existingBundle = null;
|
||||
Bitstream existingBitstream = null;
|
||||
List<Bitstream> existingBitstreams = new ArrayList<Bitstream>();
|
||||
List<Bundle> bundles = itemService.getBundles(item, formatFilter.getBundleName());
|
||||
|
||||
if (bundles.size() > 0) {
|
||||
// only finds the last match (FIXME?)
|
||||
// only finds the last matching bundle and all matching bitstreams in the proper bundle(s)
|
||||
for (Bundle bundle : bundles) {
|
||||
List<Bitstream> bitstreams = bundle.getBitstreams();
|
||||
|
||||
for (Bitstream bitstream : bitstreams) {
|
||||
if (bitstream.getName().trim().equals(newName.trim())) {
|
||||
existingBundle = bundle;
|
||||
existingBitstream = bitstream;
|
||||
existingBitstreams.add(bitstream);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// if exists and overwrite = false, exit
|
||||
if (!overWrite && (existingBitstream != null)) {
|
||||
if (!overWrite && (existingBitstreams.size() > 0)) {
|
||||
if (!isQuiet) {
|
||||
logInfo("SKIPPED: bitstream " + source.getID()
|
||||
+ " (item: " + item.getHandle() + ") because '" + newName + "' already exists");
|
||||
@@ -397,8 +397,8 @@ public class MediaFilterServiceImpl implements MediaFilterService, InitializingB
|
||||
Group anonymous = groupService.findByName(context, Group.ANONYMOUS);
|
||||
authorizeService.addPolicy(context, b, Constants.READ, anonymous);
|
||||
} else {
|
||||
//- Inherit policies from the source bitstream
|
||||
authorizeService.inheritPolicies(context, source, b);
|
||||
//- replace the policies using the same in the source bitstream
|
||||
authorizeService.replaceAllPolicies(context, source, b);
|
||||
}
|
||||
|
||||
//do post-processing of the generated bitstream
|
||||
@@ -408,9 +408,8 @@ public class MediaFilterServiceImpl implements MediaFilterService, InitializingB
|
||||
logError("!!! OutOfMemoryError !!!");
|
||||
}
|
||||
|
||||
// fixme - set date?
|
||||
// we are overwriting, so remove old bitstream
|
||||
if (existingBitstream != null) {
|
||||
for (Bitstream existingBitstream : existingBitstreams) {
|
||||
bundleService.removeBitstream(context, existingBundle, existingBitstream);
|
||||
}
|
||||
|
||||
|
@@ -31,5 +31,5 @@ public interface RequestItemAuthorExtractor {
|
||||
*/
|
||||
@NonNull
|
||||
public List<RequestItemAuthor> getRequestItemAuthor(Context context, Item item)
|
||||
throws SQLException;
|
||||
throws SQLException;
|
||||
}
|
||||
|
@@ -56,7 +56,8 @@ public class RequestItemEmailNotifier {
|
||||
private static final RequestItemAuthorExtractor requestItemAuthorExtractor
|
||||
= DSpaceServicesFactory.getInstance()
|
||||
.getServiceManager()
|
||||
.getServiceByName(null, RequestItemAuthorExtractor.class);
|
||||
.getServiceByName("requestItemAuthorExtractor",
|
||||
RequestItemAuthorExtractor.class);
|
||||
|
||||
private RequestItemEmailNotifier() {}
|
||||
|
||||
@@ -154,9 +155,9 @@ public class RequestItemEmailNotifier {
|
||||
email.setContent("body", message);
|
||||
email.setSubject(subject);
|
||||
email.addRecipient(ri.getReqEmail());
|
||||
if (ri.isAccept_request()) {
|
||||
// Attach bitstreams.
|
||||
try {
|
||||
// Attach bitstreams.
|
||||
try {
|
||||
if (ri.isAccept_request()) {
|
||||
if (ri.isAllfiles()) {
|
||||
Item item = ri.getItem();
|
||||
List<Bundle> bundles = item.getBundles("ORIGINAL");
|
||||
@@ -166,24 +167,39 @@ public class RequestItemEmailNotifier {
|
||||
if (!bitstream.getFormat(context).isInternal() &&
|
||||
requestItemService.isRestricted(context,
|
||||
bitstream)) {
|
||||
// #8636 Anyone receiving the email can respond to the
|
||||
// request without authenticating into DSpace
|
||||
context.turnOffAuthorisationSystem();
|
||||
email.addAttachment(bitstreamService.retrieve(context,
|
||||
bitstream), bitstream.getName(),
|
||||
bitstream.getFormat(context).getMIMEType());
|
||||
context.restoreAuthSystemState();
|
||||
}
|
||||
}
|
||||
}
|
||||
} else {
|
||||
Bitstream bitstream = ri.getBitstream();
|
||||
// #8636 Anyone receiving the email can respond to the request without authenticating into DSpace
|
||||
context.turnOffAuthorisationSystem();
|
||||
email.addAttachment(bitstreamService.retrieve(context, bitstream),
|
||||
bitstream.getName(),
|
||||
bitstream.getFormat(context).getMIMEType());
|
||||
context.restoreAuthSystemState();
|
||||
}
|
||||
email.send();
|
||||
} catch (MessagingException | IOException | SQLException | AuthorizeException e) {
|
||||
LOG.warn(LogHelper.getHeader(context,
|
||||
"error_mailing_requestItem", e.getMessage()));
|
||||
throw new IOException("Reply not sent: " + e.getMessage());
|
||||
} else {
|
||||
boolean sendRejectEmail = configurationService
|
||||
.getBooleanProperty("request.item.reject.email", true);
|
||||
// Not all sites want the "refusal" to be sent back to the requester via
|
||||
// email. However, by default, the rejection email is sent back.
|
||||
if (sendRejectEmail) {
|
||||
email.send();
|
||||
}
|
||||
}
|
||||
} catch (MessagingException | IOException | SQLException | AuthorizeException e) {
|
||||
LOG.warn(LogHelper.getHeader(context,
|
||||
"error_mailing_requestItem", e.getMessage()));
|
||||
throw new IOException("Reply not sent: " + e.getMessage());
|
||||
}
|
||||
LOG.info(LogHelper.getHeader(context,
|
||||
"sent_attach_requestItem", "token={}"), ri.getToken());
|
||||
@@ -220,8 +236,13 @@ public class RequestItemEmailNotifier {
|
||||
message.addArgument(bitstreamName); // {0} bitstream name or "all"
|
||||
message.addArgument(item.getHandle()); // {1} Item handle
|
||||
message.addArgument(ri.getToken()); // {2} Request token
|
||||
message.addArgument(approver.getFullName()); // {3} Approver's name
|
||||
message.addArgument(approver.getEmail()); // {4} Approver's address
|
||||
if (approver != null) {
|
||||
message.addArgument(approver.getFullName()); // {3} Approver's name
|
||||
message.addArgument(approver.getEmail()); // {4} Approver's address
|
||||
} else {
|
||||
message.addArgument("anonymous approver"); // [3] Approver's name
|
||||
message.addArgument(configurationService.getProperty("mail.admin")); // [4] Approver's address
|
||||
}
|
||||
|
||||
// Who gets this message?
|
||||
String recipient;
|
||||
|
@@ -22,21 +22,27 @@ import org.springframework.beans.factory.annotation.Autowired;
|
||||
import org.springframework.lang.NonNull;
|
||||
|
||||
/**
|
||||
* RequestItem strategy to allow DSpace support team's helpdesk to receive requestItem request.
|
||||
* With this enabled, then the Item author/submitter doesn't receive the request, but the helpdesk instead does.
|
||||
* RequestItem strategy to allow DSpace support team's help desk to receive
|
||||
* requestItem requests. With this enabled, the Item author/submitter doesn't
|
||||
* receive the request, but the help desk instead does.
|
||||
*
|
||||
* Failover to the RequestItemSubmitterStrategy, which means the submitter would get the request if there is no
|
||||
* specified helpdesk email.
|
||||
* <p>Fails over to the {@link RequestItemSubmitterStrategy}, which means the
|
||||
* submitter would get the request if there is no specified help desk email.
|
||||
*
|
||||
* @author Sam Ottenhoff
|
||||
* @author Peter Dietz
|
||||
*/
|
||||
public class RequestItemHelpdeskStrategy extends RequestItemSubmitterStrategy {
|
||||
public class RequestItemHelpdeskStrategy
|
||||
extends RequestItemSubmitterStrategy {
|
||||
static final String P_HELPDESK_OVERRIDE
|
||||
= "request.item.helpdesk.override";
|
||||
static final String P_MAIL_HELPDESK = "mail.helpdesk";
|
||||
|
||||
@Autowired(required = true)
|
||||
protected EPersonService ePersonService;
|
||||
|
||||
@Autowired(required = true)
|
||||
private ConfigurationService configuration;
|
||||
protected ConfigurationService configurationService;
|
||||
|
||||
public RequestItemHelpdeskStrategy() {
|
||||
}
|
||||
@@ -45,9 +51,9 @@ public class RequestItemHelpdeskStrategy extends RequestItemSubmitterStrategy {
|
||||
@NonNull
|
||||
public List<RequestItemAuthor> getRequestItemAuthor(Context context, Item item)
|
||||
throws SQLException {
|
||||
boolean helpdeskOverridesSubmitter = configuration
|
||||
boolean helpdeskOverridesSubmitter = configurationService
|
||||
.getBooleanProperty("request.item.helpdesk.override", false);
|
||||
String helpDeskEmail = configuration.getProperty("mail.helpdesk");
|
||||
String helpDeskEmail = configurationService.getProperty("mail.helpdesk");
|
||||
|
||||
if (helpdeskOverridesSubmitter && StringUtils.isNotBlank(helpDeskEmail)) {
|
||||
List<RequestItemAuthor> authors = new ArrayList<>(1);
|
||||
@@ -60,16 +66,18 @@ public class RequestItemHelpdeskStrategy extends RequestItemSubmitterStrategy {
|
||||
}
|
||||
|
||||
/**
|
||||
* Return a RequestItemAuthor object for the specified helpdesk email address.
|
||||
* It makes an attempt to find if there is a matching eperson for the helpdesk address, to use the name,
|
||||
* Otherwise it falls back to a helpdeskname key in the Messages.props.
|
||||
* Return a RequestItemAuthor object for the specified help desk email address.
|
||||
* It makes an attempt to find if there is a matching {@link EPerson} for
|
||||
* the help desk address, to use its name. Otherwise it falls back to the
|
||||
* {@code helpdeskname} key in {@code Messages.properties}.
|
||||
*
|
||||
* @param context context
|
||||
* @param helpDeskEmail email
|
||||
* @return RequestItemAuthor
|
||||
* @throws SQLException if database error
|
||||
*/
|
||||
public RequestItemAuthor getHelpDeskPerson(Context context, String helpDeskEmail) throws SQLException {
|
||||
public RequestItemAuthor getHelpDeskPerson(Context context, String helpDeskEmail)
|
||||
throws SQLException {
|
||||
context.turnOffAuthorisationSystem();
|
||||
EPerson helpdeskEPerson = ePersonService.findByEmail(context, helpDeskEmail);
|
||||
context.restoreAuthSystemState();
|
||||
|
@@ -9,6 +9,7 @@ package org.dspace.app.requestitem;
|
||||
|
||||
import java.sql.SQLException;
|
||||
import java.util.Date;
|
||||
import java.util.Iterator;
|
||||
import java.util.List;
|
||||
|
||||
import org.apache.logging.log4j.LogManager;
|
||||
@@ -90,6 +91,11 @@ public class RequestItemServiceImpl implements RequestItemService {
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public Iterator<RequestItem> findByItem(Context context, Item item) throws SQLException {
|
||||
return requestItemDAO.findByItem(context, item);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void update(Context context, RequestItem requestItem) {
|
||||
try {
|
||||
|
@@ -22,7 +22,6 @@ import org.springframework.lang.NonNull;
|
||||
* @author Andrea Bollini
|
||||
*/
|
||||
public class RequestItemSubmitterStrategy implements RequestItemAuthorExtractor {
|
||||
|
||||
public RequestItemSubmitterStrategy() {
|
||||
}
|
||||
|
||||
|
@@ -8,8 +8,10 @@
|
||||
package org.dspace.app.requestitem.dao;
|
||||
|
||||
import java.sql.SQLException;
|
||||
import java.util.Iterator;
|
||||
|
||||
import org.dspace.app.requestitem.RequestItem;
|
||||
import org.dspace.content.Item;
|
||||
import org.dspace.core.Context;
|
||||
import org.dspace.core.GenericDAO;
|
||||
|
||||
@@ -32,4 +34,6 @@ public interface RequestItemDAO extends GenericDAO<RequestItem> {
|
||||
* @throws SQLException passed through.
|
||||
*/
|
||||
public RequestItem findByToken(Context context, String token) throws SQLException;
|
||||
|
||||
public Iterator<RequestItem> findByItem(Context context, Item item) throws SQLException;
|
||||
}
|
||||
|
@@ -8,6 +8,8 @@
|
||||
package org.dspace.app.requestitem.dao.impl;
|
||||
|
||||
import java.sql.SQLException;
|
||||
import java.util.Iterator;
|
||||
import javax.persistence.Query;
|
||||
import javax.persistence.criteria.CriteriaBuilder;
|
||||
import javax.persistence.criteria.CriteriaQuery;
|
||||
import javax.persistence.criteria.Root;
|
||||
@@ -15,6 +17,7 @@ import javax.persistence.criteria.Root;
|
||||
import org.dspace.app.requestitem.RequestItem;
|
||||
import org.dspace.app.requestitem.RequestItem_;
|
||||
import org.dspace.app.requestitem.dao.RequestItemDAO;
|
||||
import org.dspace.content.Item;
|
||||
import org.dspace.core.AbstractHibernateDAO;
|
||||
import org.dspace.core.Context;
|
||||
|
||||
@@ -39,4 +42,10 @@ public class RequestItemDAOImpl extends AbstractHibernateDAO<RequestItem> implem
|
||||
criteriaQuery.where(criteriaBuilder.equal(requestItemRoot.get(RequestItem_.token), token));
|
||||
return uniqueResult(context, criteriaQuery, false, RequestItem.class);
|
||||
}
|
||||
@Override
|
||||
public Iterator<RequestItem> findByItem(Context context, Item item) throws SQLException {
|
||||
Query query = createQuery(context, "FROM RequestItem WHERE item_id= :uuid");
|
||||
query.setParameter("uuid", item.getID());
|
||||
return iterate(query);
|
||||
}
|
||||
}
|
||||
|
@@ -8,6 +8,7 @@
|
||||
package org.dspace.app.requestitem.service;
|
||||
|
||||
import java.sql.SQLException;
|
||||
import java.util.Iterator;
|
||||
import java.util.List;
|
||||
|
||||
import org.dspace.app.requestitem.RequestItem;
|
||||
@@ -62,6 +63,14 @@ public interface RequestItemService {
|
||||
*/
|
||||
public RequestItem findByToken(Context context, String token);
|
||||
|
||||
/**
|
||||
* Retrieve a request based on the item.
|
||||
* @param context current DSpace session.
|
||||
* @param item the item to find requests for.
|
||||
* @return the matching requests, or null if not found.
|
||||
*/
|
||||
public Iterator<RequestItem> findByItem(Context context, Item item) throws SQLException;
|
||||
|
||||
/**
|
||||
* Save updates to the record. Only accept_request, and decision_date are set-able.
|
||||
*
|
||||
|
@@ -10,6 +10,7 @@ package org.dspace.app.util;
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.Optional;
|
||||
import java.util.regex.Pattern;
|
||||
import java.util.regex.PatternSyntaxException;
|
||||
import javax.annotation.Nullable;
|
||||
@@ -131,10 +132,15 @@ public class DCInput {
|
||||
private boolean closedVocabulary = false;
|
||||
|
||||
/**
|
||||
* the regex to comply with, null if nothing
|
||||
* the regex in ECMAScript standard format, usable also by rests.
|
||||
*/
|
||||
private String regex = null;
|
||||
|
||||
/**
|
||||
* the computed pattern, null if nothing
|
||||
*/
|
||||
private Pattern pattern = null;
|
||||
|
||||
/**
|
||||
* allowed document types
|
||||
*/
|
||||
@@ -178,7 +184,7 @@ public class DCInput {
|
||||
|
||||
//check if the input have a language tag
|
||||
language = Boolean.valueOf(fieldMap.get("language"));
|
||||
valueLanguageList = new ArrayList();
|
||||
valueLanguageList = new ArrayList<>();
|
||||
if (language) {
|
||||
String languageNameTmp = fieldMap.get("value-pairs-name");
|
||||
if (StringUtils.isBlank(languageNameTmp)) {
|
||||
@@ -191,7 +197,7 @@ public class DCInput {
|
||||
repeatable = "true".equalsIgnoreCase(repStr)
|
||||
|| "yes".equalsIgnoreCase(repStr);
|
||||
String nameVariantsString = fieldMap.get("name-variants");
|
||||
nameVariants = (StringUtils.isNotBlank(nameVariantsString)) ?
|
||||
nameVariants = StringUtils.isNotBlank(nameVariantsString) ?
|
||||
nameVariantsString.equalsIgnoreCase("true") : false;
|
||||
label = fieldMap.get("label");
|
||||
inputType = fieldMap.get("input-type");
|
||||
@@ -203,17 +209,17 @@ public class DCInput {
|
||||
}
|
||||
hint = fieldMap.get("hint");
|
||||
warning = fieldMap.get("required");
|
||||
required = (warning != null && warning.length() > 0);
|
||||
required = warning != null && warning.length() > 0;
|
||||
visibility = fieldMap.get("visibility");
|
||||
readOnly = fieldMap.get("readonly");
|
||||
vocabulary = fieldMap.get("vocabulary");
|
||||
regex = fieldMap.get("regex");
|
||||
this.initRegex(fieldMap.get("regex"));
|
||||
String closedVocabularyStr = fieldMap.get("closedVocabulary");
|
||||
closedVocabulary = "true".equalsIgnoreCase(closedVocabularyStr)
|
||||
|| "yes".equalsIgnoreCase(closedVocabularyStr);
|
||||
|
||||
// parsing of the <type-bind> element (using the colon as split separator)
|
||||
typeBind = new ArrayList<>();
|
||||
typeBind = new ArrayList<String>();
|
||||
String typeBindDef = fieldMap.get("type-bind");
|
||||
if (typeBindDef != null && typeBindDef.trim().length() > 0) {
|
||||
String[] types = typeBindDef.split(",");
|
||||
@@ -238,6 +244,22 @@ public class DCInput {
|
||||
|
||||
}
|
||||
|
||||
protected void initRegex(String regex) {
|
||||
this.regex = null;
|
||||
this.pattern = null;
|
||||
if (regex != null) {
|
||||
try {
|
||||
Optional.ofNullable(RegexPatternUtils.computePattern(regex))
|
||||
.ifPresent(pattern -> {
|
||||
this.pattern = pattern;
|
||||
this.regex = regex;
|
||||
});
|
||||
} catch (PatternSyntaxException e) {
|
||||
log.warn("The regex field of input {} with value {} is invalid!", this.label, regex);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Is this DCInput for display in the given scope? The scope should be
|
||||
* either "workflow" or "submit", as per the input forms definition. If the
|
||||
@@ -248,7 +270,7 @@ public class DCInput {
|
||||
* @return whether the input should be displayed or not
|
||||
*/
|
||||
public boolean isVisible(String scope) {
|
||||
return (visibility == null || visibility.equals(scope));
|
||||
return visibility == null || visibility.equals(scope);
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -512,8 +534,12 @@ public class DCInput {
|
||||
return visibility;
|
||||
}
|
||||
|
||||
public Pattern getPattern() {
|
||||
return this.pattern;
|
||||
}
|
||||
|
||||
public String getRegex() {
|
||||
return regex;
|
||||
return this.regex;
|
||||
}
|
||||
|
||||
public String getFieldName() {
|
||||
@@ -546,8 +572,7 @@ public class DCInput {
|
||||
public boolean validate(String value) {
|
||||
if (StringUtils.isNotBlank(value)) {
|
||||
try {
|
||||
if (StringUtils.isNotBlank(regex)) {
|
||||
Pattern pattern = Pattern.compile(regex);
|
||||
if (this.pattern != null) {
|
||||
if (!pattern.matcher(value).matches()) {
|
||||
return false;
|
||||
}
|
||||
@@ -557,7 +582,6 @@ public class DCInput {
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
return true;
|
||||
}
|
||||
|
||||
|
@@ -0,0 +1,73 @@
|
||||
/**
|
||||
* The contents of this file are subject to the license and copyright
|
||||
* detailed in the LICENSE and NOTICE files at the root of the source
|
||||
* tree and available online at
|
||||
*
|
||||
* http://www.dspace.org/license/
|
||||
*/
|
||||
package org.dspace.app.util;
|
||||
|
||||
import static java.util.regex.Pattern.CASE_INSENSITIVE;
|
||||
|
||||
import java.util.Optional;
|
||||
import java.util.regex.Matcher;
|
||||
import java.util.regex.Pattern;
|
||||
import java.util.regex.PatternSyntaxException;
|
||||
|
||||
import org.apache.commons.lang3.StringUtils;
|
||||
|
||||
/**
|
||||
* Utility class useful for check regex and patterns.
|
||||
*
|
||||
* @author Vincenzo Mecca (vins01-4science - vincenzo.mecca at 4science.com)
|
||||
*
|
||||
*/
|
||||
public class RegexPatternUtils {
|
||||
|
||||
// checks input having the format /{pattern}/{flags}
|
||||
// allowed flags are: g,i,m,s,u,y
|
||||
public static final String REGEX_INPUT_VALIDATOR = "(/?)(.+)\\1([gimsuy]*)";
|
||||
// flags usable inside regex definition using format (?i|m|s|u|y)
|
||||
public static final String REGEX_FLAGS = "(?%s)";
|
||||
public static final Pattern PATTERN_REGEX_INPUT_VALIDATOR =
|
||||
Pattern.compile(REGEX_INPUT_VALIDATOR, CASE_INSENSITIVE);
|
||||
|
||||
/**
|
||||
* Computes a pattern starting from a regex definition with flags that
|
||||
* uses the standard format: <code>/{regex}/{flags}</code> (ECMAScript format).
|
||||
* This method can transform an ECMAScript regex into a java {@code Pattern} object
|
||||
* wich can be used to validate strings.
|
||||
* <br/>
|
||||
* If regex is null, empty or blank a null {@code Pattern} will be retrieved
|
||||
* If it's a valid regex, then a non-null {@code Pattern} will be retrieved,
|
||||
* an exception will be thrown otherwise.
|
||||
*
|
||||
* @param regex with format <code>/{regex}/{flags}</code>
|
||||
* @return {@code Pattern} regex pattern instance
|
||||
* @throws PatternSyntaxException
|
||||
*/
|
||||
public static final Pattern computePattern(String regex) throws PatternSyntaxException {
|
||||
if (StringUtils.isBlank(regex)) {
|
||||
return null;
|
||||
}
|
||||
Matcher inputMatcher = PATTERN_REGEX_INPUT_VALIDATOR.matcher(regex);
|
||||
String regexPattern = regex;
|
||||
String regexFlags = "";
|
||||
if (inputMatcher.matches()) {
|
||||
regexPattern =
|
||||
Optional.of(inputMatcher.group(2))
|
||||
.filter(StringUtils::isNotBlank)
|
||||
.orElse(regex);
|
||||
regexFlags =
|
||||
Optional.ofNullable(inputMatcher.group(3))
|
||||
.filter(StringUtils::isNotBlank)
|
||||
.map(flags -> String.format(REGEX_FLAGS, flags))
|
||||
.orElse("")
|
||||
.replaceAll("g", "");
|
||||
}
|
||||
return Pattern.compile(regexFlags + regexPattern);
|
||||
}
|
||||
|
||||
private RegexPatternUtils() {}
|
||||
|
||||
}
|
@@ -51,6 +51,7 @@ import org.dspace.content.service.CollectionService;
|
||||
import org.dspace.content.service.CommunityService;
|
||||
import org.dspace.content.service.ItemService;
|
||||
import org.dspace.core.Context;
|
||||
import org.dspace.core.I18nUtil;
|
||||
import org.dspace.discovery.IndexableObject;
|
||||
import org.dspace.discovery.indexobject.IndexableCollection;
|
||||
import org.dspace.discovery.indexobject.IndexableCommunity;
|
||||
@@ -91,6 +92,7 @@ public class SyndicationFeed {
|
||||
|
||||
// default DC fields for entry
|
||||
protected String defaultTitleField = "dc.title";
|
||||
protected String defaultDescriptionField = "dc.description";
|
||||
protected String defaultAuthorField = "dc.contributor.author";
|
||||
protected String defaultDateField = "dc.date.issued";
|
||||
private static final String[] defaultDescriptionFields =
|
||||
@@ -196,15 +198,15 @@ public class SyndicationFeed {
|
||||
// dso is null for the whole site, or a search without scope
|
||||
if (dso == null) {
|
||||
defaultTitle = configurationService.getProperty("dspace.name");
|
||||
feed.setDescription(localize(labels, MSG_FEED_DESCRIPTION));
|
||||
defaultDescriptionField = localize(labels, MSG_FEED_DESCRIPTION);
|
||||
objectURL = resolveURL(request, null);
|
||||
} else {
|
||||
Bitstream logo = null;
|
||||
if (dso instanceof IndexableCollection) {
|
||||
Collection col = ((IndexableCollection) dso).getIndexedObject();
|
||||
defaultTitle = col.getName();
|
||||
feed.setDescription(collectionService.getMetadataFirstValue(col,
|
||||
CollectionService.MD_SHORT_DESCRIPTION, Item.ANY));
|
||||
defaultDescriptionField = collectionService.getMetadataFirstValue(col,
|
||||
CollectionService.MD_SHORT_DESCRIPTION, Item.ANY);
|
||||
logo = col.getLogo();
|
||||
String cols = configurationService.getProperty("webui.feed.podcast.collections");
|
||||
if (cols != null && cols.length() > 1 && cols.contains(col.getHandle())) {
|
||||
@@ -214,8 +216,8 @@ public class SyndicationFeed {
|
||||
} else if (dso instanceof IndexableCommunity) {
|
||||
Community comm = ((IndexableCommunity) dso).getIndexedObject();
|
||||
defaultTitle = comm.getName();
|
||||
feed.setDescription(communityService.getMetadataFirstValue(comm,
|
||||
CommunityService.MD_SHORT_DESCRIPTION, Item.ANY));
|
||||
defaultDescriptionField = communityService.getMetadataFirstValue(comm,
|
||||
CommunityService.MD_SHORT_DESCRIPTION, Item.ANY);
|
||||
logo = comm.getLogo();
|
||||
String comms = configurationService.getProperty("webui.feed.podcast.communities");
|
||||
if (comms != null && comms.length() > 1 && comms.contains(comm.getHandle())) {
|
||||
@@ -230,6 +232,12 @@ public class SyndicationFeed {
|
||||
}
|
||||
feed.setTitle(labels.containsKey(MSG_FEED_TITLE) ?
|
||||
localize(labels, MSG_FEED_TITLE) : defaultTitle);
|
||||
|
||||
if (defaultDescriptionField == null || defaultDescriptionField == "") {
|
||||
defaultDescriptionField = I18nUtil.getMessage("org.dspace.app.util.SyndicationFeed.no-description");
|
||||
}
|
||||
|
||||
feed.setDescription(defaultDescriptionField);
|
||||
feed.setLink(objectURL);
|
||||
feed.setPublishedDate(new Date());
|
||||
feed.setUri(objectURL);
|
||||
|
@@ -52,11 +52,6 @@ public class IPAuthentication implements AuthenticationMethod {
|
||||
*/
|
||||
private static Logger log = org.apache.logging.log4j.LogManager.getLogger(IPAuthentication.class);
|
||||
|
||||
/**
|
||||
* Whether to look for x-forwarded headers for logging IP addresses
|
||||
*/
|
||||
protected static Boolean useProxies;
|
||||
|
||||
/**
|
||||
* All the IP matchers
|
||||
*/
|
||||
@@ -250,7 +245,7 @@ public class IPAuthentication implements AuthenticationMethod {
|
||||
|
||||
log.debug(LogHelper.getHeader(context, "authenticated",
|
||||
"special_groups=" + gsb.toString()
|
||||
+ " (by IP=" + addr + ", useProxies=" + useProxies.toString() + ")"
|
||||
+ " (by IP=" + addr + ")"
|
||||
));
|
||||
}
|
||||
|
||||
|
@@ -11,9 +11,11 @@ import static org.dspace.eperson.service.EPersonService.MD_PHONE;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.sql.SQLException;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Arrays;
|
||||
import java.util.Collections;
|
||||
import java.util.Hashtable;
|
||||
import java.util.Iterator;
|
||||
import java.util.List;
|
||||
import javax.naming.NamingEnumeration;
|
||||
import javax.naming.NamingException;
|
||||
@@ -64,6 +66,7 @@ import org.dspace.services.factory.DSpaceServicesFactory;
|
||||
* @author Reuben Pasquini
|
||||
* @author Samuel Ottenhoff
|
||||
* @author Ivan Masár
|
||||
* @author Michael Plate
|
||||
*/
|
||||
public class LDAPAuthentication
|
||||
implements AuthenticationMethod {
|
||||
@@ -391,7 +394,7 @@ public class LDAPAuthentication
|
||||
protected String ldapGivenName = null;
|
||||
protected String ldapSurname = null;
|
||||
protected String ldapPhone = null;
|
||||
protected String ldapGroup = null;
|
||||
protected ArrayList<String> ldapGroup = null;
|
||||
|
||||
/**
|
||||
* LDAP settings
|
||||
@@ -406,9 +409,9 @@ public class LDAPAuthentication
|
||||
final String ldap_surname_field;
|
||||
final String ldap_phone_field;
|
||||
final String ldap_group_field;
|
||||
|
||||
final boolean useTLS;
|
||||
|
||||
|
||||
SpeakerToLDAP(Logger thelog) {
|
||||
ConfigurationService configurationService
|
||||
= DSpaceServicesFactory.getInstance().getConfigurationService();
|
||||
@@ -547,7 +550,11 @@ public class LDAPAuthentication
|
||||
if (attlist[4] != null) {
|
||||
att = atts.get(attlist[4]);
|
||||
if (att != null) {
|
||||
ldapGroup = (String) att.get();
|
||||
// loop through all groups returned by LDAP
|
||||
ldapGroup = new ArrayList<String>();
|
||||
for (NamingEnumeration val = att.getAll(); val.hasMoreElements(); ) {
|
||||
ldapGroup.add((String) val.next());
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -693,48 +700,69 @@ public class LDAPAuthentication
|
||||
/*
|
||||
* Add authenticated users to the group defined in dspace.cfg by
|
||||
* the authentication-ldap.login.groupmap.* key.
|
||||
*
|
||||
* @param dn
|
||||
* The string containing distinguished name of the user
|
||||
*
|
||||
* @param group
|
||||
* List of strings with LDAP dn of groups
|
||||
*
|
||||
* @param context
|
||||
* DSpace context
|
||||
*/
|
||||
private void assignGroups(String dn, String group, Context context) {
|
||||
private void assignGroups(String dn, ArrayList<String> group, Context context) {
|
||||
if (StringUtils.isNotBlank(dn)) {
|
||||
System.out.println("dn:" + dn);
|
||||
int i = 1;
|
||||
String groupMap = configurationService.getProperty("authentication-ldap.login.groupmap." + i);
|
||||
|
||||
boolean cmp;
|
||||
|
||||
|
||||
// groupmap contains the mapping of LDAP groups to DSpace groups
|
||||
// outer loop with the DSpace groups
|
||||
while (groupMap != null) {
|
||||
String t[] = groupMap.split(":");
|
||||
String ldapSearchString = t[0];
|
||||
String dspaceGroupName = t[1];
|
||||
|
||||
if (group == null) {
|
||||
cmp = StringUtils.containsIgnoreCase(dn, ldapSearchString + ",");
|
||||
} else {
|
||||
cmp = StringUtils.equalsIgnoreCase(group, ldapSearchString);
|
||||
}
|
||||
// list of strings with dn from LDAP groups
|
||||
// inner loop
|
||||
Iterator<String> groupIterator = group.iterator();
|
||||
while (groupIterator.hasNext()) {
|
||||
|
||||
if (cmp) {
|
||||
// assign user to this group
|
||||
try {
|
||||
Group ldapGroup = groupService.findByName(context, dspaceGroupName);
|
||||
if (ldapGroup != null) {
|
||||
groupService.addMember(context, ldapGroup, context.getCurrentUser());
|
||||
groupService.update(context, ldapGroup);
|
||||
} else {
|
||||
// The group does not exist
|
||||
log.warn(LogHelper.getHeader(context,
|
||||
"ldap_assignGroupsBasedOnLdapDn",
|
||||
"Group defined in authentication-ldap.login.groupmap." + i
|
||||
+ " does not exist :: " + dspaceGroupName));
|
||||
// save the current entry from iterator for further use
|
||||
String currentGroup = groupIterator.next();
|
||||
|
||||
// very much the old code from DSpace <= 7.5
|
||||
if (currentGroup == null) {
|
||||
cmp = StringUtils.containsIgnoreCase(dn, ldapSearchString + ",");
|
||||
} else {
|
||||
cmp = StringUtils.equalsIgnoreCase(currentGroup, ldapSearchString);
|
||||
}
|
||||
|
||||
if (cmp) {
|
||||
// assign user to this group
|
||||
try {
|
||||
Group ldapGroup = groupService.findByName(context, dspaceGroupName);
|
||||
if (ldapGroup != null) {
|
||||
groupService.addMember(context, ldapGroup, context.getCurrentUser());
|
||||
groupService.update(context, ldapGroup);
|
||||
} else {
|
||||
// The group does not exist
|
||||
log.warn(LogHelper.getHeader(context,
|
||||
"ldap_assignGroupsBasedOnLdapDn",
|
||||
"Group defined in authentication-ldap.login.groupmap." + i
|
||||
+ " does not exist :: " + dspaceGroupName));
|
||||
}
|
||||
} catch (AuthorizeException ae) {
|
||||
log.debug(LogHelper.getHeader(context,
|
||||
"assignGroupsBasedOnLdapDn could not authorize addition to " +
|
||||
"group",
|
||||
dspaceGroupName));
|
||||
} catch (SQLException e) {
|
||||
log.debug(LogHelper.getHeader(context, "assignGroupsBasedOnLdapDn could not find group",
|
||||
dspaceGroupName));
|
||||
}
|
||||
} catch (AuthorizeException ae) {
|
||||
log.debug(LogHelper.getHeader(context,
|
||||
"assignGroupsBasedOnLdapDn could not authorize addition to " +
|
||||
"group",
|
||||
dspaceGroupName));
|
||||
} catch (SQLException e) {
|
||||
log.debug(LogHelper.getHeader(context, "assignGroupsBasedOnLdapDn could not find group",
|
||||
dspaceGroupName));
|
||||
}
|
||||
}
|
||||
|
||||
|
@@ -31,10 +31,12 @@ import org.dspace.content.DSpaceObject;
|
||||
import org.dspace.content.Item;
|
||||
import org.dspace.content.factory.ContentServiceFactory;
|
||||
import org.dspace.content.service.BitstreamService;
|
||||
import org.dspace.content.service.CollectionService;
|
||||
import org.dspace.content.service.WorkspaceItemService;
|
||||
import org.dspace.core.Constants;
|
||||
import org.dspace.core.Context;
|
||||
import org.dspace.discovery.DiscoverQuery;
|
||||
import org.dspace.discovery.DiscoverQuery.SORT_ORDER;
|
||||
import org.dspace.discovery.DiscoverResult;
|
||||
import org.dspace.discovery.IndexableObject;
|
||||
import org.dspace.discovery.SearchService;
|
||||
@@ -521,6 +523,15 @@ public class AuthorizeServiceImpl implements AuthorizeService {
|
||||
addPolicies(c, nonAdminPolicies, dest);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void replaceAllPolicies(Context context, DSpaceObject source, DSpaceObject dest)
|
||||
throws SQLException, AuthorizeException {
|
||||
// find all policies for the source object
|
||||
List<ResourcePolicy> policies = getPolicies(context, source);
|
||||
removeAllPolicies(context, dest);
|
||||
addPolicies(context, policies, dest);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void switchPoliciesAction(Context context, DSpaceObject dso, int fromAction, int toAction)
|
||||
throws SQLException, AuthorizeException {
|
||||
@@ -830,7 +841,7 @@ public class AuthorizeServiceImpl implements AuthorizeService {
|
||||
query = formatCustomQuery(query);
|
||||
DiscoverResult discoverResult = getDiscoverResult(context, query + "search.resourcetype:" +
|
||||
IndexableCommunity.TYPE,
|
||||
offset, limit);
|
||||
offset, limit, null, null);
|
||||
for (IndexableObject solrCollections : discoverResult.getIndexableObjects()) {
|
||||
Community community = ((IndexableCommunity) solrCollections).getIndexedObject();
|
||||
communities.add(community);
|
||||
@@ -852,7 +863,7 @@ public class AuthorizeServiceImpl implements AuthorizeService {
|
||||
query = formatCustomQuery(query);
|
||||
DiscoverResult discoverResult = getDiscoverResult(context, query + "search.resourcetype:" +
|
||||
IndexableCommunity.TYPE,
|
||||
null, null);
|
||||
null, null, null, null);
|
||||
return discoverResult.getTotalSearchResults();
|
||||
}
|
||||
|
||||
@@ -877,7 +888,7 @@ public class AuthorizeServiceImpl implements AuthorizeService {
|
||||
query = formatCustomQuery(query);
|
||||
DiscoverResult discoverResult = getDiscoverResult(context, query + "search.resourcetype:" +
|
||||
IndexableCollection.TYPE,
|
||||
offset, limit);
|
||||
offset, limit, CollectionService.SOLR_SORT_FIELD, SORT_ORDER.asc);
|
||||
for (IndexableObject solrCollections : discoverResult.getIndexableObjects()) {
|
||||
Collection collection = ((IndexableCollection) solrCollections).getIndexedObject();
|
||||
collections.add(collection);
|
||||
@@ -899,7 +910,7 @@ public class AuthorizeServiceImpl implements AuthorizeService {
|
||||
query = formatCustomQuery(query);
|
||||
DiscoverResult discoverResult = getDiscoverResult(context, query + "search.resourcetype:" +
|
||||
IndexableCollection.TYPE,
|
||||
null, null);
|
||||
null, null, null, null);
|
||||
return discoverResult.getTotalSearchResults();
|
||||
}
|
||||
|
||||
@@ -919,7 +930,7 @@ public class AuthorizeServiceImpl implements AuthorizeService {
|
||||
}
|
||||
|
||||
try {
|
||||
DiscoverResult discoverResult = getDiscoverResult(context, query, null, null);
|
||||
DiscoverResult discoverResult = getDiscoverResult(context, query, null, null, null, null);
|
||||
if (discoverResult.getTotalSearchResults() > 0) {
|
||||
return true;
|
||||
}
|
||||
@@ -931,7 +942,8 @@ public class AuthorizeServiceImpl implements AuthorizeService {
|
||||
return false;
|
||||
}
|
||||
|
||||
private DiscoverResult getDiscoverResult(Context context, String query, Integer offset, Integer limit)
|
||||
private DiscoverResult getDiscoverResult(Context context, String query, Integer offset, Integer limit,
|
||||
String sortField, SORT_ORDER sortOrder)
|
||||
throws SearchServiceException, SQLException {
|
||||
String groupQuery = getGroupToQuery(groupService.allMemberGroups(context, context.getCurrentUser()));
|
||||
|
||||
@@ -947,7 +959,9 @@ public class AuthorizeServiceImpl implements AuthorizeService {
|
||||
if (limit != null) {
|
||||
discoverQuery.setMaxResults(limit);
|
||||
}
|
||||
|
||||
if (sortField != null && sortOrder != null) {
|
||||
discoverQuery.setSortField(sortField, sortOrder);
|
||||
}
|
||||
|
||||
return searchService.search(context, discoverQuery);
|
||||
}
|
||||
|
@@ -41,9 +41,16 @@ import org.hibernate.proxy.HibernateProxyHelper;
|
||||
@Entity
|
||||
@Table(name = "resourcepolicy")
|
||||
public class ResourcePolicy implements ReloadableEntity<Integer> {
|
||||
/** This policy was set on submission, to give the submitter access. */
|
||||
public static String TYPE_SUBMISSION = "TYPE_SUBMISSION";
|
||||
|
||||
/** This policy was set to allow access by a workflow group. */
|
||||
public static String TYPE_WORKFLOW = "TYPE_WORKFLOW";
|
||||
|
||||
/** This policy was explicitly set on this object. */
|
||||
public static String TYPE_CUSTOM = "TYPE_CUSTOM";
|
||||
|
||||
/** This policy was copied from the containing object's default policies. */
|
||||
public static String TYPE_INHERITED = "TYPE_INHERITED";
|
||||
|
||||
@Id
|
||||
@@ -93,7 +100,7 @@ public class ResourcePolicy implements ReloadableEntity<Integer> {
|
||||
private String rptype;
|
||||
|
||||
@Lob
|
||||
@Type(type = "org.dspace.storage.rdbms.hibernate.DatabaseAwareLobType")
|
||||
@Type(type = "org.hibernate.type.TextType")
|
||||
@Column(name = "rpdescription")
|
||||
private String rpdescription;
|
||||
|
||||
|
@@ -0,0 +1,67 @@
|
||||
/**
|
||||
* The contents of this file are subject to the license and copyright
|
||||
* detailed in the LICENSE and NOTICE files at the root of the source
|
||||
* tree and available online at
|
||||
*
|
||||
* http://www.dspace.org/license/
|
||||
*/
|
||||
|
||||
/**
|
||||
* Represents permissions for access to DSpace content.
|
||||
*
|
||||
* <h2>Philosophy</h2>
|
||||
* DSpace's authorization system follows the classical "police state"
|
||||
* philosophy of security - the user can do nothing, unless it is
|
||||
* specifically allowed. Those permissions are spelled out with
|
||||
* {@link ResourcePolicy} objects, stored in the {@code resourcepolicy} table
|
||||
* in the database.
|
||||
*
|
||||
* <h2>Policies are attached to Content</h2>
|
||||
* Resource Policies get assigned to all of the content objects in
|
||||
* DSpace - collections, communities, items, bundles, and bitstreams.
|
||||
* (Currently they are not attached to non-content objects such as
|
||||
* {@code EPerson} or {@code Group}. But they could be, hence the name
|
||||
* {@code ResourcePolicy} instead of {@code ContentPolicy}.)
|
||||
*
|
||||
* <h2>Policies are tuples</h2>
|
||||
* Authorization is based on evaluating the tuple of (object, action, actor),
|
||||
* such as (ITEM, READ, EPerson John Smith) to check if the {@code EPerson}
|
||||
* "John Smith" can read an item. {@code ResourcePolicy} objects are pretty
|
||||
* simple, describing a single instance of (object, action, actor). If
|
||||
* multiple actors are desired, such as groups 10, 11, and 12 are allowed to
|
||||
* READ Item 13, you simply create a {@code ResourcePolicy} for each group.
|
||||
*
|
||||
* <h2>Built-in groups</h2>
|
||||
* The install process should create two built-in groups - {@code Anonymous}
|
||||
* for anonymous/public access, and {@code Administrators} for administrators.
|
||||
* Group {@code Anonymous} allows anyone access, even if not authenticated.
|
||||
* Group {@code Administrators}' members have super-user rights,
|
||||
* and are allowed to do any action to any object.
|
||||
*
|
||||
* <h2>Policy types
|
||||
* Policies have a "type" used to distinguish policies which are applied for
|
||||
* specific purposes.
|
||||
* <dl>
|
||||
* <dt>CUSTOM</dt>
|
||||
* <dd>These are created and assigned explicitly by users.</dd>
|
||||
* <dt>INHERITED</dt>
|
||||
* <dd>These are copied from a containing object's default policies.</dd>
|
||||
* <dt>SUBMISSION</dt>
|
||||
* <dd>These are applied during submission to give the submitter access while
|
||||
* composing a submission.</dd>
|
||||
* <dt>WORKFLOW</dt>
|
||||
* <dd>These are automatically applied during workflow, to give curators
|
||||
* access to submissions in their curation queues. They usually have an
|
||||
* automatically-created workflow group as the actor.</dd>
|
||||
*
|
||||
* <h2>Start and End dates</h2>
|
||||
* A policy may have a start date and/or an end date. The policy is
|
||||
* considered not valid before the start date or after the end date. No date
|
||||
* means do not apply the related test. For example, embargo until a given
|
||||
* date can be expressed by a READ policy with a given start date, and a
|
||||
* limited-time offer by a READ policy with a given end date.
|
||||
*
|
||||
* @author dstuve
|
||||
* @author mwood
|
||||
*/
|
||||
package org.dspace.authorize;
|
@@ -1,68 +0,0 @@
|
||||
<!--
|
||||
|
||||
The contents of this file are subject to the license and copyright
|
||||
detailed in the LICENSE and NOTICE files at the root of the source
|
||||
tree and available online at
|
||||
|
||||
http://www.dspace.org/license/
|
||||
|
||||
-->
|
||||
<!DOCTYPE HTML PUBLIC "-//W3C//DTD HTML 3.2 Final//EN">
|
||||
<html>
|
||||
<head>
|
||||
<!--
|
||||
Author: dstuve
|
||||
Version: $Id$
|
||||
Date: $Date$
|
||||
-->
|
||||
</head>
|
||||
<body bgcolor="white">
|
||||
<p>Handles permissions for DSpace content.
|
||||
</p>
|
||||
|
||||
<p><strong>Philosophy</strong><br>
|
||||
DSpace's authorization system follows the classical "police state"
|
||||
philosophy of security - the user can do nothing, unless it is
|
||||
specifically allowed. Those permissions are spelled out with
|
||||
ResourcePolicy objects, stored in the resourcepolicy table in the
|
||||
database.
|
||||
</p>
|
||||
|
||||
<h2>Policies are attached to Content</h2>
|
||||
<p><strong>Policies are attached to Content</strong><br>
|
||||
Resource Policies get assigned to all of the content objects in
|
||||
DSpace - collections, communities, items, bundles, and bitstreams.
|
||||
(Currently they are not attached to non-content objects such as EPerson
|
||||
or Group. But they could be, hence the name ResourcePolicy instead of
|
||||
ContentPolicy.)
|
||||
</p>
|
||||
|
||||
<h2>Policies are tuples</h2>
|
||||
Authorization is based on evaluating the tuple of (object, action, who),
|
||||
such as (ITEM, READ, EPerson John Smith) to check if the EPerson "John Smith"
|
||||
can read an item. ResourcePolicy objects are pretty simple, describing a single instance of
|
||||
(object, action, who). If multiple who's are desired, such as Groups 10, 11, and
|
||||
12 are allowed to READ Item 13, you simply create a ResourcePolicy for each
|
||||
group.
|
||||
</p>
|
||||
|
||||
<h2>Special Groups</h2>
|
||||
The install process should create two special groups - group 0, for
|
||||
anonymous/public access, and group 1 for administrators.
|
||||
Group 0 (public/anonymous) allows anyone access, even if they are not
|
||||
authenticated. Group 1's (admin) members have super-user rights, and
|
||||
are allowed to do any action to any object.
|
||||
</p>
|
||||
|
||||
<h2>Unused ResourcePolicy attributes </h2>
|
||||
ResourcePolicies have a few attributes that are currently unused,
|
||||
but are included with the intent that they will be used someday.
|
||||
One is start and end dates, for when policies will be active, so that
|
||||
permissions for content can change over time. The other is the EPerson -
|
||||
policies could apply to only a single EPerson, but for ease of
|
||||
administration currently a Group is the recommended unit to use to
|
||||
describe 'who'.
|
||||
</p>
|
||||
|
||||
</body>
|
||||
</html>
|
@@ -600,4 +600,17 @@ public interface AuthorizeService {
|
||||
* @return true if the current user can manage accounts
|
||||
*/
|
||||
boolean isAccountManager(Context context);
|
||||
|
||||
/**
|
||||
* Replace all the policies in the target object with exactly the same policies that exist in the source object
|
||||
*
|
||||
* @param context DSpace Context
|
||||
* @param source source of policies
|
||||
* @param dest destination of inherited policies
|
||||
* @throws SQLException if there's a database problem
|
||||
* @throws AuthorizeException if the current user is not authorized to add these policies
|
||||
*/
|
||||
public void replaceAllPolicies(Context context, DSpaceObject source, DSpaceObject dest)
|
||||
throws SQLException, AuthorizeException;
|
||||
|
||||
}
|
||||
|
@@ -53,12 +53,19 @@ public interface ResourcePolicyService extends DSpaceCRUDService<ResourcePolicy>
|
||||
throws SQLException;
|
||||
|
||||
/**
|
||||
* Look for ResourcePolicies by DSpaceObject, Group, and action, ignoring IDs with a specific PolicyID.
|
||||
* This method can be used to detect duplicate ResourcePolicies.
|
||||
* Look for ResourcePolicies by DSpaceObject, Group, and action, ignoring
|
||||
* IDs with a specific PolicyID. This method can be used to detect duplicate
|
||||
* ResourcePolicies.
|
||||
*
|
||||
* @param notPolicyID ResourcePolicies with this ID will be ignored while looking out for equal ResourcePolicies.
|
||||
* @return List of resource policies for the same DSpaceObject, group and action but other policyID.
|
||||
* @throws SQLException
|
||||
* @param context current DSpace session.
|
||||
* @param dso find policies for this object.
|
||||
* @param group find policies referring to this group.
|
||||
* @param action find policies for this action.
|
||||
* @param notPolicyID ResourcePolicies with this ID will be ignored while
|
||||
* looking out for equal ResourcePolicies.
|
||||
* @return List of resource policies for the same DSpaceObject, group and
|
||||
* action but other policyID.
|
||||
* @throws SQLException passed through.
|
||||
*/
|
||||
public List<ResourcePolicy> findByTypeGroupActionExceptId(Context context, DSpaceObject dso, Group group,
|
||||
int action, int notPolicyID)
|
||||
@@ -68,6 +75,16 @@ public interface ResourcePolicyService extends DSpaceCRUDService<ResourcePolicy>
|
||||
|
||||
public boolean isDateValid(ResourcePolicy resourcePolicy);
|
||||
|
||||
/**
|
||||
* Create and persist a copy of a given ResourcePolicy, with an empty
|
||||
* dSpaceObject field.
|
||||
*
|
||||
* @param context current DSpace session.
|
||||
* @param resourcePolicy the policy to be copied.
|
||||
* @return the copy.
|
||||
* @throws SQLException passed through.
|
||||
* @throws AuthorizeException passed through.
|
||||
*/
|
||||
public ResourcePolicy clone(Context context, ResourcePolicy resourcePolicy) throws SQLException, AuthorizeException;
|
||||
|
||||
public void removeAllPolicies(Context c, DSpaceObject o) throws SQLException, AuthorizeException;
|
||||
@@ -117,6 +134,7 @@ public interface ResourcePolicyService extends DSpaceCRUDService<ResourcePolicy>
|
||||
* @param ePerson ePerson whose policies want to find
|
||||
* @param offset the position of the first result to return
|
||||
* @param limit paging limit
|
||||
* @return some of the policies referring to {@code ePerson}.
|
||||
* @throws SQLException if database error
|
||||
*/
|
||||
public List<ResourcePolicy> findByEPerson(Context context, EPerson ePerson, int offset, int limit)
|
||||
|
@@ -8,8 +8,8 @@
|
||||
package org.dspace.browse;
|
||||
|
||||
import java.util.List;
|
||||
import java.util.UUID;
|
||||
|
||||
import org.dspace.content.DSpaceObject;
|
||||
import org.dspace.content.Item;
|
||||
|
||||
/**
|
||||
@@ -140,21 +140,21 @@ public interface BrowseDAO {
|
||||
public void setAscending(boolean ascending);
|
||||
|
||||
/**
|
||||
* Get the database ID of the container object. The container object will be a
|
||||
* Get the container object. The container object will be a
|
||||
* Community or a Collection.
|
||||
*
|
||||
* @return the database id of the container, or -1 if none is set
|
||||
* @return the container, or null if none is set
|
||||
*/
|
||||
public UUID getContainerID();
|
||||
public DSpaceObject getContainer();
|
||||
|
||||
/**
|
||||
* Set the database id of the container object. This should be the id of a
|
||||
* Community or Collection. This will constrain the results of the browse
|
||||
* to only items or values within items that appear in the given container.
|
||||
* Set the container object. This should be a Community or Collection.
|
||||
* This will constrain the results of the browse to only items or values within items that appear in the given
|
||||
* container and add the related configuration default filters.
|
||||
*
|
||||
* @param containerID community/collection internal ID (UUID)
|
||||
* @param container community/collection
|
||||
*/
|
||||
public void setContainerID(UUID containerID);
|
||||
public void setContainer(DSpaceObject container);
|
||||
|
||||
/**
|
||||
* get the name of the field in which to look for the container id. This is
|
||||
|
@@ -141,12 +141,12 @@ public class BrowseEngine {
|
||||
Collection col = (Collection) scope.getBrowseContainer();
|
||||
dao.setContainerTable("collection2item");
|
||||
dao.setContainerIDField("collection_id");
|
||||
dao.setContainerID(col.getID());
|
||||
dao.setContainer(col);
|
||||
} else if (scope.inCommunity()) {
|
||||
Community com = (Community) scope.getBrowseContainer();
|
||||
dao.setContainerTable("communities2item");
|
||||
dao.setContainerIDField("community_id");
|
||||
dao.setContainerID(com.getID());
|
||||
dao.setContainer(com);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -247,12 +247,12 @@ public class BrowseEngine {
|
||||
Collection col = (Collection) scope.getBrowseContainer();
|
||||
dao.setContainerTable("collection2item");
|
||||
dao.setContainerIDField("collection_id");
|
||||
dao.setContainerID(col.getID());
|
||||
dao.setContainer(col);
|
||||
} else if (scope.inCommunity()) {
|
||||
Community com = (Community) scope.getBrowseContainer();
|
||||
dao.setContainerTable("communities2item");
|
||||
dao.setContainerIDField("community_id");
|
||||
dao.setContainerID(com.getID());
|
||||
dao.setContainer(com);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -413,12 +413,12 @@ public class BrowseEngine {
|
||||
Collection col = (Collection) scope.getBrowseContainer();
|
||||
dao.setContainerTable("collection2item");
|
||||
dao.setContainerIDField("collection_id");
|
||||
dao.setContainerID(col.getID());
|
||||
dao.setContainer(col);
|
||||
} else if (scope.inCommunity()) {
|
||||
Community com = (Community) scope.getBrowseContainer();
|
||||
dao.setContainerTable("communities2item");
|
||||
dao.setContainerIDField("community_id");
|
||||
dao.setContainerID(com.getID());
|
||||
dao.setContainer(com);
|
||||
}
|
||||
}
|
||||
|
||||
|
@@ -59,7 +59,16 @@ public class CrossLinks {
|
||||
* @return true/false
|
||||
*/
|
||||
public boolean hasLink(String metadata) {
|
||||
return links.containsKey(metadata);
|
||||
return findLinkType(metadata) != null;
|
||||
}
|
||||
|
||||
/**
|
||||
* Is there a link for the given browse name (eg 'author')
|
||||
* @param browseIndexName
|
||||
* @return true/false
|
||||
*/
|
||||
public boolean hasBrowseName(String browseIndexName) {
|
||||
return links.containsValue(browseIndexName);
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -69,6 +78,41 @@ public class CrossLinks {
|
||||
* @return type
|
||||
*/
|
||||
public String getLinkType(String metadata) {
|
||||
return links.get(metadata);
|
||||
return findLinkType(metadata);
|
||||
}
|
||||
|
||||
/**
|
||||
* Get full map of field->indexname link configurations
|
||||
* @return
|
||||
*/
|
||||
public Map<String, String> getLinks() {
|
||||
return links;
|
||||
}
|
||||
|
||||
/**
|
||||
* Find and return the browse name for a given metadata field.
|
||||
* If the link key contains a wildcard eg dc.subject.*, it should
|
||||
* match dc.subject.other, etc.
|
||||
* @param metadata
|
||||
* @return
|
||||
*/
|
||||
public String findLinkType(String metadata) {
|
||||
// Resolve wildcards properly, eg. dc.subject.other matches a configuration for dc.subject.*
|
||||
for (String key : links.keySet()) {
|
||||
if (null != key && key.endsWith(".*")) {
|
||||
// A substring of length-1, also substracting the wildcard should work as a "startsWith"
|
||||
// check for the field eg. dc.subject.* -> dc.subject is the start of dc.subject.other
|
||||
if (null != metadata && metadata.startsWith(key.substring(0, key.length() - 1 - ".*".length()))) {
|
||||
return links.get(key);
|
||||
}
|
||||
} else {
|
||||
// Exact match, if the key field has no .* wildcard
|
||||
if (links.containsKey(metadata)) {
|
||||
return links.get(key);
|
||||
}
|
||||
}
|
||||
}
|
||||
// No match
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
@@ -8,17 +8,17 @@
|
||||
package org.dspace.browse;
|
||||
|
||||
import java.io.Serializable;
|
||||
import java.sql.SQLException;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Collections;
|
||||
import java.util.Comparator;
|
||||
import java.util.List;
|
||||
import java.util.UUID;
|
||||
|
||||
import org.apache.commons.lang3.StringUtils;
|
||||
import org.apache.logging.log4j.Logger;
|
||||
import org.apache.solr.client.solrj.util.ClientUtils;
|
||||
import org.dspace.authorize.factory.AuthorizeServiceFactory;
|
||||
import org.dspace.authorize.service.AuthorizeService;
|
||||
import org.dspace.content.DSpaceObject;
|
||||
import org.dspace.content.Item;
|
||||
import org.dspace.core.Context;
|
||||
import org.dspace.discovery.DiscoverFacetField;
|
||||
@@ -30,6 +30,8 @@ import org.dspace.discovery.DiscoverResult.SearchDocument;
|
||||
import org.dspace.discovery.IndexableObject;
|
||||
import org.dspace.discovery.SearchService;
|
||||
import org.dspace.discovery.SearchServiceException;
|
||||
import org.dspace.discovery.SearchUtils;
|
||||
import org.dspace.discovery.configuration.DiscoveryConfiguration;
|
||||
import org.dspace.discovery.configuration.DiscoveryConfigurationParameters;
|
||||
import org.dspace.discovery.indexobject.IndexableItem;
|
||||
import org.dspace.services.factory.DSpaceServicesFactory;
|
||||
@@ -123,9 +125,9 @@ public class SolrBrowseDAO implements BrowseDAO {
|
||||
private String containerIDField = null;
|
||||
|
||||
/**
|
||||
* the database id of the container we are constraining to
|
||||
* the container we are constraining to
|
||||
*/
|
||||
private UUID containerID = null;
|
||||
private DSpaceObject container = null;
|
||||
|
||||
/**
|
||||
* the column that we are sorting results by
|
||||
@@ -175,7 +177,7 @@ public class SolrBrowseDAO implements BrowseDAO {
|
||||
if (sResponse == null) {
|
||||
DiscoverQuery query = new DiscoverQuery();
|
||||
addLocationScopeFilter(query);
|
||||
addStatusFilter(query);
|
||||
addDefaultFilterQueries(query);
|
||||
if (distinct) {
|
||||
DiscoverFacetField dff;
|
||||
if (StringUtils.isNotBlank(startsWith)) {
|
||||
@@ -206,7 +208,8 @@ public class SolrBrowseDAO implements BrowseDAO {
|
||||
query.addFilterQueries("{!field f=" + facetField + "_partial}" + value);
|
||||
}
|
||||
if (StringUtils.isNotBlank(startsWith) && orderField != null) {
|
||||
query.addFilterQueries("bi_" + orderField + "_sort:" + startsWith + "*");
|
||||
query.addFilterQueries(
|
||||
"bi_" + orderField + "_sort:" + ClientUtils.escapeQueryChars(startsWith) + "*");
|
||||
}
|
||||
// filter on item to be sure to don't include any other object
|
||||
// indexed in the Discovery Search core
|
||||
@@ -225,26 +228,19 @@ public class SolrBrowseDAO implements BrowseDAO {
|
||||
return sResponse;
|
||||
}
|
||||
|
||||
private void addStatusFilter(DiscoverQuery query) {
|
||||
try {
|
||||
if (!authorizeService.isAdmin(context)
|
||||
&& (authorizeService.isCommunityAdmin(context)
|
||||
|| authorizeService.isCollectionAdmin(context))) {
|
||||
query.addFilterQueries(searcher.createLocationQueryForAdministrableItems(context));
|
||||
private void addLocationScopeFilter(DiscoverQuery query) {
|
||||
if (container != null) {
|
||||
if (containerIDField.startsWith("collection")) {
|
||||
query.addFilterQueries("location.coll:" + container.getID());
|
||||
} else if (containerIDField.startsWith("community")) {
|
||||
query.addFilterQueries("location.comm:" + container.getID());
|
||||
}
|
||||
} catch (SQLException ex) {
|
||||
log.error("Error looking up authorization rights of current user", ex);
|
||||
}
|
||||
}
|
||||
|
||||
private void addLocationScopeFilter(DiscoverQuery query) {
|
||||
if (containerID != null) {
|
||||
if (containerIDField.startsWith("collection")) {
|
||||
query.addFilterQueries("location.coll:" + containerID);
|
||||
} else if (containerIDField.startsWith("community")) {
|
||||
query.addFilterQueries("location.comm:" + containerID);
|
||||
}
|
||||
}
|
||||
private void addDefaultFilterQueries(DiscoverQuery query) {
|
||||
DiscoveryConfiguration discoveryConfiguration = SearchUtils.getDiscoveryConfiguration(container);
|
||||
discoveryConfiguration.getDefaultFilterQueries().forEach(query::addFilterQueries);
|
||||
}
|
||||
|
||||
@Override
|
||||
@@ -335,7 +331,7 @@ public class SolrBrowseDAO implements BrowseDAO {
|
||||
throws BrowseException {
|
||||
DiscoverQuery query = new DiscoverQuery();
|
||||
addLocationScopeFilter(query);
|
||||
addStatusFilter(query);
|
||||
addDefaultFilterQueries(query);
|
||||
query.setMaxResults(0);
|
||||
query.addFilterQueries("search.resourcetype:" + IndexableItem.TYPE);
|
||||
|
||||
@@ -396,8 +392,8 @@ public class SolrBrowseDAO implements BrowseDAO {
|
||||
* @see org.dspace.browse.BrowseDAO#getContainerID()
|
||||
*/
|
||||
@Override
|
||||
public UUID getContainerID() {
|
||||
return containerID;
|
||||
public DSpaceObject getContainer() {
|
||||
return container;
|
||||
}
|
||||
|
||||
/*
|
||||
@@ -559,8 +555,8 @@ public class SolrBrowseDAO implements BrowseDAO {
|
||||
* @see org.dspace.browse.BrowseDAO#setContainerID(int)
|
||||
*/
|
||||
@Override
|
||||
public void setContainerID(UUID containerID) {
|
||||
this.containerID = containerID;
|
||||
public void setContainer(DSpaceObject container) {
|
||||
this.container = container;
|
||||
|
||||
}
|
||||
|
||||
|
@@ -332,8 +332,8 @@ public class BitstreamServiceImpl extends DSpaceObjectServiceImpl<Bitstream> imp
|
||||
}
|
||||
|
||||
@Override
|
||||
public List<Bitstream> findDeletedBitstreams(Context context) throws SQLException {
|
||||
return bitstreamDAO.findDeletedBitstreams(context);
|
||||
public List<Bitstream> findDeletedBitstreams(Context context, int limit, int offset) throws SQLException {
|
||||
return bitstreamDAO.findDeletedBitstreams(context, limit, offset);
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@@ -43,6 +43,7 @@ import org.dspace.core.I18nUtil;
|
||||
import org.dspace.core.LogHelper;
|
||||
import org.dspace.core.service.LicenseService;
|
||||
import org.dspace.discovery.DiscoverQuery;
|
||||
import org.dspace.discovery.DiscoverQuery.SORT_ORDER;
|
||||
import org.dspace.discovery.DiscoverResult;
|
||||
import org.dspace.discovery.IndexableObject;
|
||||
import org.dspace.discovery.SearchService;
|
||||
@@ -735,7 +736,7 @@ public class CollectionServiceImpl extends DSpaceObjectServiceImpl<Collection> i
|
||||
collection.getID(), collection.getHandle(), getIdentifiers(context, collection)));
|
||||
|
||||
// remove subscriptions - hmm, should this be in Subscription.java?
|
||||
subscribeService.deleteByCollection(context, collection);
|
||||
subscribeService.deleteByDspaceObject(context, collection);
|
||||
|
||||
// Remove Template Item
|
||||
removeTemplateItem(context, collection);
|
||||
@@ -946,6 +947,7 @@ public class CollectionServiceImpl extends DSpaceObjectServiceImpl<Collection> i
|
||||
discoverQuery.setDSpaceObjectFilter(IndexableCollection.TYPE);
|
||||
discoverQuery.setStart(offset);
|
||||
discoverQuery.setMaxResults(limit);
|
||||
discoverQuery.setSortField(SOLR_SORT_FIELD, SORT_ORDER.asc);
|
||||
DiscoverResult resp = retrieveCollectionsWithSubmit(context, discoverQuery, null, community, q);
|
||||
for (IndexableObject solrCollections : resp.getIndexableObjects()) {
|
||||
Collection c = ((IndexableCollection) solrCollections).getIndexedObject();
|
||||
@@ -1025,6 +1027,7 @@ public class CollectionServiceImpl extends DSpaceObjectServiceImpl<Collection> i
|
||||
discoverQuery.setDSpaceObjectFilter(IndexableCollection.TYPE);
|
||||
discoverQuery.setStart(offset);
|
||||
discoverQuery.setMaxResults(limit);
|
||||
discoverQuery.setSortField(SOLR_SORT_FIELD, SORT_ORDER.asc);
|
||||
DiscoverResult resp = retrieveCollectionsWithSubmit(context, discoverQuery,
|
||||
entityType, community, q);
|
||||
for (IndexableObject solrCollections : resp.getIndexableObjects()) {
|
||||
|
@@ -36,6 +36,7 @@ import org.dspace.core.I18nUtil;
|
||||
import org.dspace.core.LogHelper;
|
||||
import org.dspace.eperson.Group;
|
||||
import org.dspace.eperson.service.GroupService;
|
||||
import org.dspace.eperson.service.SubscribeService;
|
||||
import org.dspace.event.Event;
|
||||
import org.dspace.identifier.IdentifierException;
|
||||
import org.dspace.identifier.service.IdentifierService;
|
||||
@@ -73,7 +74,8 @@ public class CommunityServiceImpl extends DSpaceObjectServiceImpl<Community> imp
|
||||
protected SiteService siteService;
|
||||
@Autowired(required = true)
|
||||
protected IdentifierService identifierService;
|
||||
|
||||
@Autowired(required = true)
|
||||
protected SubscribeService subscribeService;
|
||||
protected CommunityServiceImpl() {
|
||||
super();
|
||||
|
||||
@@ -217,12 +219,12 @@ public class CommunityServiceImpl extends DSpaceObjectServiceImpl<Community> imp
|
||||
|
||||
@Override
|
||||
public Bitstream setLogo(Context context, Community community, InputStream is)
|
||||
throws AuthorizeException, IOException, SQLException {
|
||||
throws AuthorizeException, IOException, SQLException {
|
||||
// Check authorisation
|
||||
// authorized to remove the logo when DELETE rights
|
||||
// authorized when canEdit
|
||||
if (!((is == null) && authorizeService.authorizeActionBoolean(
|
||||
context, community, Constants.DELETE))) {
|
||||
context, community, Constants.DELETE))) {
|
||||
canEdit(context, community);
|
||||
}
|
||||
|
||||
@@ -242,7 +244,7 @@ public class CommunityServiceImpl extends DSpaceObjectServiceImpl<Community> imp
|
||||
// now create policy for logo bitstream
|
||||
// to match our READ policy
|
||||
List<ResourcePolicy> policies = authorizeService
|
||||
.getPoliciesActionFilter(context, community, Constants.READ);
|
||||
.getPoliciesActionFilter(context, community, Constants.READ);
|
||||
authorizeService.addPolicies(context, policies, newLogo);
|
||||
|
||||
log.info(LogHelper.getHeader(context, "set_logo",
|
||||
@@ -549,6 +551,8 @@ public class CommunityServiceImpl extends DSpaceObjectServiceImpl<Community> imp
|
||||
context.addEvent(new Event(Event.DELETE, Constants.COMMUNITY, community.getID(), community.getHandle(),
|
||||
getIdentifiers(context, community)));
|
||||
|
||||
subscribeService.deleteByDspaceObject(context, community);
|
||||
|
||||
// Remove collections
|
||||
Iterator<Collection> collections = community.getCollections().iterator();
|
||||
|
||||
|
@@ -48,6 +48,12 @@ public abstract class DSpaceObject implements Serializable, ReloadableEntity<jav
|
||||
@Transient
|
||||
private StringBuffer eventDetails = null;
|
||||
|
||||
/**
|
||||
* The same order should be applied inside this comparator
|
||||
* {@link MetadataValueComparators#defaultComparator} to preserve
|
||||
* ordering while the list has been modified and not yet persisted
|
||||
* and reloaded.
|
||||
*/
|
||||
@OneToMany(fetch = FetchType.LAZY, mappedBy = "dSpaceObject", cascade = CascadeType.ALL, orphanRemoval = true)
|
||||
@OrderBy("metadataField, place")
|
||||
private List<MetadataValue> metadata = new ArrayList<>();
|
||||
@@ -116,7 +122,7 @@ public abstract class DSpaceObject implements Serializable, ReloadableEntity<jav
|
||||
* @return summary of event details, or null if there are none.
|
||||
*/
|
||||
public String getDetails() {
|
||||
return (eventDetails == null ? null : eventDetails.toString());
|
||||
return eventDetails == null ? null : eventDetails.toString();
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -145,7 +151,7 @@ public abstract class DSpaceObject implements Serializable, ReloadableEntity<jav
|
||||
* one
|
||||
*/
|
||||
public String getHandle() {
|
||||
return (CollectionUtils.isNotEmpty(handles) ? handles.get(0).getHandle() : null);
|
||||
return CollectionUtils.isNotEmpty(handles) ? handles.get(0).getHandle() : null;
|
||||
}
|
||||
|
||||
void setHandle(List<Handle> handle) {
|
||||
|
@@ -126,6 +126,11 @@ public abstract class DSpaceObjectServiceImpl<T extends DSpaceObject> implements
|
||||
}
|
||||
}
|
||||
|
||||
// Sort the metadataValues if they have been modified,
|
||||
// is used to preserve the default order.
|
||||
if (dso.isMetadataModified()) {
|
||||
values.sort(MetadataValueComparators.defaultComparator);
|
||||
}
|
||||
// Create an array of matching values
|
||||
return values;
|
||||
}
|
||||
@@ -542,7 +547,7 @@ public abstract class DSpaceObjectServiceImpl<T extends DSpaceObject> implements
|
||||
|
||||
int add = 4 - tokens.length;
|
||||
if (add > 0) {
|
||||
tokens = (String[]) ArrayUtils.addAll(tokens, new String[add]);
|
||||
tokens = ArrayUtils.addAll(tokens, new String[add]);
|
||||
}
|
||||
|
||||
return tokens;
|
||||
@@ -603,21 +608,18 @@ public abstract class DSpaceObjectServiceImpl<T extends DSpaceObject> implements
|
||||
//If two places are the same then the MetadataValue instance will be placed before the
|
||||
//RelationshipMetadataValue instance.
|
||||
//This is done to ensure that the order is correct.
|
||||
metadataValues.sort(new Comparator<MetadataValue>() {
|
||||
@Override
|
||||
public int compare(MetadataValue o1, MetadataValue o2) {
|
||||
int compare = o1.getPlace() - o2.getPlace();
|
||||
if (compare == 0) {
|
||||
if (o1 instanceof RelationshipMetadataValue && o2 instanceof RelationshipMetadataValue) {
|
||||
return compare;
|
||||
} else if (o1 instanceof RelationshipMetadataValue) {
|
||||
return 1;
|
||||
} else if (o2 instanceof RelationshipMetadataValue) {
|
||||
return -1;
|
||||
}
|
||||
metadataValues.sort((o1, o2) -> {
|
||||
int compare = o1.getPlace() - o2.getPlace();
|
||||
if (compare == 0) {
|
||||
if (o1 instanceof RelationshipMetadataValue && o2 instanceof RelationshipMetadataValue) {
|
||||
return compare;
|
||||
} else if (o1 instanceof RelationshipMetadataValue) {
|
||||
return 1;
|
||||
} else if (o2 instanceof RelationshipMetadataValue) {
|
||||
return -1;
|
||||
}
|
||||
return compare;
|
||||
}
|
||||
return compare;
|
||||
});
|
||||
for (MetadataValue metadataValue : metadataValues) {
|
||||
//Retrieve & store the place for each metadata value
|
||||
@@ -634,7 +636,7 @@ public abstract class DSpaceObjectServiceImpl<T extends DSpaceObject> implements
|
||||
String authority = metadataValue.getAuthority();
|
||||
String relationshipId = StringUtils.split(authority, "::")[1];
|
||||
Relationship relationship = relationshipService.find(context, Integer.parseInt(relationshipId));
|
||||
if (relationship.getLeftItem().equals((Item) dso)) {
|
||||
if (relationship.getLeftItem().equals(dso)) {
|
||||
relationship.setLeftPlace(mvPlace);
|
||||
} else {
|
||||
relationship.setRightPlace(mvPlace);
|
||||
|
@@ -10,9 +10,14 @@ package org.dspace.content;
|
||||
import java.io.IOException;
|
||||
import java.sql.SQLException;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
|
||||
import org.apache.logging.log4j.LogManager;
|
||||
import org.apache.logging.log4j.Logger;
|
||||
import org.dspace.authorize.AuthorizeException;
|
||||
import org.dspace.content.factory.ContentServiceFactory;
|
||||
import org.dspace.content.logic.Filter;
|
||||
import org.dspace.content.logic.FilterUtils;
|
||||
import org.dspace.content.service.CollectionService;
|
||||
import org.dspace.content.service.InstallItemService;
|
||||
import org.dspace.content.service.ItemService;
|
||||
@@ -20,8 +25,11 @@ import org.dspace.core.Constants;
|
||||
import org.dspace.core.Context;
|
||||
import org.dspace.embargo.service.EmbargoService;
|
||||
import org.dspace.event.Event;
|
||||
import org.dspace.identifier.Identifier;
|
||||
import org.dspace.identifier.IdentifierException;
|
||||
import org.dspace.identifier.service.IdentifierService;
|
||||
import org.dspace.supervision.SupervisionOrder;
|
||||
import org.dspace.supervision.service.SupervisionOrderService;
|
||||
import org.springframework.beans.factory.annotation.Autowired;
|
||||
|
||||
/**
|
||||
@@ -42,9 +50,13 @@ public class InstallItemServiceImpl implements InstallItemService {
|
||||
protected IdentifierService identifierService;
|
||||
@Autowired(required = true)
|
||||
protected ItemService itemService;
|
||||
@Autowired(required = true)
|
||||
protected SupervisionOrderService supervisionOrderService;
|
||||
@Autowired(required = false)
|
||||
|
||||
Logger log = LogManager.getLogger(InstallItemServiceImpl.class);
|
||||
|
||||
protected InstallItemServiceImpl() {
|
||||
|
||||
}
|
||||
|
||||
@Override
|
||||
@@ -59,10 +71,14 @@ public class InstallItemServiceImpl implements InstallItemService {
|
||||
AuthorizeException {
|
||||
Item item = is.getItem();
|
||||
Collection collection = is.getCollection();
|
||||
// Get map of filters to use for identifier types.
|
||||
Map<Class<? extends Identifier>, Filter> filters = FilterUtils.getIdentifierFilters(false);
|
||||
try {
|
||||
if (suppliedHandle == null) {
|
||||
identifierService.register(c, item);
|
||||
// Register with the filters we've set up
|
||||
identifierService.register(c, item, filters);
|
||||
} else {
|
||||
// This will register the handle but a pending DOI won't be compatible and so won't be registered
|
||||
identifierService.register(c, item, suppliedHandle);
|
||||
}
|
||||
} catch (IdentifierException e) {
|
||||
@@ -222,9 +238,19 @@ public class InstallItemServiceImpl implements InstallItemService {
|
||||
// set embargo lift date and take away read access if indicated.
|
||||
embargoService.setEmbargo(c, item);
|
||||
|
||||
// delete all related supervision orders
|
||||
deleteSupervisionOrders(c, item);
|
||||
|
||||
return item;
|
||||
}
|
||||
|
||||
private void deleteSupervisionOrders(Context c, Item item) throws SQLException, AuthorizeException {
|
||||
List<SupervisionOrder> supervisionOrders = supervisionOrderService.findByItem(c, item);
|
||||
for (SupervisionOrder supervisionOrder : supervisionOrders) {
|
||||
supervisionOrderService.delete(c, supervisionOrder);
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getBitstreamProvenanceMessage(Context context, Item myitem)
|
||||
throws SQLException {
|
||||
|
@@ -12,7 +12,6 @@ import java.io.InputStream;
|
||||
import java.sql.SQLException;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Arrays;
|
||||
import java.util.Comparator;
|
||||
import java.util.Date;
|
||||
import java.util.Iterator;
|
||||
import java.util.LinkedList;
|
||||
@@ -27,6 +26,8 @@ import java.util.stream.Stream;
|
||||
import org.apache.commons.collections4.CollectionUtils;
|
||||
import org.apache.commons.lang3.StringUtils;
|
||||
import org.apache.logging.log4j.Logger;
|
||||
import org.dspace.app.requestitem.RequestItem;
|
||||
import org.dspace.app.requestitem.service.RequestItemService;
|
||||
import org.dspace.app.util.AuthorizeUtil;
|
||||
import org.dspace.authorize.AuthorizeConfiguration;
|
||||
import org.dspace.authorize.AuthorizeException;
|
||||
@@ -51,8 +52,15 @@ import org.dspace.content.virtual.VirtualMetadataPopulator;
|
||||
import org.dspace.core.Constants;
|
||||
import org.dspace.core.Context;
|
||||
import org.dspace.core.LogHelper;
|
||||
import org.dspace.discovery.DiscoverQuery;
|
||||
import org.dspace.discovery.DiscoverResult;
|
||||
import org.dspace.discovery.SearchService;
|
||||
import org.dspace.discovery.SearchServiceException;
|
||||
import org.dspace.discovery.indexobject.IndexableItem;
|
||||
import org.dspace.eperson.EPerson;
|
||||
import org.dspace.eperson.Group;
|
||||
import org.dspace.eperson.service.GroupService;
|
||||
import org.dspace.eperson.service.SubscribeService;
|
||||
import org.dspace.event.Event;
|
||||
import org.dspace.harvest.HarvestedItem;
|
||||
import org.dspace.harvest.service.HarvestedItemService;
|
||||
@@ -93,6 +101,8 @@ public class ItemServiceImpl extends DSpaceObjectServiceImpl<Item> implements It
|
||||
@Autowired(required = true)
|
||||
protected CommunityService communityService;
|
||||
@Autowired(required = true)
|
||||
protected GroupService groupService;
|
||||
@Autowired(required = true)
|
||||
protected AuthorizeService authorizeService;
|
||||
@Autowired(required = true)
|
||||
protected BundleService bundleService;
|
||||
@@ -105,6 +115,8 @@ public class ItemServiceImpl extends DSpaceObjectServiceImpl<Item> implements It
|
||||
@Autowired(required = true)
|
||||
protected InstallItemService installItemService;
|
||||
@Autowired(required = true)
|
||||
protected SearchService searchService;
|
||||
@Autowired(required = true)
|
||||
protected ResourcePolicyService resourcePolicyService;
|
||||
@Autowired(required = true)
|
||||
protected CollectionService collectionService;
|
||||
@@ -148,6 +160,11 @@ public class ItemServiceImpl extends DSpaceObjectServiceImpl<Item> implements It
|
||||
|
||||
@Autowired(required = true)
|
||||
private ResearcherProfileService researcherProfileService;
|
||||
@Autowired(required = true)
|
||||
private RequestItemService requestItemService;
|
||||
|
||||
@Autowired(required = true)
|
||||
protected SubscribeService subscribeService;
|
||||
|
||||
protected ItemServiceImpl() {
|
||||
super();
|
||||
@@ -270,9 +287,10 @@ public class ItemServiceImpl extends DSpaceObjectServiceImpl<Item> implements It
|
||||
return itemDAO.findAll(context, true, true);
|
||||
}
|
||||
|
||||
@Override
|
||||
public Iterator<Item> findAllRegularItems(Context context) throws SQLException {
|
||||
return itemDAO.findAllRegularItems(context);
|
||||
};
|
||||
}
|
||||
|
||||
@Override
|
||||
public Iterator<Item> findBySubmitter(Context context, EPerson eperson) throws SQLException {
|
||||
@@ -755,7 +773,8 @@ public class ItemServiceImpl extends DSpaceObjectServiceImpl<Item> implements It
|
||||
|
||||
log.info(LogHelper.getHeader(context, "delete_item", "item_id="
|
||||
+ item.getID()));
|
||||
|
||||
//remove subscription related with it
|
||||
subscribeService.deleteByDspaceObject(context, item);
|
||||
// Remove relationships
|
||||
for (Relationship relationship : relationshipService.findByItem(context, item, -1, -1, false, false)) {
|
||||
relationshipService.forceDelete(context, relationship, false, false);
|
||||
@@ -770,6 +789,8 @@ public class ItemServiceImpl extends DSpaceObjectServiceImpl<Item> implements It
|
||||
// remove version attached to the item
|
||||
removeVersion(context, item);
|
||||
|
||||
removeRequest(context, item);
|
||||
|
||||
removeOrcidSynchronizationStuff(context, item);
|
||||
|
||||
// Also delete the item if it appears in a harvested collection.
|
||||
@@ -792,6 +813,14 @@ public class ItemServiceImpl extends DSpaceObjectServiceImpl<Item> implements It
|
||||
itemDAO.delete(context, item);
|
||||
}
|
||||
|
||||
protected void removeRequest(Context context, Item item) throws SQLException {
|
||||
Iterator<RequestItem> requestItems = requestItemService.findByItem(context, item);
|
||||
while (requestItems.hasNext()) {
|
||||
RequestItem requestItem = requestItems.next();
|
||||
requestItemService.delete(context, requestItem);
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public void removeAllBundles(Context context, Item item) throws AuthorizeException, SQLException, IOException {
|
||||
Iterator<Bundle> bundles = item.getBundles().iterator();
|
||||
@@ -1025,7 +1054,7 @@ public class ItemServiceImpl extends DSpaceObjectServiceImpl<Item> implements It
|
||||
List<Collection> linkedCollections = item.getCollections();
|
||||
List<Collection> notLinkedCollections = new ArrayList<>(allCollections.size() - linkedCollections.size());
|
||||
|
||||
if ((allCollections.size() - linkedCollections.size()) == 0) {
|
||||
if (allCollections.size() - linkedCollections.size() == 0) {
|
||||
return notLinkedCollections;
|
||||
}
|
||||
for (Collection collection : allCollections) {
|
||||
@@ -1065,6 +1094,53 @@ public class ItemServiceImpl extends DSpaceObjectServiceImpl<Item> implements It
|
||||
return collectionService.canEditBoolean(context, item.getOwningCollection(), false);
|
||||
}
|
||||
|
||||
/**
|
||||
* Finds all Indexed Items where the current user has edit rights. If the user is an Admin,
|
||||
* this is all Indexed Items. Otherwise, it includes those Items where
|
||||
* an indexed "edit" policy lists either the eperson or one of the eperson's groups
|
||||
*
|
||||
* @param context DSpace context
|
||||
* @param discoverQuery
|
||||
* @return discovery search result objects
|
||||
* @throws SQLException if something goes wrong
|
||||
* @throws SearchServiceException if search error
|
||||
*/
|
||||
private DiscoverResult retrieveItemsWithEdit(Context context, DiscoverQuery discoverQuery)
|
||||
throws SQLException, SearchServiceException {
|
||||
EPerson currentUser = context.getCurrentUser();
|
||||
if (!authorizeService.isAdmin(context)) {
|
||||
String userId = currentUser != null ? "e" + currentUser.getID().toString() : "e";
|
||||
Stream<String> groupIds = groupService.allMemberGroupsSet(context, currentUser).stream()
|
||||
.map(group -> "g" + group.getID());
|
||||
String query = Stream.concat(Stream.of(userId), groupIds)
|
||||
.collect(Collectors.joining(" OR ", "edit:(", ")"));
|
||||
discoverQuery.addFilterQueries(query);
|
||||
}
|
||||
return searchService.search(context, discoverQuery);
|
||||
}
|
||||
|
||||
@Override
|
||||
public List<Item> findItemsWithEdit(Context context, int offset, int limit)
|
||||
throws SQLException, SearchServiceException {
|
||||
DiscoverQuery discoverQuery = new DiscoverQuery();
|
||||
discoverQuery.setDSpaceObjectFilter(IndexableItem.TYPE);
|
||||
discoverQuery.setStart(offset);
|
||||
discoverQuery.setMaxResults(limit);
|
||||
DiscoverResult resp = retrieveItemsWithEdit(context, discoverQuery);
|
||||
return resp.getIndexableObjects().stream()
|
||||
.map(solrItems -> ((IndexableItem) solrItems).getIndexedObject())
|
||||
.collect(Collectors.toList());
|
||||
}
|
||||
|
||||
@Override
|
||||
public int countItemsWithEdit(Context context) throws SQLException, SearchServiceException {
|
||||
DiscoverQuery discoverQuery = new DiscoverQuery();
|
||||
discoverQuery.setMaxResults(0);
|
||||
discoverQuery.setDSpaceObjectFilter(IndexableItem.TYPE);
|
||||
DiscoverResult resp = retrieveItemsWithEdit(context, discoverQuery);
|
||||
return (int) resp.getTotalSearchResults();
|
||||
}
|
||||
|
||||
/**
|
||||
* Check if the item is an inprogress submission
|
||||
*
|
||||
@@ -1073,6 +1149,7 @@ public class ItemServiceImpl extends DSpaceObjectServiceImpl<Item> implements It
|
||||
* @return <code>true</code> if the item is an inprogress submission, i.e. a WorkspaceItem or WorkflowItem
|
||||
* @throws SQLException An exception that provides information on a database access error or other errors.
|
||||
*/
|
||||
@Override
|
||||
public boolean isInProgressSubmission(Context context, Item item) throws SQLException {
|
||||
return workspaceItemService.findByItem(context, item) != null
|
||||
|| workflowItemService.findByItem(context, item) != null;
|
||||
@@ -1103,8 +1180,8 @@ prevent the generation of resource policy entry values with null dspace_object a
|
||||
if (!authorizeService
|
||||
.isAnIdenticalPolicyAlreadyInPlace(context, dso, defaultPolicy.getGroup(), Constants.READ,
|
||||
defaultPolicy.getID()) &&
|
||||
((!appendMode && this.isNotAlreadyACustomRPOfThisTypeOnDSO(context, dso)) ||
|
||||
(appendMode && this.shouldBeAppended(context, dso, defaultPolicy)))) {
|
||||
(!appendMode && this.isNotAlreadyACustomRPOfThisTypeOnDSO(context, dso) ||
|
||||
appendMode && this.shouldBeAppended(context, dso, defaultPolicy))) {
|
||||
ResourcePolicy newPolicy = resourcePolicyService.clone(context, defaultPolicy);
|
||||
newPolicy.setdSpaceObject(dso);
|
||||
newPolicy.setAction(Constants.READ);
|
||||
@@ -1535,7 +1612,7 @@ prevent the generation of resource policy entry values with null dspace_object a
|
||||
fullMetadataValueList.addAll(relationshipMetadataService.getRelationshipMetadata(item, true));
|
||||
fullMetadataValueList.addAll(dbMetadataValues);
|
||||
|
||||
item.setCachedMetadata(sortMetadataValueList(fullMetadataValueList));
|
||||
item.setCachedMetadata(MetadataValueComparators.sort(fullMetadataValueList));
|
||||
}
|
||||
|
||||
log.debug("Called getMetadata for " + item.getID() + " based on cache");
|
||||
@@ -1577,28 +1654,6 @@ prevent the generation of resource policy entry values with null dspace_object a
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* This method will sort the List of MetadataValue objects based on the MetadataSchema, MetadataField Element,
|
||||
* MetadataField Qualifier and MetadataField Place in that order.
|
||||
* @param listToReturn The list to be sorted
|
||||
* @return The list sorted on those criteria
|
||||
*/
|
||||
private List<MetadataValue> sortMetadataValueList(List<MetadataValue> listToReturn) {
|
||||
Comparator<MetadataValue> comparator = Comparator.comparing(
|
||||
metadataValue -> metadataValue.getMetadataField().getMetadataSchema().getName(),
|
||||
Comparator.nullsFirst(Comparator.naturalOrder()));
|
||||
comparator = comparator.thenComparing(metadataValue -> metadataValue.getMetadataField().getElement(),
|
||||
Comparator.nullsFirst(Comparator.naturalOrder()));
|
||||
comparator = comparator.thenComparing(metadataValue -> metadataValue.getMetadataField().getQualifier(),
|
||||
Comparator.nullsFirst(Comparator.naturalOrder()));
|
||||
comparator = comparator.thenComparing(metadataValue -> metadataValue.getPlace(),
|
||||
Comparator.nullsFirst(Comparator.naturalOrder()));
|
||||
|
||||
Stream<MetadataValue> metadataValueStream = listToReturn.stream().sorted(comparator);
|
||||
listToReturn = metadataValueStream.collect(Collectors.toList());
|
||||
return listToReturn;
|
||||
}
|
||||
|
||||
@Override
|
||||
public MetadataValue addMetadata(Context context, Item dso, String schema, String element, String qualifier,
|
||||
String lang, String value, String authority, int confidence, int place) throws SQLException {
|
||||
|
@@ -19,6 +19,7 @@ import javax.persistence.Lob;
|
||||
import javax.persistence.ManyToOne;
|
||||
import javax.persistence.SequenceGenerator;
|
||||
import javax.persistence.Table;
|
||||
import javax.persistence.Transient;
|
||||
|
||||
import org.dspace.core.Context;
|
||||
import org.dspace.core.ReloadableEntity;
|
||||
@@ -59,7 +60,7 @@ public class MetadataValue implements ReloadableEntity<Integer> {
|
||||
* The value of the field
|
||||
*/
|
||||
@Lob
|
||||
@Type(type = "org.dspace.storage.rdbms.hibernate.DatabaseAwareLobType")
|
||||
@Type(type = "org.hibernate.type.TextType")
|
||||
@Column(name = "text_value")
|
||||
private String value;
|
||||
|
||||
@@ -171,6 +172,14 @@ public class MetadataValue implements ReloadableEntity<Integer> {
|
||||
this.metadataField = metadataField;
|
||||
}
|
||||
|
||||
/**
|
||||
* @return {@code MetadataField#getID()}
|
||||
*/
|
||||
@Transient
|
||||
protected Integer getMetadataFieldId() {
|
||||
return getMetadataField().getID();
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the metadata value.
|
||||
*
|
||||
|
@@ -0,0 +1,51 @@
|
||||
/**
|
||||
* The contents of this file are subject to the license and copyright
|
||||
* detailed in the LICENSE and NOTICE files at the root of the source
|
||||
* tree and available online at
|
||||
*
|
||||
* http://www.dspace.org/license/
|
||||
*/
|
||||
package org.dspace.content;
|
||||
|
||||
import java.util.Comparator;
|
||||
import java.util.List;
|
||||
import java.util.stream.Collectors;
|
||||
|
||||
/**
|
||||
* This class contains only static members that can be used
|
||||
* to sort list of {@link MetadataValue}
|
||||
*
|
||||
* @author Vincenzo Mecca (vins01-4science - vincenzo.mecca at 4science.com)
|
||||
*
|
||||
*/
|
||||
public final class MetadataValueComparators {
|
||||
|
||||
private MetadataValueComparators() {}
|
||||
|
||||
/**
|
||||
* This is the default comparator that mimics the ordering
|
||||
* applied by the standard {@code @OrderBy} annotation inside
|
||||
* {@link DSpaceObject#getMetadata()}
|
||||
*/
|
||||
public static final Comparator<MetadataValue> defaultComparator =
|
||||
Comparator.comparing(MetadataValue::getMetadataFieldId)
|
||||
.thenComparing(
|
||||
MetadataValue::getPlace,
|
||||
Comparator.nullsFirst(Comparator.naturalOrder())
|
||||
);
|
||||
|
||||
/**
|
||||
* This method creates a new {@code List<MetadataValue>} ordered by the
|
||||
* {@code MetadataComparators#defaultComparator}.
|
||||
*
|
||||
* @param metadataValues
|
||||
* @return {@code List<MetadataValue>} ordered copy list using stream.
|
||||
*/
|
||||
public static final List<MetadataValue> sort(List<MetadataValue> metadataValues) {
|
||||
return metadataValues
|
||||
.stream()
|
||||
.sorted(MetadataValueComparators.defaultComparator)
|
||||
.collect(Collectors.toList());
|
||||
}
|
||||
|
||||
}
|
@@ -1,40 +0,0 @@
|
||||
/**
|
||||
* The contents of this file are subject to the license and copyright
|
||||
* detailed in the LICENSE and NOTICE files at the root of the source
|
||||
* tree and available online at
|
||||
*
|
||||
* http://www.dspace.org/license/
|
||||
*/
|
||||
package org.dspace.content;
|
||||
|
||||
import java.sql.SQLException;
|
||||
import java.util.List;
|
||||
|
||||
import org.dspace.content.service.SupervisedItemService;
|
||||
import org.dspace.content.service.WorkspaceItemService;
|
||||
import org.dspace.core.Context;
|
||||
import org.dspace.eperson.EPerson;
|
||||
import org.springframework.beans.factory.annotation.Autowired;
|
||||
|
||||
public class SupervisedItemServiceImpl implements SupervisedItemService {
|
||||
|
||||
@Autowired(required = true)
|
||||
protected WorkspaceItemService workspaceItemService;
|
||||
|
||||
protected SupervisedItemServiceImpl() {
|
||||
|
||||
}
|
||||
|
||||
@Override
|
||||
public List<WorkspaceItem> getAll(Context context)
|
||||
throws SQLException {
|
||||
return workspaceItemService.findAllSupervisedItems(context);
|
||||
}
|
||||
|
||||
@Override
|
||||
public List<WorkspaceItem> findbyEPerson(Context context, EPerson ep)
|
||||
throws SQLException {
|
||||
return workspaceItemService.findSupervisedItemsByEPerson(context, ep);
|
||||
}
|
||||
|
||||
}
|
@@ -8,8 +8,6 @@
|
||||
package org.dspace.content;
|
||||
|
||||
import java.io.Serializable;
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
import javax.persistence.Column;
|
||||
import javax.persistence.Entity;
|
||||
import javax.persistence.FetchType;
|
||||
@@ -17,8 +15,6 @@ import javax.persistence.GeneratedValue;
|
||||
import javax.persistence.GenerationType;
|
||||
import javax.persistence.Id;
|
||||
import javax.persistence.JoinColumn;
|
||||
import javax.persistence.JoinTable;
|
||||
import javax.persistence.ManyToMany;
|
||||
import javax.persistence.ManyToOne;
|
||||
import javax.persistence.OneToOne;
|
||||
import javax.persistence.SequenceGenerator;
|
||||
@@ -27,7 +23,6 @@ import javax.persistence.Table;
|
||||
import org.apache.commons.lang3.builder.HashCodeBuilder;
|
||||
import org.dspace.core.Context;
|
||||
import org.dspace.eperson.EPerson;
|
||||
import org.dspace.eperson.Group;
|
||||
import org.dspace.workflow.WorkflowItem;
|
||||
import org.hibernate.proxy.HibernateProxyHelper;
|
||||
|
||||
@@ -78,14 +73,6 @@ public class WorkspaceItem
|
||||
@Column(name = "page_reached")
|
||||
private Integer pageReached = -1;
|
||||
|
||||
@ManyToMany(fetch = FetchType.LAZY)
|
||||
@JoinTable(
|
||||
name = "epersongroup2workspaceitem",
|
||||
joinColumns = {@JoinColumn(name = "workspace_item_id")},
|
||||
inverseJoinColumns = {@JoinColumn(name = "eperson_group_id")}
|
||||
)
|
||||
private final List<Group> supervisorGroups = new ArrayList<>();
|
||||
|
||||
/**
|
||||
* Protected constructor, create object using:
|
||||
* {@link org.dspace.content.service.WorkspaceItemService#create(Context, Collection, boolean)}
|
||||
@@ -226,15 +213,4 @@ public class WorkspaceItem
|
||||
publishedBefore = b;
|
||||
}
|
||||
|
||||
public List<Group> getSupervisorGroups() {
|
||||
return supervisorGroups;
|
||||
}
|
||||
|
||||
void removeSupervisorGroup(Group group) {
|
||||
supervisorGroups.remove(group);
|
||||
}
|
||||
|
||||
void addSupervisorGroup(Group group) {
|
||||
supervisorGroups.add(group);
|
||||
}
|
||||
}
|
||||
|
@@ -24,6 +24,8 @@ import org.dspace.authorize.AuthorizeException;
|
||||
import org.dspace.authorize.ResourcePolicy;
|
||||
import org.dspace.authorize.service.AuthorizeService;
|
||||
import org.dspace.content.dao.WorkspaceItemDAO;
|
||||
import org.dspace.content.logic.Filter;
|
||||
import org.dspace.content.logic.FilterUtils;
|
||||
import org.dspace.content.service.CollectionService;
|
||||
import org.dspace.content.service.ItemService;
|
||||
import org.dspace.content.service.WorkspaceItemService;
|
||||
@@ -32,6 +34,13 @@ import org.dspace.core.Context;
|
||||
import org.dspace.core.LogHelper;
|
||||
import org.dspace.eperson.EPerson;
|
||||
import org.dspace.event.Event;
|
||||
import org.dspace.identifier.DOI;
|
||||
import org.dspace.identifier.DOIIdentifierProvider;
|
||||
import org.dspace.identifier.Identifier;
|
||||
import org.dspace.identifier.IdentifierException;
|
||||
import org.dspace.identifier.factory.IdentifierServiceFactory;
|
||||
import org.dspace.identifier.service.DOIService;
|
||||
import org.dspace.services.factory.DSpaceServicesFactory;
|
||||
import org.dspace.workflow.WorkflowItem;
|
||||
import org.dspace.workflow.WorkflowService;
|
||||
import org.springframework.beans.factory.annotation.Autowired;
|
||||
@@ -58,6 +67,8 @@ public class WorkspaceItemServiceImpl implements WorkspaceItemService {
|
||||
protected ItemService itemService;
|
||||
@Autowired(required = true)
|
||||
protected WorkflowService workflowService;
|
||||
@Autowired(required = true)
|
||||
protected DOIService doiService;
|
||||
|
||||
|
||||
protected WorkspaceItemServiceImpl() {
|
||||
@@ -160,6 +171,26 @@ public class WorkspaceItemServiceImpl implements WorkspaceItemService {
|
||||
}
|
||||
|
||||
itemService.update(context, item);
|
||||
|
||||
// If configured, register identifiers (eg handle, DOI) now. This is typically used with the Show Identifiers
|
||||
// submission step which previews minted handles and DOIs during the submission process. Default: false
|
||||
if (DSpaceServicesFactory.getInstance().getConfigurationService()
|
||||
.getBooleanProperty("identifiers.submission.register", false)) {
|
||||
try {
|
||||
// Get map of filters to use for identifier types, while the item is in progress
|
||||
Map<Class<? extends Identifier>, Filter> filters = FilterUtils.getIdentifierFilters(true);
|
||||
IdentifierServiceFactory.getInstance().getIdentifierService().register(context, item, filters);
|
||||
// Look for a DOI and move it to PENDING
|
||||
DOI doi = doiService.findDOIByDSpaceObject(context, item);
|
||||
if (doi != null) {
|
||||
doi.setStatus(DOIIdentifierProvider.PENDING);
|
||||
doiService.update(context, doi);
|
||||
}
|
||||
} catch (IdentifierException e) {
|
||||
log.error("Could not register identifier(s) for item {}: {}", item.getID(), e.getMessage());
|
||||
}
|
||||
}
|
||||
|
||||
workspaceItem.setItem(item);
|
||||
|
||||
log.info(LogHelper.getHeader(context, "create_workspace_item",
|
||||
@@ -212,16 +243,6 @@ public class WorkspaceItemServiceImpl implements WorkspaceItemService {
|
||||
return workspaceItemDAO.findByItem(context, item);
|
||||
}
|
||||
|
||||
@Override
|
||||
public List<WorkspaceItem> findAllSupervisedItems(Context context) throws SQLException {
|
||||
return workspaceItemDAO.findWithSupervisedGroup(context);
|
||||
}
|
||||
|
||||
@Override
|
||||
public List<WorkspaceItem> findSupervisedItemsByEPerson(Context context, EPerson ePerson) throws SQLException {
|
||||
return workspaceItemDAO.findBySupervisedGroupMember(context, ePerson);
|
||||
}
|
||||
|
||||
@Override
|
||||
public List<WorkspaceItem> findAll(Context context) throws SQLException {
|
||||
return workspaceItemDAO.findAll(context);
|
||||
@@ -268,10 +289,6 @@ public class WorkspaceItemServiceImpl implements WorkspaceItemService {
|
||||
"workspace_item_id=" + workspaceItem.getID() + "item_id=" + item.getID()
|
||||
+ "collection_id=" + workspaceItem.getCollection().getID()));
|
||||
|
||||
// Need to delete the epersongroup2workspaceitem row first since it refers
|
||||
// to workspaceitem ID
|
||||
workspaceItem.getSupervisorGroups().clear();
|
||||
|
||||
// Need to delete the workspaceitem row first since it refers
|
||||
// to item ID
|
||||
workspaceItemDAO.delete(context, workspaceItem);
|
||||
@@ -307,14 +324,6 @@ public class WorkspaceItemServiceImpl implements WorkspaceItemService {
|
||||
|
||||
// deleteSubmitPermissions();
|
||||
|
||||
// Need to delete the workspaceitem row first since it refers
|
||||
// to item ID
|
||||
try {
|
||||
workspaceItem.getSupervisorGroups().clear();
|
||||
} catch (Exception e) {
|
||||
log.error("failed to clear supervisor group", e);
|
||||
}
|
||||
|
||||
workspaceItemDAO.delete(context, workspaceItem);
|
||||
|
||||
}
|
||||
|
@@ -136,7 +136,9 @@ public class DSpaceControlledVocabulary extends SelfNamedPlugin implements Hiera
|
||||
}
|
||||
|
||||
protected String buildString(Node node) {
|
||||
if (node.getNodeType() == Node.DOCUMENT_NODE) {
|
||||
if (node.getNodeType() == Node.DOCUMENT_NODE || (
|
||||
node.getParentNode() != null &&
|
||||
node.getParentNode().getNodeType() == Node.DOCUMENT_NODE)) {
|
||||
return ("");
|
||||
} else {
|
||||
String parentValue = buildString(node.getParentNode());
|
||||
|
@@ -200,8 +200,8 @@ public class SolrAuthority implements ChoiceAuthority {
|
||||
}
|
||||
|
||||
private String toQuery(String searchField, String text) {
|
||||
return searchField + ":(" + text.toLowerCase().replaceAll(":", "\\:") + "*) or " + searchField + ":(" + text
|
||||
.toLowerCase().replaceAll(":", "\\:") + ")";
|
||||
return searchField + ":(" + text.toLowerCase().replaceAll(":", "\\\\:") + "*) or " + searchField + ":(" + text
|
||||
.toLowerCase().replaceAll(":", "\\\\:") + ")";
|
||||
}
|
||||
|
||||
@Override
|
||||
@@ -225,7 +225,7 @@ public class SolrAuthority implements ChoiceAuthority {
|
||||
log.debug("requesting label for key " + key + " using locale " + locale);
|
||||
}
|
||||
SolrQuery queryArgs = new SolrQuery();
|
||||
queryArgs.setQuery("id:" + key);
|
||||
queryArgs.setQuery("id:" + key.replaceAll(":", "\\\\:"));
|
||||
queryArgs.setRows(1);
|
||||
QueryResponse searchResponse = getSearchService().search(queryArgs);
|
||||
SolrDocumentList docs = searchResponse.getResults();
|
||||
|
@@ -0,0 +1,80 @@
|
||||
/**
|
||||
* The contents of this file are subject to the license and copyright
|
||||
* detailed in the LICENSE and NOTICE files at the root of the source
|
||||
* tree and available online at
|
||||
*
|
||||
* http://www.dspace.org/license/
|
||||
*/
|
||||
package org.dspace.content.crosswalk;
|
||||
|
||||
import static org.dspace.content.Item.ANY;
|
||||
|
||||
import java.io.OutputStream;
|
||||
import java.io.PrintStream;
|
||||
import java.sql.SQLException;
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
import java.util.Objects;
|
||||
|
||||
import org.dspace.content.DSpaceObject;
|
||||
import org.dspace.content.Item;
|
||||
import org.dspace.content.service.ItemService;
|
||||
import org.dspace.core.Constants;
|
||||
import org.dspace.core.Context;
|
||||
import org.dspace.handle.factory.HandleServiceFactory;
|
||||
import org.springframework.beans.factory.annotation.Autowired;
|
||||
|
||||
/**
|
||||
* Creates a String to be sent as email body for subscriptions
|
||||
*
|
||||
* @author Alba Aliu
|
||||
*/
|
||||
public class SubscriptionDsoMetadataForEmailCompose implements StreamDisseminationCrosswalk {
|
||||
|
||||
private List<String> metadata = new ArrayList<>();
|
||||
|
||||
@Autowired
|
||||
private ItemService itemService;
|
||||
|
||||
@Override
|
||||
public boolean canDisseminate(Context context, DSpaceObject dso) {
|
||||
return Objects.nonNull(dso) && dso.getType() == Constants.ITEM;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void disseminate(Context context, DSpaceObject dso, OutputStream out) throws SQLException {
|
||||
if (dso.getType() == Constants.ITEM) {
|
||||
Item item = (Item) dso;
|
||||
PrintStream printStream = new PrintStream(out);
|
||||
for (String actualMetadata : metadata) {
|
||||
String[] splitted = actualMetadata.split("\\.");
|
||||
String qualifier = null;
|
||||
if (splitted.length == 1) {
|
||||
qualifier = splitted[2];
|
||||
}
|
||||
var metadataValue = itemService.getMetadataFirstValue(item, splitted[0], splitted[1], qualifier, ANY);
|
||||
printStream.print(metadataValue + " ");
|
||||
}
|
||||
String itemURL = HandleServiceFactory.getInstance()
|
||||
.getHandleService()
|
||||
.resolveToURL(context, item.getHandle());
|
||||
printStream.print(itemURL);
|
||||
printStream.print("\n");
|
||||
printStream.close();
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getMIMEType() {
|
||||
return "text/plain";
|
||||
}
|
||||
|
||||
public List<String> getMetadata() {
|
||||
return metadata;
|
||||
}
|
||||
|
||||
public void setMetadata(List<String> metadata) {
|
||||
this.metadata = metadata;
|
||||
}
|
||||
|
||||
}
|
@@ -29,7 +29,7 @@ public interface BitstreamDAO extends DSpaceObjectLegacySupportDAO<Bitstream> {
|
||||
|
||||
public Iterator<Bitstream> findAll(Context context, int limit, int offset) throws SQLException;
|
||||
|
||||
public List<Bitstream> findDeletedBitstreams(Context context) throws SQLException;
|
||||
public List<Bitstream> findDeletedBitstreams(Context context, int limit, int offset) throws SQLException;
|
||||
|
||||
public List<Bitstream> findDuplicateInternalIdentifier(Context context, Bitstream bitstream) throws SQLException;
|
||||
|
||||
|
@@ -41,10 +41,6 @@ public interface WorkspaceItemDAO extends GenericDAO<WorkspaceItem> {
|
||||
|
||||
public List<WorkspaceItem> findAll(Context context, Integer limit, Integer offset) throws SQLException;
|
||||
|
||||
public List<WorkspaceItem> findWithSupervisedGroup(Context context) throws SQLException;
|
||||
|
||||
public List<WorkspaceItem> findBySupervisedGroupMember(Context context, EPerson ePerson) throws SQLException;
|
||||
|
||||
int countRows(Context context) throws SQLException;
|
||||
|
||||
List<Map.Entry<Integer, Long>> getStageReachedCounts(Context context) throws SQLException;
|
||||
|
@@ -41,13 +41,14 @@ public class BitstreamDAOImpl extends AbstractHibernateDSODAO<Bitstream> impleme
|
||||
}
|
||||
|
||||
@Override
|
||||
public List<Bitstream> findDeletedBitstreams(Context context) throws SQLException {
|
||||
public List<Bitstream> findDeletedBitstreams(Context context, int limit, int offset) throws SQLException {
|
||||
CriteriaBuilder criteriaBuilder = getCriteriaBuilder(context);
|
||||
CriteriaQuery criteriaQuery = getCriteriaQuery(criteriaBuilder, Bitstream.class);
|
||||
Root<Bitstream> bitstreamRoot = criteriaQuery.from(Bitstream.class);
|
||||
criteriaQuery.select(bitstreamRoot);
|
||||
criteriaQuery.orderBy(criteriaBuilder.desc(bitstreamRoot.get(Bitstream_.ID)));
|
||||
criteriaQuery.where(criteriaBuilder.equal(bitstreamRoot.get(Bitstream_.deleted), true));
|
||||
return list(context, criteriaQuery, false, Bitstream.class, -1, -1);
|
||||
return list(context, criteriaQuery, false, Bitstream.class, limit, offset);
|
||||
|
||||
}
|
||||
|
||||
|
@@ -15,7 +15,6 @@ import java.util.Map;
|
||||
import javax.persistence.Query;
|
||||
import javax.persistence.criteria.CriteriaBuilder;
|
||||
import javax.persistence.criteria.CriteriaQuery;
|
||||
import javax.persistence.criteria.Join;
|
||||
import javax.persistence.criteria.Root;
|
||||
|
||||
import org.dspace.content.Collection;
|
||||
@@ -26,8 +25,6 @@ import org.dspace.content.dao.WorkspaceItemDAO;
|
||||
import org.dspace.core.AbstractHibernateDAO;
|
||||
import org.dspace.core.Context;
|
||||
import org.dspace.eperson.EPerson;
|
||||
import org.dspace.eperson.EPerson_;
|
||||
import org.dspace.eperson.Group;
|
||||
|
||||
/**
|
||||
* Hibernate implementation of the Database Access Object interface class for the WorkspaceItem object.
|
||||
@@ -114,33 +111,6 @@ public class WorkspaceItemDAOImpl extends AbstractHibernateDAO<WorkspaceItem> im
|
||||
return list(context, criteriaQuery, false, WorkspaceItem.class, limit, offset);
|
||||
}
|
||||
|
||||
@Override
|
||||
public List<WorkspaceItem> findWithSupervisedGroup(Context context) throws SQLException {
|
||||
CriteriaBuilder criteriaBuilder = getCriteriaBuilder(context);
|
||||
CriteriaQuery criteriaQuery = getCriteriaQuery(criteriaBuilder, WorkspaceItem.class);
|
||||
Root<WorkspaceItem> workspaceItemRoot = criteriaQuery.from(WorkspaceItem.class);
|
||||
criteriaQuery.select(workspaceItemRoot);
|
||||
criteriaQuery.where(criteriaBuilder.isNotEmpty(workspaceItemRoot.get(WorkspaceItem_.supervisorGroups)));
|
||||
|
||||
List<javax.persistence.criteria.Order> orderList = new LinkedList<>();
|
||||
orderList.add(criteriaBuilder.asc(workspaceItemRoot.get(WorkspaceItem_.workspaceItemId)));
|
||||
criteriaQuery.orderBy(orderList);
|
||||
return list(context, criteriaQuery, false, WorkspaceItem.class, -1, -1);
|
||||
}
|
||||
|
||||
@Override
|
||||
public List<WorkspaceItem> findBySupervisedGroupMember(Context context, EPerson ePerson) throws SQLException {
|
||||
CriteriaBuilder criteriaBuilder = getCriteriaBuilder(context);
|
||||
CriteriaQuery criteriaQuery = getCriteriaQuery(criteriaBuilder, WorkspaceItem.class);
|
||||
Root<WorkspaceItem> workspaceItemRoot = criteriaQuery.from(WorkspaceItem.class);
|
||||
Join<WorkspaceItem, Group> join = workspaceItemRoot.join("supervisorGroups");
|
||||
Join<Group, EPerson> secondJoin = join.join("epeople");
|
||||
criteriaQuery.select(workspaceItemRoot);
|
||||
criteriaQuery.where(criteriaBuilder.equal(secondJoin.get(EPerson_.id), ePerson.getID()));
|
||||
criteriaQuery.orderBy(criteriaBuilder.asc(workspaceItemRoot.get(WorkspaceItem_.workspaceItemId)));
|
||||
return list(context, criteriaQuery, false, WorkspaceItem.class, -1, -1);
|
||||
}
|
||||
|
||||
@Override
|
||||
public int countRows(Context context) throws SQLException {
|
||||
return count(createQuery(context, "SELECT count(*) from WorkspaceItem"));
|
||||
|
@@ -31,8 +31,8 @@ import org.dspace.content.service.MetadataValueService;
|
||||
import org.dspace.content.service.RelationshipService;
|
||||
import org.dspace.content.service.RelationshipTypeService;
|
||||
import org.dspace.content.service.SiteService;
|
||||
import org.dspace.content.service.SupervisedItemService;
|
||||
import org.dspace.content.service.WorkspaceItemService;
|
||||
import org.dspace.eperson.service.SubscribeService;
|
||||
import org.dspace.services.factory.DSpaceServicesFactory;
|
||||
import org.dspace.workflow.factory.WorkflowServiceFactory;
|
||||
|
||||
@@ -71,10 +71,10 @@ public abstract class ContentServiceFactory {
|
||||
|
||||
public abstract InstallItemService getInstallItemService();
|
||||
|
||||
public abstract SupervisedItemService getSupervisedItemService();
|
||||
|
||||
public abstract SiteService getSiteService();
|
||||
|
||||
public abstract SubscribeService getSubscribeService();
|
||||
|
||||
/**
|
||||
* Return the implementation of the RelationshipTypeService interface
|
||||
*
|
||||
@@ -114,11 +114,7 @@ public abstract class ContentServiceFactory {
|
||||
}
|
||||
|
||||
public <T extends DSpaceObject> DSpaceObjectService<T> getDSpaceObjectService(T dso) {
|
||||
// No need to worry when supressing, as long as our "getDSpaceObjectManager" method is properly implemented
|
||||
// no casting issues should occur
|
||||
@SuppressWarnings("unchecked")
|
||||
DSpaceObjectService<T> manager = getDSpaceObjectService(dso.getType());
|
||||
return manager;
|
||||
return getDSpaceObjectService(dso.getType());
|
||||
}
|
||||
|
||||
@SuppressWarnings("unchecked")
|
||||
|
@@ -28,8 +28,8 @@ import org.dspace.content.service.MetadataValueService;
|
||||
import org.dspace.content.service.RelationshipService;
|
||||
import org.dspace.content.service.RelationshipTypeService;
|
||||
import org.dspace.content.service.SiteService;
|
||||
import org.dspace.content.service.SupervisedItemService;
|
||||
import org.dspace.content.service.WorkspaceItemService;
|
||||
import org.dspace.eperson.service.SubscribeService;
|
||||
import org.springframework.beans.factory.annotation.Autowired;
|
||||
|
||||
/**
|
||||
@@ -68,10 +68,9 @@ public class ContentServiceFactoryImpl extends ContentServiceFactory {
|
||||
@Autowired(required = true)
|
||||
private InstallItemService installItemService;
|
||||
@Autowired(required = true)
|
||||
private SupervisedItemService supervisedItemService;
|
||||
@Autowired(required = true)
|
||||
private SiteService siteService;
|
||||
|
||||
@Autowired(required = true)
|
||||
private SubscribeService subscribeService;
|
||||
@Autowired(required = true)
|
||||
private RelationshipService relationshipService;
|
||||
@Autowired(required = true)
|
||||
@@ -149,13 +148,13 @@ public class ContentServiceFactoryImpl extends ContentServiceFactory {
|
||||
}
|
||||
|
||||
@Override
|
||||
public SupervisedItemService getSupervisedItemService() {
|
||||
return supervisedItemService;
|
||||
public SiteService getSiteService() {
|
||||
return siteService;
|
||||
}
|
||||
|
||||
@Override
|
||||
public SiteService getSiteService() {
|
||||
return siteService;
|
||||
public SubscribeService getSubscribeService() {
|
||||
return subscribeService ;
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@@ -18,10 +18,10 @@ import org.dspace.core.Context;
|
||||
* statement as a property (unlike an operator) and takes no parameters (unlike a condition)
|
||||
*
|
||||
* @author Kim Shepherd
|
||||
* @version $Revision$
|
||||
*/
|
||||
public class DefaultFilter implements Filter {
|
||||
private LogicalStatement statement;
|
||||
private String name;
|
||||
private final static Logger log = LogManager.getLogger();
|
||||
|
||||
/**
|
||||
@@ -44,4 +44,15 @@ public class DefaultFilter implements Filter {
|
||||
public boolean getResult(Context context, Item item) throws LogicalStatementException {
|
||||
return this.statement.getResult(context, item);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void setBeanName(String name) {
|
||||
log.debug("Initialize bean " + name);
|
||||
this.name = name;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getName() {
|
||||
return name;
|
||||
}
|
||||
}
|
||||
|
@@ -9,6 +9,7 @@ package org.dspace.content.logic;
|
||||
|
||||
import org.dspace.content.Item;
|
||||
import org.dspace.core.Context;
|
||||
import org.springframework.beans.factory.BeanNameAware;
|
||||
|
||||
/**
|
||||
* The interface for Filter currently doesn't add anything to LogicalStatement but inherits from it
|
||||
@@ -22,7 +23,7 @@ import org.dspace.core.Context;
|
||||
* @author Kim Shepherd
|
||||
* @see org.dspace.content.logic.DefaultFilter
|
||||
*/
|
||||
public interface Filter extends LogicalStatement {
|
||||
public interface Filter extends LogicalStatement, BeanNameAware {
|
||||
/**
|
||||
* Get the result of logical evaluation for an item
|
||||
* @param context DSpace context
|
||||
@@ -32,4 +33,11 @@ public interface Filter extends LogicalStatement {
|
||||
*/
|
||||
@Override
|
||||
boolean getResult(Context context, Item item) throws LogicalStatementException;
|
||||
|
||||
/**
|
||||
* Get the name of a filter. This can be used by filters which make use of BeanNameAware
|
||||
* to return the bean name.
|
||||
* @return the id/name of this spring bean
|
||||
*/
|
||||
String getName();
|
||||
}
|
||||
|
@@ -0,0 +1,85 @@
|
||||
/**
|
||||
* The contents of this file are subject to the license and copyright
|
||||
* detailed in the LICENSE and NOTICE files at the root of the source
|
||||
* tree and available online at
|
||||
*
|
||||
* http://www.dspace.org/license/
|
||||
*/
|
||||
package org.dspace.content.logic;
|
||||
|
||||
import java.util.HashMap;
|
||||
import java.util.Map;
|
||||
|
||||
import org.dspace.identifier.DOI;
|
||||
import org.dspace.identifier.Handle;
|
||||
import org.dspace.identifier.Identifier;
|
||||
import org.dspace.services.ConfigurationService;
|
||||
import org.dspace.services.factory.DSpaceServicesFactory;
|
||||
import org.springframework.beans.factory.annotation.Autowired;
|
||||
|
||||
/**
|
||||
* General utility methods for logical item filtering
|
||||
*
|
||||
* @author Kim Shepherd
|
||||
*/
|
||||
public class FilterUtils {
|
||||
|
||||
@Autowired(required = true)
|
||||
ConfigurationService configurationService;
|
||||
|
||||
/**
|
||||
* Get a Filter by configuration property name
|
||||
* For example, if a module has implemented "my-feature.filter" configuration property
|
||||
* this method will return a filter with the ID specified by the configuration property
|
||||
* @param property DSpace configuration property name (Apache Commons config)
|
||||
* @return Filter object, with a bean ID configured for this property key, or null
|
||||
*/
|
||||
public static Filter getFilterFromConfiguration(String property) {
|
||||
String filterName = DSpaceServicesFactory.getInstance().getConfigurationService().getProperty(property);
|
||||
if (filterName != null) {
|
||||
return DSpaceServicesFactory.getInstance().getServiceManager().getServiceByName(filterName, Filter.class);
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
||||
/**
|
||||
* Get a Filter by configuration property name
|
||||
* For example, if a module has implemented "my-feature.filter" configuration property
|
||||
* this method will return a filter with the ID specified by the configuration property
|
||||
* @param property DSpace configuration property name (Apache Commons config)
|
||||
* @return Filter object, with a bean ID configured for this property key, or default filter
|
||||
*/
|
||||
public static Filter getFilterFromConfiguration(String property, Filter defaultFilter) {
|
||||
Filter filter = getFilterFromConfiguration(property);
|
||||
if (filter != null) {
|
||||
return filter;
|
||||
}
|
||||
return defaultFilter;
|
||||
}
|
||||
|
||||
/**
|
||||
* Get a map of identifier types and filters to use when creating workspace or archived items
|
||||
* This is used by services installing new archived or workspace items to filter by identifier type
|
||||
* as some filters should apply to DOI creation but not Handle creation, and so on.
|
||||
* The in progress or archived status will be used to load the appropriate filter from configuration
|
||||
* <p>
|
||||
* @param inProgress
|
||||
* @return
|
||||
*/
|
||||
public static Map<Class<? extends Identifier>, Filter> getIdentifierFilters(boolean inProgress) {
|
||||
String configurationSuffix = "install";
|
||||
if (inProgress) {
|
||||
configurationSuffix = "workspace";
|
||||
}
|
||||
Map<Class<? extends Identifier>, Filter> filters = new HashMap<>();
|
||||
// Put DOI 'can we create DOI on install / workspace?' filter
|
||||
Filter filter = FilterUtils.getFilterFromConfiguration("identifiers.submission.filter." + configurationSuffix);
|
||||
// A null filter should be handled safely by the identifier provier (default, or "always true")
|
||||
filters.put(DOI.class, filter);
|
||||
// This won't have an affect until handle providers implement filtering, but is an example of
|
||||
// how the filters can be used for other types
|
||||
filters.put(Handle.class, DSpaceServicesFactory.getInstance().getServiceManager().getServiceByName(
|
||||
"always_true_filter", TrueFilter.class));
|
||||
return filters;
|
||||
}
|
||||
}
|
@@ -17,7 +17,6 @@ import org.dspace.core.Context;
|
||||
* used as sub-statements in other Filters and Operators.
|
||||
*
|
||||
* @author Kim Shepherd
|
||||
* @version $Revision$
|
||||
*/
|
||||
public interface LogicalStatement {
|
||||
/**
|
||||
|
@@ -12,7 +12,6 @@ package org.dspace.content.logic;
|
||||
* defined as spring beans.
|
||||
*
|
||||
* @author Kim Shepherd
|
||||
* @version $Revision$
|
||||
*/
|
||||
public class LogicalStatementException extends RuntimeException {
|
||||
|
||||
|
@@ -33,7 +33,6 @@ import org.dspace.services.factory.DSpaceServicesFactory;
|
||||
* A command-line runner used for testing a logical filter against an item, or all items
|
||||
*
|
||||
* @author Kim Shepherd
|
||||
* @version $Revision$
|
||||
*/
|
||||
public class TestLogicRunner {
|
||||
|
||||
|
@@ -0,0 +1,41 @@
|
||||
/**
|
||||
* The contents of this file are subject to the license and copyright
|
||||
* detailed in the LICENSE and NOTICE files at the root of the source
|
||||
* tree and available online at
|
||||
*
|
||||
* http://www.dspace.org/license/
|
||||
*/
|
||||
package org.dspace.content.logic;
|
||||
|
||||
import org.apache.logging.log4j.LogManager;
|
||||
import org.apache.logging.log4j.Logger;
|
||||
import org.dspace.content.Item;
|
||||
import org.dspace.core.Context;
|
||||
|
||||
/**
|
||||
* Extremely simple filter that always returns true!
|
||||
* Useful to pass to methods that expect a filter, in order to effectively say "all items".
|
||||
* This could be configured in Spring XML but it is more stable and reliable to have it hard-coded here
|
||||
* so that any broken configuration doesn't silently break parts of DSpace that expect it to work.
|
||||
*
|
||||
* @author Kim Shepherd
|
||||
*/
|
||||
public class TrueFilter implements Filter {
|
||||
private String name;
|
||||
private final static Logger log = LogManager.getLogger();
|
||||
|
||||
public boolean getResult(Context context, Item item) throws LogicalStatementException {
|
||||
return true;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void setBeanName(String name) {
|
||||
log.debug("Initialize bean " + name);
|
||||
this.name = name;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getName() {
|
||||
return name;
|
||||
}
|
||||
}
|
@@ -23,7 +23,6 @@ import org.springframework.beans.factory.annotation.Autowired;
|
||||
* Abstract class for conditions, to implement the basic getter and setter parameters
|
||||
*
|
||||
* @author Kim Shepherd
|
||||
* @version $Revision$
|
||||
*/
|
||||
public abstract class AbstractCondition implements Condition {
|
||||
|
||||
|
@@ -18,7 +18,6 @@ import org.dspace.core.Context;
|
||||
* A condition to evaluate an item based on how many bitstreams it has in a particular bundle
|
||||
*
|
||||
* @author Kim Shepherd
|
||||
* @version $Revision$
|
||||
*/
|
||||
public class BitstreamCountCondition extends AbstractCondition {
|
||||
/**
|
||||
|
@@ -22,7 +22,6 @@ import org.dspace.core.Context;
|
||||
* operator is not a condition but also a logical statement.
|
||||
*
|
||||
* @author Kim Shepherd
|
||||
* @version $Revision$
|
||||
*/
|
||||
public interface Condition extends LogicalStatement {
|
||||
|
||||
|
@@ -23,7 +23,6 @@ import org.dspace.core.Context;
|
||||
* if the item belongs to any of them.
|
||||
*
|
||||
* @author Kim Shepherd
|
||||
* @version $Revision$
|
||||
*/
|
||||
public class InCollectionCondition extends AbstractCondition {
|
||||
private static Logger log = LogManager.getLogger(InCollectionCondition.class);
|
||||
|
@@ -24,7 +24,6 @@ import org.dspace.core.Context;
|
||||
* if the item belongs to any of them.
|
||||
*
|
||||
* @author Kim Shepherd
|
||||
* @version $Revision$
|
||||
*/
|
||||
public class InCommunityCondition extends AbstractCondition {
|
||||
private final static Logger log = LogManager.getLogger();
|
||||
|
@@ -0,0 +1,37 @@
|
||||
/**
|
||||
* The contents of this file are subject to the license and copyright
|
||||
* detailed in the LICENSE and NOTICE files at the root of the source
|
||||
* tree and available online at
|
||||
*
|
||||
* http://www.dspace.org/license/
|
||||
*/
|
||||
package org.dspace.content.logic.condition;
|
||||
|
||||
import org.apache.logging.log4j.LogManager;
|
||||
import org.apache.logging.log4j.Logger;
|
||||
import org.dspace.content.Item;
|
||||
import org.dspace.content.logic.LogicalStatementException;
|
||||
import org.dspace.core.Context;
|
||||
|
||||
/**
|
||||
* A condition that returns true if the item is archived
|
||||
*
|
||||
* @author Kim Shepherd
|
||||
*/
|
||||
public class IsArchivedCondition extends AbstractCondition {
|
||||
private final static Logger log = LogManager.getLogger();
|
||||
|
||||
/**
|
||||
* Return true if item is archived
|
||||
* Return false if not
|
||||
* @param context DSpace context
|
||||
* @param item Item to evaluate
|
||||
* @return boolean result of evaluation
|
||||
* @throws LogicalStatementException
|
||||
*/
|
||||
@Override
|
||||
public boolean getResult(Context context, Item item) throws LogicalStatementException {
|
||||
log.debug("Result of isArchived is " + item.isArchived());
|
||||
return item.isArchived();
|
||||
}
|
||||
}
|
@@ -17,7 +17,6 @@ import org.dspace.core.Context;
|
||||
* A condition that returns true if the item is withdrawn
|
||||
*
|
||||
* @author Kim Shepherd
|
||||
* @version $Revision$
|
||||
*/
|
||||
public class IsWithdrawnCondition extends AbstractCondition {
|
||||
private final static Logger log = LogManager.getLogger();
|
||||
|
@@ -23,7 +23,6 @@ import org.dspace.core.Context;
|
||||
* in a given metadata field
|
||||
*
|
||||
* @author Kim Shepherd
|
||||
* @version $Revision$
|
||||
*/
|
||||
public class MetadataValueMatchCondition extends AbstractCondition {
|
||||
|
||||
|
@@ -23,7 +23,6 @@ import org.dspace.core.Context;
|
||||
* in a given metadata field
|
||||
*
|
||||
* @author Kim Shepherd
|
||||
* @version $Revision$
|
||||
*/
|
||||
public class MetadataValuesMatchCondition extends AbstractCondition {
|
||||
|
||||
|
@@ -25,7 +25,6 @@ import org.dspace.core.Context;
|
||||
* can perform the action on a given item
|
||||
*
|
||||
* @author Kim Shepherd
|
||||
* @version $Revision$
|
||||
*/
|
||||
public class ReadableByGroupCondition extends AbstractCondition {
|
||||
private final static Logger log = LogManager.getLogger();
|
||||
|
@@ -22,7 +22,6 @@ import org.dspace.core.Context;
|
||||
* as a logical result
|
||||
*
|
||||
* @author Kim Shepherd
|
||||
* @version $Revision$
|
||||
*/
|
||||
public abstract class AbstractOperator implements LogicalStatement {
|
||||
|
||||
|
@@ -19,7 +19,6 @@ import org.dspace.core.Context;
|
||||
* true if all sub-statements return true
|
||||
*
|
||||
* @author Kim Shepherd
|
||||
* @version $Revision$
|
||||
*/
|
||||
public class And extends AbstractOperator {
|
||||
|
||||
|
@@ -18,7 +18,6 @@ import org.dspace.core.Context;
|
||||
* An operator that implements NAND by negating an AND operation
|
||||
*
|
||||
* @author Kim Shepherd
|
||||
* @version $Revision$
|
||||
*/
|
||||
public class Nand extends AbstractOperator {
|
||||
|
||||
|
@@ -19,7 +19,6 @@ import org.dspace.core.Context;
|
||||
* Not can have one sub-statement only, while and, or, nor, ... can have multiple sub-statements.
|
||||
*
|
||||
* @author Kim Shepherd
|
||||
* @version $Revision$
|
||||
*/
|
||||
public class Not implements LogicalStatement {
|
||||
|
||||
|
@@ -19,7 +19,6 @@ import org.dspace.core.Context;
|
||||
* true if one or more sub-statements return true
|
||||
*
|
||||
* @author Kim Shepherd
|
||||
* @version $Revision$
|
||||
*/
|
||||
public class Or extends AbstractOperator {
|
||||
|
||||
|
@@ -183,7 +183,7 @@ public interface BitstreamService extends DSpaceObjectService<Bitstream>, DSpace
|
||||
* @return a list of all bitstreams that have been "deleted"
|
||||
* @throws SQLException if database error
|
||||
*/
|
||||
public List<Bitstream> findDeletedBitstreams(Context context) throws SQLException;
|
||||
public List<Bitstream> findDeletedBitstreams(Context context, int limit, int offset) throws SQLException;
|
||||
|
||||
|
||||
/**
|
||||
|
@@ -33,6 +33,11 @@ import org.dspace.eperson.Group;
|
||||
public interface CollectionService
|
||||
extends DSpaceObjectService<Collection>, DSpaceObjectLegacySupportService<Collection> {
|
||||
|
||||
/*
|
||||
* Field used to sort community and collection lists at solr
|
||||
*/
|
||||
public static final String SOLR_SORT_FIELD = "dc.title_sort";
|
||||
|
||||
/**
|
||||
* Create a new collection with a new ID.
|
||||
* Once created the collection is added to the given community
|
||||
@@ -46,7 +51,6 @@ public interface CollectionService
|
||||
public Collection create(Context context, Community community) throws SQLException,
|
||||
AuthorizeException;
|
||||
|
||||
|
||||
/**
|
||||
* Create a new collection with the supplied handle and with a new ID.
|
||||
* Once created the collection is added to the given community
|
||||
|
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user