Merge branch 'master' into DS-4443_delete-solr-record-on-workspaceitem-deletion

This commit is contained in:
Kevin Van de Velde
2020-10-09 09:02:21 +02:00
379 changed files with 24971 additions and 5607 deletions

29
.codecov.yml Normal file
View File

@@ -0,0 +1,29 @@
# DSpace configuration for Codecov.io coverage reports
# These override the default YAML settings at
# https://docs.codecov.io/docs/codecov-yaml#section-default-yaml
# Can be validated via instructions at:
# https://docs.codecov.io/docs/codecov-yaml#validate-your-repository-yaml
# Settings related to code coverage analysis
coverage:
status:
# Configuration for project-level checks. This checks how the PR changes overall coverage.
project:
default:
# For each PR, auto compare coverage to previous commit.
# Require that overall (project) coverage does NOT drop more than 0.5%
target: auto
threshold: 0.5%
# Configuration for patch-level checks. This checks the relative coverage of the new PR code ONLY.
patch:
default:
# For each PR, make sure the coverage of the new code is within 1% of current overall coverage.
# We let 'patch' be more lenient as we only require *project* coverage to not drop significantly.
target: auto
threshold: 1%
# Turn PR comments "off". This feature adds the code coverage summary as a
# comment on each PR. See https://docs.codecov.io/docs/pull-request-comments
# However, this same info is available from the Codecov checks in the PR's
# "Checks" tab in GitHub. So, the comment is unnecessary.
comment: false

View File

@@ -0,0 +1,25 @@
# This workflow checks open PRs for merge conflicts and labels them when conflicts are found
name: Check for merge conflicts
# Run whenever the "main" branch is updated
# NOTE: This means merge conflicts are only checked for when a PR is merged to main.
on:
push:
branches:
- main
jobs:
triage:
runs-on: ubuntu-latest
steps:
# See: https://github.com/mschilde/auto-label-merge-conflicts/
- name: Auto-label PRs with merge conflicts
uses: mschilde/auto-label-merge-conflicts@v2.0
# Add "merge conflict" label if a merge conflict is detected. Remove it when resolved.
# Note, the authentication token is created automatically
# See: https://docs.github.com/en/actions/configuring-and-managing-workflows/authenticating-with-the-github_token
with:
CONFLICT_LABEL_NAME: 'merge conflict'
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
# Ignore errors
continue-on-error: true

View File

@@ -1,46 +1,55 @@
# DSpace's Travis CI Configuration
# Builds: https://travis-ci.com/github/DSpace/DSpace
# Travis configuration guide/validation: https://config.travis-ci.com/explore
language: java language: java
sudo: false # TODO: Upgrade to Bionic
dist: trusty dist: trusty
os: linux
env:
# Give Maven 1GB of memory to work with
- MAVEN_OPTS=-Xmx1024M
jdk: jdk:
# DS-3384 Oracle JDK has DocLint enabled by default. # DS-3384 Oracle JDK has DocLint enabled by default.
# Let's use this to catch any newly introduced DocLint issues. # Let's use this to catch any newly introduced DocLint issues.
- oraclejdk11 - oraclejdk11
## Should we run into any problems with oraclejdk8 on Travis, we may try the following workaround. # Define global environment variables (shared across all jobs)
## https://docs.travis-ci.com/user/languages/java#Testing-Against-Multiple-JDKs env:
## https://github.com/travis-ci/travis-ci/issues/3259#issuecomment-130860338 global:
#addons: # Suppress all Maven "downloading" messages in Travis logs (see https://stackoverflow.com/a/35653426)
# apt: # This also slightly speeds builds in Travis, as there is less logging
# packages: - HIDE_MAVEN_DOWNLOADS="-Dorg.slf4j.simpleLogger.log.org.apache.maven.cli.transfer.Slf4jMavenTransferListener=warn"
# - oracle-java8-installer # Give Maven 1GB of memory to work with
- MAVEN_OPTS="-Xmx1024M $HIDE_MAVEN_DOWNLOADS"
# Maven options which will skip ALL code validation checks. Includes skipping:
# - enforcer.skip => Skip maven-enforcer-plugin rules
# - checkstyle.skip => Skip all checkstyle checks by maven-checkstyle-plugin
# - license.skip => Skip all license header checks by license-maven-plugin
# - xml.skip => Skip all XML/XSLT validation by xml-maven-plugin
# (Useful for builds which don't need to repeat code checks)
- SKIP_CODE_CHECKS="-Denforcer.skip=true -Dcheckstyle.skip=true -Dlicense.skip=true -Dxml.skip=true"
before_install: # Create two jobs to run Unit & Integration tests in parallel.
# Remove outdated settings.xml from Travis builds. Workaround for https://github.com/travis-ci/travis-ci/issues/4629 # These jobs only differ in the TEST_FLAGS defined below,
- rm ~/.m2/settings.xml # and otherwise share all the other configs in this file
jobs:
include:
- name: "Run Unit Tests & Check Code"
# NOTE: unit tests include deprecated REST API v6 (as it has unit tests)
env: TEST_FLAGS="-DskipUnitTests=false -Pdspace-rest"
- name: "Run Integration Tests"
# NOTE: skips code checks, as they are already done by Unit Test job
env: TEST_FLAGS="-DskipIntegrationTests=false $SKIP_CODE_CHECKS"
# Skip install stage, as we'll do it below # Skip 'install' process to save time. We build/install/test all at once in "script" below.
install: "echo 'Skipping install stage, dependencies will be downloaded during build and test stages.'" install: skip
# Build DSpace and run both Unit and Integration Tests # Build DSpace and run configured tests (see 'jobs' above)
script: # Notes on flags used:
# Summary of flags used (below): # -B => Maven batch/non-interactive mode (recommended for CI)
# license:check => Validate all source code license headers # -V => Display Maven version info before build
# -DskipTests=false => Enable DSpace Unit Tests # -P-assembly => Disable build of dspace-installer in [src]/dspace/, as it can be memory intensive
# -DskipITs=false => Enable DSpace Integration Tests # -Pcoverage-report => Enable aggregate code coverage report (across all modules) via JaCoCo
# -Pdspace-rest => Enable optional dspace-rest module as part of build script: mvn install -B -V -P-assembly -Pcoverage-report $TEST_FLAGS
# -P !assembly => Skip assembly of "dspace-installer" directory (as it can be memory intensive)
# -B => Maven batch/non-interactive mode (recommended for CI)
# -V => Display Maven version info before build
# -Dsurefire.rerunFailingTestsCount=2 => try again for flakey tests, and keep track of/report on number of retries
- "mvn clean install license:check -DskipTests=false -DskipITs=false -Pdspace-rest -P !assembly -B -V -Dsurefire.rerunFailingTestsCount=2"
# After a successful build and test (see 'script'), send code coverage reports to coveralls.io # After a successful build and test (see 'script'), send aggregate code coverage reports
# These code coverage reports are generated by jacoco-maven-plugin (during test process above). # (generated by -Pcoverage-report above) to CodeCov.io
after_success: after_success: bash <(curl -s https://codecov.io/bash)
# Run "verify", enabling the "coveralls" profile. This sends our reports to coveralls.io (see coveralls-maven-plugin)
- "cd dspace && mvn verify -P coveralls"

View File

@@ -90,33 +90,33 @@ run automatically by [Travis CI](https://travis-ci.com/DSpace/DSpace/) for all P
* How to run both Unit Tests (via `maven-surefire-plugin`) and Integration Tests (via `maven-failsafe-plugin`): * How to run both Unit Tests (via `maven-surefire-plugin`) and Integration Tests (via `maven-failsafe-plugin`):
``` ```
mvn clean test -DskipTests=false -DskipITs=false mvn install -DskipUnitTests=false -DskipIntegrationTests=false
``` ```
* How to run just Unit Tests: * How to run _only_ Unit Tests:
``` ```
mvn test -DskipTests=false mvn test -DskipUnitTests=false
``` ```
* How to run a *single* Unit Test * How to run a *single* Unit Test
``` ```
# Run all tests in a specific test class # Run all tests in a specific test class
# NOTE: failIfNoTests=false is required to skip tests in other modules # NOTE: failIfNoTests=false is required to skip tests in other modules
mvn test -DskipTests=false -Dtest=[full.package.testClassName] -DfailIfNoTests=false mvn test -DskipUnitTests=false -Dtest=[full.package.testClassName] -DfailIfNoTests=false
# Run one test method in a specific test class # Run one test method in a specific test class
mvn test -DskipTests=false -Dtest=[full.package.testClassName]#[testMethodName] -DfailIfNoTests=false mvn test -DskipUnitTests=false -Dtest=[full.package.testClassName]#[testMethodName] -DfailIfNoTests=false
``` ```
* How to run Integration Tests (requires enabling Unit tests too) * How to run _only_ Integration Tests
``` ```
mvn verify -DskipTests=false -DskipITs=false mvn install -DskipIntegrationTests=false
``` ```
* How to run a *single* Integration Test (requires enabling Unit tests too) * How to run a *single* Integration Test
``` ```
# Run all integration tests in a specific test class # Run all integration tests in a specific test class
# NOTE: failIfNoTests=false is required to skip tests in other modules # NOTE: failIfNoTests=false is required to skip tests in other modules
mvn test -DskipTests=false -DskipITs=false -Dtest=[full.package.testClassName] -DfailIfNoTests=false mvn install -DskipIntegrationTests=false -Dit.test=[full.package.testClassName] -DfailIfNoTests=false
# Run one test method in a specific test class # Run one test method in a specific test class
mvn test -DskipTests=false -DskipITs=false -Dtest=[full.package.testClassName]#[testMethodName] -DfailIfNoTests=false mvn install -DskipIntegrationTests=false -Dit.test=[full.package.testClassName]#[testMethodName] -DfailIfNoTests=false
``` ```
* How to run only tests of a specific DSpace module * How to run only tests of a specific DSpace module
``` ```

View File

@@ -127,44 +127,82 @@
</executions> </executions>
</plugin> </plugin>
<!-- This plugin allows us to run a Groovy script in our Maven POM
(see: https://groovy.github.io/gmaven/groovy-maven-plugin/execute.html )
We are generating a OS-agnostic version (agnostic.build.dir) of
the ${project.build.directory} property (full path of target dir).
This is needed by the Surefire & Failsafe plugins (see below)
to initialize the Unit Test environment's dspace.cfg file.
Otherwise, the Unit Test Framework will not work on Windows OS.
This Groovy code was mostly borrowed from:
http://stackoverflow.com/questions/3872355/how-to-convert-file-separator-in-maven
-->
<plugin>
<groupId>org.codehaus.gmaven</groupId>
<artifactId>groovy-maven-plugin</artifactId>
<executions>
<execution>
<id>setproperty</id>
<phase>initialize</phase>
<goals>
<goal>execute</goal>
</goals>
<configuration>
<source>
project.properties['agnostic.build.dir'] = project.build.directory.replace(File.separator, '/');
log.info("Initializing Maven property 'agnostic.build.dir' to: {}", project.properties['agnostic.build.dir']);
</source>
</configuration>
</execution>
</executions>
</plugin>
<plugin>
<groupId>com.mycila</groupId>
<artifactId>license-maven-plugin</artifactId>
<configuration>
<excludes>
<exclude>src/test/resources/**</exclude>
<exclude>src/test/data/**</exclude>
<!-- Ignore license header requirements on Flyway upgrade scripts -->
<exclude>src/main/resources/org/dspace/storage/rdbms/flywayupgrade/**</exclude>
</excludes>
</configuration>
</plugin>
</plugins> </plugins>
</build> </build>
<profiles> <profiles>
<profile> <profile>
<id>findbugs</id> <id>spotbugs</id>
<activation> <activation>
<activeByDefault>false</activeByDefault> <activeByDefault>false</activeByDefault>
<!-- property>
<name>skipTests</name>
<value>false</value>
</property -->
</activation> </activation>
<build> <build>
<plugins> <plugins>
<plugin> <plugin>
<groupId>org.codehaus.mojo</groupId> <groupId>com.github.spotbugs</groupId>
<artifactId>findbugs-maven-plugin</artifactId> <artifactId>spotbugs-maven-plugin</artifactId>
</plugin> </plugin>
</plugins> </plugins>
</build> </build>
</profile> </profile>
<!-- If Unit Testing is enabled, then setup the Unit Test Environment. <!-- Setup the Unit Test Environment (when -DskipUnitTests=false) -->
See also the 'skiptests' profile in Parent POM. -->
<profile> <profile>
<id>test-environment</id> <id>unit-test-environment</id>
<activation> <activation>
<activeByDefault>false</activeByDefault> <activeByDefault>false</activeByDefault>
<property> <property>
<name>skipTests</name> <name>skipUnitTests</name>
<value>false</value> <value>false</value>
</property> </property>
</activation> </activation>
<build> <build>
<plugins> <plugins>
<!-- Unit/Integration Testing setup: This plugin unzips the <!-- Unit Testing setup: This plugin unzips the
'testEnvironment.zip' file (created by dspace-parent POM), into 'testEnvironment.zip' file (created by dspace-parent POM), into
the 'target/testing/' folder, to essentially create a test the 'target/testing/' folder, to essentially create a test
install of DSpace, against which Tests can be run. --> install of DSpace, against which Tests can be run. -->
@@ -184,53 +222,16 @@
</configuration> </configuration>
<executions> <executions>
<execution> <execution>
<id>setupTestEnvironment</id> <id>setupUnitTestEnvironment</id>
<phase>generate-test-resources</phase> <phase>generate-test-resources</phase>
<goals> <goals>
<goal>unpack</goal> <goal>unpack</goal>
</goals> </goals>
</execution> </execution>
<execution>
<id>setupIntegrationTestEnvironment</id>
<phase>pre-integration-test</phase>
<goals>
<goal>unpack</goal>
</goals>
</execution>
</executions> </executions>
</plugin> </plugin>
<!-- This plugin allows us to run a Groovy script in our Maven POM <!-- Run Unit Testing! This plugin just kicks off the tests. -->
(see: http://gmaven.codehaus.org/Executing+Groovy+Code )
We are generating a OS-agnostic version (agnostic.build.dir) of
the ${project.build.directory} property (full path of target dir).
This is needed by the Surefire & Failsafe plugins (see below)
to initialize the Unit Test environment's dspace.cfg file.
Otherwise, the Unit Test Framework will not work on Windows OS.
This Groovy code was mostly borrowed from:
http://stackoverflow.com/questions/3872355/how-to-convert-file-separator-in-maven
-->
<plugin>
<groupId>org.codehaus.gmaven</groupId>
<artifactId>groovy-maven-plugin</artifactId>
<executions>
<execution>
<id>setproperty</id>
<phase>initialize</phase>
<goals>
<goal>execute</goal>
</goals>
<configuration>
<source>
project.properties['agnostic.build.dir'] = project.build.directory.replace(File.separator, '/');
log.info("Initializing Maven property 'agnostic.build.dir' to: {}", project.properties['agnostic.build.dir']);
</source>
</configuration>
</execution>
</executions>
</plugin>
<!-- Run Unit Testing! This plugin just kicks off the tests (when enabled). -->
<plugin> <plugin>
<artifactId>maven-surefire-plugin</artifactId> <artifactId>maven-surefire-plugin</artifactId>
<configuration> <configuration>
@@ -245,8 +246,52 @@
</systemPropertyVariables> </systemPropertyVariables>
</configuration> </configuration>
</plugin> </plugin>
</plugins>
</build>
</profile>
<!-- Run Integration Testing! This plugin just kicks off the tests (when enabled). --> <!-- Setup the Integration Test Environment (when -DskipIntegrationTests=false) -->
<profile>
<id>integration-test-environment</id>
<activation>
<activeByDefault>false</activeByDefault>
<property>
<name>skipIntegrationTests</name>
<value>false</value>
</property>
</activation>
<build>
<plugins>
<!-- Integration Testing setup: This plugin unzips the
'testEnvironment.zip' file (created by dspace-parent POM), into
the 'target/testing/' folder, to essentially create a test
install of DSpace, against which Tests can be run. -->
<plugin>
<artifactId>maven-dependency-plugin</artifactId>
<configuration>
<outputDirectory>${project.build.directory}/testing</outputDirectory>
<artifactItems>
<artifactItem>
<groupId>org.dspace</groupId>
<artifactId>dspace-parent</artifactId>
<version>${project.version}</version>
<type>zip</type>
<classifier>testEnvironment</classifier>
</artifactItem>
</artifactItems>
</configuration>
<executions>
<execution>
<id>setupIntegrationTestEnvironment</id>
<phase>pre-integration-test</phase>
<goals>
<goal>unpack</goal>
</goals>
</execution>
</executions>
</plugin>
<!-- Run Integration Testing! This plugin just kicks off the tests. -->
<plugin> <plugin>
<artifactId>maven-failsafe-plugin</artifactId> <artifactId>maven-failsafe-plugin</artifactId>
<configuration> <configuration>
@@ -262,7 +307,6 @@
</plugin> </plugin>
</plugins> </plugins>
</build> </build>
</profile> </profile>
</profiles> </profiles>
@@ -325,6 +369,14 @@
<artifactId>apache-jena-libs</artifactId> <artifactId>apache-jena-libs</artifactId>
<type>pom</type> <type>pom</type>
</dependency> </dependency>
<!-- Required to support PubMed API call in "PubmedImportMetadataSourceServiceImpl.GetRecord" -->
<!-- Makes runtime operations in Jersey Dependency Injection -->
<dependency>
<groupId>org.glassfish.jersey.inject</groupId>
<artifactId>jersey-hk2</artifactId>
<version>${jersey.version}</version>
</dependency>
<dependency> <dependency>
<groupId>commons-cli</groupId> <groupId>commons-cli</groupId>
<artifactId>commons-cli</artifactId> <artifactId>commons-cli</artifactId>
@@ -700,7 +752,7 @@
<dependency> <dependency>
<groupId>org.flywaydb</groupId> <groupId>org.flywaydb</groupId>
<artifactId>flyway-core</artifactId> <artifactId>flyway-core</artifactId>
<version>4.0.3</version> <version>6.5.5</version>
</dependency> </dependency>
<!-- Google Analytics --> <!-- Google Analytics -->
@@ -724,6 +776,7 @@
<groupId>com.google.oauth-client</groupId> <groupId>com.google.oauth-client</groupId>
<artifactId>google-oauth-client</artifactId> <artifactId>google-oauth-client</artifactId>
</dependency> </dependency>
<!-- FindBugs --> <!-- FindBugs -->
<dependency> <dependency>
<groupId>com.google.code.findbugs</groupId> <groupId>com.google.code.findbugs</groupId>
@@ -733,6 +786,7 @@
<groupId>com.google.code.findbugs</groupId> <groupId>com.google.code.findbugs</groupId>
<artifactId>annotations</artifactId> <artifactId>annotations</artifactId>
</dependency> </dependency>
<dependency> <dependency>
<groupId>joda-time</groupId> <groupId>joda-time</groupId>
<artifactId>joda-time</artifactId> <artifactId>joda-time</artifactId>

View File

@@ -10,10 +10,14 @@ package org.dspace.app.bulkedit;
import java.sql.SQLException; import java.sql.SQLException;
import org.apache.commons.cli.ParseException; import org.apache.commons.cli.ParseException;
import org.apache.commons.lang3.StringUtils;
import org.dspace.content.DSpaceObject;
import org.dspace.content.factory.ContentServiceFactory;
import org.dspace.content.service.MetadataDSpaceCsvExportService; import org.dspace.content.service.MetadataDSpaceCsvExportService;
import org.dspace.core.Context; import org.dspace.core.Context;
import org.dspace.eperson.factory.EPersonServiceFactory; import org.dspace.eperson.factory.EPersonServiceFactory;
import org.dspace.eperson.service.EPersonService; import org.dspace.eperson.service.EPersonService;
import org.dspace.handle.factory.HandleServiceFactory;
import org.dspace.scripts.DSpaceRunnable; import org.dspace.scripts.DSpaceRunnable;
import org.dspace.utils.DSpace; import org.dspace.utils.DSpace;
@@ -41,8 +45,7 @@ public class MetadataExport extends DSpaceRunnable<MetadataExportScriptConfigura
public void internalRun() throws Exception { public void internalRun() throws Exception {
if (help) { if (help) {
handler.logInfo("\nfull export: metadata-export -f filename"); logHelpInfo();
handler.logInfo("partial export: metadata-export -i handle -f filename");
printHelp(); printHelp();
return; return;
} }
@@ -61,6 +64,11 @@ public class MetadataExport extends DSpaceRunnable<MetadataExportScriptConfigura
context.complete(); context.complete();
} }
protected void logHelpInfo() {
handler.logInfo("\nfull export: metadata-export");
handler.logInfo("partial export: metadata-export -i handle");
}
@Override @Override
public MetadataExportScriptConfiguration getScriptConfiguration() { public MetadataExportScriptConfiguration getScriptConfiguration() {
return new DSpace().getServiceManager().getServiceByName("metadata-export", return new DSpace().getServiceManager().getServiceByName("metadata-export",
@@ -75,17 +83,32 @@ public class MetadataExport extends DSpaceRunnable<MetadataExportScriptConfigura
return; return;
} }
// Check a filename is given
if (!commandLine.hasOption('f')) {
throw new ParseException("Required parameter -f missing!");
}
filename = commandLine.getOptionValue('f');
exportAllMetadata = commandLine.hasOption('a');
if (!commandLine.hasOption('i')) { if (!commandLine.hasOption('i')) {
exportAllItems = true; exportAllItems = true;
} }
handle = commandLine.getOptionValue('i'); handle = commandLine.getOptionValue('i');
filename = getFileNameForExportFile();
exportAllMetadata = commandLine.hasOption('a');
}
protected String getFileNameForExportFile() throws ParseException {
Context context = new Context();
try {
DSpaceObject dso = null;
if (StringUtils.isNotBlank(handle)) {
dso = HandleServiceFactory.getInstance().getHandleService().resolveToObject(context, handle);
} else {
dso = ContentServiceFactory.getInstance().getSiteService().findSite(context);
}
if (dso == null) {
throw new ParseException("A handle got given that wasn't able to be parsed to a DSpaceObject");
}
return dso.getID().toString() + ".csv";
} catch (SQLException e) {
handler.handleException("Something went wrong trying to retrieve DSO for handle: " + handle, e);
}
return null;
} }
} }

View File

@@ -0,0 +1,33 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.app.bulkedit;
import org.apache.commons.cli.ParseException;
public class MetadataExportCli extends MetadataExport {
@Override
protected String getFileNameForExportFile() {
return commandLine.getOptionValue('f');
}
@Override
public void setup() throws ParseException {
super.setup();
// Check a filename is given
if (!commandLine.hasOption('f')) {
throw new ParseException("Required parameter -f missing!");
}
}
@Override
protected void logHelpInfo() {
handler.logInfo("\nfull export: metadata-export -f filename");
handler.logInfo("partial export: metadata-export -i handle -f filename");
}
}

View File

@@ -0,0 +1,26 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.app.bulkedit;
import java.io.OutputStream;
import org.apache.commons.cli.Options;
public class MetadataExportCliScriptConfiguration extends MetadataExportScriptConfiguration<MetadataExportCli> {
@Override
public Options getOptions() {
Options options = super.getOptions();
options.addOption("f", "file", true, "destination where you want file written");
options.getOption("f").setType(OutputStream .class);
options.getOption("f").setRequired(true);
super.options = options;
return options;
}
}

View File

@@ -7,7 +7,6 @@
*/ */
package org.dspace.app.bulkedit; package org.dspace.app.bulkedit;
import java.io.OutputStream;
import java.sql.SQLException; import java.sql.SQLException;
import org.apache.commons.cli.Options; import org.apache.commons.cli.Options;
@@ -56,9 +55,6 @@ public class MetadataExportScriptConfiguration<T extends MetadataExport> extends
options.addOption("i", "id", true, "ID or handle of thing to export (item, collection, or community)"); options.addOption("i", "id", true, "ID or handle of thing to export (item, collection, or community)");
options.getOption("i").setType(String.class); options.getOption("i").setType(String.class);
options.addOption("f", "file", true, "destination where you want file written");
options.getOption("f").setType(OutputStream.class);
options.getOption("f").setRequired(true);
options.addOption("a", "all", false, options.addOption("a", "all", false,
"include all metadata fields that are not normally changed (e.g. provenance)"); "include all metadata fields that are not normally changed (e.g. provenance)");
options.getOption("a").setType(boolean.class); options.getOption("a").setType(boolean.class);

View File

@@ -182,24 +182,7 @@ public class MetadataImport extends DSpaceRunnable<MetadataImportScriptConfigura
c.turnOffAuthorisationSystem(); c.turnOffAuthorisationSystem();
// Find the EPerson, assign to context // Find the EPerson, assign to context
try { assignCurrentUserInContext(c);
if (commandLine.hasOption('e')) {
EPerson eperson;
String e = commandLine.getOptionValue('e');
if (e.indexOf('@') != -1) {
eperson = EPersonServiceFactory.getInstance().getEPersonService().findByEmail(c, e);
} else {
eperson = EPersonServiceFactory.getInstance().getEPersonService().find(c, UUID.fromString(e));
}
if (eperson == null) {
throw new ParseException("Error, eperson cannot be found: " + e);
}
c.setCurrentUser(eperson);
}
} catch (Exception e) {
throw new ParseException("Unable to find DSpace user: " + e.getMessage());
}
if (authorityControlled == null) { if (authorityControlled == null) {
setAuthorizedMetadataFields(); setAuthorizedMetadataFields();
@@ -277,6 +260,18 @@ public class MetadataImport extends DSpaceRunnable<MetadataImportScriptConfigura
} }
protected void assignCurrentUserInContext(Context context) throws ParseException {
UUID uuid = getEpersonIdentifier();
if (uuid != null) {
try {
EPerson ePerson = EPersonServiceFactory.getInstance().getEPersonService().find(context, uuid);
context.setCurrentUser(ePerson);
} catch (SQLException e) {
log.error("Something went wrong trying to fetch the eperson for uuid: " + uuid, e);
}
}
}
/** /**
* This method determines whether the changes should be applied or not. This is default set to true for the REST * This method determines whether the changes should be applied or not. This is default set to true for the REST
* script as we don't want to interact with the caller. This will be overwritten in the CLI script to ask for * script as we don't want to interact with the caller. This will be overwritten in the CLI script to ask for
@@ -312,9 +307,6 @@ public class MetadataImport extends DSpaceRunnable<MetadataImportScriptConfigura
throw new ParseException("Required parameter -f missing!"); throw new ParseException("Required parameter -f missing!");
} }
filename = commandLine.getOptionValue('f'); filename = commandLine.getOptionValue('f');
if (!commandLine.hasOption('e')) {
throw new ParseException("Required parameter -e missing!");
}
// Option to apply template to new items // Option to apply template to new items
if (commandLine.hasOption('t')) { if (commandLine.hasOption('t')) {

View File

@@ -10,7 +10,12 @@ package org.dspace.app.bulkedit;
import java.io.BufferedReader; import java.io.BufferedReader;
import java.io.IOException; import java.io.IOException;
import java.io.InputStreamReader; import java.io.InputStreamReader;
import java.util.UUID;
import org.apache.commons.cli.ParseException;
import org.dspace.core.Context;
import org.dspace.eperson.EPerson;
import org.dspace.eperson.factory.EPersonServiceFactory;
import org.dspace.scripts.handler.DSpaceRunnableHandler; import org.dspace.scripts.handler.DSpaceRunnableHandler;
/** /**
@@ -30,4 +35,34 @@ public class MetadataImportCLI extends MetadataImport {
return false; return false;
} }
} }
@Override
protected void assignCurrentUserInContext(Context context) throws ParseException {
try {
if (commandLine.hasOption('e')) {
EPerson eperson;
String e = commandLine.getOptionValue('e');
if (e.indexOf('@') != -1) {
eperson = EPersonServiceFactory.getInstance().getEPersonService().findByEmail(context, e);
} else {
eperson = EPersonServiceFactory.getInstance().getEPersonService().find(context, UUID.fromString(e));
}
if (eperson == null) {
throw new ParseException("Error, eperson cannot be found: " + e);
}
context.setCurrentUser(eperson);
}
} catch (Exception e) {
throw new ParseException("Unable to find DSpace user: " + e.getMessage());
}
}
@Override
public void setup() throws ParseException {
super.setup();
if (!commandLine.hasOption('e')) {
throw new ParseException("Required parameter -e missing!");
}
}
} }

View File

@@ -7,10 +7,21 @@
*/ */
package org.dspace.app.bulkedit; package org.dspace.app.bulkedit;
import org.apache.commons.cli.Options;
import org.dspace.scripts.configuration.ScriptConfiguration; import org.dspace.scripts.configuration.ScriptConfiguration;
/** /**
* The {@link ScriptConfiguration} for the {@link org.dspace.app.bulkedit.MetadataImportCLI} CLI script * The {@link ScriptConfiguration} for the {@link org.dspace.app.bulkedit.MetadataImportCLI} CLI script
*/ */
public class MetadataImportCliScriptConfiguration extends MetadataImportScriptConfiguration<MetadataImportCLI> { public class MetadataImportCliScriptConfiguration extends MetadataImportScriptConfiguration<MetadataImportCLI> {
@Override
public Options getOptions() {
Options options = super.getOptions();
options.addOption("e", "email", true, "email address or user id of user (required if adding new items)");
options.getOption("e").setType(String.class);
options.getOption("e").setRequired(true);
super.options = options;
return options;
}
} }

View File

@@ -57,9 +57,6 @@ public class MetadataImportScriptConfiguration<T extends MetadataImport> extends
options.addOption("f", "file", true, "source file"); options.addOption("f", "file", true, "source file");
options.getOption("f").setType(InputStream.class); options.getOption("f").setType(InputStream.class);
options.getOption("f").setRequired(true); options.getOption("f").setRequired(true);
options.addOption("e", "email", true, "email address or user id of user (required if adding new items)");
options.getOption("e").setType(String.class);
options.getOption("e").setRequired(true);
options.addOption("s", "silent", false, options.addOption("s", "silent", false,
"silent operation - doesn't request confirmation of changes USE WITH CAUTION"); "silent operation - doesn't request confirmation of changes USE WITH CAUTION");
options.getOption("s").setType(boolean.class); options.getOption("s").setType(boolean.class);

View File

@@ -19,6 +19,15 @@ import org.dspace.core.Context;
* @author Andrea Bollini * @author Andrea Bollini
*/ */
public interface RequestItemAuthorExtractor { public interface RequestItemAuthorExtractor {
public RequestItemAuthor getRequestItemAuthor(Context context, Item item)
throws SQLException; /**
* Retrieve the auhtor to contact for a request copy of the give item.
*
* @param context DSpace context object
* @param item item to request
* @return An object containing name an email address to send the request to
* or null if no valid email address was found.
* @throws SQLException if database error
*/
public RequestItemAuthor getRequestItemAuthor(Context context, Item item) throws SQLException;
} }

View File

@@ -74,8 +74,8 @@ public class RequestItemHelpdeskStrategy extends RequestItemSubmitterStrategy {
return new RequestItemAuthor(helpdeskEPerson); return new RequestItemAuthor(helpdeskEPerson);
} else { } else {
String helpdeskName = I18nUtil.getMessage( String helpdeskName = I18nUtil.getMessage(
"org.dspace.app.requestitem.RequestItemHelpdeskStrategy.helpdeskname", "org.dspace.app.requestitem.RequestItemHelpdeskStrategy.helpdeskname",
context); context);
return new RequestItemAuthor(helpdeskName, helpDeskEmail); return new RequestItemAuthor(helpdeskName, helpDeskEmail);
} }
} }

View File

@@ -16,6 +16,7 @@ import org.dspace.content.MetadataValue;
import org.dspace.content.service.ItemService; import org.dspace.content.service.ItemService;
import org.dspace.core.Context; import org.dspace.core.Context;
import org.dspace.core.I18nUtil; import org.dspace.core.I18nUtil;
import org.dspace.services.factory.DSpaceServicesFactory;
import org.springframework.beans.factory.annotation.Autowired; import org.springframework.beans.factory.annotation.Autowired;
/** /**
@@ -38,6 +39,7 @@ public class RequestItemMetadataStrategy extends RequestItemSubmitterStrategy {
@Override @Override
public RequestItemAuthor getRequestItemAuthor(Context context, Item item) public RequestItemAuthor getRequestItemAuthor(Context context, Item item)
throws SQLException { throws SQLException {
RequestItemAuthor author = null;
if (emailMetadata != null) { if (emailMetadata != null) {
List<MetadataValue> vals = itemService.getMetadataByMetadataString(item, emailMetadata); List<MetadataValue> vals = itemService.getMetadataByMetadataString(item, emailMetadata);
if (vals.size() > 0) { if (vals.size() > 0) {
@@ -49,19 +51,38 @@ public class RequestItemMetadataStrategy extends RequestItemSubmitterStrategy {
fullname = nameVals.iterator().next().getValue(); fullname = nameVals.iterator().next().getValue();
} }
} }
if (StringUtils.isBlank(fullname)) { if (StringUtils.isBlank(fullname)) {
fullname = I18nUtil fullname = I18nUtil
.getMessage( .getMessage(
"org.dspace.app.requestitem.RequestItemMetadataStrategy.unnamed", "org.dspace.app.requestitem.RequestItemMetadataStrategy.unnamed",
context); context);
} }
RequestItemAuthor author = new RequestItemAuthor( author = new RequestItemAuthor(fullname, email);
fullname, email);
return author; return author;
} }
} else {
// Uses the basic strategy to look for the original submitter
author = super.getRequestItemAuthor(context, item);
// Is the author or his email null, so get the help desk or admin name and email
if (null == author || null == author.getEmail()) {
String email = null;
String name = null;
//First get help desk name and email
email = DSpaceServicesFactory.getInstance()
.getConfigurationService().getProperty("mail.helpdesk");
name = DSpaceServicesFactory.getInstance()
.getConfigurationService().getProperty("mail.helpdesk.name");
// If help desk mail is null get the mail and name of admin
if (email == null) {
email = DSpaceServicesFactory.getInstance()
.getConfigurationService().getProperty("mail.admin");
name = DSpaceServicesFactory.getInstance()
.getConfigurationService().getProperty("mail.admin.name");
}
author = new RequestItemAuthor(name, email);
}
} }
return super.getRequestItemAuthor(context, item); return author;
} }
public void setEmailMetadata(String emailMetadata) { public void setEmailMetadata(String emailMetadata) {

View File

@@ -23,13 +23,22 @@ public class RequestItemSubmitterStrategy implements RequestItemAuthorExtractor
public RequestItemSubmitterStrategy() { public RequestItemSubmitterStrategy() {
} }
/**
* Returns the submitter of an Item as RequestItemAuthor or null if the
* Submitter is deleted.
*
* @return The submitter of the item or null if the submitter is deleted
* @throws SQLException if database error
*/
@Override @Override
public RequestItemAuthor getRequestItemAuthor(Context context, Item item) public RequestItemAuthor getRequestItemAuthor(Context context, Item item)
throws SQLException { throws SQLException {
EPerson submitter = item.getSubmitter(); EPerson submitter = item.getSubmitter();
RequestItemAuthor author = new RequestItemAuthor( RequestItemAuthor author = null;
submitter.getFullName(), submitter.getEmail()); if (null != submitter) {
author = new RequestItemAuthor(
submitter.getFullName(), submitter.getEmail());
}
return author; return author;
} }
} }

View File

@@ -27,6 +27,7 @@ import org.apache.commons.cli.HelpFormatter;
import org.apache.commons.cli.Options; import org.apache.commons.cli.Options;
import org.apache.commons.cli.ParseException; import org.apache.commons.cli.ParseException;
import org.apache.commons.cli.PosixParser; import org.apache.commons.cli.PosixParser;
import org.apache.commons.io.FileUtils;
import org.apache.commons.lang3.ArrayUtils; import org.apache.commons.lang3.ArrayUtils;
import org.apache.commons.lang3.StringUtils; import org.apache.commons.lang3.StringUtils;
import org.apache.logging.log4j.Logger; import org.apache.logging.log4j.Logger;
@@ -84,6 +85,9 @@ public class GenerateSitemaps {
options options
.addOption("p", "ping", true, .addOption("p", "ping", true,
"ping specified search engine URL"); "ping specified search engine URL");
options
.addOption("d", "delete", false,
"delete sitemaps dir and its contents");
CommandLine line = null; CommandLine line = null;
@@ -105,10 +109,9 @@ public class GenerateSitemaps {
} }
/* /*
* Sanity check -- if no sitemap generation or pinging to do, print * Sanity check -- if no sitemap generation or pinging to do, or deletion, print usage
* usage
*/ */
if (line.getArgs().length != 0 || line.hasOption('b') if (line.getArgs().length != 0 || line.hasOption('d') || line.hasOption('b')
&& line.hasOption('s') && !line.hasOption('g') && line.hasOption('s') && !line.hasOption('g')
&& !line.hasOption('m') && !line.hasOption('y') && !line.hasOption('m') && !line.hasOption('y')
&& !line.hasOption('p')) { && !line.hasOption('p')) {
@@ -123,6 +126,10 @@ public class GenerateSitemaps {
generateSitemaps(!line.hasOption('b'), !line.hasOption('s')); generateSitemaps(!line.hasOption('b'), !line.hasOption('s'));
} }
if (line.hasOption('d')) {
deleteSitemaps();
}
if (line.hasOption('a')) { if (line.hasOption('a')) {
pingConfiguredSearchEngines(); pingConfiguredSearchEngines();
} }
@@ -140,6 +147,29 @@ public class GenerateSitemaps {
System.exit(0); System.exit(0);
} }
/**
* Runs generate-sitemaps without any params for the scheduler (task-scheduler.xml).
*
* @throws SQLException if a database error occurs.
* @throws IOException if IO error occurs.
*/
public static void generateSitemapsScheduled() throws IOException, SQLException {
generateSitemaps(true, true);
}
/**
* Delete the sitemaps directory and its contents if it exists
* @throws IOException if IO error occurs
*/
public static void deleteSitemaps() throws IOException {
File outputDir = new File(configurationService.getProperty("sitemap.dir"));
if (!outputDir.exists() && !outputDir.isDirectory()) {
log.error("Unable to delete sitemaps directory, doesn't exist or isn't a directort");
} else {
FileUtils.deleteDirectory(outputDir);
}
}
/** /**
* Generate sitemap.org protocol and/or basic HTML sitemaps. * Generate sitemap.org protocol and/or basic HTML sitemaps.
* *
@@ -150,14 +180,9 @@ public class GenerateSitemaps {
* @throws IOException if IO error * @throws IOException if IO error
* if IO error occurs. * if IO error occurs.
*/ */
public static void generateSitemaps(boolean makeHTMLMap, public static void generateSitemaps(boolean makeHTMLMap, boolean makeSitemapOrg) throws SQLException, IOException {
boolean makeSitemapOrg) throws SQLException, IOException { String uiURLStem = configurationService.getProperty("dspace.ui.url");
String sitemapStem = configurationService.getProperty("dspace.ui.url") String sitemapStem = uiURLStem + "/sitemap";
+ "/sitemap";
String htmlMapStem = configurationService.getProperty("dspace.ui.url")
+ "/htmlmap";
String handleURLStem = configurationService.getProperty("dspace.ui.url")
+ "/handle/";
File outputDir = new File(configurationService.getProperty("sitemap.dir")); File outputDir = new File(configurationService.getProperty("sitemap.dir"));
if (!outputDir.exists() && !outputDir.mkdir()) { if (!outputDir.exists() && !outputDir.mkdir()) {
@@ -168,13 +193,11 @@ public class GenerateSitemaps {
AbstractGenerator sitemapsOrg = null; AbstractGenerator sitemapsOrg = null;
if (makeHTMLMap) { if (makeHTMLMap) {
html = new HTMLSitemapGenerator(outputDir, htmlMapStem + "?map=", html = new HTMLSitemapGenerator(outputDir, sitemapStem, ".html");
null);
} }
if (makeSitemapOrg) { if (makeSitemapOrg) {
sitemapsOrg = new SitemapsOrgGenerator(outputDir, sitemapStem sitemapsOrg = new SitemapsOrgGenerator(outputDir, sitemapStem, ".xml");
+ "?map=", null);
} }
Context c = new Context(Context.Mode.READ_ONLY); Context c = new Context(Context.Mode.READ_ONLY);
@@ -182,7 +205,7 @@ public class GenerateSitemaps {
List<Community> comms = communityService.findAll(c); List<Community> comms = communityService.findAll(c);
for (Community comm : comms) { for (Community comm : comms) {
String url = handleURLStem + comm.getHandle(); String url = uiURLStem + "/communities/" + comm.getID();
if (makeHTMLMap) { if (makeHTMLMap) {
html.addURL(url, null); html.addURL(url, null);
@@ -197,7 +220,7 @@ public class GenerateSitemaps {
List<Collection> colls = collectionService.findAll(c); List<Collection> colls = collectionService.findAll(c);
for (Collection coll : colls) { for (Collection coll : colls) {
String url = handleURLStem + coll.getHandle(); String url = uiURLStem + "/collections/" + coll.getID();
if (makeHTMLMap) { if (makeHTMLMap) {
html.addURL(url, null); html.addURL(url, null);
@@ -214,7 +237,7 @@ public class GenerateSitemaps {
while (allItems.hasNext()) { while (allItems.hasNext()) {
Item i = allItems.next(); Item i = allItems.next();
String url = handleURLStem + i.getHandle(); String url = uiURLStem + "/items/" + i.getID();
Date lastMod = i.getLastModified(); Date lastMod = i.getLastModified();
if (makeHTMLMap) { if (makeHTMLMap) {

View File

@@ -59,7 +59,7 @@ public class SitemapsOrgGenerator extends AbstractGenerator {
@Override @Override
public String getFilename(int number) { public String getFilename(int number) {
return "sitemap" + number + ".xml.gz"; return "sitemap" + number + ".xml";
} }
@Override @Override
@@ -100,12 +100,12 @@ public class SitemapsOrgGenerator extends AbstractGenerator {
@Override @Override
public boolean useCompression() { public boolean useCompression() {
return true; return false;
} }
@Override @Override
public String getIndexFilename() { public String getIndexFilename() {
return "sitemap_index.xml.gz"; return "sitemap_index.xml";
} }
@Override @Override

View File

@@ -12,6 +12,7 @@ import java.util.List;
import java.util.Map; import java.util.Map;
import java.util.regex.Pattern; import java.util.regex.Pattern;
import java.util.regex.PatternSyntaxException; import java.util.regex.PatternSyntaxException;
import javax.annotation.Nullable;
import org.apache.commons.lang3.StringUtils; import org.apache.commons.lang3.StringUtils;
import org.dspace.content.MetadataSchemaEnum; import org.dspace.content.MetadataSchemaEnum;
@@ -291,7 +292,7 @@ public class DCInput {
* *
* @return the input type * @return the input type
*/ */
public String getInputType() { public @Nullable String getInputType() {
return inputType; return inputType;
} }

View File

@@ -10,6 +10,7 @@ package org.dspace.app.util;
import java.util.List; import java.util.List;
import java.util.Map; import java.util.Map;
import org.apache.commons.lang3.StringUtils;
import org.dspace.core.Utils; import org.dspace.core.Utils;
/** /**
* Class representing all DC inputs required for a submission, organized into pages * Class representing all DC inputs required for a submission, organized into pages
@@ -109,7 +110,7 @@ public class DCInputSet {
for (int j = 0; j < inputs[i].length; j++) { for (int j = 0; j < inputs[i].length; j++) {
DCInput field = inputs[i][j]; DCInput field = inputs[i][j];
// If this is a "qualdrop_value" field, then the full field name is the field + dropdown qualifier // If this is a "qualdrop_value" field, then the full field name is the field + dropdown qualifier
if (field.getInputType().equals("qualdrop_value")) { if (StringUtils.equals(field.getInputType(), "qualdrop_value")) {
List<String> pairs = field.getPairs(); List<String> pairs = field.getPairs();
for (int k = 0; k < pairs.size(); k += 2) { for (int k = 0; k < pairs.size(); k += 2) {
String qualifier = pairs.get(k + 1); String qualifier = pairs.get(k + 1);

View File

@@ -614,6 +614,12 @@ public class AuthorizeServiceImpl implements AuthorizeService {
resourcePolicyService.removeDsoEPersonPolicies(c, o, e); resourcePolicyService.removeDsoEPersonPolicies(c, o, e);
} }
@Override
public void removeAllEPersonPolicies(Context c, EPerson e)
throws SQLException, AuthorizeException {
resourcePolicyService.removeAllEPersonPolicies(c, e);
}
@Override @Override
public List<Group> getAuthorizedGroups(Context c, DSpaceObject o, public List<Group> getAuthorizedGroups(Context c, DSpaceObject o,
int actionID) throws java.sql.SQLException { int actionID) throws java.sql.SQLException {

View File

@@ -114,6 +114,11 @@ public class ResourcePolicyServiceImpl implements ResourcePolicyService {
return resourcePolicyDAO.findByEPersonGroupTypeIdAction(c, e, groups, action, type_id); return resourcePolicyDAO.findByEPersonGroupTypeIdAction(c, e, groups, action, type_id);
} }
@Override
public List<ResourcePolicy> find(Context context, EPerson ePerson) throws SQLException {
return resourcePolicyDAO.findByEPerson(context, ePerson);
}
@Override @Override
public List<ResourcePolicy> findByTypeGroupActionExceptId(Context context, DSpaceObject dso, Group group, public List<ResourcePolicy> findByTypeGroupActionExceptId(Context context, DSpaceObject dso, Group group,
int action, int notPolicyID) int action, int notPolicyID)
@@ -246,6 +251,11 @@ public class ResourcePolicyServiceImpl implements ResourcePolicyService {
} }
@Override
public void removeAllEPersonPolicies(Context context, EPerson ePerson) throws SQLException, AuthorizeException {
resourcePolicyDAO.deleteByEPerson(context, ePerson);
}
@Override @Override
public void removeGroupPolicies(Context c, Group group) throws SQLException { public void removeGroupPolicies(Context c, Group group) throws SQLException {
resourcePolicyDAO.deleteByGroup(c, group); resourcePolicyDAO.deleteByGroup(c, group);

View File

@@ -33,6 +33,8 @@ public interface ResourcePolicyDAO extends GenericDAO<ResourcePolicy> {
public List<ResourcePolicy> findByDsoAndType(Context context, DSpaceObject dSpaceObject, String type) public List<ResourcePolicy> findByDsoAndType(Context context, DSpaceObject dSpaceObject, String type)
throws SQLException; throws SQLException;
public List<ResourcePolicy> findByEPerson(Context context, EPerson ePerson) throws SQLException;
public List<ResourcePolicy> findByGroup(Context context, Group group) throws SQLException; public List<ResourcePolicy> findByGroup(Context context, Group group) throws SQLException;
public List<ResourcePolicy> findByDSoAndAction(Context context, DSpaceObject dso, int actionId) throws SQLException; public List<ResourcePolicy> findByDSoAndAction(Context context, DSpaceObject dso, int actionId) throws SQLException;
@@ -66,6 +68,15 @@ public interface ResourcePolicyDAO extends GenericDAO<ResourcePolicy> {
public void deleteByDsoEPersonPolicies(Context context, DSpaceObject dso, EPerson ePerson) throws SQLException; public void deleteByDsoEPersonPolicies(Context context, DSpaceObject dso, EPerson ePerson) throws SQLException;
/**
* Deletes all policies that belong to an EPerson
*
* @param context DSpace context object
* @param ePerson ePerson whose policies to delete
* @throws SQLException if database error
*/
public void deleteByEPerson(Context context, EPerson ePerson) throws SQLException;
public void deleteByDsoAndTypeNotEqualsTo(Context c, DSpaceObject o, String type) throws SQLException; public void deleteByDsoAndTypeNotEqualsTo(Context c, DSpaceObject o, String type) throws SQLException;
/** /**
@@ -101,7 +112,7 @@ public interface ResourcePolicyDAO extends GenericDAO<ResourcePolicy> {
* @return total resource policies of the ePerson * @return total resource policies of the ePerson
* @throws SQLException if database error * @throws SQLException if database error
*/ */
public int countByEPerson(Context context, EPerson eperson) throws SQLException; public int countByEPerson(Context context, EPerson ePerson) throws SQLException;
/** /**
* Return a paginated list of policies related to a resourceUuid belong to an ePerson * Return a paginated list of policies related to a resourceUuid belong to an ePerson

View File

@@ -63,6 +63,16 @@ public class ResourcePolicyDAOImpl extends AbstractHibernateDAO<ResourcePolicy>
return list(context, criteriaQuery, false, ResourcePolicy.class, -1, -1); return list(context, criteriaQuery, false, ResourcePolicy.class, -1, -1);
} }
@Override
public List<ResourcePolicy> findByEPerson(Context context, EPerson ePerson) throws SQLException {
CriteriaBuilder criteriaBuilder = getCriteriaBuilder(context);
CriteriaQuery criteriaQuery = getCriteriaQuery(criteriaBuilder, ResourcePolicy.class);
Root<ResourcePolicy> resourcePolicyRoot = criteriaQuery.from(ResourcePolicy.class);
criteriaQuery.select(resourcePolicyRoot);
criteriaQuery.where(criteriaBuilder.equal(resourcePolicyRoot.get(ResourcePolicy_.eperson), ePerson));
return list(context, criteriaQuery, false, ResourcePolicy.class, -1, -1);
}
@Override @Override
public List<ResourcePolicy> findByGroup(Context context, Group group) throws SQLException { public List<ResourcePolicy> findByGroup(Context context, Group group) throws SQLException {
CriteriaBuilder criteriaBuilder = getCriteriaBuilder(context); CriteriaBuilder criteriaBuilder = getCriteriaBuilder(context);
@@ -194,6 +204,15 @@ public class ResourcePolicyDAOImpl extends AbstractHibernateDAO<ResourcePolicy>
} }
@Override
public void deleteByEPerson(Context context, EPerson ePerson) throws SQLException {
String queryString = "delete from ResourcePolicy where eperson= :eperson";
Query query = createQuery(context, queryString);
query.setParameter("eperson", ePerson);
query.executeUpdate();
}
@Override @Override
public void deleteByDsoAndTypeNotEqualsTo(Context context, DSpaceObject dso, String type) throws SQLException { public void deleteByDsoAndTypeNotEqualsTo(Context context, DSpaceObject dso, String type) throws SQLException {
@@ -247,10 +266,10 @@ public class ResourcePolicyDAOImpl extends AbstractHibernateDAO<ResourcePolicy>
} }
@Override @Override
public int countByEPerson(Context context, EPerson eperson) throws SQLException { public int countByEPerson(Context context, EPerson ePerson) throws SQLException {
Query query = createQuery(context, Query query = createQuery(context,
"SELECT count(*) FROM " + ResourcePolicy.class.getSimpleName() + " WHERE eperson_id = (:epersonUuid) "); "SELECT count(*) FROM " + ResourcePolicy.class.getSimpleName() + " WHERE eperson_id = (:epersonUuid) ");
query.setParameter("epersonUuid", eperson.getID()); query.setParameter("epersonUuid", ePerson.getID());
return count(query); return count(query);
} }

View File

@@ -449,6 +449,16 @@ public interface AuthorizeService {
*/ */
public void removeEPersonPolicies(Context c, DSpaceObject o, EPerson e) throws SQLException, AuthorizeException; public void removeEPersonPolicies(Context c, DSpaceObject o, EPerson e) throws SQLException, AuthorizeException;
/**
* Removes all policies from an eperson that belong to an EPerson.
*
* @param c current context
* @param e the eperson
* @throws SQLException if there's a database problem
* @throws AuthorizeException if authorization error
*/
public void removeAllEPersonPolicies(Context c, EPerson e) throws SQLException, AuthorizeException;
/** /**
* Returns all groups authorized to perform an action on an object. Returns * Returns all groups authorized to perform an action on an object. Returns
* empty array if no matches. * empty array if no matches.

View File

@@ -39,6 +39,16 @@ public interface ResourcePolicyService extends DSpaceCRUDService<ResourcePolicy>
public List<ResourcePolicy> find(Context context, Group group) throws SQLException; public List<ResourcePolicy> find(Context context, Group group) throws SQLException;
/**
* Retrieve a list of ResourcePolicies by EPerson
*
* @param c context
* @param ePerson the EPerson for which to look up the resource policies
* @return a list of ResourcePolicies for the provided EPerson
* @throws SQLException if there's a database problem
*/
public List<ResourcePolicy> find(Context c, EPerson ePerson) throws SQLException;
public List<ResourcePolicy> find(Context c, EPerson e, List<Group> groups, int action, int type_id) public List<ResourcePolicy> find(Context c, EPerson e, List<Group> groups, int action, int type_id)
throws SQLException; throws SQLException;
@@ -72,6 +82,16 @@ public interface ResourcePolicyService extends DSpaceCRUDService<ResourcePolicy>
public void removeDsoEPersonPolicies(Context context, DSpaceObject dso, EPerson ePerson) public void removeDsoEPersonPolicies(Context context, DSpaceObject dso, EPerson ePerson)
throws SQLException, AuthorizeException; throws SQLException, AuthorizeException;
/**
* Removes all ResourcePolicies related to an EPerson
*
* @param context context
* @param ePerson the EPerson for which the ResourcePolicies will be deleted
* @throws SQLException if there's a database problem
* @throws AuthorizeException when the current user is not authorized
*/
public void removeAllEPersonPolicies(Context context, EPerson ePerson) throws SQLException, AuthorizeException;
public void removeGroupPolicies(Context c, Group group) throws SQLException; public void removeGroupPolicies(Context c, Group group) throws SQLException;
public void removeDsoAndTypeNotEqualsToPolicies(Context c, DSpaceObject o, String type) public void removeDsoAndTypeNotEqualsToPolicies(Context c, DSpaceObject o, String type)

View File

@@ -57,7 +57,6 @@ import org.dspace.harvest.HarvestedCollection;
import org.dspace.harvest.service.HarvestedCollectionService; import org.dspace.harvest.service.HarvestedCollectionService;
import org.dspace.workflow.factory.WorkflowServiceFactory; import org.dspace.workflow.factory.WorkflowServiceFactory;
import org.dspace.xmlworkflow.WorkflowConfigurationException; import org.dspace.xmlworkflow.WorkflowConfigurationException;
import org.dspace.xmlworkflow.XmlWorkflowFactoryImpl;
import org.dspace.xmlworkflow.factory.XmlWorkflowFactory; import org.dspace.xmlworkflow.factory.XmlWorkflowFactory;
import org.dspace.xmlworkflow.state.Workflow; import org.dspace.xmlworkflow.state.Workflow;
import org.dspace.xmlworkflow.storedcomponents.CollectionRole; import org.dspace.xmlworkflow.storedcomponents.CollectionRole;
@@ -387,7 +386,7 @@ public class CollectionServiceImpl extends DSpaceObjectServiceImpl<Collection> i
log.error(LogManager.getHeader(context, "setWorkflowGroup", log.error(LogManager.getHeader(context, "setWorkflowGroup",
"collection_id=" + collection.getID() + " " + e.getMessage()), e); "collection_id=" + collection.getID() + " " + e.getMessage()), e);
} }
if (!StringUtils.equals(XmlWorkflowFactoryImpl.LEGACY_WORKFLOW_NAME, workflow.getID())) { if (!StringUtils.equals(workflowFactory.getDefaultWorkflow().getID(), workflow.getID())) {
throw new IllegalArgumentException( throw new IllegalArgumentException(
"setWorkflowGroup can be used only on collection with the default basic dspace workflow. " "setWorkflowGroup can be used only on collection with the default basic dspace workflow. "
+ "Instead, the collection: " + "Instead, the collection: "

View File

@@ -629,6 +629,10 @@ public class CommunityServiceImpl extends DSpaceObjectServiceImpl<Community> imp
case Constants.DELETE: case Constants.DELETE:
if (AuthorizeConfiguration.canCommunityAdminPerformSubelementDeletion()) { if (AuthorizeConfiguration.canCommunityAdminPerformSubelementDeletion()) {
adminObject = getParentObject(context, community); adminObject = getParentObject(context, community);
if (adminObject == null) {
//top-level community, has to be admin of the current community
adminObject = community;
}
} }
break; break;
case Constants.ADD: case Constants.ADD:

View File

@@ -207,8 +207,8 @@ public abstract class DSpaceObjectServiceImpl<T extends DSpaceObject> implements
} }
@Override @Override
public void addMetadata(Context context, T dso, String schema, String element, String qualifier, String lang, public List<MetadataValue> addMetadata(Context context, T dso, String schema, String element, String qualifier,
List<String> values) throws SQLException { String lang, List<String> values) throws SQLException {
MetadataField metadataField = metadataFieldService.findByElement(context, schema, element, qualifier); MetadataField metadataField = metadataFieldService.findByElement(context, schema, element, qualifier);
if (metadataField == null) { if (metadataField == null) {
throw new SQLException( throw new SQLException(
@@ -216,12 +216,12 @@ public abstract class DSpaceObjectServiceImpl<T extends DSpaceObject> implements
"exist!"); "exist!");
} }
addMetadata(context, dso, metadataField, lang, values); return addMetadata(context, dso, metadataField, lang, values);
} }
@Override @Override
public void addMetadata(Context context, T dso, String schema, String element, String qualifier, String lang, public List<MetadataValue> addMetadata(Context context, T dso, String schema, String element, String qualifier,
List<String> values, List<String> authorities, List<Integer> confidences) String lang, List<String> values, List<String> authorities, List<Integer> confidences)
throws SQLException { throws SQLException {
// We will not verify that they are valid entries in the registry // We will not verify that they are valid entries in the registry
// until update() is called. // until update() is called.
@@ -231,15 +231,16 @@ public abstract class DSpaceObjectServiceImpl<T extends DSpaceObject> implements
"bad_dublin_core schema=" + schema + "." + element + "." + qualifier + ". Metadata field does not " + "bad_dublin_core schema=" + schema + "." + element + "." + qualifier + ". Metadata field does not " +
"exist!"); "exist!");
} }
addMetadata(context, dso, metadataField, lang, values, authorities, confidences); return addMetadata(context, dso, metadataField, lang, values, authorities, confidences);
} }
@Override @Override
public void addMetadata(Context context, T dso, MetadataField metadataField, String lang, List<String> values, public List<MetadataValue> addMetadata(Context context, T dso, MetadataField metadataField, String lang,
List<String> authorities, List<Integer> confidences) List<String> values, List<String> authorities, List<Integer> confidences)
throws SQLException { throws SQLException {
boolean authorityControlled = metadataAuthorityService.isAuthorityControlled(metadataField); boolean authorityControlled = metadataAuthorityService.isAuthorityControlled(metadataField);
boolean authorityRequired = metadataAuthorityService.isAuthorityRequired(metadataField); boolean authorityRequired = metadataAuthorityService.isAuthorityRequired(metadataField);
List<MetadataValue> newMetadata = new ArrayList<>(values.size());
// We will not verify that they are valid entries in the registry // We will not verify that they are valid entries in the registry
// until update() is called. // until update() is called.
for (int i = 0; i < values.size(); i++) { for (int i = 0; i < values.size(); i++) {
@@ -250,6 +251,7 @@ public abstract class DSpaceObjectServiceImpl<T extends DSpaceObject> implements
} }
} }
MetadataValue metadataValue = metadataValueService.create(context, dso, metadataField); MetadataValue metadataValue = metadataValueService.create(context, dso, metadataField);
newMetadata.add(metadataValue);
//Set place to list length of all metadatavalues for the given schema.element.qualifier combination. //Set place to list length of all metadatavalues for the given schema.element.qualifier combination.
// Subtract one to adhere to the 0 as first element rule // Subtract one to adhere to the 0 as first element rule
metadataValue.setPlace( metadataValue.setPlace(
@@ -304,29 +306,31 @@ public abstract class DSpaceObjectServiceImpl<T extends DSpaceObject> implements
// metadataValueService.update(context, metadataValue); // metadataValueService.update(context, metadataValue);
dso.addDetails(metadataField.toString()); dso.addDetails(metadataField.toString());
} }
return newMetadata;
} }
@Override @Override
public void addMetadata(Context context, T dso, MetadataField metadataField, String language, String value, public MetadataValue addMetadata(Context context, T dso, MetadataField metadataField, String language,
String authority, int confidence) throws SQLException { String value, String authority, int confidence) throws SQLException {
addMetadata(context, dso, metadataField, language, Arrays.asList(value), Arrays.asList(authority), return addMetadata(context, dso, metadataField, language, Arrays.asList(value), Arrays.asList(authority),
Arrays.asList(confidence)); Arrays.asList(confidence)).get(0);
} }
@Override @Override
public void addMetadata(Context context, T dso, String schema, String element, String qualifier, String lang, public MetadataValue addMetadata(Context context, T dso, String schema, String element, String qualifier,
String value) throws SQLException { String lang, String value) throws SQLException {
addMetadata(context, dso, schema, element, qualifier, lang, Arrays.asList(value)); return addMetadata(context, dso, schema, element, qualifier, lang, Arrays.asList(value)).get(0);
} }
@Override @Override
public void addMetadata(Context context, T dso, MetadataField metadataField, String language, String value) public MetadataValue addMetadata(Context context, T dso, MetadataField metadataField, String language, String value)
throws SQLException { throws SQLException {
addMetadata(context, dso, metadataField, language, Arrays.asList(value)); return addMetadata(context, dso, metadataField, language, Arrays.asList(value)).get(0);
} }
@Override @Override
public void addMetadata(Context context, T dso, MetadataField metadataField, String language, List<String> values) public List<MetadataValue> addMetadata(Context context, T dso, MetadataField metadataField, String language,
List<String> values)
throws SQLException { throws SQLException {
if (metadataField != null) { if (metadataField != null) {
String fieldKey = metadataAuthorityService String fieldKey = metadataAuthorityService
@@ -343,18 +347,19 @@ public abstract class DSpaceObjectServiceImpl<T extends DSpaceObject> implements
getAuthoritiesAndConfidences(fieldKey, null, values, authorities, confidences, i); getAuthoritiesAndConfidences(fieldKey, null, values, authorities, confidences, i);
} }
} }
addMetadata(context, dso, metadataField, language, values, authorities, confidences); return addMetadata(context, dso, metadataField, language, values, authorities, confidences);
} else { } else {
addMetadata(context, dso, metadataField, language, values, null, null); return addMetadata(context, dso, metadataField, language, values, null, null);
} }
} }
return new ArrayList<>(0);
} }
@Override @Override
public void addMetadata(Context context, T dso, String schema, String element, String qualifier, String lang, public MetadataValue addMetadata(Context context, T dso, String schema, String element, String qualifier,
String value, String authority, int confidence) throws SQLException { String lang, String value, String authority, int confidence) throws SQLException {
addMetadata(context, dso, schema, element, qualifier, lang, Arrays.asList(value), Arrays.asList(authority), return addMetadata(context, dso, schema, element, qualifier, lang, Arrays.asList(value),
Arrays.asList(confidence)); Arrays.asList(authority), Arrays.asList(confidence)).get(0);
} }
@Override @Override
@@ -660,33 +665,35 @@ public abstract class DSpaceObjectServiceImpl<T extends DSpaceObject> implements
@Override @Override
public void addAndShiftRightMetadata(Context context, T dso, String schema, String element, String qualifier, public void addAndShiftRightMetadata(Context context, T dso, String schema, String element, String qualifier,
String lang, String value, String authority, int confidence, int index) String lang, String value, String authority, int confidence, int index)
throws SQLException { throws SQLException {
List<MetadataValue> list = getMetadata(dso, schema, element, qualifier); List<MetadataValue> list = getMetadata(dso, schema, element, qualifier);
clearMetadata(context, dso, schema, element, qualifier, Item.ANY);
int idx = 0; int idx = 0;
int place = 0;
boolean last = true; boolean last = true;
for (MetadataValue rr : list) { for (MetadataValue rr : list) {
if (idx == index) { if (idx == index) {
addMetadata(context, dso, schema, element, qualifier, MetadataValue newMetadata = addMetadata(context, dso, schema, element, qualifier,
lang, value, authority, confidence); lang, value, authority, confidence);
moveSingleMetadataValue(context, dso, place, newMetadata);
place++;
last = false; last = false;
} }
addMetadata(context, dso, schema, element, qualifier, moveSingleMetadataValue(context, dso, place, rr);
rr.getLanguage(), rr.getValue(), rr.getAuthority(), rr.getConfidence()); place++;
idx++; idx++;
} }
if (last) { if (last) {
addMetadata(context, dso, schema, element, qualifier, addMetadata(context, dso, schema, element, qualifier,
lang, value, authority, confidence); lang, value, authority, confidence);
} }
} }
@Override @Override
public void moveMetadata(Context context, T dso, String schema, String element, String qualifier, int from, int to) public void moveMetadata(Context context, T dso, String schema, String element, String qualifier, int from, int to)
throws SQLException, IllegalArgumentException { throws SQLException, IllegalArgumentException {
if (from == to) { if (from == to) {
throw new IllegalArgumentException("The \"from\" location MUST be different from \"to\" location"); throw new IllegalArgumentException("The \"from\" location MUST be different from \"to\" location");
@@ -701,8 +708,6 @@ public abstract class DSpaceObjectServiceImpl<T extends DSpaceObject> implements
"\n Idx from:" + from + " Idx to: " + to); "\n Idx from:" + from + " Idx to: " + to);
} }
clearMetadata(context, dso, schema, element, qualifier, Item.ANY);
int idx = 0; int idx = 0;
MetadataValue moved = null; MetadataValue moved = null;
for (MetadataValue md : list) { for (MetadataValue md : list) {
@@ -714,49 +719,46 @@ public abstract class DSpaceObjectServiceImpl<T extends DSpaceObject> implements
} }
idx = 0; idx = 0;
int place = 0;
boolean last = true; boolean last = true;
for (MetadataValue rr : list) { for (MetadataValue rr : list) {
if (idx == to && to < from) { if (idx == to && to < from) {
addMetadata(context, dso, schema, element, qualifier, moved.getLanguage(), moved.getValue(), moveSingleMetadataValue(context, dso, place, moved);
moved.getAuthority(), moved.getConfidence()); place++;
last = false; last = false;
} }
if (idx != from) { if (idx != from) {
addMetadata(context, dso, schema, element, qualifier, rr.getLanguage(), rr.getValue(), moveSingleMetadataValue(context, dso, place, rr);
rr.getAuthority(), rr.getConfidence()); place++;
} }
if (idx == to && to > from) { if (idx == to && to > from) {
addMetadata(context, dso, schema, element, qualifier, moved.getLanguage(), moved.getValue(), moveSingleMetadataValue(context, dso, place, moved);
moved.getAuthority(), moved.getConfidence()); place++;
last = false; last = false;
} }
idx++; idx++;
} }
if (last) { if (last) {
addMetadata(context, dso, schema, element, qualifier, moved.getLanguage(), moved.getValue(), moveSingleMetadataValue(context, dso, place, moved);
moved.getAuthority(), moved.getConfidence());
} }
} }
/**
* Supports moving metadata by updating the place of the metadata value
*/
protected void moveSingleMetadataValue(Context context, T dso, int place, MetadataValue rr) {
//just move the metadata
rr.setPlace(place);
}
@Override @Override
public void replaceMetadata(Context context, T dso, String schema, String element, String qualifier, String lang, public void replaceMetadata(Context context, T dso, String schema, String element, String qualifier, String lang,
String value, String authority, int confidence, int index) throws SQLException { String value, String authority, int confidence, int index) throws SQLException {
List<MetadataValue> list = getMetadata(dso, schema, element, qualifier); List<MetadataValue> list = getMetadata(dso, schema, element, qualifier);
clearMetadata(context, dso, schema, element, qualifier, Item.ANY); removeMetadataValues(context, dso, Arrays.asList(list.get(index)));
addAndShiftRightMetadata(context, dso, schema, element, qualifier, lang, value, authority, confidence, index);
int idx = 0;
for (MetadataValue rr : list) {
if (idx == index) {
addMetadata(context, dso, schema, element, qualifier,
lang, value, authority, confidence);
} else {
addMetadata(context, dso, schema, element, qualifier,
rr.getLanguage(), rr.getValue(), rr.getAuthority(), rr.getConfidence());
}
idx++;
}
} }
@Override @Override

View File

@@ -7,6 +7,7 @@
*/ */
package org.dspace.content; package org.dspace.content;
import java.util.Objects;
import javax.persistence.Column; import javax.persistence.Column;
import javax.persistence.Entity; import javax.persistence.Entity;
import javax.persistence.GeneratedValue; import javax.persistence.GeneratedValue;
@@ -15,6 +16,8 @@ import javax.persistence.Id;
import javax.persistence.SequenceGenerator; import javax.persistence.SequenceGenerator;
import javax.persistence.Table; import javax.persistence.Table;
import org.apache.commons.lang3.StringUtils;
import org.apache.commons.lang3.builder.HashCodeBuilder;
import org.dspace.core.ReloadableEntity; import org.dspace.core.ReloadableEntity;
/** /**
@@ -45,7 +48,8 @@ public class EntityType implements ReloadableEntity<Integer> {
/** /**
* The standard setter for the ID of this EntityType * The standard setter for the ID of this EntityType
* @param id The ID that this EntityType's ID will be set to *
* @param id The ID that this EntityType's ID will be set to
*/ */
public void setId(Integer id) { public void setId(Integer id) {
this.id = id; this.id = id;
@@ -53,7 +57,8 @@ public class EntityType implements ReloadableEntity<Integer> {
/** /**
* The standard getter for the label of this EntityType * The standard getter for the label of this EntityType
* @return The label for this EntityType *
* @return The label for this EntityType
*/ */
public String getLabel() { public String getLabel() {
return label; return label;
@@ -61,6 +66,7 @@ public class EntityType implements ReloadableEntity<Integer> {
/** /**
* The standard setter for the label of this EntityType * The standard setter for the label of this EntityType
*
* @param label The label that this EntityType's label will be set to * @param label The label that this EntityType's label will be set to
*/ */
public void setLabel(String label) { public void setLabel(String label) {
@@ -69,9 +75,40 @@ public class EntityType implements ReloadableEntity<Integer> {
/** /**
* The standard getter for the ID of this EntityType * The standard getter for the ID of this EntityType
* @return The ID for this EntityType *
* @return The ID for this EntityType
*/ */
public Integer getID() { public Integer getID() {
return id; return id;
} }
/**
* Determines whether two entity types are equal based on the id and the label
* @param obj object to be compared
* @return
*/
public boolean equals(Object obj) {
if (!(obj instanceof EntityType)) {
return false;
}
EntityType entityType = (EntityType) obj;
if (!Objects.equals(this.getID(), entityType.getID())) {
return false;
}
if (!StringUtils.equals(this.getLabel(), entityType.getLabel())) {
return false;
}
return true;
}
/**
* Returns a hash code value for the object.
* @return hash code value
*/
@Override
public int hashCode() {
return new HashCodeBuilder().append(getID()).toHashCode();
}
} }

View File

@@ -230,6 +230,12 @@ public class ItemServiceImpl extends DSpaceObjectServiceImpl<Item> implements It
return itemDAO.findBySubmitter(context, eperson); return itemDAO.findBySubmitter(context, eperson);
} }
@Override
public Iterator<Item> findBySubmitter(Context context, EPerson eperson, boolean retrieveAllItems)
throws SQLException {
return itemDAO.findBySubmitter(context, eperson, retrieveAllItems);
}
@Override @Override
public Iterator<Item> findBySubmitterDateSorted(Context context, EPerson eperson, Integer limit) public Iterator<Item> findBySubmitterDateSorted(Context context, EPerson eperson, Integer limit)
throws SQLException { throws SQLException {
@@ -1100,19 +1106,7 @@ prevent the generation of resource policy entry values with null dspace_object a
} }
break; break;
case Constants.DELETE: case Constants.DELETE:
if (item.getOwningCollection() != null) { adminObject = item;
if (AuthorizeConfiguration.canCollectionAdminPerformItemDeletion()) {
adminObject = collection;
} else if (AuthorizeConfiguration.canCommunityAdminPerformItemDeletion()) {
adminObject = community;
}
} else {
if (AuthorizeConfiguration.canCollectionAdminManageTemplateItem()) {
adminObject = collection;
} else if (AuthorizeConfiguration.canCommunityAdminManageCollectionTemplateItem()) {
adminObject = community;
}
}
break; break;
case Constants.WRITE: case Constants.WRITE:
// if it is a template item we need to check the // if it is a template item we need to check the
@@ -1372,6 +1366,32 @@ prevent the generation of resource policy entry values with null dspace_object a
} }
/**
* Supports moving metadata by adding the metadata value or updating the place of the relationship
*/
@Override
protected void moveSingleMetadataValue(Context context, Item dso, int place, MetadataValue rr) {
if (rr instanceof RelationshipMetadataValue) {
try {
//Retrieve the applicable relationship
Relationship rs = relationshipService.find(context,
((RelationshipMetadataValue) rr).getRelationshipId());
if (rs.getLeftItem() == dso) {
rs.setLeftPlace(place);
} else {
rs.setRightPlace(place);
}
relationshipService.update(context, rs);
} catch (Exception e) {
//should not occur, otherwise metadata can't be updated either
log.error("An error occurred while moving " + rr.getAuthority() + " for item " + dso.getID(), e);
}
} else {
//just move the metadata
rr.setPlace(place);
}
}
/** /**
* This method will sort the List of MetadataValue objects based on the MetadataSchema, MetadataField Element, * This method will sort the List of MetadataValue objects based on the MetadataSchema, MetadataField Element,
* MetadataField Qualifier and MetadataField Place in that order. * MetadataField Qualifier and MetadataField Place in that order.

View File

@@ -9,6 +9,8 @@ package org.dspace.content;
import java.io.IOException; import java.io.IOException;
import java.sql.SQLException; import java.sql.SQLException;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List; import java.util.List;
import org.apache.commons.collections4.CollectionUtils; import org.apache.commons.collections4.CollectionUtils;
@@ -20,8 +22,12 @@ import org.dspace.content.dao.MetadataFieldDAO;
import org.dspace.content.service.MetadataFieldService; import org.dspace.content.service.MetadataFieldService;
import org.dspace.content.service.MetadataSchemaService; import org.dspace.content.service.MetadataSchemaService;
import org.dspace.content.service.MetadataValueService; import org.dspace.content.service.MetadataValueService;
import org.dspace.content.service.SiteService;
import org.dspace.core.Constants;
import org.dspace.core.Context; import org.dspace.core.Context;
import org.dspace.core.LogManager; import org.dspace.core.LogManager;
import org.dspace.discovery.indexobject.IndexableMetadataField;
import org.dspace.event.Event;
import org.springframework.beans.factory.annotation.Autowired; import org.springframework.beans.factory.annotation.Autowired;
/** /**
@@ -46,6 +52,8 @@ public class MetadataFieldServiceImpl implements MetadataFieldService {
protected MetadataValueService metadataValueService; protected MetadataValueService metadataValueService;
@Autowired(required = true) @Autowired(required = true)
protected MetadataSchemaService metadataSchemaService; protected MetadataSchemaService metadataSchemaService;
@Autowired
protected SiteService siteService;
protected MetadataFieldServiceImpl() { protected MetadataFieldServiceImpl() {
@@ -77,6 +85,8 @@ public class MetadataFieldServiceImpl implements MetadataFieldService {
log.info(LogManager.getHeader(context, "create_metadata_field", log.info(LogManager.getHeader(context, "create_metadata_field",
"metadata_field_id=" + metadataField.getID())); "metadata_field_id=" + metadataField.getID()));
// Update the index of type metadatafield
this.triggerEventToUpdateIndex(context, metadataField.getID());
return metadataField; return metadataField;
} }
@@ -149,6 +159,8 @@ public class MetadataFieldServiceImpl implements MetadataFieldService {
"metadata_field_id=" + metadataField.getID() + "element=" + metadataField "metadata_field_id=" + metadataField.getID() + "element=" + metadataField
.getElement() .getElement()
+ "qualifier=" + metadataField.getQualifier())); + "qualifier=" + metadataField.getQualifier()));
// Update the index of type metadatafield
this.triggerEventToUpdateIndex(context, metadataField.getID());
} }
@Override @Override
@@ -177,6 +189,21 @@ public class MetadataFieldServiceImpl implements MetadataFieldService {
log.info(LogManager.getHeader(context, "delete_metadata_field", log.info(LogManager.getHeader(context, "delete_metadata_field",
"metadata_field_id=" + metadataField.getID())); "metadata_field_id=" + metadataField.getID()));
// Update the index of type metadatafield
this.triggerEventToUpdateIndex(context, metadataField.getID());
}
/**
* Calls a MODIFY SITE event with the identifier of the changed mdField, so it can be indexed in
* {@link org.dspace.discovery.IndexEventConsumer}, with type of {@link org.dspace.discovery.IndexableObject} in
* {@link Event}.detail and the identifiers of the changed mdFields in {@link Event}.identifiers
*
* @param context DSpace context
* @param mdFieldId ID of the metadata field that needs to be (re)indexed
*/
private void triggerEventToUpdateIndex(Context context, int mdFieldId) {
context.addEvent(new Event(Event.MODIFY, Constants.SITE, null, IndexableMetadataField.TYPE, new ArrayList<>(
Arrays.asList(Integer.toString(mdFieldId)))));
} }
/** /**

View File

@@ -7,6 +7,8 @@
*/ */
package org.dspace.content; package org.dspace.content;
import org.dspace.core.Constants;
/** /**
* This class is used as a representation of MetadataValues for the MetadataValues that are derived from the * This class is used as a representation of MetadataValues for the MetadataValues that are derived from the
* Relationships that the item has. This includes the useForPlace property which we'll have to use to determine * Relationships that the item has. This includes the useForPlace property which we'll have to use to determine
@@ -57,4 +59,13 @@ public class RelationshipMetadataValue extends MetadataValue {
} }
return super.equals(obj); return super.equals(obj);
} }
/**
* Retrieves the Relationship ID from which the current RelationshipMetadataValue is derived
*
* @return the relationship ID
*/
public int getRelationshipId() {
return Integer.parseInt(getAuthority().substring(Constants.VIRTUAL_AUTHORITY_PREFIX.length()));
}
} }

View File

@@ -212,9 +212,8 @@ public class WorkspaceItemServiceImpl implements WorkspaceItemService {
*/ */
Item item = workspaceItem.getItem(); Item item = workspaceItem.getItem();
if (!authorizeService.isAdmin(context) if (!authorizeService.isAdmin(context)
&& ((context.getCurrentUser() == null) || (context && (item.getSubmitter() == null || (context.getCurrentUser() == null)
.getCurrentUser().getID() != item.getSubmitter() || (context.getCurrentUser().getID() != item.getSubmitter().getID()))) {
.getID()))) {
// Not an admit, not the submitter // Not an admit, not the submitter
throw new AuthorizeException("Must be an administrator or the " throw new AuthorizeException("Must be an administrator or the "
+ "original submitter to delete a workspace item"); + "original submitter to delete a workspace item");
@@ -265,7 +264,12 @@ public class WorkspaceItemServiceImpl implements WorkspaceItemService {
// Need to delete the workspaceitem row first since it refers // Need to delete the workspaceitem row first since it refers
// to item ID // to item ID
workspaceItem.getSupervisorGroups().clear(); try {
workspaceItem.getSupervisorGroups().clear();
} catch (Exception e) {
log.error("failed to clear supervisor group", e);
}
workspaceItemDAO.delete(context, workspaceItem); workspaceItemDAO.delete(context, workspaceItem);
} }

View File

@@ -33,21 +33,62 @@ public class Choice {
*/ */
public String value = null; public String value = null;
/**
* A boolean representing if choice entry value can selected (usually true).
* Hierarchical authority can flag some choice as not selectable to force the
* use to choice a more detailed terms in the tree, such a leaf or a deeper
* branch
*/
public boolean selectable = true;
public Map<String, String> extras = new HashMap<String, String>(); public Map<String, String> extras = new HashMap<String, String>();
public Choice() { public Choice() {
} }
/**
* Minimal constructor for this data object. It assumes an empty map of extras
* information and a selected choice
*
* @param authority the authority key
* @param value the text value to store in the metadata
* @param label the value to display to the user
*/
public Choice(String authority, String value, String label) { public Choice(String authority, String value, String label) {
this.authority = authority; this.authority = authority;
this.value = value; this.value = value;
this.label = label; this.label = label;
} }
/**
* Constructor to quickly setup the data object for basic authorities. The choice is assumed to be selectable.
*
* @param authority the authority key
* @param value the text value to store in the metadata
* @param label the value to display to the user
* @param extras a key value map of extra information related to this choice
*/
public Choice(String authority, String label, String value, Map<String, String> extras) { public Choice(String authority, String label, String value, Map<String, String> extras) {
this.authority = authority; this.authority = authority;
this.label = label; this.label = label;
this.value = value; this.value = value;
this.extras = extras; this.extras = extras;
} }
/**
* Constructor for common need of Hierarchical authorities that want to
* explicitely set the selectable flag
*
* @param authority the authority key
* @param value the text value to store in the metadata
* @param label the value to display to the user
* @param selectable true if the choice can be selected, false if the a more
* accurate choice should be preferred
*/
public Choice(String authority, String label, String value, boolean selectable) {
this.authority = authority;
this.label = label;
this.value = value;
this.selectable = selectable;
}
} }

View File

@@ -7,7 +7,10 @@
*/ */
package org.dspace.content.authority; package org.dspace.content.authority;
import org.dspace.content.Collection; import java.util.HashMap;
import java.util.Map;
import org.dspace.core.NameAwarePlugin;
/** /**
* Plugin interface that supplies an authority control mechanism for * Plugin interface that supplies an authority control mechanism for
@@ -17,7 +20,7 @@ import org.dspace.content.Collection;
* @see ChoiceAuthorityServiceImpl * @see ChoiceAuthorityServiceImpl
* @see MetadataAuthorityServiceImpl * @see MetadataAuthorityServiceImpl
*/ */
public interface ChoiceAuthority { public interface ChoiceAuthority extends NameAwarePlugin {
/** /**
* Get all values from the authority that match the preferred value. * Get all values from the authority that match the preferred value.
* Note that the offering was entered by the user and may contain * Note that the offering was entered by the user and may contain
@@ -32,15 +35,13 @@ public interface ChoiceAuthority {
* defaultSelected index in the Choices instance to the choice, if any, * defaultSelected index in the Choices instance to the choice, if any,
* that matches the value. * that matches the value.
* *
* @param field being matched for
* @param text user's value to match * @param text user's value to match
* @param collection database ID of Collection for context (owner of Item)
* @param start choice at which to start, 0 is first. * @param start choice at which to start, 0 is first.
* @param limit maximum number of choices to return, 0 for no limit. * @param limit maximum number of choices to return, 0 for no limit.
* @param locale explicit localization key if available, or null * @param locale explicit localization key if available, or null
* @return a Choices object (never null). * @return a Choices object (never null).
*/ */
public Choices getMatches(String field, String text, Collection collection, int start, int limit, String locale); public Choices getMatches(String text, int start, int limit, String locale);
/** /**
* Get the single "best" match (if any) of a value in the authority * Get the single "best" match (if any) of a value in the authority
@@ -51,13 +52,11 @@ public interface ChoiceAuthority {
* This call is typically used in non-interactive metadata ingest * This call is typically used in non-interactive metadata ingest
* where there is no interactive agent to choose from among options. * where there is no interactive agent to choose from among options.
* *
* @param field being matched for
* @param text user's value to match * @param text user's value to match
* @param collection database ID of Collection for context (owner of Item)
* @param locale explicit localization key if available, or null * @param locale explicit localization key if available, or null
* @return a Choices object (never null) with 1 or 0 values. * @return a Choices object (never null) with 1 or 0 values.
*/ */
public Choices getBestMatch(String field, String text, Collection collection, String locale); public Choices getBestMatch(String text, String locale);
/** /**
* Get the canonical user-visible "label" (i.e. short descriptive text) * Get the canonical user-visible "label" (i.e. short descriptive text)
@@ -67,31 +66,97 @@ public interface ChoiceAuthority {
* This may get called many times while populating a Web page so it should * This may get called many times while populating a Web page so it should
* be implemented as efficiently as possible. * be implemented as efficiently as possible.
* *
* @param field being matched for
* @param key authority key known to this authority. * @param key authority key known to this authority.
* @param locale explicit localization key if available, or null * @param locale explicit localization key if available, or null
* @return descriptive label - should always return something, never null. * @return descriptive label - should always return something, never null.
*/ */
public String getLabel(String field, String key, String locale); public String getLabel(String key, String locale);
/**
* Get the canonical value to store for a key in the authority. Can be localized
* given the implicit or explicit locale specification.
*
* @param key authority key known to this authority.
* @param locale explicit localization key if available, or null
* @return value to store - should always return something, never null.
*/
default String getValue(String key, String locale) {
return getLabel(key, locale);
}
/**
* Get a map of additional information related to the specified key in the
* authority.
*
* @param key the key of the entry
* @param locale explicit localization key if available, or null
* @return a map of additional information related to the key
*/
default Map<String, String> getExtra(String key, String locale) {
return new HashMap<String, String>();
}
/**
* Return true for hierarchical authorities
*
* @return <code>true</code> if hierarchical, default <code>false</code>
*/
default boolean isHierarchical() { default boolean isHierarchical() {
return false; return false;
} }
/**
* Scrollable authorities allows the scroll of the entries without applying
* filter/query to the
* {@link #getMatches(String, String, Collection, int, int, String)}
*
* @return <code>true</code> if scrollable, default <code>false</code>
*/
default boolean isScrollable() { default boolean isScrollable() {
return false; return false;
} }
default boolean hasIdentifier() { /**
return true; * Hierarchical authority can provide an hint for the UI about how many levels
* preload to improve the UX. It provides a valid default for hierarchical
* authorities
*
* @return <code>0</code> if hierarchical, null otherwise
*/
default Integer getPreloadLevel() {
return isHierarchical() ? 0 : null;
} }
default public Choice getChoice(String fieldKey, String authKey, String locale) { /**
* Build the preferred choice associated with the authKey. The default
* implementation delegate the creato to the {@link #getLabel(String, String)}
* {@link #getValue(String, String)} and {@link #getExtra(String, String)}
* methods but can be directly overriden for better efficiency or special
* scenario
*
* @param authKey authority key known to this authority.
* @param locale explicit localization key if available, or null
* @return the preferred choice for this authKey and locale
*/
default public Choice getChoice(String authKey, String locale) {
Choice result = new Choice(); Choice result = new Choice();
result.authority = authKey; result.authority = authKey;
result.label = getLabel(fieldKey, authKey, locale); result.label = getLabel(authKey, locale);
result.value = getLabel(fieldKey, authKey, locale); result.value = getValue(authKey, locale);
result.extras.putAll(getExtra(authKey, locale));
return result; return result;
} }
/**
* Provide a recommendation to store the authority in the metadata value if
* available in the in the provided choice(s). Usually ChoiceAuthority should
* recommend that so the default is true and it only need to be implemented in
* the unusual scenario
*
* @return <code>true</code> if the authority provided in any choice of this
* authority should be stored in the metadata value
*/
default public boolean storeAuthorityInMetadata() {
return true;
}
} }

View File

@@ -7,10 +7,13 @@
*/ */
package org.dspace.content.authority; package org.dspace.content.authority;
import java.util.ArrayList;
import java.util.HashMap; import java.util.HashMap;
import java.util.HashSet;
import java.util.Iterator; import java.util.Iterator;
import java.util.List; import java.util.List;
import java.util.Map; import java.util.Map;
import java.util.Map.Entry;
import java.util.Set; import java.util.Set;
import org.apache.commons.lang3.StringUtils; import org.apache.commons.lang3.StringUtils;
@@ -19,6 +22,9 @@ import org.dspace.app.util.DCInput;
import org.dspace.app.util.DCInputSet; import org.dspace.app.util.DCInputSet;
import org.dspace.app.util.DCInputsReader; import org.dspace.app.util.DCInputsReader;
import org.dspace.app.util.DCInputsReaderException; import org.dspace.app.util.DCInputsReaderException;
import org.dspace.app.util.SubmissionConfig;
import org.dspace.app.util.SubmissionConfigReader;
import org.dspace.app.util.SubmissionConfigReaderException;
import org.dspace.content.Collection; import org.dspace.content.Collection;
import org.dspace.content.MetadataValue; import org.dspace.content.MetadataValue;
import org.dspace.content.authority.service.ChoiceAuthorityService; import org.dspace.content.authority.service.ChoiceAuthorityService;
@@ -54,23 +60,37 @@ public final class ChoiceAuthorityServiceImpl implements ChoiceAuthorityService
// map of field key to authority plugin // map of field key to authority plugin
protected Map<String, ChoiceAuthority> controller = new HashMap<String, ChoiceAuthority>(); protected Map<String, ChoiceAuthority> controller = new HashMap<String, ChoiceAuthority>();
// map of field key, form definition to authority plugin
protected Map<String, Map<String, ChoiceAuthority>> controllerFormDefinitions =
new HashMap<String, Map<String, ChoiceAuthority>>();
// map of field key to presentation type // map of field key to presentation type
protected Map<String, String> presentation = new HashMap<String, String>(); protected Map<String, String> presentation = new HashMap<String, String>();
// map of field key to closed value // map of field key to closed value
protected Map<String, Boolean> closed = new HashMap<String, Boolean>(); protected Map<String, Boolean> closed = new HashMap<String, Boolean>();
// map of authority name to field key // flag to track the initialization status of the service
protected Map<String, String> authorities = new HashMap<String, String>(); private boolean initialized = false;
// map of authority name to field keys (the same authority can be configured over multiple metadata)
protected Map<String, List<String>> authorities = new HashMap<String, List<String>>();
// map of authority name to form definition and field keys
protected Map<String, Map<String, List<String>>> authoritiesFormDefinitions =
new HashMap<String, Map<String, List<String>>>();
// the item submission reader
private SubmissionConfigReader itemSubmissionConfigReader;
@Autowired(required = true) @Autowired(required = true)
protected ConfigurationService configurationService; protected ConfigurationService configurationService;
@Autowired(required = true) @Autowired(required = true)
protected PluginService pluginService; protected PluginService pluginService;
private final String CHOICES_PLUGIN_PREFIX = "choices.plugin."; final static String CHOICES_PLUGIN_PREFIX = "choices.plugin.";
private final String CHOICES_PRESENTATION_PREFIX = "choices.presentation."; final static String CHOICES_PRESENTATION_PREFIX = "choices.presentation.";
private final String CHOICES_CLOSED_PREFIX = "choices.closed."; final static String CHOICES_CLOSED_PREFIX = "choices.closed.";
protected ChoiceAuthorityServiceImpl() { protected ChoiceAuthorityServiceImpl() {
} }
@@ -96,10 +116,25 @@ public final class ChoiceAuthorityServiceImpl implements ChoiceAuthorityService
@Override @Override
public Set<String> getChoiceAuthoritiesNames() { public Set<String> getChoiceAuthoritiesNames() {
if (authorities.keySet().isEmpty()) { init();
Set<String> authoritiesNames = new HashSet<String>();
authoritiesNames.addAll(authorities.keySet());
authoritiesNames.addAll(authoritiesFormDefinitions.keySet());
return authoritiesNames;
}
private synchronized void init() {
if (!initialized) {
try {
itemSubmissionConfigReader = new SubmissionConfigReader();
} catch (SubmissionConfigReaderException e) {
// the system is in an illegal state as the submission definition is not valid
throw new IllegalStateException("Error reading the item submission configuration: " + e.getMessage(),
e);
}
loadChoiceAuthorityConfigurations(); loadChoiceAuthorityConfigurations();
initialized = true;
} }
return authorities.keySet();
} }
@Override @Override
@@ -112,59 +147,62 @@ public final class ChoiceAuthorityServiceImpl implements ChoiceAuthorityService
@Override @Override
public Choices getMatches(String fieldKey, String query, Collection collection, public Choices getMatches(String fieldKey, String query, Collection collection,
int start, int limit, String locale) { int start, int limit, String locale) {
ChoiceAuthority ma = getChoiceAuthorityMap().get(fieldKey); ChoiceAuthority ma = getAuthorityByFieldKeyCollection(fieldKey, collection);
if (ma == null) { if (ma == null) {
throw new IllegalArgumentException( throw new IllegalArgumentException(
"No choices plugin was configured for field \"" + fieldKey "No choices plugin was configured for field \"" + fieldKey
+ "\"."); + "\", collection=" + collection.getID().toString() + ".");
} }
return ma.getMatches(fieldKey, query, collection, start, limit, locale); return ma.getMatches(query, start, limit, locale);
} }
@Override @Override
public Choices getMatches(String fieldKey, String query, Collection collection, int start, int limit, String locale, public Choices getMatches(String fieldKey, String query, Collection collection, int start, int limit, String locale,
boolean externalInput) { boolean externalInput) {
ChoiceAuthority ma = getChoiceAuthorityMap().get(fieldKey); ChoiceAuthority ma = getAuthorityByFieldKeyCollection(fieldKey, collection);
if (ma == null) { if (ma == null) {
throw new IllegalArgumentException( throw new IllegalArgumentException(
"No choices plugin was configured for field \"" + fieldKey "No choices plugin was configured for field \"" + fieldKey
+ "\"."); + "\", collection=" + collection.getID().toString() + ".");
} }
if (externalInput && ma instanceof SolrAuthority) { if (externalInput && ma instanceof SolrAuthority) {
((SolrAuthority) ma).addExternalResultsInNextMatches(); ((SolrAuthority) ma).addExternalResultsInNextMatches();
} }
return ma.getMatches(fieldKey, query, collection, start, limit, locale); return ma.getMatches(query, start, limit, locale);
} }
@Override @Override
public Choices getBestMatch(String fieldKey, String query, Collection collection, public Choices getBestMatch(String fieldKey, String query, Collection collection,
String locale) { String locale) {
ChoiceAuthority ma = getChoiceAuthorityMap().get(fieldKey); ChoiceAuthority ma = getAuthorityByFieldKeyCollection(fieldKey, collection);
if (ma == null) { if (ma == null) {
throw new IllegalArgumentException( throw new IllegalArgumentException(
"No choices plugin was configured for field \"" + fieldKey "No choices plugin was configured for field \"" + fieldKey
+ "\"."); + "\", collection=" + collection.getID().toString() + ".");
} }
return ma.getBestMatch(fieldKey, query, collection, locale); return ma.getBestMatch(query, locale);
} }
@Override @Override
public String getLabel(MetadataValue metadataValue, String locale) { public String getLabel(MetadataValue metadataValue, Collection collection, String locale) {
return getLabel(metadataValue.getMetadataField().toString(), metadataValue.getAuthority(), locale); return getLabel(metadataValue.getMetadataField().toString(), collection, metadataValue.getAuthority(), locale);
} }
@Override @Override
public String getLabel(String fieldKey, String authKey, String locale) { public String getLabel(String fieldKey, Collection collection, String authKey, String locale) {
ChoiceAuthority ma = getChoiceAuthorityMap().get(fieldKey); ChoiceAuthority ma = getAuthorityByFieldKeyCollection(fieldKey, collection);
if (ma == null) { if (ma == null) {
throw new IllegalArgumentException("No choices plugin was configured for field \"" + fieldKey + "\"."); throw new IllegalArgumentException(
"No choices plugin was configured for field \"" + fieldKey
+ "\", collection=" + collection.getID().toString() + ".");
} }
return ma.getLabel(fieldKey, authKey, locale); return ma.getLabel(authKey, locale);
} }
@Override @Override
public boolean isChoicesConfigured(String fieldKey) { public boolean isChoicesConfigured(String fieldKey, Collection collection) {
return getChoiceAuthorityMap().containsKey(fieldKey); return getAuthorityByFieldKeyCollection(fieldKey, collection) != null;
} }
@Override @Override
@@ -178,8 +216,14 @@ public final class ChoiceAuthorityServiceImpl implements ChoiceAuthorityService
} }
@Override @Override
public List<String> getVariants(MetadataValue metadataValue) { public List<String> getVariants(MetadataValue metadataValue, Collection collection) {
ChoiceAuthority ma = getChoiceAuthorityMap().get(metadataValue.getMetadataField().toString()); String fieldKey = metadataValue.getMetadataField().toString();
ChoiceAuthority ma = getAuthorityByFieldKeyCollection(fieldKey, collection);
if (ma == null) {
throw new IllegalArgumentException(
"No choices plugin was configured for field \"" + fieldKey
+ "\", collection=" + collection.getID().toString() + ".");
}
if (ma instanceof AuthorityVariantsSupport) { if (ma instanceof AuthorityVariantsSupport) {
AuthorityVariantsSupport avs = (AuthorityVariantsSupport) ma; AuthorityVariantsSupport avs = (AuthorityVariantsSupport) ma;
return avs.getVariants(metadataValue.getAuthority(), metadataValue.getLanguage()); return avs.getVariants(metadataValue.getAuthority(), metadataValue.getLanguage());
@@ -189,42 +233,53 @@ public final class ChoiceAuthorityServiceImpl implements ChoiceAuthorityService
@Override @Override
public String getChoiceAuthorityName(String schema, String element, String qualifier) { public String getChoiceAuthorityName(String schema, String element, String qualifier, Collection collection) {
String makeFieldKey = makeFieldKey(schema, element, qualifier); init();
if (getChoiceAuthorityMap().containsKey(makeFieldKey)) { String fieldKey = makeFieldKey(schema, element, qualifier);
for (String key : this.authorities.keySet()) { // check if there is an authority configured for the metadata valid for all the collections
if (this.authorities.get(key).equals(makeFieldKey)) { if (controller.containsKey(fieldKey)) {
return key; for (Entry<String, List<String>> authority2md : authorities.entrySet()) {
if (authority2md.getValue().contains(fieldKey)) {
return authority2md.getKey();
}
}
} else if (collection != null && controllerFormDefinitions.containsKey(fieldKey)) {
// there is an authority configured for the metadata valid for some collections,
// check if it is the requested collection
Map<String, ChoiceAuthority> controllerFormDef = controllerFormDefinitions.get(fieldKey);
SubmissionConfig submissionConfig = itemSubmissionConfigReader
.getSubmissionConfigByCollection(collection.getHandle());
String submissionName = submissionConfig.getSubmissionName();
// check if the requested collection has a submission definition that use an authority for the metadata
if (controllerFormDef.containsKey(submissionName)) {
for (Entry<String, Map<String, List<String>>> authority2defs2md :
authoritiesFormDefinitions.entrySet()) {
List<String> mdByDefinition = authority2defs2md.getValue().get(submissionName);
if (mdByDefinition != null && mdByDefinition.contains(fieldKey)) {
return authority2defs2md.getKey();
}
} }
} }
} }
return configurationService.getProperty( return null;
CHOICES_PLUGIN_PREFIX + schema + "." + element + (qualifier != null ? "." + qualifier : ""));
} }
protected String makeFieldKey(String schema, String element, String qualifier) { protected String makeFieldKey(String schema, String element, String qualifier) {
return Utils.standardize(schema, element, qualifier, "_"); return Utils.standardize(schema, element, qualifier, "_");
} }
/**
* Return map of key to ChoiceAuthority plugin
*
* @return
*/
private Map<String, ChoiceAuthority> getChoiceAuthorityMap() {
// If empty, load from configuration
if (controller.isEmpty()) {
loadChoiceAuthorityConfigurations();
}
return controller;
}
@Override @Override
public void clearCache() { public void clearCache() {
controller.clear(); controller.clear();
authorities.clear(); authorities.clear();
presentation.clear();
closed.clear();
controllerFormDefinitions.clear();
authoritiesFormDefinitions.clear();
itemSubmissionConfigReader = null;
initialized = false;
} }
private void loadChoiceAuthorityConfigurations() { private void loadChoiceAuthorityConfigurations() {
// Get all configuration keys starting with a given prefix // Get all configuration keys starting with a given prefix
List<String> propKeys = configurationService.getPropertyKeys(CHOICES_PLUGIN_PREFIX); List<String> propKeys = configurationService.getPropertyKeys(CHOICES_PLUGIN_PREFIX);
@@ -249,71 +304,127 @@ public final class ChoiceAuthorityServiceImpl implements ChoiceAuthorityService
"Skipping invalid configuration for " + key + " because named plugin not found: " + authorityName); "Skipping invalid configuration for " + key + " because named plugin not found: " + authorityName);
continue; continue;
} }
if (!authorities.containsKey(authorityName)) {
controller.put(fkey, ma);
authorities.put(authorityName, fkey);
} else {
log.warn(
"Skipping invalid configuration for " + key + " because plugin is alredy in use: " +
authorityName + " used by " + authorities
.get(authorityName));
continue;
}
controller.put(fkey, ma);
List<String> fkeys;
if (authorities.containsKey(authorityName)) {
fkeys = authorities.get(authorityName);
} else {
fkeys = new ArrayList<String>();
}
fkeys.add(fkey);
authorities.put(authorityName, fkeys);
log.debug("Choice Control: For field=" + fkey + ", Plugin=" + ma); log.debug("Choice Control: For field=" + fkey + ", Plugin=" + ma);
} }
autoRegisterChoiceAuthorityFromInputReader(); autoRegisterChoiceAuthorityFromInputReader();
} }
/**
* This method will register all the authorities that are required due to the
* submission forms configuration. This includes authorities for value pairs and
* xml vocabularies
*/
private void autoRegisterChoiceAuthorityFromInputReader() { private void autoRegisterChoiceAuthorityFromInputReader() {
try { try {
List<SubmissionConfig> submissionConfigs = itemSubmissionConfigReader
.getAllSubmissionConfigs(Integer.MAX_VALUE, 0);
DCInputsReader dcInputsReader = new DCInputsReader(); DCInputsReader dcInputsReader = new DCInputsReader();
for (DCInputSet dcinputSet : dcInputsReader.getAllInputs(Integer.MAX_VALUE, 0)) {
DCInput[][] dcinputs = dcinputSet.getFields(); // loop over all the defined item submission configuration
for (DCInput[] dcrows : dcinputs) { for (SubmissionConfig subCfg : submissionConfigs) {
for (DCInput dcinput : dcrows) { String submissionName = subCfg.getSubmissionName();
if (StringUtils.isNotBlank(dcinput.getPairsType()) List<DCInputSet> inputsBySubmissionName = dcInputsReader.getInputsBySubmissionName(submissionName);
|| StringUtils.isNotBlank(dcinput.getVocabulary())) { // loop over the submission forms configuration eventually associated with the submission panel
String authorityName = dcinput.getPairsType(); for (DCInputSet dcinputSet : inputsBySubmissionName) {
if (StringUtils.isBlank(authorityName)) { DCInput[][] dcinputs = dcinputSet.getFields();
for (DCInput[] dcrows : dcinputs) {
for (DCInput dcinput : dcrows) {
// for each input in the form check if it is associated with a real value pairs
// or an xml vocabulary
String authorityName = null;
if (StringUtils.isNotBlank(dcinput.getPairsType())
&& !StringUtils.equals(dcinput.getInputType(), "qualdrop_value")) {
authorityName = dcinput.getPairsType();
} else if (StringUtils.isNotBlank(dcinput.getVocabulary())) {
authorityName = dcinput.getVocabulary(); authorityName = dcinput.getVocabulary();
} }
if (!StringUtils.equals(dcinput.getInputType(), "qualdrop_value")) {
// do we have an authority?
if (StringUtils.isNotBlank(authorityName)) {
String fieldKey = makeFieldKey(dcinput.getSchema(), dcinput.getElement(), String fieldKey = makeFieldKey(dcinput.getSchema(), dcinput.getElement(),
dcinput.getQualifier()); dcinput.getQualifier());
ChoiceAuthority ca = controller.get(authorityName); ChoiceAuthority ca = controller.get(authorityName);
if (ca == null) { if (ca == null) {
InputFormSelfRegisterWrapperAuthority ifa = new ca = (ChoiceAuthority) pluginService
InputFormSelfRegisterWrapperAuthority();
if (controller.containsKey(fieldKey)) {
ifa = (InputFormSelfRegisterWrapperAuthority) controller.get(fieldKey);
}
ChoiceAuthority ma = (ChoiceAuthority) pluginService
.getNamedPlugin(ChoiceAuthority.class, authorityName); .getNamedPlugin(ChoiceAuthority.class, authorityName);
if (ma == null) { if (ca == null) {
log.warn("Skipping invalid configuration for " + fieldKey throw new IllegalStateException("Invalid configuration for " + fieldKey
+ " because named plugin not found: " + authorityName); + " in submission definition " + submissionName
continue; + ", form definition " + dcinputSet.getFormName()
+ " no named plugin found: " + authorityName);
} }
ifa.getDelegates().put(dcinputSet.getFormName(), ma);
controller.put(fieldKey, ifa);
}
if (!authorities.containsKey(authorityName)) {
authorities.put(authorityName, fieldKey);
} }
addAuthorityToFormCacheMap(submissionName, fieldKey, ca);
addFormDetailsToAuthorityCacheMap(submissionName, authorityName, fieldKey);
} }
} }
} }
} }
} }
} catch (DCInputsReaderException e) { } catch (DCInputsReaderException e) {
throw new IllegalStateException(e.getMessage(), e); // the system is in an illegal state as the submission definition is not valid
throw new IllegalStateException("Error reading the item submission configuration: " + e.getMessage(),
e);
} }
} }
/**
* Add the form/field to the cache map keeping track of which form/field are
* associated with the specific authority name
*
* @param submissionName the form definition name
* @param authorityName the name of the authority plugin
* @param fieldKey the field key that use the authority
*/
private void addFormDetailsToAuthorityCacheMap(String submissionName, String authorityName, String fieldKey) {
Map<String, List<String>> submissionDefinitionNames2fieldKeys;
if (authoritiesFormDefinitions.containsKey(authorityName)) {
submissionDefinitionNames2fieldKeys = authoritiesFormDefinitions.get(authorityName);
} else {
submissionDefinitionNames2fieldKeys = new HashMap<String, List<String>>();
}
List<String> fields;
if (submissionDefinitionNames2fieldKeys.containsKey(submissionName)) {
fields = submissionDefinitionNames2fieldKeys.get(submissionName);
} else {
fields = new ArrayList<String>();
}
fields.add(fieldKey);
submissionDefinitionNames2fieldKeys.put(submissionName, fields);
authoritiesFormDefinitions.put(authorityName, submissionDefinitionNames2fieldKeys);
}
/**
* Add the authority plugin to the cache map keeping track of which authority is
* used by a specific form/field
*
* @param submissionName the submission definition name
* @param fieldKey the field key that require the authority
* @param ca the authority plugin
*/
private void addAuthorityToFormCacheMap(String submissionName, String fieldKey, ChoiceAuthority ca) {
Map<String, ChoiceAuthority> definition2authority;
if (controllerFormDefinitions.containsKey(fieldKey)) {
definition2authority = controllerFormDefinitions.get(fieldKey);
} else {
definition2authority = new HashMap<String, ChoiceAuthority>();
}
definition2authority.put(submissionName, ca);
controllerFormDefinitions.put(fieldKey, definition2authority);
}
/** /**
* Return map of key to presentation * Return map of key to presentation
* *
@@ -370,26 +481,6 @@ public final class ChoiceAuthorityServiceImpl implements ChoiceAuthorityService
return closed; return closed;
} }
@Override
public String getChoiceMetadatabyAuthorityName(String name) {
if (authorities.isEmpty()) {
loadChoiceAuthorityConfigurations();
}
if (authorities.containsKey(name)) {
return authorities.get(name);
}
return null;
}
@Override
public Choice getChoice(String fieldKey, String authKey, String locale) {
ChoiceAuthority ma = getChoiceAuthorityMap().get(fieldKey);
if (ma == null) {
throw new IllegalArgumentException("No choices plugin was configured for field \"" + fieldKey + "\".");
}
return ma.getChoice(fieldKey, authKey, locale);
}
@Override @Override
public ChoiceAuthority getChoiceAuthorityByAuthorityName(String authorityName) { public ChoiceAuthority getChoiceAuthorityByAuthorityName(String authorityName) {
ChoiceAuthority ma = (ChoiceAuthority) ChoiceAuthority ma = (ChoiceAuthority)
@@ -401,4 +492,68 @@ public final class ChoiceAuthorityServiceImpl implements ChoiceAuthorityService
} }
return ma; return ma;
} }
private ChoiceAuthority getAuthorityByFieldKeyCollection(String fieldKey, Collection collection) {
init();
ChoiceAuthority ma = controller.get(fieldKey);
if (ma == null && collection != null) {
SubmissionConfigReader configReader;
try {
configReader = new SubmissionConfigReader();
SubmissionConfig submissionName = configReader.getSubmissionConfigByCollection(collection.getHandle());
ma = controllerFormDefinitions.get(fieldKey).get(submissionName.getSubmissionName());
} catch (SubmissionConfigReaderException e) {
// the system is in an illegal state as the submission definition is not valid
throw new IllegalStateException("Error reading the item submission configuration: " + e.getMessage(),
e);
}
}
return ma;
}
@Override
public boolean storeAuthority(String fieldKey, Collection collection) {
// currently only named authority can eventually provide real authority
return controller.containsKey(fieldKey);
}
/**
* Wrapper that calls getChoicesByParent method of the plugin.
*
* @param authorityName authority name
* @param parentId parent Id
* @param start choice at which to start, 0 is first.
* @param limit maximum number of choices to return, 0 for no limit.
* @param locale explicit localization key if available, or null
* @return a Choices object (never null).
* @see org.dspace.content.authority.ChoiceAuthority#getChoicesByParent(java.lang.String, java.lang.String,
* int, int, java.lang.String)
*/
@Override
public Choices getChoicesByParent(String authorityName, String parentId, int start, int limit, String locale) {
HierarchicalAuthority ma = (HierarchicalAuthority) getChoiceAuthorityByAuthorityName(authorityName);
return ma.getChoicesByParent(authorityName, parentId, start, limit, locale);
}
/**
* Wrapper that calls getTopChoices method of the plugin.
*
* @param authorityName authority name
* @param start choice at which to start, 0 is first.
* @param limit maximum number of choices to return, 0 for no limit.
* @param locale explicit localization key if available, or null
* @return a Choices object (never null).
* @see org.dspace.content.authority.ChoiceAuthority#getTopChoices(java.lang.String, int, int, java.lang.String)
*/
@Override
public Choices getTopChoices(String authorityName, int start, int limit, String locale) {
HierarchicalAuthority ma = (HierarchicalAuthority) getChoiceAuthorityByAuthorityName(authorityName);
return ma.getTopChoices(authorityName, start, limit, locale);
}
@Override
public Choice getParentChoice(String authorityName, String vocabularyId, String locale) {
HierarchicalAuthority ma = (HierarchicalAuthority) getChoiceAuthorityByAuthorityName(authorityName);
return ma.getParentChoice(authorityName, vocabularyId, locale);
}
} }

View File

@@ -9,14 +9,20 @@ package org.dspace.content.authority;
import java.util.ArrayList; import java.util.ArrayList;
import java.util.Arrays; import java.util.Arrays;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Iterator; import java.util.Iterator;
import java.util.List; import java.util.List;
import java.util.Locale;
import java.util.Map;
import java.util.Set;
import org.apache.commons.lang3.ArrayUtils; import org.apache.commons.lang3.ArrayUtils;
import org.apache.commons.lang3.StringUtils;
import org.apache.logging.log4j.Logger; import org.apache.logging.log4j.Logger;
import org.dspace.app.util.DCInputsReader; import org.dspace.app.util.DCInputsReader;
import org.dspace.app.util.DCInputsReaderException; import org.dspace.app.util.DCInputsReaderException;
import org.dspace.content.Collection; import org.dspace.core.I18nUtil;
import org.dspace.core.SelfNamedPlugin; import org.dspace.core.SelfNamedPlugin;
/** /**
@@ -44,16 +50,38 @@ import org.dspace.core.SelfNamedPlugin;
public class DCInputAuthority extends SelfNamedPlugin implements ChoiceAuthority { public class DCInputAuthority extends SelfNamedPlugin implements ChoiceAuthority {
private static Logger log = org.apache.logging.log4j.LogManager.getLogger(DCInputAuthority.class); private static Logger log = org.apache.logging.log4j.LogManager.getLogger(DCInputAuthority.class);
private String values[] = null; /**
private String labels[] = null; * The map of the values available for a specific language. Examples of keys are
* "en", "it", "uk"
*/
private Map<String, String[]> values = null;
private static DCInputsReader dci = null; /**
* The map of the labels available for a specific language. Examples of keys are
* "en", "it", "uk"
*/
private Map<String, String[]> labels = null;
/**
* The map of the input form reader associated to use for a specific java locale
*/
private static Map<Locale, DCInputsReader> dcis = null;
private static String pluginNames[] = null; private static String pluginNames[] = null;
public DCInputAuthority() { public DCInputAuthority() {
super(); super();
} }
@Override
public boolean storeAuthorityInMetadata() {
// For backward compatibility value pairs don't store authority in
// the metadatavalue
return false;
}
public static void reset() {
pluginNames = null;
}
public static String[] getPluginNames() { public static String[] getPluginNames() {
if (pluginNames == null) { if (pluginNames == null) {
initPluginNames(); initPluginNames();
@@ -63,20 +91,28 @@ public class DCInputAuthority extends SelfNamedPlugin implements ChoiceAuthority
} }
private static synchronized void initPluginNames() { private static synchronized void initPluginNames() {
Locale[] locales = I18nUtil.getSupportedLocales();
Set<String> names = new HashSet<String>();
if (pluginNames == null) { if (pluginNames == null) {
try { try {
if (dci == null) { dcis = new HashMap<Locale, DCInputsReader>();
dci = new DCInputsReader(); for (Locale locale : locales) {
dcis.put(locale, new DCInputsReader(I18nUtil.getInputFormsFileName(locale)));
}
for (Locale l : locales) {
Iterator pi = dcis.get(l).getPairsNameIterator();
while (pi.hasNext()) {
names.add((String) pi.next());
}
}
DCInputsReader dcirDefault = new DCInputsReader();
Iterator pi = dcirDefault.getPairsNameIterator();
while (pi.hasNext()) {
names.add((String) pi.next());
} }
} catch (DCInputsReaderException e) { } catch (DCInputsReaderException e) {
log.error("Failed reading DCInputs initialization: ", e); log.error("Failed reading DCInputs initialization: ", e);
} }
List<String> names = new ArrayList<String>();
Iterator pi = dci.getPairsNameIterator();
while (pi.hasNext()) {
names.add((String) pi.next());
}
pluginNames = names.toArray(new String[names.size()]); pluginNames = names.toArray(new String[names.size()]);
log.debug("Got plugin names = " + Arrays.deepToString(pluginNames)); log.debug("Got plugin names = " + Arrays.deepToString(pluginNames));
} }
@@ -85,45 +121,65 @@ public class DCInputAuthority extends SelfNamedPlugin implements ChoiceAuthority
// once-only load of values and labels // once-only load of values and labels
private void init() { private void init() {
if (values == null) { if (values == null) {
values = new HashMap<String, String[]>();
labels = new HashMap<String, String[]>();
String pname = this.getPluginInstanceName(); String pname = this.getPluginInstanceName();
List<String> pairs = dci.getPairs(pname); for (Locale l : dcis.keySet()) {
if (pairs != null) { DCInputsReader dci = dcis.get(l);
values = new String[pairs.size() / 2]; List<String> pairs = dci.getPairs(pname);
labels = new String[pairs.size() / 2]; if (pairs != null) {
for (int i = 0; i < pairs.size(); i += 2) { String[] valuesLocale = new String[pairs.size() / 2];
labels[i / 2] = pairs.get(i); String[]labelsLocale = new String[pairs.size() / 2];
values[i / 2] = pairs.get(i + 1); for (int i = 0; i < pairs.size(); i += 2) {
labelsLocale[i / 2] = pairs.get(i);
valuesLocale[i / 2] = pairs.get(i + 1);
}
values.put(l.getLanguage(), valuesLocale);
labels.put(l.getLanguage(), labelsLocale);
log.debug("Found pairs for name=" + pname + ",locale=" + l);
} else {
log.error("Failed to find any pairs for name=" + pname, new IllegalStateException());
} }
log.debug("Found pairs for name=" + pname);
} else {
log.error("Failed to find any pairs for name=" + pname, new IllegalStateException());
} }
} }
} }
@Override @Override
public Choices getMatches(String field, String query, Collection collection, int start, int limit, String locale) { public Choices getMatches(String query, int start, int limit, String locale) {
init(); init();
Locale currentLocale = I18nUtil.getSupportedLocale(locale);
String[] valuesLocale = values.get(currentLocale.getLanguage());
String[] labelsLocale = labels.get(currentLocale.getLanguage());
int dflt = -1; int dflt = -1;
Choice v[] = new Choice[values.length]; int found = 0;
for (int i = 0; i < values.length; ++i) { List<Choice> v = new ArrayList<Choice>();
v[i] = new Choice(values[i], values[i], labels[i]); for (int i = 0; i < valuesLocale.length; ++i) {
if (values[i].equalsIgnoreCase(query)) { if (query == null || StringUtils.containsIgnoreCase(valuesLocale[i], query)) {
dflt = i; if (found >= start && v.size() < limit) {
v.add(new Choice(null, valuesLocale[i], labelsLocale[i]));
if (valuesLocale[i].equalsIgnoreCase(query)) {
dflt = i;
}
}
found++;
} }
} }
return new Choices(v, 0, v.length, Choices.CF_AMBIGUOUS, false, dflt); Choice[] vArray = new Choice[v.size()];
return new Choices(v.toArray(vArray), start, found, Choices.CF_AMBIGUOUS, false, dflt);
} }
@Override @Override
public Choices getBestMatch(String field, String text, Collection collection, String locale) { public Choices getBestMatch(String text, String locale) {
init(); init();
for (int i = 0; i < values.length; ++i) { Locale currentLocale = I18nUtil.getSupportedLocale(locale);
if (text.equalsIgnoreCase(values[i])) { String[] valuesLocale = values.get(currentLocale.getLanguage());
String[] labelsLocale = labels.get(currentLocale.getLanguage());
for (int i = 0; i < valuesLocale.length; ++i) {
if (text.equalsIgnoreCase(valuesLocale[i])) {
Choice v[] = new Choice[1]; Choice v[] = new Choice[1];
v[0] = new Choice(String.valueOf(i), values[i], labels[i]); v[0] = new Choice(String.valueOf(i), valuesLocale[i], labelsLocale[i]);
return new Choices(v, 0, v.length, Choices.CF_UNCERTAIN, false, 0); return new Choices(v, 0, v.length, Choices.CF_UNCERTAIN, false, 0);
} }
} }
@@ -131,19 +187,31 @@ public class DCInputAuthority extends SelfNamedPlugin implements ChoiceAuthority
} }
@Override @Override
public String getLabel(String field, String key, String locale) { public String getLabel(String key, String locale) {
init(); init();
// Get default if locale is empty
if (StringUtils.isBlank(locale)) {
locale = I18nUtil.getDefaultLocale().getLanguage();
}
String[] labelsLocale = labels.get(locale);
int pos = -1; int pos = -1;
for (int i = 0; i < values.length; i++) { for (int i = 0; i < labelsLocale.length; i++) {
if (values[i].equals(key)) { if (labelsLocale[i].equals(key)) {
pos = i; pos = i;
break; break;
} }
} }
if (pos != -1) { if (pos != -1) {
return labels[pos]; return labelsLocale[pos];
} else { } else {
return "UNKNOWN KEY " + key; return "UNKNOWN KEY " + key;
} }
} }
@Override
public boolean isScrollable() {
return true;
}
} }

View File

@@ -10,7 +10,9 @@ package org.dspace.content.authority;
import java.io.File; import java.io.File;
import java.util.ArrayList; import java.util.ArrayList;
import java.util.Arrays; import java.util.Arrays;
import java.util.HashMap;
import java.util.List; import java.util.List;
import java.util.Map;
import javax.xml.xpath.XPath; import javax.xml.xpath.XPath;
import javax.xml.xpath.XPathConstants; import javax.xml.xpath.XPathConstants;
import javax.xml.xpath.XPathExpressionException; import javax.xml.xpath.XPathExpressionException;
@@ -19,7 +21,6 @@ import javax.xml.xpath.XPathFactory;
import org.apache.commons.lang3.ArrayUtils; import org.apache.commons.lang3.ArrayUtils;
import org.apache.commons.lang3.StringUtils; import org.apache.commons.lang3.StringUtils;
import org.apache.logging.log4j.Logger; import org.apache.logging.log4j.Logger;
import org.dspace.content.Collection;
import org.dspace.core.SelfNamedPlugin; import org.dspace.core.SelfNamedPlugin;
import org.dspace.services.ConfigurationService; import org.dspace.services.ConfigurationService;
import org.dspace.services.factory.DSpaceServicesFactory; import org.dspace.services.factory.DSpaceServicesFactory;
@@ -54,25 +55,35 @@ import org.xml.sax.InputSource;
* @author Michael B. Klein * @author Michael B. Klein
*/ */
public class DSpaceControlledVocabulary extends SelfNamedPlugin implements ChoiceAuthority { public class DSpaceControlledVocabulary extends SelfNamedPlugin implements HierarchicalAuthority {
private static Logger log = org.apache.logging.log4j.LogManager.getLogger(DSpaceControlledVocabulary.class); private static Logger log = org.apache.logging.log4j.LogManager.getLogger(DSpaceControlledVocabulary.class);
protected static String xpathTemplate = "//node[contains(translate(@label,'ABCDEFGHIJKLMNOPQRSTUVWXYZ'," + protected static String xpathTemplate = "//node[contains(translate(@label,'ABCDEFGHIJKLMNOPQRSTUVWXYZ'," +
"'abcdefghijklmnopqrstuvwxyz'),'%s')]"; "'abcdefghijklmnopqrstuvwxyz'),'%s')]";
protected static String idTemplate = "//node[@id = '%s']"; protected static String idTemplate = "//node[@id = '%s']";
protected static String idParentTemplate = "//node[@id = '%s']/parent::isComposedBy"; protected static String labelTemplate = "//node[@label = '%s']";
protected static String idParentTemplate = "//node[@id = '%s']/parent::isComposedBy/parent::node";
protected static String rootTemplate = "/node";
protected static String pluginNames[] = null; protected static String pluginNames[] = null;
protected String vocabularyName = null; protected String vocabularyName = null;
protected InputSource vocabulary = null; protected InputSource vocabulary = null;
protected Boolean suggestHierarchy = true; protected Boolean suggestHierarchy = false;
protected Boolean storeHierarchy = true; protected Boolean storeHierarchy = true;
protected String hierarchyDelimiter = "::"; protected String hierarchyDelimiter = "::";
protected Integer preloadLevel = 1;
public DSpaceControlledVocabulary() { public DSpaceControlledVocabulary() {
super(); super();
} }
@Override
public boolean storeAuthorityInMetadata() {
// For backward compatibility controlled vocabularies don't store the node id in
// the metadatavalue
return false;
}
public static String[] getPluginNames() { public static String[] getPluginNames() {
if (pluginNames == null) { if (pluginNames == null) {
initPluginNames(); initPluginNames();
@@ -112,6 +123,7 @@ public class DSpaceControlledVocabulary extends SelfNamedPlugin implements Choic
String configurationPrefix = "vocabulary.plugin." + vocabularyName; String configurationPrefix = "vocabulary.plugin." + vocabularyName;
storeHierarchy = config.getBooleanProperty(configurationPrefix + ".hierarchy.store", storeHierarchy); storeHierarchy = config.getBooleanProperty(configurationPrefix + ".hierarchy.store", storeHierarchy);
suggestHierarchy = config.getBooleanProperty(configurationPrefix + ".hierarchy.suggest", suggestHierarchy); suggestHierarchy = config.getBooleanProperty(configurationPrefix + ".hierarchy.suggest", suggestHierarchy);
preloadLevel = config.getIntProperty(configurationPrefix + ".hierarchy.preloadLevel", preloadLevel);
String configuredDelimiter = config.getProperty(configurationPrefix + ".delimiter"); String configuredDelimiter = config.getProperty(configurationPrefix + ".delimiter");
if (configuredDelimiter != null) { if (configuredDelimiter != null) {
hierarchyDelimiter = configuredDelimiter.replaceAll("(^\"|\"$)", ""); hierarchyDelimiter = configuredDelimiter.replaceAll("(^\"|\"$)", "");
@@ -142,7 +154,7 @@ public class DSpaceControlledVocabulary extends SelfNamedPlugin implements Choic
} }
@Override @Override
public Choices getMatches(String field, String text, Collection collection, int start, int limit, String locale) { public Choices getMatches(String text, int start, int limit, String locale) {
init(); init();
log.debug("Getting matches for '" + text + "'"); log.debug("Getting matches for '" + text + "'");
String xpathExpression = ""; String xpathExpression = "";
@@ -151,59 +163,60 @@ public class DSpaceControlledVocabulary extends SelfNamedPlugin implements Choic
xpathExpression += String.format(xpathTemplate, textHierarchy[i].replaceAll("'", "&apos;").toLowerCase()); xpathExpression += String.format(xpathTemplate, textHierarchy[i].replaceAll("'", "&apos;").toLowerCase());
} }
XPath xpath = XPathFactory.newInstance().newXPath(); XPath xpath = XPathFactory.newInstance().newXPath();
Choice[] choices; int total = 0;
List<Choice> choices = new ArrayList<Choice>();
try { try {
NodeList results = (NodeList) xpath.evaluate(xpathExpression, vocabulary, XPathConstants.NODESET); NodeList results = (NodeList) xpath.evaluate(xpathExpression, vocabulary, XPathConstants.NODESET);
String[] authorities = new String[results.getLength()]; total = results.getLength();
String[] values = new String[results.getLength()]; choices = getChoicesFromNodeList(results, start, limit);
String[] labels = new String[results.getLength()];
String[] parent = new String[results.getLength()];
String[] notes = new String[results.getLength()];
for (int i = 0; i < results.getLength(); i++) {
Node node = results.item(i);
readNode(authorities, values, labels, parent, notes, i, node);
}
int resultCount = labels.length - start;
// limit = 0 means no limit
if ((limit > 0) && (resultCount > limit)) {
resultCount = limit;
}
choices = new Choice[resultCount];
if (resultCount > 0) {
for (int i = 0; i < resultCount; i++) {
choices[i] = new Choice(authorities[start + i], values[start + i], labels[start + i]);
if (StringUtils.isNotBlank(parent[i])) {
choices[i].extras.put("parent", parent[i]);
}
if (StringUtils.isNotBlank(notes[i])) {
choices[i].extras.put("note", notes[i]);
}
}
}
} catch (XPathExpressionException e) { } catch (XPathExpressionException e) {
choices = new Choice[0]; log.warn(e.getMessage(), e);
return new Choices(true);
} }
return new Choices(choices, 0, choices.length, Choices.CF_AMBIGUOUS, false); return new Choices(choices.toArray(new Choice[choices.size()]), start, total, Choices.CF_AMBIGUOUS,
total > start + limit);
} }
@Override @Override
public Choices getBestMatch(String field, String text, Collection collection, String locale) { public Choices getBestMatch(String text, String locale) {
init(); init();
log.debug("Getting best match for '" + text + "'"); log.debug("Getting best matches for '" + text + "'");
return getMatches(field, text, collection, 0, 2, locale); String xpathExpression = "";
} String[] textHierarchy = text.split(hierarchyDelimiter, -1);
for (int i = 0; i < textHierarchy.length; i++) {
@Override xpathExpression += String.format(labelTemplate, textHierarchy[i].replaceAll("'", "&apos;"));
public String getLabel(String field, String key, String locale) { }
init();
String xpathExpression = String.format(idTemplate, key);
XPath xpath = XPathFactory.newInstance().newXPath(); XPath xpath = XPathFactory.newInstance().newXPath();
List<Choice> choices = new ArrayList<Choice>();
try { try {
Node node = (Node) xpath.evaluate(xpathExpression, vocabulary, XPathConstants.NODE); NodeList results = (NodeList) xpath.evaluate(xpathExpression, vocabulary, XPathConstants.NODESET);
return node.getAttributes().getNamedItem("label").getNodeValue(); choices = getChoicesFromNodeList(results, 0, 1);
} catch (XPathExpressionException e) { } catch (XPathExpressionException e) {
return (""); log.warn(e.getMessage(), e);
return new Choices(true);
} }
return new Choices(choices.toArray(new Choice[choices.size()]), 0, choices.size(), Choices.CF_AMBIGUOUS, false);
}
@Override
public String getLabel(String key, String locale) {
return getNodeLabel(key, this.suggestHierarchy);
}
@Override
public String getValue(String key, String locale) {
return getNodeLabel(key, this.storeHierarchy);
}
@Override
public Choice getChoice(String authKey, String locale) {
Node node;
try {
node = getNode(authKey);
} catch (XPathExpressionException e) {
return null;
}
return createChoiceFromNode(node);
} }
@Override @Override
@@ -212,81 +225,227 @@ public class DSpaceControlledVocabulary extends SelfNamedPlugin implements Choic
} }
@Override @Override
public Choice getChoice(String fieldKey, String authKey, String locale) { public Choices getTopChoices(String authorityName, int start, int limit, String locale) {
init(); init();
log.debug("Getting matches for '" + authKey + "'"); String xpathExpression = rootTemplate;
String xpathExpression = String.format(idTemplate, authKey); return getChoicesByXpath(xpathExpression, start, limit);
XPath xpath = XPathFactory.newInstance().newXPath();
try {
Node node = (Node) xpath.evaluate(xpathExpression, vocabulary, XPathConstants.NODE);
if (node != null) {
String[] authorities = new String[1];
String[] values = new String[1];
String[] labels = new String[1];
String[] parent = new String[1];
String[] note = new String[1];
readNode(authorities, values, labels, parent, note, 0, node);
if (values.length > 0) {
Choice choice = new Choice(authorities[0], values[0], labels[0]);
if (StringUtils.isNotBlank(parent[0])) {
choice.extras.put("parent", parent[0]);
}
if (StringUtils.isNotBlank(note[0])) {
choice.extras.put("note", note[0]);
}
return choice;
}
}
} catch (XPathExpressionException e) {
log.warn(e.getMessage(), e);
}
return null;
} }
private void readNode(String[] authorities, String[] values, String[] labels, String[] parent, String[] notes, @Override
int i, Node node) { public Choices getChoicesByParent(String authorityName, String parentId, int start, int limit, String locale) {
init();
String xpathExpression = String.format(idTemplate, parentId);
return getChoicesByXpath(xpathExpression, start, limit);
}
@Override
public Choice getParentChoice(String authorityName, String childId, String locale) {
init();
try {
String xpathExpression = String.format(idParentTemplate, childId);
Choice choice = createChoiceFromNode(getNodeFromXPath(xpathExpression));
return choice;
} catch (XPathExpressionException e) {
log.error(e.getMessage(), e);
return null;
}
}
@Override
public Integer getPreloadLevel() {
return preloadLevel;
}
private boolean isRootElement(Node node) {
if (node != null && node.getOwnerDocument().getDocumentElement().equals(node)) {
return true;
}
return false;
}
private Node getNode(String key) throws XPathExpressionException {
init();
String xpathExpression = String.format(idTemplate, key);
Node node = getNodeFromXPath(xpathExpression);
return node;
}
private Node getNodeFromXPath(String xpathExpression) throws XPathExpressionException {
XPath xpath = XPathFactory.newInstance().newXPath();
Node node = (Node) xpath.evaluate(xpathExpression, vocabulary, XPathConstants.NODE);
return node;
}
private List<Choice> getChoicesFromNodeList(NodeList results, int start, int limit) {
List<Choice> choices = new ArrayList<Choice>();
for (int i = 0; i < results.getLength(); i++) {
if (i < start) {
continue;
}
if (choices.size() == limit) {
break;
}
Node node = results.item(i);
Choice choice = new Choice(getAuthority(node), getLabel(node), getValue(node),
isSelectable(node));
choice.extras = addOtherInformation(getParent(node), getNote(node), getChildren(node), getAuthority(node));
choices.add(choice);
}
return choices;
}
private Map<String, String> addOtherInformation(String parentCurr, String noteCurr,
List<String> childrenCurr, String authorityCurr) {
Map<String, String> extras = new HashMap<String, String>();
if (StringUtils.isNotBlank(parentCurr)) {
extras.put("parent", parentCurr);
}
if (StringUtils.isNotBlank(noteCurr)) {
extras.put("note", noteCurr);
}
if (childrenCurr.size() > 0) {
extras.put("hasChildren", "true");
} else {
extras.put("hasChildren", "false");
}
extras.put("id", authorityCurr);
return extras;
}
private String getNodeLabel(String key, boolean useHierarchy) {
try {
Node node = getNode(key);
if (useHierarchy) {
return this.buildString(node);
} else {
return node.getAttributes().getNamedItem("label").getNodeValue();
}
} catch (XPathExpressionException e) {
return ("");
}
}
private String getLabel(Node node) {
String hierarchy = this.buildString(node); String hierarchy = this.buildString(node);
if (this.suggestHierarchy) { if (this.suggestHierarchy) {
labels[i] = hierarchy; return hierarchy;
} else { } else {
labels[i] = node.getAttributes().getNamedItem("label").getNodeValue(); return node.getAttributes().getNamedItem("label").getNodeValue();
}
if (this.storeHierarchy) {
values[i] = hierarchy;
} else {
values[i] = node.getAttributes().getNamedItem("label").getNodeValue();
} }
}
private String getValue(Node node) {
String hierarchy = this.buildString(node);
if (this.storeHierarchy) {
return hierarchy;
} else {
return node.getAttributes().getNamedItem("label").getNodeValue();
}
}
private String getNote(Node node) {
NodeList childNodes = node.getChildNodes(); NodeList childNodes = node.getChildNodes();
for (int ci = 0; ci < childNodes.getLength(); ci++) { for (int ci = 0; ci < childNodes.getLength(); ci++) {
Node firstChild = childNodes.item(ci); Node firstChild = childNodes.item(ci);
if (firstChild != null && "hasNote".equals(firstChild.getNodeName())) { if (firstChild != null && "hasNote".equals(firstChild.getNodeName())) {
String nodeValue = firstChild.getTextContent(); String nodeValue = firstChild.getTextContent();
if (StringUtils.isNotBlank(nodeValue)) { if (StringUtils.isNotBlank(nodeValue)) {
notes[i] = nodeValue; return nodeValue;
} }
} }
} }
Node idAttr = node.getAttributes().getNamedItem("id"); return null;
if (null != idAttr) { // 'id' is optional }
authorities[i] = idAttr.getNodeValue();
if (isHierarchical()) { private List<String> getChildren(Node node) {
Node parentN = node.getParentNode(); List<String> children = new ArrayList<String>();
if (parentN != null) { NodeList childNodes = node.getChildNodes();
parentN = parentN.getParentNode(); for (int ci = 0; ci < childNodes.getLength(); ci++) {
if (parentN != null) { Node firstChild = childNodes.item(ci);
Node parentIdAttr = parentN.getAttributes().getNamedItem("id"); if (firstChild != null && "isComposedBy".equals(firstChild.getNodeName())) {
if (null != parentIdAttr) { for (int cii = 0; cii < firstChild.getChildNodes().getLength(); cii++) {
parent[i] = parentIdAttr.getNodeValue(); Node childN = firstChild.getChildNodes().item(cii);
if (childN != null && "node".equals(childN.getNodeName())) {
Node childIdAttr = childN.getAttributes().getNamedItem("id");
if (null != childIdAttr) {
children.add(childIdAttr.getNodeValue());
} }
} }
} }
break;
} }
} else { }
authorities[i] = null; return children;
parent[i] = null; }
private boolean isSelectable(Node node) {
Node selectableAttr = node.getAttributes().getNamedItem("selectable");
if (null != selectableAttr) {
return Boolean.valueOf(selectableAttr.getNodeValue());
} else { // Default is true
return true;
} }
} }
private String getParent(Node node) {
Node parentN = node.getParentNode();
if (parentN != null) {
parentN = parentN.getParentNode();
if (parentN != null && !isRootElement(parentN)) {
return buildString(parentN);
}
}
return null;
}
private String getAuthority(Node node) {
Node idAttr = node.getAttributes().getNamedItem("id");
if (null != idAttr) { // 'id' is optional
return idAttr.getNodeValue();
} else {
return null;
}
}
private Choices getChoicesByXpath(String xpathExpression, int start, int limit) {
List<Choice> choices = new ArrayList<Choice>();
XPath xpath = XPathFactory.newInstance().newXPath();
try {
Node parentNode = (Node) xpath.evaluate(xpathExpression, vocabulary, XPathConstants.NODE);
int count = 0;
if (parentNode != null) {
NodeList childNodes = (NodeList) xpath.evaluate(".//isComposedBy", parentNode, XPathConstants.NODE);
if (null != childNodes) {
for (int i = 0; i < childNodes.getLength(); i++) {
Node childNode = childNodes.item(i);
if (childNode != null && "node".equals(childNode.getNodeName())) {
if (count < start || choices.size() >= limit) {
count++;
continue;
}
count++;
choices.add(createChoiceFromNode(childNode));
}
}
}
return new Choices(choices.toArray(new Choice[choices.size()]), start, count,
Choices.CF_AMBIGUOUS, false);
}
} catch (XPathExpressionException e) {
log.warn(e.getMessage(), e);
return new Choices(true);
}
return new Choices(false);
}
private Choice createChoiceFromNode(Node node) {
if (node != null && !isRootElement(node)) {
Choice choice = new Choice(getAuthority(node), getLabel(node), getValue(node),
isSelectable(node));
choice.extras = addOtherInformation(getParent(node), getNote(node),getChildren(node), getAuthority(node));
return choice;
}
return null;
}
} }

View File

@@ -0,0 +1,85 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.content.authority;
/**
* Plugin interface that supplies an authority control mechanism for
* one metadata field.
*
* @author Larry Stone
* @see ChoiceAuthority
*/
public interface HierarchicalAuthority extends ChoiceAuthority {
/**
* Get all values from the authority that match the preferred value.
* Note that the offering was entered by the user and may contain
* mixed/incorrect case, whitespace, etc so the plugin should be careful
* to clean up user data before making comparisons.
*
* Value of a "Name" field will be in canonical DSpace person name format,
* which is "Lastname, Firstname(s)", e.g. "Smith, John Q.".
*
* Some authorities with a small set of values may simply return the whole
* set for any sample value, although it's a good idea to set the
* defaultSelected index in the Choices instance to the choice, if any,
* that matches the value.
*
* @param authorityName authority name
* @param start choice at which to start, 0 is first.
* @param limit maximum number of choices to return, 0 for no limit.
* @param locale explicit localization key if available, or null
* @return a Choices object (never null).
*/
public Choices getTopChoices(String authorityName, int start, int limit, String locale);
/**
* Get all values from the authority that match the preferred value.
* Note that the offering was entered by the user and may contain
* mixed/incorrect case, whitespace, etc so the plugin should be careful
* to clean up user data before making comparisons.
*
* Value of a "Name" field will be in canonical DSpace person name format,
* which is "Lastname, Firstname(s)", e.g. "Smith, John Q.".
*
* Some authorities with a small set of values may simply return the whole
* set for any sample value, although it's a good idea to set the
* defaultSelected index in the Choices instance to the choice, if any,
* that matches the value.
*
* @param authorityName authority name
* @param parentId user's value to match
* @param start choice at which to start, 0 is first.
* @param limit maximum number of choices to return, 0 for no limit.
* @param locale explicit localization key if available, or null
* @return a Choices object (never null).
*/
public Choices getChoicesByParent(String authorityName, String parentId, int start, int limit, String locale);
/**
* It returns the parent choice in the hierarchy if any
*
* @param authorityName authority name
* @param vocabularyId user's value to match
* @param locale explicit localization key if available, or null
* @return a Choice object
*/
public Choice getParentChoice(String authorityName, String vocabularyId, String locale);
/**
* Provides an hint for the UI to preload some levels to improve the UX. It
* usually mean that these preloaded level will be shown expanded by default
*/
public Integer getPreloadLevel();
@Override
default boolean isHierarchical() {
return true;
}
}

View File

@@ -1,166 +0,0 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.content.authority;
import java.util.Arrays;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Map;
import java.util.Set;
import org.apache.commons.lang3.StringUtils;
import org.apache.logging.log4j.Logger;
import org.dspace.app.util.DCInputsReader;
import org.dspace.app.util.DCInputsReaderException;
import org.dspace.content.Collection;
/**
* This authority is registered automatically by the ChoiceAuthorityService for
* all the metadata that use a value-pair or a vocabulary in the submission-form.xml
*
* It keeps a map of form-name vs ChoiceAuthority to delegate the execution of
* the method to the specific ChoiceAuthority configured for the collection when
* the same metadata have different vocabulary or value-pair on a collection
* basis
*
* @author Andrea Bollini (andrea.bollini at 4science.it)
*/
public class InputFormSelfRegisterWrapperAuthority implements ChoiceAuthority {
private static Logger log =
org.apache.logging.log4j.LogManager.getLogger(InputFormSelfRegisterWrapperAuthority.class);
private Map<String, ChoiceAuthority> delegates = new HashMap<String, ChoiceAuthority>();
private static DCInputsReader dci = null;
private void init() {
try {
if (dci == null) {
dci = new DCInputsReader();
}
} catch (DCInputsReaderException e) {
log.error("Failed reading DCInputs initialization: ", e);
}
}
@Override
public Choices getMatches(String field, String query, Collection collection, int start, int limit, String locale) {
String formName;
try {
init();
if (collection == null) {
Set<Choice> choices = new HashSet<Choice>();
//workaround search in all authority configured
for (ChoiceAuthority ca : delegates.values()) {
Choices tmp = ca.getMatches(field, query, null, start, limit, locale);
if (tmp.total > 0) {
Set<Choice> mySet = new HashSet<Choice>(Arrays.asList(tmp.values));
choices.addAll(mySet);
}
}
if (!choices.isEmpty()) {
Choice[] results = new Choice[choices.size()];
choices.toArray(results);
return new Choices(results, 0, choices.size(), Choices.CF_AMBIGUOUS, false);
}
} else {
formName = dci.getInputFormNameByCollectionAndField(collection, field);
return delegates.get(formName).getMatches(field, query, collection, start, limit, locale);
}
} catch (DCInputsReaderException e) {
log.error(e.getMessage(), e);
}
return new Choices(Choices.CF_NOTFOUND);
}
@Override
public Choices getBestMatch(String field, String text, Collection collection, String locale) {
String formName;
try {
init();
if (collection == null) {
Set<Choice> choices = new HashSet<Choice>();
//workaround search in all authority configured
for (ChoiceAuthority ca : delegates.values()) {
Choices tmp = ca.getBestMatch(field, text, null, locale);
if (tmp.total > 0) {
Set<Choice> mySet = new HashSet<Choice>(Arrays.asList(tmp.values));
choices.addAll(mySet);
}
}
if (!choices.isEmpty()) {
Choice[] results = new Choice[choices.size() - 1];
choices.toArray(results);
return new Choices(results, 0, choices.size(), Choices.CF_UNCERTAIN, false);
}
} else {
formName = dci.getInputFormNameByCollectionAndField(collection, field);
return delegates.get(formName).getBestMatch(field, text, collection, locale);
}
} catch (DCInputsReaderException e) {
log.error(e.getMessage(), e);
}
return new Choices(Choices.CF_NOTFOUND);
}
@Override
public String getLabel(String field, String key, String locale) {
// TODO we need to manage REALLY the authority
// WRONG BEHAVIOUR: now in each delegates can exists the same key with
// different value
for (ChoiceAuthority delegate : delegates.values()) {
String label = delegate.getLabel(field, key, locale);
if (StringUtils.isNotBlank(label)) {
return label;
}
}
return "UNKNOWN KEY " + key;
}
@Override
public boolean isHierarchical() {
// TODO we need to manage REALLY the authority
// WRONG BEHAVIOUR: now in each delegates can exists the same key with
// different value
for (ChoiceAuthority delegate : delegates.values()) {
return delegate.isHierarchical();
}
return false;
}
@Override
public boolean isScrollable() {
// TODO we need to manage REALLY the authority
// WRONG BEHAVIOUR: now in each delegates can exists the same key with
// different value
for (ChoiceAuthority delegate : delegates.values()) {
return delegate.isScrollable();
}
return false;
}
@Override
public boolean hasIdentifier() {
// TODO we need to manage REALLY the authority
// WRONG BEHAVIOUR: now in each delegates can exists the same key with
// different value
for (ChoiceAuthority delegate : delegates.values()) {
return delegate.hasIdentifier();
}
return false;
}
public Map<String, ChoiceAuthority> getDelegates() {
return delegates;
}
public void setDelegates(Map<String, ChoiceAuthority> delegates) {
this.delegates = delegates;
}
}

View File

@@ -14,12 +14,7 @@ import java.util.HashMap;
import java.util.List; import java.util.List;
import java.util.Map; import java.util.Map;
import org.apache.commons.lang3.StringUtils;
import org.apache.logging.log4j.Logger; import org.apache.logging.log4j.Logger;
import org.dspace.app.util.DCInput;
import org.dspace.app.util.DCInputSet;
import org.dspace.app.util.DCInputsReader;
import org.dspace.app.util.DCInputsReaderException;
import org.dspace.content.MetadataField; import org.dspace.content.MetadataField;
import org.dspace.content.authority.service.MetadataAuthorityService; import org.dspace.content.authority.service.MetadataAuthorityService;
import org.dspace.content.service.MetadataFieldService; import org.dspace.content.service.MetadataFieldService;
@@ -144,8 +139,6 @@ public class MetadataAuthorityServiceImpl implements MetadataAuthorityService {
if (dmc >= Choices.CF_UNSET) { if (dmc >= Choices.CF_UNSET) {
defaultMinConfidence = dmc; defaultMinConfidence = dmc;
} }
autoRegisterAuthorityFromInputReader();
} }
} }
@@ -205,7 +198,6 @@ public class MetadataAuthorityServiceImpl implements MetadataAuthorityService {
} }
} }
/** /**
* Give the minimal level of confidence required to consider valid an authority value * Give the minimal level of confidence required to consider valid an authority value
* for the given metadata. * for the given metadata.
@@ -229,35 +221,4 @@ public class MetadataAuthorityServiceImpl implements MetadataAuthorityService {
} }
return copy; return copy;
} }
private void autoRegisterAuthorityFromInputReader() {
try {
DCInputsReader dcInputsReader = new DCInputsReader();
for (DCInputSet dcinputSet : dcInputsReader.getAllInputs(Integer.MAX_VALUE, 0)) {
DCInput[][] dcinputs = dcinputSet.getFields();
for (DCInput[] dcrows : dcinputs) {
for (DCInput dcinput : dcrows) {
if (StringUtils.isNotBlank(dcinput.getPairsType())
|| StringUtils.isNotBlank(dcinput.getVocabulary())) {
String authorityName = dcinput.getPairsType();
if (StringUtils.isBlank(authorityName)) {
authorityName = dcinput.getVocabulary();
}
if (!StringUtils.equals(dcinput.getInputType(), "qualdrop_value")) {
String fieldKey = makeFieldKey(dcinput.getSchema(), dcinput.getElement(),
dcinput.getQualifier());
boolean req = ConfigurationManager
.getBooleanProperty("authority.required." + fieldKey, false);
controlled.put(fieldKey, true);
isAuthorityRequired.put(fieldKey, req);
}
}
}
}
}
} catch (DCInputsReaderException e) {
throw new IllegalStateException(e.getMessage(), e);
}
}
} }

View File

@@ -7,13 +7,13 @@
*/ */
package org.dspace.content.authority; package org.dspace.content.authority;
import org.dspace.content.Collection;
/** /**
* This is a *very* stupid test fixture for authority control, and also * This is a *very* stupid test fixture for authority control, and also
* serves as a trivial example of an authority plugin implementation. * serves as a trivial example of an authority plugin implementation.
*/ */
public class SampleAuthority implements ChoiceAuthority { public class SampleAuthority implements ChoiceAuthority {
private String pluginInstanceName;
protected static String values[] = { protected static String values[] = {
"sun", "sun",
"mon", "mon",
@@ -35,7 +35,7 @@ public class SampleAuthority implements ChoiceAuthority {
}; };
@Override @Override
public Choices getMatches(String field, String query, Collection collection, int start, int limit, String locale) { public Choices getMatches(String query, int start, int limit, String locale) {
int dflt = -1; int dflt = -1;
Choice v[] = new Choice[values.length]; Choice v[] = new Choice[values.length];
for (int i = 0; i < values.length; ++i) { for (int i = 0; i < values.length; ++i) {
@@ -48,7 +48,7 @@ public class SampleAuthority implements ChoiceAuthority {
} }
@Override @Override
public Choices getBestMatch(String field, String text, Collection collection, String locale) { public Choices getBestMatch(String text, String locale) {
for (int i = 0; i < values.length; ++i) { for (int i = 0; i < values.length; ++i) {
if (text.equalsIgnoreCase(values[i])) { if (text.equalsIgnoreCase(values[i])) {
Choice v[] = new Choice[1]; Choice v[] = new Choice[1];
@@ -60,7 +60,17 @@ public class SampleAuthority implements ChoiceAuthority {
} }
@Override @Override
public String getLabel(String field, String key, String locale) { public String getLabel(String key, String locale) {
return labels[Integer.parseInt(key)]; return labels[Integer.parseInt(key)];
} }
@Override
public String getPluginInstanceName() {
return pluginInstanceName;
}
@Override
public void setPluginInstanceName(String name) {
this.pluginInstanceName = name;
}
} }

View File

@@ -11,6 +11,7 @@ import java.util.ArrayList;
import java.util.Iterator; import java.util.Iterator;
import java.util.List; import java.util.List;
import java.util.Map; import java.util.Map;
import java.util.Map.Entry;
import org.apache.commons.lang3.StringUtils; import org.apache.commons.lang3.StringUtils;
import org.apache.logging.log4j.Logger; import org.apache.logging.log4j.Logger;
@@ -24,8 +25,9 @@ import org.dspace.authority.AuthorityValue;
import org.dspace.authority.SolrAuthorityInterface; import org.dspace.authority.SolrAuthorityInterface;
import org.dspace.authority.factory.AuthorityServiceFactory; import org.dspace.authority.factory.AuthorityServiceFactory;
import org.dspace.authority.service.AuthorityValueService; import org.dspace.authority.service.AuthorityValueService;
import org.dspace.content.Collection;
import org.dspace.core.ConfigurationManager; import org.dspace.core.ConfigurationManager;
import org.dspace.core.NameAwarePlugin;
import org.dspace.services.ConfigurationService;
import org.dspace.services.factory.DSpaceServicesFactory; import org.dspace.services.factory.DSpaceServicesFactory;
/** /**
@@ -35,7 +37,14 @@ import org.dspace.services.factory.DSpaceServicesFactory;
* @author Mark Diggory (markd at atmire dot com) * @author Mark Diggory (markd at atmire dot com)
*/ */
public class SolrAuthority implements ChoiceAuthority { public class SolrAuthority implements ChoiceAuthority {
/** the name assigned to the specific instance by the PluginService, @see {@link NameAwarePlugin} **/
private String authorityName;
/**
* the metadata managed by the plugin instance, derived from its authority name
* in the form schema_element_qualifier
*/
private String field;
protected SolrAuthorityInterface source = protected SolrAuthorityInterface source =
DSpaceServicesFactory.getInstance().getServiceManager() DSpaceServicesFactory.getInstance().getServiceManager()
.getServiceByName("AuthoritySource", SolrAuthorityInterface.class); .getServiceByName("AuthoritySource", SolrAuthorityInterface.class);
@@ -45,8 +54,9 @@ public class SolrAuthority implements ChoiceAuthority {
protected boolean externalResults = false; protected boolean externalResults = false;
protected final AuthorityValueService authorityValueService = AuthorityServiceFactory.getInstance() protected final AuthorityValueService authorityValueService = AuthorityServiceFactory.getInstance()
.getAuthorityValueService(); .getAuthorityValueService();
protected final ConfigurationService configurationService = DSpaceServicesFactory.getInstance()
public Choices getMatches(String field, String text, Collection collection, int start, int limit, String locale, .getConfigurationService();
public Choices getMatches(String text, int start, int limit, String locale,
boolean bestMatch) { boolean bestMatch) {
if (limit == 0) { if (limit == 0) {
limit = 10; limit = 10;
@@ -193,13 +203,13 @@ public class SolrAuthority implements ChoiceAuthority {
} }
@Override @Override
public Choices getMatches(String field, String text, Collection collection, int start, int limit, String locale) { public Choices getMatches(String text, int start, int limit, String locale) {
return getMatches(field, text, collection, start, limit, locale, true); return getMatches(text, start, limit, locale, true);
} }
@Override @Override
public Choices getBestMatch(String field, String text, Collection collection, String locale) { public Choices getBestMatch(String text, String locale) {
Choices matches = getMatches(field, text, collection, 0, 1, locale, false); Choices matches = getMatches(text, 0, 1, locale, false);
if (matches.values.length != 0 && !matches.values[0].value.equalsIgnoreCase(text)) { if (matches.values.length != 0 && !matches.values[0].value.equalsIgnoreCase(text)) {
matches = new Choices(false); matches = new Choices(false);
} }
@@ -207,7 +217,7 @@ public class SolrAuthority implements ChoiceAuthority {
} }
@Override @Override
public String getLabel(String field, String key, String locale) { public String getLabel(String key, String locale) {
try { try {
if (log.isDebugEnabled()) { if (log.isDebugEnabled()) {
log.debug("requesting label for key " + key + " using locale " + locale); log.debug("requesting label for key " + key + " using locale " + locale);
@@ -276,4 +286,23 @@ public class SolrAuthority implements ChoiceAuthority {
public void addExternalResultsInNextMatches() { public void addExternalResultsInNextMatches() {
this.externalResults = true; this.externalResults = true;
} }
@Override
public void setPluginInstanceName(String name) {
authorityName = name;
for (Entry conf : configurationService.getProperties().entrySet()) {
if (StringUtils.startsWith((String) conf.getKey(), ChoiceAuthorityServiceImpl.CHOICES_PLUGIN_PREFIX)
&& StringUtils.equals((String) conf.getValue(), authorityName)) {
field = ((String) conf.getKey()).substring(ChoiceAuthorityServiceImpl.CHOICES_PLUGIN_PREFIX.length())
.replace(".", "_");
// exit the look immediately as we have found it
break;
}
}
}
@Override
public String getPluginInstanceName() {
return authorityName;
}
} }

View File

@@ -11,7 +11,6 @@ import java.util.ArrayList;
import java.util.List; import java.util.List;
import org.apache.commons.lang3.StringUtils; import org.apache.commons.lang3.StringUtils;
import org.dspace.content.Collection;
/** /**
* This is a *very* stupid test fixture for authority control with AuthorityVariantsSupport. * This is a *very* stupid test fixture for authority control with AuthorityVariantsSupport.
@@ -19,6 +18,7 @@ import org.dspace.content.Collection;
* @author Andrea Bollini (CILEA) * @author Andrea Bollini (CILEA)
*/ */
public class TestAuthority implements ChoiceAuthority, AuthorityVariantsSupport { public class TestAuthority implements ChoiceAuthority, AuthorityVariantsSupport {
private String pluginInstanceName;
@Override @Override
public List<String> getVariants(String key, String locale) { public List<String> getVariants(String key, String locale) {
@@ -33,8 +33,7 @@ public class TestAuthority implements ChoiceAuthority, AuthorityVariantsSupport
} }
@Override @Override
public Choices getMatches(String field, String text, Collection collection, public Choices getMatches(String text, int start, int limit, String locale) {
int start, int limit, String locale) {
Choices choices = new Choices(false); Choices choices = new Choices(false);
if (StringUtils.isNotBlank(text)) { if (StringUtils.isNotBlank(text)) {
@@ -52,8 +51,7 @@ public class TestAuthority implements ChoiceAuthority, AuthorityVariantsSupport
} }
@Override @Override
public Choices getBestMatch(String field, String text, Collection collection, public Choices getBestMatch(String text, String locale) {
String locale) {
Choices choices = new Choices(false); Choices choices = new Choices(false);
if (StringUtils.isNotBlank(text)) { if (StringUtils.isNotBlank(text)) {
@@ -70,10 +68,20 @@ public class TestAuthority implements ChoiceAuthority, AuthorityVariantsSupport
} }
@Override @Override
public String getLabel(String field, String key, String locale) { public String getLabel(String key, String locale) {
if (StringUtils.isNotBlank(key)) { if (StringUtils.isNotBlank(key)) {
return key.replaceAll("authority", "label"); return key.replaceAll("authority", "label");
} }
return "Unknown"; return "Unknown";
} }
@Override
public String getPluginInstanceName() {
return pluginInstanceName;
}
@Override
public void setPluginInstanceName(String name) {
this.pluginInstanceName = name;
}
} }

View File

@@ -48,10 +48,10 @@ public interface ChoiceAuthorityService {
* @param element element of metadata field * @param element element of metadata field
* @param qualifier qualifier of metadata field * @param qualifier qualifier of metadata field
* @return the name of the choice authority associated with the specified * @return the name of the choice authority associated with the specified
* metadata. Throw IllegalArgumentException if the supplied metadat * metadata. Throw IllegalArgumentException if the supplied metadata
* is not associated with an authority choice * is not associated with an authority choice
*/ */
public String getChoiceAuthorityName(String schema, String element, String qualifier); public String getChoiceAuthorityName(String schema, String element, String qualifier, Collection collection);
/** /**
* Wrapper that calls getMatches method of the plugin corresponding to * Wrapper that calls getMatches method of the plugin corresponding to
@@ -112,30 +112,33 @@ public interface ChoiceAuthorityService {
* the metadata field defined by schema,element,qualifier. * the metadata field defined by schema,element,qualifier.
* *
* @param metadataValue metadata value * @param metadataValue metadata value
* @param collection Collection owner of Item
* @param locale explicit localization key if available * @param locale explicit localization key if available
* @return label * @return label
*/ */
public String getLabel(MetadataValue metadataValue, String locale); public String getLabel(MetadataValue metadataValue, Collection collection, String locale);
/** /**
* Wrapper that calls getLabel method of the plugin corresponding to * Wrapper that calls getLabel method of the plugin corresponding to
* the metadata field defined by single field key. * the metadata field defined by single field key.
* *
* @param fieldKey single string identifying metadata field * @param fieldKey single string identifying metadata field
* @param collection Collection owner of Item
* @param locale explicit localization key if available * @param locale explicit localization key if available
* @param authKey authority key * @param authKey authority key
* @return label * @return label
*/ */
public String getLabel(String fieldKey, String authKey, String locale); public String getLabel(String fieldKey, Collection collection, String authKey, String locale);
/** /**
* Predicate, is there a Choices configuration of any kind for the * Predicate, is there a Choices configuration of any kind for the
* given metadata field? * given metadata field?
* *
* @param fieldKey single string identifying metadata field * @param fieldKey single string identifying metadata field
* @param collection Collection owner of Item
* @return true if choices are configured for this field. * @return true if choices are configured for this field.
*/ */
public boolean isChoicesConfigured(String fieldKey); public boolean isChoicesConfigured(String fieldKey, Collection collection);
/** /**
* Get the presentation keyword (should be "lookup", "select" or "suggest", but this * Get the presentation keyword (should be "lookup", "select" or "suggest", but this
@@ -160,12 +163,14 @@ public interface ChoiceAuthorityService {
* @param metadataValue metadata value * @param metadataValue metadata value
* @return List of variants * @return List of variants
*/ */
public List<String> getVariants(MetadataValue metadataValue); public List<String> getVariants(MetadataValue metadataValue, Collection collection);
public String getChoiceMetadatabyAuthorityName(String name);
public Choice getChoice(String fieldKey, String authKey, String locale);
/**
* Return the ChoiceAuthority instance identified by the specified name
*
* @param authorityName the ChoiceAuthority instance name
* @return the ChoiceAuthority identified by the specified name
*/
public ChoiceAuthority getChoiceAuthorityByAuthorityName(String authorityName); public ChoiceAuthority getChoiceAuthorityByAuthorityName(String authorityName);
/** /**
@@ -173,4 +178,49 @@ public interface ChoiceAuthorityService {
*/ */
public void clearCache(); public void clearCache();
/**
* Should we store the authority key (if any) for such field key and collection?
*
* @param fieldKey single string identifying metadata field
* @param collection Collection owner of Item or where the item is submitted to
* @return true if the configuration allows to store the authority value
*/
public boolean storeAuthority(String fieldKey, Collection collection);
/**
* Wrapper that calls getChoicesByParent method of the plugin.
*
* @param authorityName authority name
* @param parentId parent Id
* @param start choice at which to start, 0 is first.
* @param limit maximum number of choices to return, 0 for no limit.
* @param locale explicit localization key if available, or null
* @return a Choices object (never null).
* @see org.dspace.content.authority.ChoiceAuthority#getChoicesByParent(java.lang.String, java.lang.String,
* int, int, java.lang.String)
*/
public Choices getChoicesByParent(String authorityName, String parentId, int start, int limit, String locale);
/**
* Wrapper that calls getTopChoices method of the plugin.
*
* @param authorityName authority name
* @param start choice at which to start, 0 is first.
* @param limit maximum number of choices to return, 0 for no limit.
* @param locale explicit localization key if available, or null
* @return a Choices object (never null).
* @see org.dspace.content.authority.ChoiceAuthority#getTopChoices(java.lang.String, int, int, java.lang.String)
*/
public Choices getTopChoices(String authorityName, int start, int limit, String locale);
/**
* Return the direct parent of an entry identified by its id in an hierarchical
* authority.
*
* @param authorityName authority name
* @param vocabularyId child id
* @param locale explicit localization key if available, or null
* @return the parent Choice object if any
*/
public Choice getParentChoice(String authorityName, String vocabularyId, String locale);
} }

View File

@@ -47,6 +47,19 @@ public interface ItemDAO extends DSpaceObjectLegacySupportDAO<Item> {
public Iterator<Item> findBySubmitter(Context context, EPerson eperson) throws SQLException; public Iterator<Item> findBySubmitter(Context context, EPerson eperson) throws SQLException;
/**
* Find all the items by a given submitter. The order is
* indeterminate. All items are included.
*
* @param context DSpace context object
* @param eperson the submitter
* @param retrieveAllItems flag to determine if only archive should be returned
* @return an iterator over the items submitted by eperson
* @throws SQLException if database error
*/
public Iterator<Item> findBySubmitter(Context context, EPerson eperson, boolean retrieveAllItems)
throws SQLException;
public Iterator<Item> findBySubmitter(Context context, EPerson eperson, MetadataField metadataField, int limit) public Iterator<Item> findBySubmitter(Context context, EPerson eperson, MetadataField metadataField, int limit)
throws SQLException; throws SQLException;

View File

@@ -13,6 +13,7 @@ import java.util.List;
import org.dspace.core.Context; import org.dspace.core.Context;
import org.dspace.core.GenericDAO; import org.dspace.core.GenericDAO;
import org.dspace.scripts.Process; import org.dspace.scripts.Process;
import org.dspace.scripts.ProcessQueryParameterContainer;
/** /**
* This is the Data Access Object for the {@link Process} object * This is the Data Access Object for the {@link Process} object
@@ -54,4 +55,30 @@ public interface ProcessDAO extends GenericDAO<Process> {
*/ */
int countRows(Context context) throws SQLException; int countRows(Context context) throws SQLException;
/**
* Returns a list of all Processes in the database which match the given field requirements. If the
* requirements are not null, they will be combined with an AND operation.
* @param context The relevant DSpace context
* @param processQueryParameterContainer The {@link ProcessQueryParameterContainer} containing all the values
* that the returned {@link Process} objects must adhere to
* @param limit The limit for the amount of Processes returned
* @param offset The offset for the Processes to be returned
* @return The list of all Processes which match the metadata requirements
* @throws SQLException If something goes wrong
*/
List<Process> search(Context context, ProcessQueryParameterContainer processQueryParameterContainer, int limit,
int offset) throws SQLException;
/**
* Count all the processes which match the requirements. The requirements are evaluated like the search
* method.
* @param context The relevant DSpace context
* @param processQueryParameterContainer The {@link ProcessQueryParameterContainer} containing all the values
* that the returned {@link Process} objects must adhere to
* @return The number of results matching the query
* @throws SQLException If something goes wrong
*/
int countTotalWithParameters(Context context, ProcessQueryParameterContainer processQueryParameterContainer)
throws SQLException;
} }

View File

@@ -108,6 +108,17 @@ public class ItemDAOImpl extends AbstractHibernateDSODAO<Item> implements ItemDA
return iterate(query); return iterate(query);
} }
@Override
public Iterator<Item> findBySubmitter(Context context, EPerson eperson, boolean retrieveAllItems)
throws SQLException {
if (!retrieveAllItems) {
return findBySubmitter(context, eperson);
}
Query query = createQuery(context, "FROM Item WHERE submitter= :submitter");
query.setParameter("submitter", eperson);
return iterate(query);
}
@Override @Override
public Iterator<Item> findBySubmitter(Context context, EPerson eperson, MetadataField metadataField, int limit) public Iterator<Item> findBySubmitter(Context context, EPerson eperson, MetadataField metadataField, int limit)
throws SQLException { throws SQLException {

View File

@@ -8,15 +8,20 @@
package org.dspace.content.dao.impl; package org.dspace.content.dao.impl;
import java.sql.SQLException; import java.sql.SQLException;
import java.util.LinkedList;
import java.util.List; import java.util.List;
import java.util.Map;
import javax.persistence.criteria.CriteriaBuilder; import javax.persistence.criteria.CriteriaBuilder;
import javax.persistence.criteria.CriteriaQuery; import javax.persistence.criteria.CriteriaQuery;
import javax.persistence.criteria.Predicate;
import javax.persistence.criteria.Root; import javax.persistence.criteria.Root;
import org.apache.commons.lang3.StringUtils;
import org.dspace.content.dao.ProcessDAO; import org.dspace.content.dao.ProcessDAO;
import org.dspace.core.AbstractHibernateDAO; import org.dspace.core.AbstractHibernateDAO;
import org.dspace.core.Context; import org.dspace.core.Context;
import org.dspace.scripts.Process; import org.dspace.scripts.Process;
import org.dspace.scripts.ProcessQueryParameterContainer;
import org.dspace.scripts.Process_; import org.dspace.scripts.Process_;
/** /**
@@ -56,6 +61,7 @@ public class ProcessDAOImpl extends AbstractHibernateDAO<Process> implements Pro
CriteriaQuery criteriaQuery = getCriteriaQuery(criteriaBuilder, Process.class); CriteriaQuery criteriaQuery = getCriteriaQuery(criteriaBuilder, Process.class);
Root<Process> processRoot = criteriaQuery.from(Process.class); Root<Process> processRoot = criteriaQuery.from(Process.class);
criteriaQuery.select(processRoot); criteriaQuery.select(processRoot);
criteriaQuery.orderBy(criteriaBuilder.desc(processRoot.get(Process_.processId)));
return list(context, criteriaQuery, false, Process.class, limit, offset); return list(context, criteriaQuery, false, Process.class, limit, offset);
} }
@@ -71,6 +77,76 @@ public class ProcessDAOImpl extends AbstractHibernateDAO<Process> implements Pro
return count(context, criteriaQuery, criteriaBuilder, processRoot); return count(context, criteriaQuery, criteriaBuilder, processRoot);
} }
@Override
public List<Process> search(Context context, ProcessQueryParameterContainer processQueryParameterContainer,
int limit, int offset) throws SQLException {
CriteriaBuilder criteriaBuilder = getCriteriaBuilder(context);
CriteriaQuery criteriaQuery = getCriteriaQuery(criteriaBuilder, Process.class);
Root<Process> processRoot = criteriaQuery.from(Process.class);
criteriaQuery.select(processRoot);
handleProcessQueryParameters(processQueryParameterContainer, criteriaBuilder, criteriaQuery, processRoot);
return list(context, criteriaQuery, false, Process.class, limit, offset);
}
/**
* This method will ensure that the params contained in the {@link ProcessQueryParameterContainer} are transferred
* to the ProcessRoot and that the correct conditions apply to the query
* @param processQueryParameterContainer The object containing the conditions that need to be met
* @param criteriaBuilder The criteriaBuilder to be used
* @param criteriaQuery The criteriaQuery to be used
* @param processRoot The processRoot to be used
*/
private void handleProcessQueryParameters(ProcessQueryParameterContainer processQueryParameterContainer,
CriteriaBuilder criteriaBuilder, CriteriaQuery criteriaQuery,
Root<Process> processRoot) {
addProcessQueryParameters(processQueryParameterContainer, criteriaBuilder, criteriaQuery, processRoot);
if (StringUtils.equalsIgnoreCase(processQueryParameterContainer.getSortOrder(), "asc")) {
criteriaQuery
.orderBy(criteriaBuilder.asc(processRoot.get(processQueryParameterContainer.getSortProperty())));
} else if (StringUtils.equalsIgnoreCase(processQueryParameterContainer.getSortOrder(), "desc")) {
criteriaQuery
.orderBy(criteriaBuilder.desc(processRoot.get(processQueryParameterContainer.getSortProperty())));
}
}
/**
* This method will apply the variables in the {@link ProcessQueryParameterContainer} as criteria for the
* {@link Process} objects to the given CriteriaQuery.
* They'll need to adhere to these variables in order to be eligible for return
* @param processQueryParameterContainer The object containing the variables for the {@link Process}
* to adhere to
* @param criteriaBuilder The current CriteriaBuilder
* @param criteriaQuery The current CriteriaQuery
* @param processRoot The processRoot
*/
private void addProcessQueryParameters(ProcessQueryParameterContainer processQueryParameterContainer,
CriteriaBuilder criteriaBuilder, CriteriaQuery criteriaQuery,
Root<Process> processRoot) {
List<Predicate> andPredicates = new LinkedList<>();
for (Map.Entry<String, Object> entry : processQueryParameterContainer.getQueryParameterMap().entrySet()) {
andPredicates.add(criteriaBuilder.equal(processRoot.get(entry.getKey()), entry.getValue()));
}
criteriaQuery.where(criteriaBuilder.and(andPredicates.toArray(new Predicate[]{})));
}
@Override
public int countTotalWithParameters(Context context, ProcessQueryParameterContainer processQueryParameterContainer)
throws SQLException {
CriteriaBuilder criteriaBuilder = getCriteriaBuilder(context);
CriteriaQuery criteriaQuery = getCriteriaQuery(criteriaBuilder, Process.class);
Root<Process> processRoot = criteriaQuery.from(Process.class);
criteriaQuery.select(processRoot);
addProcessQueryParameters(processQueryParameterContainer, criteriaBuilder, criteriaQuery, processRoot);
return count(context, criteriaQuery, criteriaBuilder, processRoot);
}
} }

View File

@@ -200,10 +200,11 @@ public interface DSpaceObjectService<T extends DSpaceObject> {
* and the ISO3166 country code. <code>null</code> means the * and the ISO3166 country code. <code>null</code> means the
* value has no language (for example, a date). * value has no language (for example, a date).
* @param values the values to add. * @param values the values to add.
* @return the list of MetadataValues added to the object
* @throws SQLException if database error * @throws SQLException if database error
*/ */
public void addMetadata(Context context, T dso, String schema, String element, String qualifier, String lang, public List<MetadataValue> addMetadata(Context context, T dso, String schema, String element, String qualifier,
List<String> values) throws SQLException; String lang, List<String> values) throws SQLException;
/** /**
* Add metadata fields. These are appended to existing values. * Add metadata fields. These are appended to existing values.
@@ -223,10 +224,11 @@ public interface DSpaceObjectService<T extends DSpaceObject> {
* @param values the values to add. * @param values the values to add.
* @param authorities the external authority key for this value (or null) * @param authorities the external authority key for this value (or null)
* @param confidences the authority confidence (default 0) * @param confidences the authority confidence (default 0)
* @return the list of MetadataValues added to the object
* @throws SQLException if database error * @throws SQLException if database error
*/ */
public void addMetadata(Context context, T dso, String schema, String element, String qualifier, String lang, public List<MetadataValue> addMetadata(Context context, T dso, String schema, String element, String qualifier,
List<String> values, List<String> authorities, List<Integer> confidences) String lang, List<String> values, List<String> authorities, List<Integer> confidences)
throws SQLException; throws SQLException;
/** /**
@@ -243,32 +245,64 @@ public interface DSpaceObjectService<T extends DSpaceObject> {
* @param values the values to add. * @param values the values to add.
* @param authorities the external authority key for this value (or null) * @param authorities the external authority key for this value (or null)
* @param confidences the authority confidence (default 0) * @param confidences the authority confidence (default 0)
* @return the list of MetadataValues added to the object
* @throws SQLException if database error * @throws SQLException if database error
*/ */
public void addMetadata(Context context, T dso, MetadataField metadataField, String lang, List<String> values, public List<MetadataValue> addMetadata(Context context, T dso, MetadataField metadataField, String lang,
List<String> authorities, List<Integer> confidences) throws SQLException; List<String> values, List<String> authorities, List<Integer> confidences) throws SQLException;
/** /**
* Shortcut for {@link #addMetadata(Context, DSpaceObject, MetadataField, String, List, List, List)} when a single * Shortcut for {@link #addMetadata(Context, DSpaceObject, MetadataField, String, List, List, List)} when a single
* value need to be added * value need to be added
* *
* @param context * @param context DSpace context
* @param dso * @param dso DSpaceObject
* @param metadataField * @param metadataField the metadata field to which the value is to be set
* @param language * @param language the ISO639 language code, optionally followed by an underscore
* @param value * and the ISO3166 country code. <code>null</code> means the
* @param authority * value has no language (for example, a date).
* @param confidence * @param value the value to add.
* @param authority the external authority key for this value (or null)
* @param confidence the authority confidence (default 0)
* @return the MetadataValue added ot the object
* @throws SQLException * @throws SQLException
*/ */
public void addMetadata(Context context, T dso, MetadataField metadataField, String language, String value, public MetadataValue addMetadata(Context context, T dso, MetadataField metadataField, String language,
String authority, int confidence) throws SQLException; String value, String authority, int confidence) throws SQLException;
public void addMetadata(Context context, T dso, MetadataField metadataField, String language, String value) /**
* Add a metadatafield. These are appended to existing values.
* Use <code>clearMetadata</code> to remove values.
*
* @param context DSpace context
* @param dso DSpaceObject
* @param metadataField the metadata field to which the value is to be set
* @param language the ISO639 language code, optionally followed by an underscore
* and the ISO3166 country code. <code>null</code> means the
* value has no language (for example, a date).
* @param value the value to add.
* @return the MetadataValue added ot the object
* @throws SQLException if database error
*/
public MetadataValue addMetadata(Context context, T dso, MetadataField metadataField, String language, String value)
throws SQLException; throws SQLException;
public void addMetadata(Context context, T dso, MetadataField metadataField, String language, List<String> values) /**
throws SQLException; * Add a metadatafields. These are appended to existing values.
* Use <code>clearMetadata</code> to remove values.
*
* @param context DSpace context
* @param dso DSpaceObject
* @param metadataField the metadata field to which the value is to be set
* @param language the ISO639 language code, optionally followed by an underscore
* and the ISO3166 country code. <code>null</code> means the
* value has no language (for example, a date).
* @param values the values to add.
* @return the list of MetadataValues added to the object
* @throws SQLException if database error
*/
public List<MetadataValue> addMetadata(Context context, T dso, MetadataField metadataField, String language,
List<String> values) throws SQLException;
/** /**
* Add a single metadata field. This is appended to existing * Add a single metadata field. This is appended to existing
@@ -285,10 +319,11 @@ public interface DSpaceObjectService<T extends DSpaceObject> {
* and the ISO3166 country code. <code>null</code> means the * and the ISO3166 country code. <code>null</code> means the
* value has no language (for example, a date). * value has no language (for example, a date).
* @param value the value to add. * @param value the value to add.
* @return the MetadataValue added ot the object
* @throws SQLException if database error * @throws SQLException if database error
*/ */
public void addMetadata(Context context, T dso, String schema, String element, String qualifier, String lang, public MetadataValue addMetadata(Context context, T dso, String schema, String element, String qualifier,
String value) throws SQLException; String lang, String value) throws SQLException;
/** /**
* Add a single metadata field. This is appended to existing * Add a single metadata field. This is appended to existing
@@ -307,10 +342,11 @@ public interface DSpaceObjectService<T extends DSpaceObject> {
* @param value the value to add. * @param value the value to add.
* @param authority the external authority key for this value (or null) * @param authority the external authority key for this value (or null)
* @param confidence the authority confidence (default 0) * @param confidence the authority confidence (default 0)
* @return the MetadataValue added ot the object
* @throws SQLException if database error * @throws SQLException if database error
*/ */
public void addMetadata(Context context, T dso, String schema, String element, String qualifier, String lang, public MetadataValue addMetadata(Context context, T dso, String schema, String element, String qualifier,
String value, String authority, int confidence) throws SQLException; String lang, String value, String authority, int confidence) throws SQLException;
/** /**
* Clear metadata values. As with <code>getDC</code> above, * Clear metadata values. As with <code>getDC</code> above,

View File

@@ -113,6 +113,21 @@ public interface ItemService
public Iterator<Item> findBySubmitter(Context context, EPerson eperson) public Iterator<Item> findBySubmitter(Context context, EPerson eperson)
throws SQLException; throws SQLException;
/**
* Find all the items by a given submitter. The order is
* indeterminate. All items are included.
*
* @param context DSpace context object
* @param eperson the submitter
* @param retrieveAllItems flag to determine if all items should be returned or only archived items.
* If true, all items (regardless of status) are returned.
* If false, only archived items will be returned.
* @return an iterator over the items submitted by eperson
* @throws SQLException if database error
*/
public Iterator<Item> findBySubmitter(Context context, EPerson eperson, boolean retrieveAllItems)
throws SQLException;
/** /**
* Retrieve the list of items submitted by eperson, ordered by recently submitted, optionally limitable * Retrieve the list of items submitted by eperson, ordered by recently submitted, optionally limitable
* *

View File

@@ -190,7 +190,15 @@ public class Context implements AutoCloseable {
setMode(this.mode); setMode(this.mode);
} }
public static boolean updateDatabase() { /**
* Update the DSpace database, ensuring that any necessary migrations are run prior to initializing
* Hibernate.
* <P>
* This is synchronized as it only needs to be run successfully *once* (for the first Context initialized).
*
* @return true/false, based on whether database was successfully updated
*/
public static synchronized boolean updateDatabase() {
//If the database has not been updated yet, update it and remember that. //If the database has not been updated yet, update it and remember that.
if (databaseUpdated.compareAndSet(false, true)) { if (databaseUpdated.compareAndSet(false, true)) {
@@ -200,7 +208,7 @@ public class Context implements AutoCloseable {
try { try {
DatabaseUtils.updateDatabase(); DatabaseUtils.updateDatabase();
} catch (SQLException sqle) { } catch (SQLException sqle) {
log.fatal("Cannot initialize database via Flyway!", sqle); log.fatal("Cannot update or initialize database via Flyway!", sqle);
databaseUpdated.set(false); databaseUpdated.set(false);
} }
} }
@@ -641,9 +649,9 @@ public class Context implements AutoCloseable {
/** /**
* Temporary change the user bound to the context, empty the special groups that * Temporary change the user bound to the context, empty the special groups that
* are retained to allow subsequent restore * are retained to allow subsequent restore
* *
* @param newUser the EPerson to bound to the context * @param newUser the EPerson to bound to the context
* *
* @throws IllegalStateException if the switch was already performed without be * @throws IllegalStateException if the switch was already performed without be
* restored * restored
*/ */
@@ -661,7 +669,7 @@ public class Context implements AutoCloseable {
/** /**
* Restore the user bound to the context and his special groups * Restore the user bound to the context and his special groups
* *
* @throws IllegalStateException if no switch was performed before * @throws IllegalStateException if no switch was performed before
*/ */
public void restoreContextUser() { public void restoreContextUser() {

View File

@@ -191,6 +191,23 @@ public class I18nUtil {
return supportedLocale; return supportedLocale;
} }
/**
* Gets the appropriate supported Locale according for a given Locale If
* no appropriate supported locale is found, the DEFAULTLOCALE is used
*
* @param locale String to find the corresponding Locale
* @return supportedLocale
* Locale for session according to locales supported by this DSpace instance as set in dspace.cfg
*/
public static Locale getSupportedLocale(String locale) {
Locale currentLocale = null;
if (locale != null) {
currentLocale = I18nUtil.getSupportedLocale(new Locale(locale));
} else {
currentLocale = I18nUtil.getDefaultLocale();
}
return currentLocale;
}
/** /**
* Get the appropriate localized version of submission-forms.xml according to language settings * Get the appropriate localized version of submission-forms.xml according to language settings

View File

@@ -345,8 +345,8 @@ public class LegacyPluginServiceImpl implements PluginService {
" for interface=" + iname + " for interface=" + iname +
" pluginName=" + name); " pluginName=" + name);
Object result = pluginClass.newInstance(); Object result = pluginClass.newInstance();
if (result instanceof SelfNamedPlugin) { if (result instanceof NameAwarePlugin) {
((SelfNamedPlugin) result).setPluginInstanceName(name); ((NameAwarePlugin) result).setPluginInstanceName(name);
} }
return result; return result;
} }

View File

@@ -0,0 +1,42 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.core;
/**
* This is the interface that should be implemented by all the named plugin that
* like to be aware of their name
*
* @author Andrea Bollini (andrea.bollini at 4science.it)
* @version $Revision$
* @see org.dspace.core.service.PluginService
*/
public interface NameAwarePlugin {
/**
* Get the instance's particular name.
* Returns the name by which the class was chosen when
* this instance was created. Only works for instances created
* by <code>PluginService</code>, or if someone remembers to call <code>setPluginName.</code>
* <p>
* Useful when the implementation class wants to be configured differently
* when it is invoked under different names.
*
* @return name or null if not available.
*/
public String getPluginInstanceName();
/**
* Set the name under which this plugin was instantiated.
* Not to be invoked by application code, it is
* called automatically by <code>PluginService.getNamedPlugin()</code>
* when the plugin is instantiated.
*
* @param name -- name used to select this class.
*/
public void setPluginInstanceName(String name);
}

View File

@@ -28,7 +28,7 @@ package org.dspace.core;
* @version $Revision$ * @version $Revision$
* @see org.dspace.core.service.PluginService * @see org.dspace.core.service.PluginService
*/ */
public abstract class SelfNamedPlugin { public abstract class SelfNamedPlugin implements NameAwarePlugin {
// the specific alias used to find the class that created this instance. // the specific alias used to find the class that created this instance.
private String myName = null; private String myName = null;
@@ -52,30 +52,13 @@ public abstract class SelfNamedPlugin {
return null; return null;
} }
/** @Override
* Get an instance's particular name.
* Returns the name by which the class was chosen when
* this instance was created. Only works for instances created
* by <code>PluginService</code>, or if someone remembers to call <code>setPluginName.</code>
* <p>
* Useful when the implementation class wants to be configured differently
* when it is invoked under different names.
*
* @return name or null if not available.
*/
public String getPluginInstanceName() { public String getPluginInstanceName() {
return myName; return myName;
} }
/** @Override
* Set the name under which this plugin was instantiated. public void setPluginInstanceName(String name) {
* Not to be invoked by application code, it is
* called automatically by <code>PluginService.getNamedPlugin()</code>
* when the plugin is instantiated.
*
* @param name -- name used to select this class.
*/
protected void setPluginInstanceName(String name) {
myName = name; myName = name;
} }
} }

View File

@@ -0,0 +1,371 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.curate;
import java.io.BufferedReader;
import java.io.File;
import java.io.FileNotFoundException;
import java.io.FileReader;
import java.io.IOException;
import java.io.OutputStream;
import java.io.OutputStreamWriter;
import java.io.PrintStream;
import java.io.Writer;
import java.sql.SQLException;
import java.util.HashMap;
import java.util.Iterator;
import java.util.Map;
import java.util.UUID;
import org.apache.commons.cli.ParseException;
import org.apache.commons.io.output.NullOutputStream;
import org.dspace.authorize.AuthorizeException;
import org.dspace.content.DSpaceObject;
import org.dspace.content.factory.ContentServiceFactory;
import org.dspace.core.Context;
import org.dspace.core.factory.CoreServiceFactory;
import org.dspace.curate.factory.CurateServiceFactory;
import org.dspace.eperson.EPerson;
import org.dspace.eperson.factory.EPersonServiceFactory;
import org.dspace.eperson.service.EPersonService;
import org.dspace.handle.factory.HandleServiceFactory;
import org.dspace.handle.service.HandleService;
import org.dspace.scripts.DSpaceRunnable;
import org.dspace.utils.DSpace;
/**
* CurationCli provides command-line access to Curation tools and processes.
*
* @author richardrodgers
*/
public class Curation extends DSpaceRunnable<CurationScriptConfiguration> {
protected EPersonService ePersonService = EPersonServiceFactory.getInstance().getEPersonService();
protected Context context;
private CurationClientOptions curationClientOptions;
private String task;
private String taskFile;
private String id;
private String queue;
private String scope;
private String reporter;
private Map<String, String> parameters;
private boolean verbose;
@Override
public void internalRun() throws Exception {
if (curationClientOptions == CurationClientOptions.HELP) {
printHelp();
return;
}
Curator curator = initCurator();
// load curation tasks
if (curationClientOptions == CurationClientOptions.TASK) {
long start = System.currentTimeMillis();
handleCurationTask(curator);
this.endScript(start);
}
// process task queue
if (curationClientOptions == CurationClientOptions.QUEUE) {
// process the task queue
TaskQueue taskQueue = (TaskQueue) CoreServiceFactory.getInstance().getPluginService()
.getSinglePlugin(TaskQueue.class);
if (taskQueue == null) {
super.handler.logError("No implementation configured for queue");
throw new UnsupportedOperationException("No queue service available");
}
long timeRun = this.runQueue(taskQueue, curator);
this.endScript(timeRun);
}
}
/**
* Does the curation task (-t) or the task in the given file (-T).
* Checks:
* - if required option -i is missing.
* - if option -t has a valid task option
*/
private void handleCurationTask(Curator curator) throws IOException, SQLException {
String taskName;
if (commandLine.hasOption('t')) {
if (verbose) {
handler.logInfo("Adding task: " + this.task);
}
curator.addTask(this.task);
if (verbose && !curator.hasTask(this.task)) {
handler.logInfo("Task: " + this.task + " not resolved");
}
} else if (commandLine.hasOption('T')) {
// load taskFile
BufferedReader reader = null;
try {
reader = new BufferedReader(new FileReader(this.taskFile));
while ((taskName = reader.readLine()) != null) {
if (verbose) {
super.handler.logInfo("Adding task: " + taskName);
}
curator.addTask(taskName);
}
} finally {
if (reader != null) {
reader.close();
}
}
}
// run tasks against object
if (verbose) {
super.handler.logInfo("Starting curation");
super.handler.logInfo("Curating id: " + this.id);
}
if ("all".equals(this.id)) {
// run on whole Site
curator.curate(context,
ContentServiceFactory.getInstance().getSiteService().findSite(context).getHandle());
} else {
curator.curate(context, this.id);
}
}
/**
* Runs task queue (-q set)
*
* @param queue The task queue
* @param curator The curator
* @return Time when queue started
*/
private long runQueue(TaskQueue queue, Curator curator) throws SQLException, AuthorizeException, IOException {
// use current time as our reader 'ticket'
long ticket = System.currentTimeMillis();
Iterator<TaskQueueEntry> entryIter = queue.dequeue(this.queue, ticket).iterator();
while (entryIter.hasNext()) {
TaskQueueEntry entry = entryIter.next();
if (verbose) {
super.handler.logInfo("Curating id: " + entry.getObjectId());
}
curator.clear();
// does entry relate to a DSO or workflow object?
if (entry.getObjectId().indexOf('/') > 0) {
for (String taskName : entry.getTaskNames()) {
curator.addTask(taskName);
}
curator.curate(context, entry.getObjectId());
} else {
// make eperson who queued task the effective user
EPerson agent = ePersonService.findByEmail(context, entry.getEpersonId());
if (agent != null) {
context.setCurrentUser(agent);
}
CurateServiceFactory.getInstance().getWorkflowCuratorService()
.curate(curator, context, entry.getObjectId());
}
}
queue.release(this.queue, ticket, true);
return ticket;
}
/**
* End of curation script; logs script time if -v verbose is set
*
* @param timeRun Time script was started
* @throws SQLException If DSpace contextx can't complete
*/
private void endScript(long timeRun) throws SQLException {
context.complete();
if (verbose) {
long elapsed = System.currentTimeMillis() - timeRun;
this.handler.logInfo("Ending curation. Elapsed time: " + elapsed);
}
}
/**
* Initialize the curator with command line variables
*
* @return Initialised curator
* @throws FileNotFoundException If file of command line variable -r reporter is not found
*/
private Curator initCurator() throws FileNotFoundException {
Curator curator = new Curator();
OutputStream reporterStream;
if (null == this.reporter) {
reporterStream = new NullOutputStream();
} else if ("-".equals(this.reporter)) {
reporterStream = System.out;
} else {
reporterStream = new PrintStream(this.reporter);
}
Writer reportWriter = new OutputStreamWriter(reporterStream);
curator.setReporter(reportWriter);
if (this.scope != null) {
Curator.TxScope txScope = Curator.TxScope.valueOf(this.scope.toUpperCase());
curator.setTransactionScope(txScope);
}
curator.addParameters(parameters);
// we are operating in batch mode, if anyone cares.
curator.setInvoked(Curator.Invoked.BATCH);
return curator;
}
@Override
public void printHelp() {
super.printHelp();
super.handler.logInfo("\nwhole repo: CurationCli -t estimate -i all");
super.handler.logInfo("single item: CurationCli -t generate -i itemId");
super.handler.logInfo("task queue: CurationCli -q monthly");
}
@Override
public CurationScriptConfiguration getScriptConfiguration() {
return new DSpace().getServiceManager().getServiceByName("curate", CurationScriptConfiguration.class);
}
@Override
public void setup() throws ParseException {
assignCurrentUserInContext();
this.curationClientOptions = CurationClientOptions.getClientOption(commandLine);
if (this.curationClientOptions != null) {
this.initGeneralLineOptionsAndCheckIfValid();
if (curationClientOptions == CurationClientOptions.TASK) {
this.initTaskLineOptionsAndCheckIfValid();
} else if (curationClientOptions == CurationClientOptions.QUEUE) {
this.queue = this.commandLine.getOptionValue('q');
}
} else {
throw new IllegalArgumentException("[--help || --task|--taskfile <> -identifier <> || -queue <> ] must be" +
" specified");
}
}
/**
* This method will assign the currentUser to the {@link Context} variable which is also created in this method.
* The instance of the method in this class will fetch the EPersonIdentifier from this class, this identifier
* was given to this class upon instantiation, it'll then be used to find the {@link EPerson} associated with it
* and this {@link EPerson} will be set as the currentUser of the created {@link Context}
* @throws ParseException If something went wrong with the retrieval of the EPerson Identifier
*/
protected void assignCurrentUserInContext() throws ParseException {
UUID currentUserUuid = this.getEpersonIdentifier();
try {
this.context = new Context(Context.Mode.BATCH_EDIT);
EPerson eperson = ePersonService.find(context, currentUserUuid);
if (eperson == null) {
super.handler.logError("EPerson not found: " + currentUserUuid);
throw new IllegalArgumentException("Unable to find a user with uuid: " + currentUserUuid);
}
this.context.setCurrentUser(eperson);
} catch (SQLException e) {
handler.handleException("Something went wrong trying to fetch eperson for uuid: " + currentUserUuid, e);
}
}
/**
* Fills in some optional command line options.
* Checks if there are missing required options or invalid values for options.
*/
private void initGeneralLineOptionsAndCheckIfValid() {
// report file
if (this.commandLine.hasOption('r')) {
this.reporter = this.commandLine.getOptionValue('r');
}
// parameters
this.parameters = new HashMap<>();
if (this.commandLine.hasOption('p')) {
for (String parameter : this.commandLine.getOptionValues('p')) {
String[] parts = parameter.split("=", 2);
String name = parts[0].trim();
String value;
if (parts.length > 1) {
value = parts[1].trim();
} else {
value = "true";
}
this.parameters.put(name, value);
}
}
// verbose
verbose = false;
if (commandLine.hasOption('v')) {
verbose = true;
}
// scope
if (this.commandLine.getOptionValue('s') != null) {
this.scope = this.commandLine.getOptionValue('s');
if (this.scope != null && Curator.TxScope.valueOf(this.scope.toUpperCase()) == null) {
this.handler.logError("Bad transaction scope '" + this.scope + "': only 'object', 'curation' or " +
"'open' recognized");
throw new IllegalArgumentException(
"Bad transaction scope '" + this.scope + "': only 'object', 'curation' or " +
"'open' recognized");
}
}
}
/**
* Fills in required command line options for the task or taskFile option.
* Checks if there are is a missing required -i option and if -i is either 'all' or a valid dso handle.
* Checks if -t task has a valid task option.
* Checks if -T taskfile is a valid file.
*/
private void initTaskLineOptionsAndCheckIfValid() {
// task or taskFile
if (this.commandLine.hasOption('t')) {
this.task = this.commandLine.getOptionValue('t');
if (!CurationClientOptions.getTaskOptions().contains(this.task)) {
super.handler
.logError("-t task must be one of: " + CurationClientOptions.getTaskOptions());
throw new IllegalArgumentException(
"-t task must be one of: " + CurationClientOptions.getTaskOptions());
}
} else if (this.commandLine.hasOption('T')) {
this.taskFile = this.commandLine.getOptionValue('T');
if (!(new File(this.taskFile).isFile())) {
super.handler
.logError("-T taskFile must be valid file: " + this.taskFile);
throw new IllegalArgumentException("-T taskFile must be valid file: " + this.taskFile);
}
}
if (this.commandLine.hasOption('i')) {
this.id = this.commandLine.getOptionValue('i').toLowerCase();
if (!this.id.equalsIgnoreCase("all")) {
HandleService handleService = HandleServiceFactory.getInstance().getHandleService();
DSpaceObject dso;
try {
dso = handleService.resolveToObject(this.context, id);
} catch (SQLException e) {
super.handler.logError("SQLException trying to resolve handle " + id + " to a valid dso");
throw new IllegalArgumentException(
"SQLException trying to resolve handle " + id + " to a valid dso");
}
if (dso == null) {
super.handler.logError("Id must be specified: a valid dso handle or 'all'; " + this.id + " could " +
"not be resolved to valid dso handle");
throw new IllegalArgumentException(
"Id must be specified: a valid dso handle or 'all'; " + this.id + " could " +
"not be resolved to valid dso handle");
}
}
} else {
super.handler.logError("Id must be specified: a handle, 'all', or no -i and a -q task queue (-h for " +
"help)");
throw new IllegalArgumentException(
"Id must be specified: a handle, 'all', or no -i and a -q task queue (-h for " +
"help)");
}
}
}

View File

@@ -7,229 +7,27 @@
*/ */
package org.dspace.curate; package org.dspace.curate;
import java.io.BufferedReader;
import java.io.File;
import java.io.FileNotFoundException;
import java.io.FileReader;
import java.io.IOException;
import java.io.OutputStream;
import java.io.OutputStreamWriter;
import java.io.PrintStream;
import java.io.Writer;
import java.sql.SQLException; import java.sql.SQLException;
import java.util.HashMap;
import java.util.Iterator;
import java.util.Map;
import org.apache.commons.io.output.NullOutputStream; import org.apache.commons.cli.ParseException;
import org.dspace.authorize.AuthorizeException;
import org.dspace.content.DSpaceObject;
import org.dspace.content.factory.ContentServiceFactory;
import org.dspace.core.Context; import org.dspace.core.Context;
import org.dspace.core.factory.CoreServiceFactory;
import org.dspace.curate.factory.CurateServiceFactory;
import org.dspace.eperson.EPerson; import org.dspace.eperson.EPerson;
import org.dspace.eperson.factory.EPersonServiceFactory;
import org.dspace.eperson.service.EPersonService;
import org.dspace.handle.factory.HandleServiceFactory;
import org.dspace.handle.service.HandleService;
import org.dspace.scripts.DSpaceRunnable;
import org.dspace.utils.DSpace;
/** /**
* CurationCli provides command-line access to Curation tools and processes. * This is the CLI version of the {@link Curation} script.
* * This will only be called when the curate script is called from a commandline instance.
* @author richardrodgers
*/ */
public class CurationCli extends DSpaceRunnable<CurationScriptConfiguration> { public class CurationCli extends Curation {
private EPersonService ePersonService = EPersonServiceFactory.getInstance().getEPersonService();
private Context context;
private CurationClientOptions curationClientOptions;
private String task;
private String taskFile;
private String id;
private String queue;
private String scope;
private String reporter;
private Map<String, String> parameters;
private boolean verbose;
@Override
public void internalRun() throws Exception {
if (curationClientOptions == CurationClientOptions.HELP) {
printHelp();
return;
}
Curator curator = initCurator();
// load curation tasks
if (curationClientOptions == CurationClientOptions.TASK) {
long start = System.currentTimeMillis();
handleCurationTask(curator);
this.endScript(start);
}
// process task queue
if (curationClientOptions == CurationClientOptions.QUEUE) {
// process the task queue
TaskQueue taskQueue = (TaskQueue) CoreServiceFactory.getInstance().getPluginService()
.getSinglePlugin(TaskQueue.class);
if (taskQueue == null) {
super.handler.logError("No implementation configured for queue");
throw new UnsupportedOperationException("No queue service available");
}
long timeRun = this.runQueue(taskQueue, curator);
this.endScript(timeRun);
}
}
/** /**
* Does the curation task (-t) or the task in the given file (-T). * This is the overridden instance of the {@link Curation#assignCurrentUserInContext()} method in the parent class
* Checks: * {@link Curation}.
* - if required option -i is missing. * This is done so that the CLI version of the Script is able to retrieve its currentUser from the -e flag given
* - if option -t has a valid task option * with the parameters of the Script.
* @throws ParseException If the e flag was not given to the parameters when calling the script
*/ */
private void handleCurationTask(Curator curator) throws IOException, SQLException {
String taskName;
if (commandLine.hasOption('t')) {
if (verbose) {
handler.logInfo("Adding task: " + this.task);
}
curator.addTask(this.task);
if (verbose && !curator.hasTask(this.task)) {
handler.logInfo("Task: " + this.task + " not resolved");
}
} else if (commandLine.hasOption('T')) {
// load taskFile
BufferedReader reader = null;
try {
reader = new BufferedReader(new FileReader(this.taskFile));
while ((taskName = reader.readLine()) != null) {
if (verbose) {
super.handler.logInfo("Adding task: " + taskName);
}
curator.addTask(taskName);
}
} finally {
if (reader != null) {
reader.close();
}
}
}
// run tasks against object
if (verbose) {
super.handler.logInfo("Starting curation");
super.handler.logInfo("Curating id: " + this.id);
}
if ("all".equals(this.id)) {
// run on whole Site
curator.curate(context,
ContentServiceFactory.getInstance().getSiteService().findSite(context).getHandle());
} else {
curator.curate(context, this.id);
}
}
/**
* Runs task queue (-q set)
*
* @param queue The task queue
* @param curator The curator
* @return Time when queue started
*/
private long runQueue(TaskQueue queue, Curator curator) throws SQLException, AuthorizeException, IOException {
// use current time as our reader 'ticket'
long ticket = System.currentTimeMillis();
Iterator<TaskQueueEntry> entryIter = queue.dequeue(this.queue, ticket).iterator();
while (entryIter.hasNext()) {
TaskQueueEntry entry = entryIter.next();
if (verbose) {
super.handler.logInfo("Curating id: " + entry.getObjectId());
}
curator.clear();
// does entry relate to a DSO or workflow object?
if (entry.getObjectId().indexOf('/') > 0) {
for (String taskName : entry.getTaskNames()) {
curator.addTask(taskName);
}
curator.curate(context, entry.getObjectId());
} else {
// make eperson who queued task the effective user
EPerson agent = ePersonService.findByEmail(context, entry.getEpersonId());
if (agent != null) {
context.setCurrentUser(agent);
}
CurateServiceFactory.getInstance().getWorkflowCuratorService()
.curate(curator, context, entry.getObjectId());
}
}
queue.release(this.queue, ticket, true);
return ticket;
}
/**
* End of curation script; logs script time if -v verbose is set
*
* @param timeRun Time script was started
* @throws SQLException If DSpace contextx can't complete
*/
private void endScript(long timeRun) throws SQLException {
context.complete();
if (verbose) {
long elapsed = System.currentTimeMillis() - timeRun;
this.handler.logInfo("Ending curation. Elapsed time: " + elapsed);
}
}
/**
* Initialize the curator with command line variables
*
* @return Initialised curator
* @throws FileNotFoundException If file of command line variable -r reporter is not found
*/
private Curator initCurator() throws FileNotFoundException {
Curator curator = new Curator();
OutputStream reporterStream;
if (null == this.reporter) {
reporterStream = new NullOutputStream();
} else if ("-".equals(this.reporter)) {
reporterStream = System.out;
} else {
reporterStream = new PrintStream(this.reporter);
}
Writer reportWriter = new OutputStreamWriter(reporterStream);
curator.setReporter(reportWriter);
if (this.scope != null) {
Curator.TxScope txScope = Curator.TxScope.valueOf(this.scope.toUpperCase());
curator.setTransactionScope(txScope);
}
curator.addParameters(parameters);
// we are operating in batch mode, if anyone cares.
curator.setInvoked(Curator.Invoked.BATCH);
return curator;
}
@Override @Override
public void printHelp() { protected void assignCurrentUserInContext() throws ParseException {
super.printHelp();
super.handler.logInfo("\nwhole repo: CurationCli -t estimate -i all");
super.handler.logInfo("single item: CurationCli -t generate -i itemId");
super.handler.logInfo("task queue: CurationCli -q monthly");
}
@Override
public CurationScriptConfiguration getScriptConfiguration() {
return new DSpace().getServiceManager().getServiceByName("curate", CurationScriptConfiguration.class);
}
@Override
public void setup() {
if (this.commandLine.hasOption('e')) { if (this.commandLine.hasOption('e')) {
String ePersonEmail = this.commandLine.getOptionValue('e'); String ePersonEmail = this.commandLine.getOptionValue('e');
this.context = new Context(Context.Mode.BATCH_EDIT); this.context = new Context(Context.Mode.BATCH_EDIT);
@@ -244,119 +42,7 @@ public class CurationCli extends DSpaceRunnable<CurationScriptConfiguration> {
throw new IllegalArgumentException("SQLException trying to find user with email: " + ePersonEmail); throw new IllegalArgumentException("SQLException trying to find user with email: " + ePersonEmail);
} }
} else { } else {
throw new IllegalArgumentException("Needs an -e to set eperson (admin)"); throw new ParseException("Required parameter -e missing!");
}
this.curationClientOptions = CurationClientOptions.getClientOption(commandLine);
if (this.curationClientOptions != null) {
this.initGeneralLineOptionsAndCheckIfValid();
if (curationClientOptions == CurationClientOptions.TASK) {
this.initTaskLineOptionsAndCheckIfValid();
} else if (curationClientOptions == CurationClientOptions.QUEUE) {
this.queue = this.commandLine.getOptionValue('q');
}
} else {
throw new IllegalArgumentException("[--help || --task|--taskfile <> -identifier <> || -queue <> ] must be" +
" specified");
}
}
/**
* Fills in some optional command line options.
* Checks if there are missing required options or invalid values for options.
*/
private void initGeneralLineOptionsAndCheckIfValid() {
// report file
if (this.commandLine.hasOption('r')) {
this.reporter = this.commandLine.getOptionValue('r');
}
// parameters
this.parameters = new HashMap<>();
if (this.commandLine.hasOption('p')) {
for (String parameter : this.commandLine.getOptionValues('p')) {
String[] parts = parameter.split("=", 2);
String name = parts[0].trim();
String value;
if (parts.length > 1) {
value = parts[1].trim();
} else {
value = "true";
}
this.parameters.put(name, value);
}
}
// verbose
verbose = false;
if (commandLine.hasOption('v')) {
verbose = true;
}
// scope
if (this.commandLine.getOptionValue('s') != null) {
this.scope = this.commandLine.getOptionValue('s');
if (this.scope != null && Curator.TxScope.valueOf(this.scope.toUpperCase()) == null) {
this.handler.logError("Bad transaction scope '" + this.scope + "': only 'object', 'curation' or " +
"'open' recognized");
throw new IllegalArgumentException(
"Bad transaction scope '" + this.scope + "': only 'object', 'curation' or " +
"'open' recognized");
}
}
}
/**
* Fills in required command line options for the task or taskFile option.
* Checks if there are is a missing required -i option and if -i is either 'all' or a valid dso handle.
* Checks if -t task has a valid task option.
* Checks if -T taskfile is a valid file.
*/
private void initTaskLineOptionsAndCheckIfValid() {
// task or taskFile
if (this.commandLine.hasOption('t')) {
this.task = this.commandLine.getOptionValue('t');
if (!CurationClientOptions.getTaskOptions().contains(this.task)) {
super.handler
.logError("-t task must be one of: " + CurationClientOptions.getTaskOptions());
throw new IllegalArgumentException(
"-t task must be one of: " + CurationClientOptions.getTaskOptions());
}
} else if (this.commandLine.hasOption('T')) {
this.taskFile = this.commandLine.getOptionValue('T');
if (!(new File(this.taskFile).isFile())) {
super.handler
.logError("-T taskFile must be valid file: " + this.taskFile);
throw new IllegalArgumentException("-T taskFile must be valid file: " + this.taskFile);
}
}
if (this.commandLine.hasOption('i')) {
this.id = this.commandLine.getOptionValue('i').toLowerCase();
if (!this.id.equalsIgnoreCase("all")) {
HandleService handleService = HandleServiceFactory.getInstance().getHandleService();
DSpaceObject dso;
try {
dso = handleService.resolveToObject(this.context, id);
} catch (SQLException e) {
super.handler.logError("SQLException trying to resolve handle " + id + " to a valid dso");
throw new IllegalArgumentException(
"SQLException trying to resolve handle " + id + " to a valid dso");
}
if (dso == null) {
super.handler.logError("Id must be specified: a valid dso handle or 'all'; " + this.id + " could " +
"not be resolved to valid dso handle");
throw new IllegalArgumentException(
"Id must be specified: a valid dso handle or 'all'; " + this.id + " could " +
"not be resolved to valid dso handle");
}
}
} else {
super.handler.logError("Id must be specified: a handle, 'all', or no -i and a -q task queue (-h for " +
"help)");
throw new IllegalArgumentException(
"Id must be specified: a handle, 'all', or no -i and a -q task queue (-h for " +
"help)");
} }
} }
} }

View File

@@ -0,0 +1,26 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.curate;
import org.apache.commons.cli.Options;
/**
* This is the CLI version of the {@link CurationScriptConfiguration} class that handles the configuration for the
* {@link CurationCli} script
*/
public class CurationCliScriptConfiguration extends CurationScriptConfiguration<Curation> {
@Override
public Options getOptions() {
options = super.getOptions();
options.addOption("e", "eperson", true, "email address of curating eperson");
options.getOption("e").setType(String.class);
options.getOption("e").setRequired(true);
return options;
}
}

View File

@@ -45,6 +45,11 @@ public enum CurationClientOptions {
return null; return null;
} }
/**
* This method will create all the possible Options for the {@link Curation} script.
* This will be used by {@link CurationScriptConfiguration}
* @return The options for the {@link Curation} script
*/
protected static Options constructOptions() { protected static Options constructOptions() {
Options options = new Options(); Options options = new Options();
@@ -54,7 +59,6 @@ public enum CurationClientOptions {
"Id (handle) of object to perform task on, or 'all' to perform on whole repository"); "Id (handle) of object to perform task on, or 'all' to perform on whole repository");
options.addOption("p", "parameter", true, "a task parameter 'NAME=VALUE'"); options.addOption("p", "parameter", true, "a task parameter 'NAME=VALUE'");
options.addOption("q", "queue", true, "name of task queue to process"); options.addOption("q", "queue", true, "name of task queue to process");
options.addOption("e", "eperson", true, "email address of curating eperson");
options.addOption("r", "reporter", true, options.addOption("r", "reporter", true,
"relative or absolute path to the desired report file. Use '-' to report to console. If absent, no " + "relative or absolute path to the desired report file. Use '-' to report to console. If absent, no " +
"reporting"); "reporting");

View File

@@ -16,11 +16,11 @@ import org.dspace.scripts.configuration.ScriptConfiguration;
import org.springframework.beans.factory.annotation.Autowired; import org.springframework.beans.factory.annotation.Autowired;
/** /**
* The {@link ScriptConfiguration} for the {@link CurationCli} script * The {@link ScriptConfiguration} for the {@link Curation} script
* *
* @author Maria Verdonck (Atmire) on 23/06/2020 * @author Maria Verdonck (Atmire) on 23/06/2020
*/ */
public class CurationScriptConfiguration<T extends CurationCli> extends ScriptConfiguration<T> { public class CurationScriptConfiguration<T extends Curation> extends ScriptConfiguration<T> {
@Autowired @Autowired
private AuthorizeService authorizeService; private AuthorizeService authorizeService;

View File

@@ -8,6 +8,7 @@
package org.dspace.discovery; package org.dspace.discovery;
import java.util.HashSet; import java.util.HashSet;
import java.util.Optional;
import java.util.Set; import java.util.Set;
import org.apache.logging.log4j.Logger; import org.apache.logging.log4j.Logger;
@@ -15,6 +16,7 @@ import org.dspace.content.Bundle;
import org.dspace.content.DSpaceObject; import org.dspace.content.DSpaceObject;
import org.dspace.core.Constants; import org.dspace.core.Constants;
import org.dspace.core.Context; import org.dspace.core.Context;
import org.dspace.discovery.indexobject.factory.IndexFactory;
import org.dspace.discovery.indexobject.factory.IndexObjectFactoryFactory; import org.dspace.discovery.indexobject.factory.IndexObjectFactoryFactory;
import org.dspace.event.Consumer; import org.dspace.event.Consumer;
import org.dspace.event.Event; import org.dspace.event.Event;
@@ -67,7 +69,7 @@ public class IndexEventConsumer implements Consumer {
int st = event.getSubjectType(); int st = event.getSubjectType();
if (!(st == Constants.ITEM || st == Constants.BUNDLE if (!(st == Constants.ITEM || st == Constants.BUNDLE
|| st == Constants.COLLECTION || st == Constants.COMMUNITY)) { || st == Constants.COLLECTION || st == Constants.COMMUNITY || st == Constants.SITE)) {
log log
.warn("IndexConsumer should not have been given this kind of Subject in an event, skipping: " .warn("IndexConsumer should not have been given this kind of Subject in an event, skipping: "
+ event.toString()); + event.toString());
@@ -104,10 +106,28 @@ public class IndexEventConsumer implements Consumer {
case Event.MODIFY: case Event.MODIFY:
case Event.MODIFY_METADATA: case Event.MODIFY_METADATA:
if (subject == null) { if (subject == null) {
log.warn(event.getEventTypeAsString() + " event, could not get object for " if (st == Constants.SITE) {
// Update the indexable objects of type in event.detail of objects with ids in event.identifiers
for (String id : event.getIdentifiers()) {
IndexFactory indexableObjectService = IndexObjectFactoryFactory.getInstance().
getIndexFactoryByType(event.getDetail());
Optional<IndexableObject> indexableObject = Optional.empty();
indexableObject = indexableObjectService.findIndexableObject(ctx, id);
if (indexableObject.isPresent()) {
log.debug("consume() adding event to update queue: " + event.toString());
objectsToUpdate
.addAll(indexObjectServiceFactory
.getIndexableObjects(ctx, indexableObject.get().getIndexedObject()));
} else {
log.warn("Cannot resolve " + id);
}
}
} else {
log.warn(event.getEventTypeAsString() + " event, could not get object for "
+ event.getSubjectTypeAsString() + " id=" + event.getSubjectTypeAsString() + " id="
+ event.getSubjectID() + event.getSubjectID()
+ ", perhaps it has been deleted."); + ", perhaps it has been deleted.");
}
} else { } else {
log.debug("consume() adding event to update queue: " + event.toString()); log.debug("consume() adding event to update queue: " + event.toString());
objectsToUpdate.addAll(indexObjectServiceFactory.getIndexableObjects(ctx, subject)); objectsToUpdate.addAll(indexObjectServiceFactory.getIndexableObjects(ctx, subject));

View File

@@ -17,6 +17,7 @@ import org.apache.logging.log4j.Logger;
import org.apache.solr.common.SolrInputDocument; import org.apache.solr.common.SolrInputDocument;
import org.dspace.browse.BrowseException; import org.dspace.browse.BrowseException;
import org.dspace.browse.BrowseIndex; import org.dspace.browse.BrowseIndex;
import org.dspace.content.Collection;
import org.dspace.content.Item; import org.dspace.content.Item;
import org.dspace.content.MetadataValue; import org.dspace.content.MetadataValue;
import org.dspace.content.authority.service.ChoiceAuthorityService; import org.dspace.content.authority.service.ChoiceAuthorityService;
@@ -63,7 +64,7 @@ public class SolrServiceMetadataBrowseIndexingPlugin implements SolrServiceIndex
return; return;
} }
Item item = ((IndexableItem) indexableObject).getIndexedObject(); Item item = ((IndexableItem) indexableObject).getIndexedObject();
Collection collection = item.getOwningCollection();
// Get the currently configured browse indexes // Get the currently configured browse indexes
BrowseIndex[] bis; BrowseIndex[] bis;
try { try {
@@ -175,7 +176,7 @@ public class SolrServiceMetadataBrowseIndexingPlugin implements SolrServiceIndex
true); true);
if (!ignorePrefered) { if (!ignorePrefered) {
preferedLabel = choiceAuthorityService preferedLabel = choiceAuthorityService
.getLabel(values.get(x), values.get(x).getLanguage()); .getLabel(values.get(x), collection, values.get(x).getLanguage());
} }
List<String> variants = null; List<String> variants = null;
@@ -195,7 +196,7 @@ public class SolrServiceMetadataBrowseIndexingPlugin implements SolrServiceIndex
if (!ignoreVariants) { if (!ignoreVariants) {
variants = choiceAuthorityService variants = choiceAuthorityService
.getVariants( .getVariants(
values.get(x)); values.get(x), collection);
} }
if (StringUtils if (StringUtils

View File

@@ -0,0 +1,43 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.discovery.indexobject;
import java.io.Serializable;
import org.dspace.core.ReloadableEntity;
import org.dspace.discovery.IndexableObject;
/**
* This class exists in order to provide a default implementation for the equals and hashCode methods.
* Since IndexableObjects can be made multiple times for the same underlying object, we needed a more finetuned
* equals and hashcode methods. We're simply checking that the underlying objects are equal and generating the hashcode
* for the underlying object. This way, we'll always get a proper result when calling equals or hashcode on an
* IndexableObject because it'll depend on the underlying object
* @param <T> Refers to the underlying entity that is linked to this object
* @param <PK> The type of ID that this entity uses
*/
public abstract class AbstractIndexableObject<T extends ReloadableEntity<PK>, PK extends Serializable>
implements IndexableObject<T,PK> {
@Override
public boolean equals(Object obj) {
//Two IndexableObjects of the same DSpaceObject are considered equal
if (!(obj instanceof AbstractIndexableObject)) {
return false;
}
IndexableDSpaceObject other = (IndexableDSpaceObject) obj;
return other.getIndexedObject().equals(getIndexedObject());
}
@Override
public int hashCode() {
//Two IndexableObjects of the same DSpaceObject are considered equal
return getIndexedObject().hashCode();
}
}

View File

@@ -12,6 +12,7 @@ import java.sql.SQLException;
import java.util.Date; import java.util.Date;
import java.util.List; import java.util.List;
import org.apache.commons.collections4.ListUtils;
import org.apache.commons.lang3.StringUtils; import org.apache.commons.lang3.StringUtils;
import org.apache.solr.client.solrj.SolrClient; import org.apache.solr.client.solrj.SolrClient;
import org.apache.solr.client.solrj.SolrServerException; import org.apache.solr.client.solrj.SolrServerException;
@@ -56,7 +57,7 @@ public abstract class IndexFactoryImpl<T extends IndexableObject, S> implements
doc.addField(SearchUtils.RESOURCE_ID_FIELD, indexableObject.getID().toString()); doc.addField(SearchUtils.RESOURCE_ID_FIELD, indexableObject.getID().toString());
//Do any additional indexing, depends on the plugins //Do any additional indexing, depends on the plugins
for (SolrServiceIndexPlugin solrServiceIndexPlugin : solrServiceIndexPlugins) { for (SolrServiceIndexPlugin solrServiceIndexPlugin : ListUtils.emptyIfNull(solrServiceIndexPlugins)) {
solrServiceIndexPlugin.additionalIndex(context, indexableObject, doc); solrServiceIndexPlugin.additionalIndex(context, indexableObject, doc);
} }
@@ -190,4 +191,4 @@ public abstract class IndexFactoryImpl<T extends IndexableObject, S> implements
public void deleteAll() throws IOException, SolrServerException { public void deleteAll() throws IOException, SolrServerException {
solrSearchCore.getSolr().deleteByQuery(SearchUtils.RESOURCE_TYPE_FIELD + ":" + getType()); solrSearchCore.getSolr().deleteByQuery(SearchUtils.RESOURCE_TYPE_FIELD + ":" + getType());
} }
} }

View File

@@ -7,7 +7,6 @@
*/ */
package org.dspace.discovery.indexobject; package org.dspace.discovery.indexobject;
import org.dspace.discovery.IndexableObject;
import org.dspace.xmlworkflow.storedcomponents.ClaimedTask; import org.dspace.xmlworkflow.storedcomponents.ClaimedTask;
/** /**
@@ -15,7 +14,7 @@ import org.dspace.xmlworkflow.storedcomponents.ClaimedTask;
* *
* @author Kevin Van de Velde (kevin at atmire dot com) * @author Kevin Van de Velde (kevin at atmire dot com)
*/ */
public class IndexableClaimedTask implements IndexableObject<ClaimedTask, Integer> { public class IndexableClaimedTask extends AbstractIndexableObject<ClaimedTask, Integer> {
private ClaimedTask claimedTask; private ClaimedTask claimedTask;
public static final String TYPE = ClaimedTask.class.getSimpleName(); public static final String TYPE = ClaimedTask.class.getSimpleName();

View File

@@ -10,7 +10,6 @@ package org.dspace.discovery.indexobject;
import java.util.UUID; import java.util.UUID;
import org.dspace.content.DSpaceObject; import org.dspace.content.DSpaceObject;
import org.dspace.discovery.IndexableObject;
/** /**
* DSpaceObject implementation for the IndexableObject, contains methods used by all DSpaceObject methods * DSpaceObject implementation for the IndexableObject, contains methods used by all DSpaceObject methods
@@ -18,7 +17,7 @@ import org.dspace.discovery.IndexableObject;
* *
* @author Kevin Van de Velde (kevin at atmire dot com) * @author Kevin Van de Velde (kevin at atmire dot com)
*/ */
public abstract class IndexableDSpaceObject<T extends DSpaceObject> implements IndexableObject<T, UUID> { public abstract class IndexableDSpaceObject<T extends DSpaceObject> extends AbstractIndexableObject<T, UUID> {
private T dso; private T dso;
@@ -40,4 +39,6 @@ public abstract class IndexableDSpaceObject<T extends DSpaceObject> implements I
public UUID getID() { public UUID getID() {
return dso.getID(); return dso.getID();
} }
}
}

View File

@@ -8,14 +8,13 @@
package org.dspace.discovery.indexobject; package org.dspace.discovery.indexobject;
import org.dspace.content.InProgressSubmission; import org.dspace.content.InProgressSubmission;
import org.dspace.discovery.IndexableObject;
/** /**
* InProgressSubmission implementation for the IndexableObject * InProgressSubmission implementation for the IndexableObject
* @author Kevin Van de Velde (kevin at atmire dot com) * @author Kevin Van de Velde (kevin at atmire dot com)
*/ */
public abstract class IndexableInProgressSubmission<T extends InProgressSubmission> public abstract class IndexableInProgressSubmission<T extends InProgressSubmission>
implements IndexableObject<T, Integer> { extends AbstractIndexableObject<T, Integer> {
protected T inProgressSubmission; protected T inProgressSubmission;

View File

@@ -0,0 +1,51 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.discovery.indexobject;
import org.dspace.content.MetadataField;
import org.dspace.discovery.IndexableObject;
/**
* {@link MetadataField} implementation for the {@link IndexableObject}
*
* @author Maria Verdonck (Atmire) on 14/07/2020
*/
public class IndexableMetadataField extends AbstractIndexableObject<MetadataField, Integer> {
private MetadataField metadataField;
public static final String TYPE = MetadataField.class.getSimpleName();
public IndexableMetadataField(MetadataField metadataField) {
this.metadataField = metadataField;
}
@Override
public String getType() {
return TYPE;
}
@Override
public Integer getID() {
return this.metadataField.getID();
}
@Override
public MetadataField getIndexedObject() {
return this.metadataField;
}
@Override
public void setIndexedObject(MetadataField metadataField) {
this.metadataField = metadataField;
}
@Override
public String getTypeText() {
return TYPE.toUpperCase();
}
}

View File

@@ -7,14 +7,13 @@
*/ */
package org.dspace.discovery.indexobject; package org.dspace.discovery.indexobject;
import org.dspace.discovery.IndexableObject;
import org.dspace.xmlworkflow.storedcomponents.PoolTask; import org.dspace.xmlworkflow.storedcomponents.PoolTask;
/** /**
* PoolTask implementation for the IndexableObject * PoolTask implementation for the IndexableObject
* @author Kevin Van de Velde (kevin at atmire dot com) * @author Kevin Van de Velde (kevin at atmire dot com)
*/ */
public class IndexablePoolTask implements IndexableObject<PoolTask, Integer> { public class IndexablePoolTask extends AbstractIndexableObject<PoolTask, Integer> {
public static final String TYPE = PoolTask.class.getSimpleName(); public static final String TYPE = PoolTask.class.getSimpleName();

View File

@@ -173,6 +173,8 @@ public class ItemIndexFactoryImpl extends DSpaceObjectIndexFactoryImpl<Indexable
public void addDiscoveryFields(SolrInputDocument doc, Context context, Item item, public void addDiscoveryFields(SolrInputDocument doc, Context context, Item item,
List<DiscoveryConfiguration> discoveryConfigurations) List<DiscoveryConfiguration> discoveryConfigurations)
throws SQLException, IOException { throws SQLException, IOException {
// use the item service to retrieve the owning collection also for inprogress submission
Collection collection = (Collection) itemService.getParentObject(context, item);
//Keep a list of our sort values which we added, sort values can only be added once //Keep a list of our sort values which we added, sort values can only be added once
List<String> sortFieldsAdded = new ArrayList<>(); List<String> sortFieldsAdded = new ArrayList<>();
Map<String, List<DiscoverySearchFilter>> searchFilters = null; Map<String, List<DiscoverySearchFilter>> searchFilters = null;
@@ -359,7 +361,7 @@ public class ItemIndexFactoryImpl extends DSpaceObjectIndexFactoryImpl<Indexable
if (!ignorePrefered) { if (!ignorePrefered) {
preferedLabel = choiceAuthorityService preferedLabel = choiceAuthorityService
.getLabel(meta, meta.getLanguage()); .getLabel(meta, collection, meta.getLanguage());
} }
boolean ignoreVariants = boolean ignoreVariants =
@@ -375,7 +377,7 @@ public class ItemIndexFactoryImpl extends DSpaceObjectIndexFactoryImpl<Indexable
true); true);
if (!ignoreVariants) { if (!ignoreVariants) {
variants = choiceAuthorityService variants = choiceAuthorityService
.getVariants(meta); .getVariants(meta, collection);
} }
} }

View File

@@ -0,0 +1,109 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.discovery.indexobject;
import java.io.IOException;
import java.sql.SQLException;
import java.util.Arrays;
import java.util.Iterator;
import java.util.List;
import java.util.Optional;
import org.apache.commons.lang3.StringUtils;
import org.apache.solr.common.SolrInputDocument;
import org.dspace.content.MetadataField;
import org.dspace.content.service.MetadataFieldService;
import org.dspace.core.Context;
import org.dspace.discovery.indexobject.factory.MetadataFieldIndexFactory;
import org.dspace.eperson.Group;
import org.dspace.eperson.factory.EPersonServiceFactory;
import org.dspace.eperson.service.GroupService;
import org.springframework.beans.factory.annotation.Autowired;
/**
* Factory implementation for indexing/retrieving {@link org.dspace.content.MetadataField} items in the search core
*
* @author Maria Verdonck (Atmire) on 14/07/2020
*/
public class MetadataFieldIndexFactoryImpl extends IndexFactoryImpl<IndexableMetadataField, MetadataField>
implements MetadataFieldIndexFactory {
public static final String SCHEMA_FIELD_NAME = "schema";
public static final String ELEMENT_FIELD_NAME = "element";
public static final String QUALIFIER_FIELD_NAME = "qualifier";
public static final String FIELD_NAME_VARIATIONS = "fieldName";
protected GroupService groupService = EPersonServiceFactory.getInstance().getGroupService();
@Override
public SolrInputDocument buildDocument(Context context, IndexableMetadataField indexableObject) throws SQLException,
IOException {
// Add the ID's, types and call the SolrServiceIndexPlugins
final SolrInputDocument doc = super.buildDocument(context, indexableObject);
final MetadataField metadataField = indexableObject.getIndexedObject();
// add schema, element, qualifier and full fieldName
addFacetIndex(doc, SCHEMA_FIELD_NAME, metadataField.getMetadataSchema().getName(),
metadataField.getMetadataSchema().getName());
addFacetIndex(doc, ELEMENT_FIELD_NAME, metadataField.getElement(), metadataField.getElement());
String fieldName = metadataField.toString().replace('_', '.');
addFacetIndex(doc, FIELD_NAME_VARIATIONS, fieldName, fieldName);
if (StringUtils.isNotBlank(metadataField.getQualifier())) {
addFacetIndex(doc, QUALIFIER_FIELD_NAME, metadataField.getQualifier(), metadataField.getQualifier());
addFacetIndex(doc, FIELD_NAME_VARIATIONS, fieldName,
metadataField.getElement() + "." + metadataField.getQualifier());
addFacetIndex(doc, FIELD_NAME_VARIATIONS, metadataField.getQualifier(), metadataField.getQualifier());
} else {
addFacetIndex(doc, FIELD_NAME_VARIATIONS, metadataField.getElement(), metadataField.getElement());
}
addNamedResourceTypeIndex(doc, indexableObject.getTypeText());
Group anonymousGroup = groupService.findByName(context, Group.ANONYMOUS);
// add read permission on doc for anonymous group
doc.addField("read", "g" + anonymousGroup.getID());
return doc;
}
@Autowired
private MetadataFieldService metadataFieldService;
@Override
public Iterator<IndexableMetadataField> findAll(Context context) throws SQLException {
final Iterator<MetadataField> metadataFields = metadataFieldService.findAll(context).iterator();
return new Iterator<>() {
@Override
public boolean hasNext() {
return metadataFields.hasNext();
}
@Override
public IndexableMetadataField next() {
return new IndexableMetadataField(metadataFields.next());
}
};
}
@Override
public String getType() {
return IndexableMetadataField.TYPE;
}
@Override
public Optional<IndexableMetadataField> findIndexableObject(Context context, String id) throws SQLException {
final MetadataField metadataField = metadataFieldService.find(context, Integer.parseInt(id));
return metadataField == null ? Optional.empty() : Optional.of(new IndexableMetadataField(metadataField));
}
@Override
public boolean supports(Object object) {
return object instanceof MetadataField;
}
@Override
public List getIndexableObjects(Context context, MetadataField object) {
return Arrays.asList(new IndexableMetadataField(object));
}
}

View File

@@ -0,0 +1,19 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.discovery.indexobject.factory;
import org.dspace.content.MetadataField;
import org.dspace.discovery.indexobject.IndexableMetadataField;
/**
* Factory interface for indexing/retrieving {@link org.dspace.content.MetadataField} items in the search core
*
* @author Maria Verdonck (Atmire) on 14/07/2020
*/
public interface MetadataFieldIndexFactory extends IndexFactory<IndexableMetadataField, MetadataField> {
}

View File

@@ -141,7 +141,7 @@ public class EPerson extends DSpaceObject implements DSpaceObjectLegacySupport {
return false; return false;
} }
final EPerson other = (EPerson) obj; final EPerson other = (EPerson) obj;
if (this.getID() != other.getID()) { if (!this.getID().equals(other.getID())) {
return false; return false;
} }
if (!StringUtils.equals(this.getEmail(), other.getEmail())) { if (!StringUtils.equals(this.getEmail(), other.getEmail())) {

View File

@@ -7,8 +7,11 @@
*/ */
package org.dspace.eperson; package org.dspace.eperson;
import java.io.BufferedReader;
import java.io.IOException; import java.io.IOException;
import java.io.InputStreamReader;
import java.sql.SQLException; import java.sql.SQLException;
import java.util.List;
import java.util.Locale; import java.util.Locale;
import org.apache.commons.cli.CommandLine; import org.apache.commons.cli.CommandLine;
@@ -196,7 +199,6 @@ public class EPersonCLITool {
try { try {
ePersonService.update(context, eperson); ePersonService.update(context, eperson);
context.complete();
System.out.printf("Created EPerson %s\n", eperson.getID().toString()); System.out.printf("Created EPerson %s\n", eperson.getID().toString());
} catch (SQLException ex) { } catch (SQLException ex) {
context.abort(); context.abort();
@@ -259,16 +261,26 @@ public class EPersonCLITool {
} }
try { try {
ePersonService.delete(context, eperson); List<String> tableList = ePersonService.getDeleteConstraints(context, eperson);
context.complete(); if (!tableList.isEmpty()) {
System.out.printf("Deleted EPerson %s\n", eperson.getID().toString()); System.out.printf("The EPerson with ID: %s is referenced by the following database tables:%n",
} catch (SQLException ex) { eperson.getID().toString());
System.err.println(ex.getMessage()); tableList.forEach((s) -> {
return 1; System.out.println(s);
} catch (AuthorizeException ex) { });
System.err.println(ex.getMessage()); }
return 1; System.out.printf("Are you sure you want to delete this EPerson with ID: %s? (y or n): ",
} catch (IOException ex) { eperson.getID().toString());
BufferedReader input = new BufferedReader(new InputStreamReader(System.in));
System.out.flush();
String s = input.readLine();
if (s != null && s.trim().toLowerCase().startsWith("y")) {
ePersonService.delete(context, eperson);
System.out.printf("%nDeleted EPerson with ID: %s", eperson.getID().toString());
} else {
System.out.printf("%nAbort Deletion of EPerson with ID: %s %n", eperson.getID().toString());
}
} catch (SQLException | AuthorizeException | IOException ex) {
System.err.println(ex.getMessage()); System.err.println(ex.getMessage());
return 1; return 1;
} }
@@ -373,7 +385,6 @@ public class EPersonCLITool {
if (modified) { if (modified) {
try { try {
ePersonService.update(context, eperson); ePersonService.update(context, eperson);
context.complete();
System.out.printf("Modified EPerson %s\n", eperson.getID().toString()); System.out.printf("Modified EPerson %s\n", eperson.getID().toString());
} catch (SQLException ex) { } catch (SQLException ex) {
context.abort(); context.abort();

View File

@@ -9,6 +9,8 @@ package org.dspace.eperson;
import java.util.List; import java.util.List;
import org.apache.commons.lang3.ArrayUtils;
/** /**
* Exception indicating that an EPerson may not be deleted due to the presence * Exception indicating that an EPerson may not be deleted due to the presence
* of the EPerson's ID in certain tables * of the EPerson's ID in certain tables
@@ -33,7 +35,10 @@ public class EPersonDeletionException extends Exception {
* deleted if it exists in these tables. * deleted if it exists in these tables.
*/ */
public EPersonDeletionException(List<String> tableList) { public EPersonDeletionException(List<String> tableList) {
super(); // this may not be the most beautiful way to print the tablenames as part or the error message.
// but it has to be a one liner, as the super() call must be the first statement in the constructor.
super("Cannot delete EPerson as it is referenced by the following database tables: "
+ ArrayUtils.toString(tableList.toArray()));
myTableList = tableList; myTableList = tableList;
} }

View File

@@ -7,10 +7,13 @@
*/ */
package org.dspace.eperson; package org.dspace.eperson;
import java.io.IOException;
import java.sql.SQLException; import java.sql.SQLException;
import java.util.ArrayList; import java.util.ArrayList;
import java.util.Arrays; import java.util.Arrays;
import java.util.Collections;
import java.util.Date; import java.util.Date;
import java.util.HashSet;
import java.util.Iterator; import java.util.Iterator;
import java.util.List; import java.util.List;
import java.util.Set; import java.util.Set;
@@ -21,26 +24,56 @@ import org.apache.commons.collections4.CollectionUtils;
import org.apache.commons.lang3.StringUtils; import org.apache.commons.lang3.StringUtils;
import org.apache.logging.log4j.Logger; import org.apache.logging.log4j.Logger;
import org.dspace.authorize.AuthorizeException; import org.dspace.authorize.AuthorizeException;
import org.dspace.authorize.factory.AuthorizeServiceFactory;
import org.dspace.authorize.service.AuthorizeService; import org.dspace.authorize.service.AuthorizeService;
import org.dspace.authorize.service.ResourcePolicyService;
import org.dspace.content.DSpaceObjectServiceImpl; import org.dspace.content.DSpaceObjectServiceImpl;
import org.dspace.content.Item; import org.dspace.content.Item;
import org.dspace.content.MetadataField; import org.dspace.content.MetadataField;
import org.dspace.content.WorkspaceItem;
import org.dspace.content.factory.ContentServiceFactory;
import org.dspace.content.service.ItemService; import org.dspace.content.service.ItemService;
import org.dspace.content.service.WorkspaceItemService;
import org.dspace.core.Constants; import org.dspace.core.Constants;
import org.dspace.core.Context; import org.dspace.core.Context;
import org.dspace.core.LogManager; import org.dspace.core.LogManager;
import org.dspace.core.Utils; import org.dspace.core.Utils;
import org.dspace.eperson.dao.EPersonDAO; import org.dspace.eperson.dao.EPersonDAO;
import org.dspace.eperson.service.EPersonService; import org.dspace.eperson.service.EPersonService;
import org.dspace.eperson.service.GroupService;
import org.dspace.eperson.service.SubscribeService; import org.dspace.eperson.service.SubscribeService;
import org.dspace.event.Event; import org.dspace.event.Event;
import org.dspace.versioning.Version;
import org.dspace.versioning.VersionHistory;
import org.dspace.versioning.dao.VersionDAO;
import org.dspace.versioning.factory.VersionServiceFactory;
import org.dspace.versioning.service.VersionHistoryService;
import org.dspace.versioning.service.VersioningService;
import org.dspace.workflow.WorkflowService; import org.dspace.workflow.WorkflowService;
import org.dspace.workflow.factory.WorkflowServiceFactory; import org.dspace.workflow.factory.WorkflowServiceFactory;
import org.dspace.workflowbasic.BasicWorkflowItem;
import org.dspace.workflowbasic.BasicWorkflowServiceImpl;
import org.dspace.workflowbasic.factory.BasicWorkflowServiceFactory;
import org.dspace.workflowbasic.service.BasicWorkflowItemService;
import org.dspace.workflowbasic.service.BasicWorkflowService;
import org.dspace.workflowbasic.service.TaskListItemService;
import org.dspace.xmlworkflow.WorkflowConfigurationException;
import org.dspace.xmlworkflow.factory.XmlWorkflowServiceFactory;
import org.dspace.xmlworkflow.service.WorkflowRequirementsService;
import org.dspace.xmlworkflow.service.XmlWorkflowService;
import org.dspace.xmlworkflow.storedcomponents.ClaimedTask;
import org.dspace.xmlworkflow.storedcomponents.CollectionRole;
import org.dspace.xmlworkflow.storedcomponents.XmlWorkflowItem;
import org.dspace.xmlworkflow.storedcomponents.service.ClaimedTaskService;
import org.dspace.xmlworkflow.storedcomponents.service.CollectionRoleService;
import org.dspace.xmlworkflow.storedcomponents.service.PoolTaskService;
import org.dspace.xmlworkflow.storedcomponents.service.WorkflowItemRoleService;
import org.dspace.xmlworkflow.storedcomponents.service.XmlWorkflowItemService;
import org.springframework.beans.factory.annotation.Autowired; import org.springframework.beans.factory.annotation.Autowired;
/** /**
* Service implementation for the EPerson object. * Service implementation for the EPerson object. This class is responsible for
* This class is responsible for all business logic calls for the EPerson object and is autowired by spring. * all business logic calls for the EPerson object and is autowired by spring.
* This class should never be accessed directly. * This class should never be accessed directly.
* *
* @author kevinvandevelde at atmire.com * @author kevinvandevelde at atmire.com
@@ -60,7 +93,17 @@ public class EPersonServiceImpl extends DSpaceObjectServiceImpl<EPerson> impleme
@Autowired(required = true) @Autowired(required = true)
protected ItemService itemService; protected ItemService itemService;
@Autowired(required = true) @Autowired(required = true)
protected WorkflowItemRoleService workflowItemRoleService;
@Autowired(required = true)
CollectionRoleService collectionRoleService;
@Autowired(required = true)
protected GroupService groupService;
@Autowired(required = true)
protected SubscribeService subscribeService; protected SubscribeService subscribeService;
@Autowired(required = true)
protected VersionDAO versionDAO;
@Autowired(required = true)
protected ClaimedTaskService claimedTaskService;
protected EPersonServiceImpl() { protected EPersonServiceImpl() {
super(); super();
@@ -129,7 +172,7 @@ public class EPersonServiceImpl extends DSpaceObjectServiceImpl<EPerson> impleme
query = null; query = null;
} }
return ePersonDAO.search(context, query, Arrays.asList(firstNameField, lastNameField), return ePersonDAO.search(context, query, Arrays.asList(firstNameField, lastNameField),
Arrays.asList(firstNameField, lastNameField), offset, limit); Arrays.asList(firstNameField, lastNameField), offset, limit);
} }
} }
@@ -179,45 +222,202 @@ public class EPersonServiceImpl extends DSpaceObjectServiceImpl<EPerson> impleme
// authorized? // authorized?
if (!authorizeService.isAdmin(context)) { if (!authorizeService.isAdmin(context)) {
throw new AuthorizeException( throw new AuthorizeException(
"You must be an admin to create an EPerson"); "You must be an admin to create an EPerson");
} }
// Create a table row // Create a table row
EPerson e = ePersonDAO.create(context, new EPerson()); EPerson e = ePersonDAO.create(context, new EPerson());
log.info(LogManager.getHeader(context, "create_eperson", "eperson_id=" log.info(LogManager.getHeader(context, "create_eperson", "eperson_id="
+ e.getID())); + e.getID()));
context.addEvent(new Event(Event.CREATE, Constants.EPERSON, e.getID(), context.addEvent(new Event(Event.CREATE, Constants.EPERSON, e.getID(),
null, getIdentifiers(context, e))); null, getIdentifiers(context, e)));
return e; return e;
} }
@Override @Override
public void delete(Context context, EPerson ePerson) throws SQLException, AuthorizeException { public void delete(Context context, EPerson ePerson) throws SQLException, AuthorizeException {
try {
delete(context, ePerson, true);
} catch (AuthorizeException ex) {
log.error("This AuthorizeException: " + ex + " occured while deleting Eperson with the ID: " +
ePerson.getID());
throw new AuthorizeException(ex);
} catch (IOException ex) {
log.error("This IOException: " + ex + " occured while deleting Eperson with the ID: " + ePerson.getID());
throw new AuthorizeException(ex);
} catch (EPersonDeletionException e) {
throw new IllegalStateException(e);
}
}
/**
* Deletes an EPerson. The argument cascade defines whether all references
* on an EPerson should be deleted as well (by either deleting the
* referencing object - e.g. WorkspaceItem, ResourcePolicy - or by setting
* the foreign key null - e.g. archived Items). If cascade is set to false
* and the EPerson is referenced somewhere, this leads to an
* AuthorizeException. EPersons may be referenced by Items, ResourcePolicies
* and workflow tasks.
*
* @param context DSpace context
* @param ePerson The EPerson to delete.
* @param cascade Whether to delete references on the EPerson (cascade =
* true) or to abort the deletion (cascade = false) if the EPerson is
* referenced within DSpace.
*
* @throws SQLException
* @throws AuthorizeException
* @throws IOException
*/
public void delete(Context context, EPerson ePerson, boolean cascade)
throws SQLException, AuthorizeException, IOException, EPersonDeletionException {
// authorized? // authorized?
if (!authorizeService.isAdmin(context)) { if (!authorizeService.isAdmin(context)) {
throw new AuthorizeException( throw new AuthorizeException(
"You must be an admin to delete an EPerson"); "You must be an admin to delete an EPerson");
}
Set<Group> workFlowGroups = getAllWorkFlowGroups(context, ePerson);
for (Group group: workFlowGroups) {
List<EPerson> ePeople = groupService.allMembers(context, group);
if (ePeople.size() == 1 && ePeople.contains(ePerson)) {
throw new IllegalStateException(
"Refused to delete user " + ePerson.getID() + " because it the only member of the workflow group"
+ group.getID() + ". Delete the tasks and group first if you want to remove this user.");
}
} }
// check for presence of eperson in tables that // check for presence of eperson in tables that
// have constraints on eperson_id // have constraints on eperson_id
List<String> constraintList = getDeleteConstraints(context, ePerson); List<String> constraintList = getDeleteConstraints(context, ePerson);
// if eperson exists in tables that have constraints
// on eperson, throw an exception
if (constraintList.size() > 0) { if (constraintList.size() > 0) {
throw new AuthorizeException(new EPersonDeletionException(constraintList)); // Check if the constraints we found should be deleted
} if (cascade) {
boolean isBasicFramework = WorkflowServiceFactory.getInstance().getWorkflowService()
instanceof BasicWorkflowService;
boolean isXmlFramework = WorkflowServiceFactory.getInstance().getWorkflowService()
instanceof XmlWorkflowService;
Iterator<String> constraintsIterator = constraintList.iterator();
while (constraintsIterator.hasNext()) {
String tableName = constraintsIterator.next();
if (StringUtils.equals(tableName, "item") || StringUtils.equals(tableName, "workspaceitem")) {
Iterator<Item> itemIterator = itemService.findBySubmitter(context, ePerson, true);
VersionHistoryService versionHistoryService = VersionServiceFactory.getInstance()
.getVersionHistoryService();
VersioningService versioningService = VersionServiceFactory.getInstance().getVersionService();
while (itemIterator.hasNext()) {
Item item = itemIterator.next();
VersionHistory versionHistory = versionHistoryService.findByItem(context, item);
if (null != versionHistory) {
for (Version version : versioningService.getVersionsByHistory(context,
versionHistory)) {
version.setePerson(null);
versionDAO.save(context, version);
}
}
WorkspaceItemService workspaceItemService = ContentServiceFactory.getInstance()
.getWorkspaceItemService();
WorkspaceItem wsi = workspaceItemService.findByItem(context, item);
if (null != wsi) {
workspaceItemService.deleteAll(context, wsi);
} else {
// we can do that as dc.provenance still contains
// information about who submitted and who
// archived an item.
item.setSubmitter(null);
itemService.update(context, item);
}
}
} else if (StringUtils.equals(tableName, "cwf_claimtask") && isXmlFramework) {
// Unclaim all XmlWorkflow tasks
XmlWorkflowItemService xmlWorkflowItemService = XmlWorkflowServiceFactory
.getInstance().getXmlWorkflowItemService();
ClaimedTaskService claimedTaskService = XmlWorkflowServiceFactory
.getInstance().getClaimedTaskService();
XmlWorkflowService xmlWorkflowService = XmlWorkflowServiceFactory
.getInstance().getXmlWorkflowService();
WorkflowRequirementsService workflowRequirementsService = XmlWorkflowServiceFactory
.getInstance().getWorkflowRequirementsService();
List<XmlWorkflowItem> xmlWorkflowItems = xmlWorkflowItemService
.findBySubmitter(context, ePerson);
List<ClaimedTask> claimedTasks = claimedTaskService.findByEperson(context, ePerson);
for (ClaimedTask task : claimedTasks) {
xmlWorkflowService.deleteClaimedTask(context, task.getWorkflowItem(), task);
try {
workflowRequirementsService.removeClaimedUser(context, task.getWorkflowItem(),
ePerson, task.getStepID());
} catch (WorkflowConfigurationException ex) {
log.error("This WorkflowConfigurationException: " + ex +
" occured while deleting Eperson with the ID: " + ePerson.getID());
throw new AuthorizeException(new EPersonDeletionException(Collections
.singletonList(tableName)));
}
}
} else if (StringUtils.equals(tableName, "workflowitem") && isBasicFramework) {
// Remove basicWorkflow workflowitem and unclaim them
BasicWorkflowItemService basicWorkflowItemService = BasicWorkflowServiceFactory.getInstance()
.getBasicWorkflowItemService();
BasicWorkflowService basicWorkflowService = BasicWorkflowServiceFactory.getInstance()
.getBasicWorkflowService();
TaskListItemService taskListItemService = BasicWorkflowServiceFactory.getInstance()
.getTaskListItemService();
List<BasicWorkflowItem> workflowItems = basicWorkflowItemService.findByOwner(context, ePerson);
for (BasicWorkflowItem workflowItem : workflowItems) {
int state = workflowItem.getState();
// unclaim tasks that are in the pool.
if (state == BasicWorkflowServiceImpl.WFSTATE_STEP1
|| state == BasicWorkflowServiceImpl.WFSTATE_STEP2
|| state == BasicWorkflowServiceImpl.WFSTATE_STEP3) {
log.info(LogManager.getHeader(context, "unclaim_workflow",
"workflow_id=" + workflowItem.getID() + ", claiming EPerson is deleted"));
basicWorkflowService.unclaim(context, workflowItem, context.getCurrentUser());
// remove the EPerson from the list of persons that can (re-)claim the task
// while we are doing it below, we must do this here as well as the previously
// unclaimed tasks was put back into pool and we do not know the order the tables
// are checked.
taskListItemService.deleteByWorkflowItemAndEPerson(context, workflowItem, ePerson);
}
}
} else if (StringUtils.equals(tableName, "resourcepolicy")) {
// we delete the EPerson, it won't need any rights anymore.
authorizeService.removeAllEPersonPolicies(context, ePerson);
} else if (StringUtils.equals(tableName, "tasklistitem") && isBasicFramework) {
// remove EPerson from the list of EPersons that may claim some specific workflow tasks.
TaskListItemService taskListItemService = BasicWorkflowServiceFactory.getInstance()
.getTaskListItemService();
taskListItemService.deleteByEPerson(context, ePerson);
} else if (StringUtils.equals(tableName, "cwf_pooltask") && isXmlFramework) {
PoolTaskService poolTaskService = XmlWorkflowServiceFactory.getInstance().getPoolTaskService();
poolTaskService.deleteByEperson(context, ePerson);
} else if (StringUtils.equals(tableName, "cwf_workflowitemrole") && isXmlFramework) {
WorkflowItemRoleService workflowItemRoleService = XmlWorkflowServiceFactory.getInstance()
.getWorkflowItemRoleService();
workflowItemRoleService.deleteByEPerson(context, ePerson);
} else {
log.warn("EPerson is referenced in table '" + tableName
+ "'. Deletion of EPerson " + ePerson.getID() + " may fail "
+ "if the database does not handle this "
+ "reference.");
}
}
} else {
throw new EPersonDeletionException(constraintList);
}
}
context.addEvent(new Event(Event.DELETE, Constants.EPERSON, ePerson.getID(), ePerson.getEmail(), context.addEvent(new Event(Event.DELETE, Constants.EPERSON, ePerson.getID(), ePerson.getEmail(),
getIdentifiers(context, ePerson))); getIdentifiers(context, ePerson)));
// XXX FIXME: This sidesteps the object model code so it won't // XXX FIXME: This sidesteps the object model code so it won't
// generate REMOVE events on the affected Groups. // generate REMOVE events on the affected Groups.
// Remove any group memberships first // Remove any group memberships first
// Remove any group memberships first // Remove any group memberships first
Iterator<Group> groups = ePerson.getGroups().iterator(); Iterator<Group> groups = ePerson.getGroups().iterator();
@@ -234,7 +434,20 @@ public class EPersonServiceImpl extends DSpaceObjectServiceImpl<EPerson> impleme
ePersonDAO.delete(context, ePerson); ePersonDAO.delete(context, ePerson);
log.info(LogManager.getHeader(context, "delete_eperson", log.info(LogManager.getHeader(context, "delete_eperson",
"eperson_id=" + ePerson.getID())); "eperson_id=" + ePerson.getID()));
}
private Set<Group> getAllWorkFlowGroups(Context context, EPerson ePerson) throws SQLException {
Set<Group> workFlowGroups = new HashSet<>();
Set<Group> groups = groupService.allMemberGroupsSet(context, ePerson);
for (Group group: groups) {
List<CollectionRole> collectionRoles = collectionRoleService.findByGroup(context, group);
if (!collectionRoles.isEmpty()) {
workFlowGroups.add(group);
}
}
return workFlowGroups;
} }
@Override @Override
@@ -268,8 +481,8 @@ public class EPersonServiceImpl extends DSpaceObjectServiceImpl<EPerson> impleme
PasswordHash hash = null; PasswordHash hash = null;
try { try {
hash = new PasswordHash(ePerson.getDigestAlgorithm(), hash = new PasswordHash(ePerson.getDigestAlgorithm(),
ePerson.getSalt(), ePerson.getSalt(),
ePerson.getPassword()); ePerson.getPassword());
} catch (DecoderException ex) { } catch (DecoderException ex) {
log.error("Problem decoding stored salt or hash: " + ex.getMessage()); log.error("Problem decoding stored salt or hash: " + ex.getMessage());
} }
@@ -281,9 +494,9 @@ public class EPersonServiceImpl extends DSpaceObjectServiceImpl<EPerson> impleme
PasswordHash myHash; PasswordHash myHash;
try { try {
myHash = new PasswordHash( myHash = new PasswordHash(
ePerson.getDigestAlgorithm(), ePerson.getDigestAlgorithm(),
ePerson.getSalt(), ePerson.getSalt(),
ePerson.getPassword()); ePerson.getPassword());
} catch (DecoderException ex) { } catch (DecoderException ex) {
log.error(ex.getMessage()); log.error(ex.getMessage());
return false; return false;
@@ -312,8 +525,8 @@ public class EPersonServiceImpl extends DSpaceObjectServiceImpl<EPerson> impleme
// Check authorisation - if you're not the eperson // Check authorisation - if you're not the eperson
// see if the authorization system says you can // see if the authorization system says you can
if (!context.ignoreAuthorization() if (!context.ignoreAuthorization()
&& ((context.getCurrentUser() == null) || (ePerson.getID() != context && ((context.getCurrentUser() == null) || (ePerson.getID() != context
.getCurrentUser().getID()))) { .getCurrentUser().getID()))) {
authorizeService.authorizeAction(context, ePerson, Constants.WRITE); authorizeService.authorizeAction(context, ePerson, Constants.WRITE);
} }
@@ -322,11 +535,11 @@ public class EPersonServiceImpl extends DSpaceObjectServiceImpl<EPerson> impleme
ePersonDAO.save(context, ePerson); ePersonDAO.save(context, ePerson);
log.info(LogManager.getHeader(context, "update_eperson", log.info(LogManager.getHeader(context, "update_eperson",
"eperson_id=" + ePerson.getID())); "eperson_id=" + ePerson.getID()));
if (ePerson.isModified()) { if (ePerson.isModified()) {
context.addEvent(new Event(Event.MODIFY, Constants.EPERSON, context.addEvent(new Event(Event.MODIFY, Constants.EPERSON,
ePerson.getID(), null, getIdentifiers(context, ePerson))); ePerson.getID(), null, getIdentifiers(context, ePerson)));
ePerson.clearModified(); ePerson.clearModified();
} }
if (ePerson.isMetadataModified()) { if (ePerson.isMetadataModified()) {
@@ -339,11 +552,22 @@ public class EPersonServiceImpl extends DSpaceObjectServiceImpl<EPerson> impleme
List<String> tableList = new ArrayList<String>(); List<String> tableList = new ArrayList<String>();
// check for eperson in item table // check for eperson in item table
Iterator<Item> itemsBySubmitter = itemService.findBySubmitter(context, ePerson); Iterator<Item> itemsBySubmitter = itemService.findBySubmitter(context, ePerson, true);
if (itemsBySubmitter.hasNext()) { if (itemsBySubmitter.hasNext()) {
tableList.add("item"); tableList.add("item");
} }
WorkspaceItemService workspaceItemService = ContentServiceFactory.getInstance().getWorkspaceItemService();
List<WorkspaceItem> workspaceBySubmitter = workspaceItemService.findByEPerson(context, ePerson);
if (workspaceBySubmitter.size() > 0) {
tableList.add("workspaceitem");
}
ResourcePolicyService resourcePolicyService = AuthorizeServiceFactory.getInstance().getResourcePolicyService();
if (resourcePolicyService.find(context, ePerson).size() > 0) {
tableList.add("resourcepolicy");
}
WorkflowService workflowService = WorkflowServiceFactory.getInstance().getWorkflowService(); WorkflowService workflowService = WorkflowServiceFactory.getInstance().getWorkflowService();
List<String> workflowConstraints = workflowService.getEPersonDeleteConstraints(context, ePerson); List<String> workflowConstraints = workflowService.getEPersonDeleteConstraints(context, ePerson);
tableList.addAll(workflowConstraints); tableList.addAll(workflowConstraints);

View File

@@ -42,8 +42,15 @@ import org.dspace.eperson.service.EPersonService;
import org.dspace.eperson.service.GroupService; import org.dspace.eperson.service.GroupService;
import org.dspace.event.Event; import org.dspace.event.Event;
import org.dspace.util.UUIDUtils; import org.dspace.util.UUIDUtils;
import org.dspace.xmlworkflow.Role;
import org.dspace.xmlworkflow.factory.XmlWorkflowFactory;
import org.dspace.xmlworkflow.state.Step;
import org.dspace.xmlworkflow.storedcomponents.ClaimedTask;
import org.dspace.xmlworkflow.storedcomponents.CollectionRole; import org.dspace.xmlworkflow.storedcomponents.CollectionRole;
import org.dspace.xmlworkflow.storedcomponents.PoolTask;
import org.dspace.xmlworkflow.storedcomponents.service.ClaimedTaskService;
import org.dspace.xmlworkflow.storedcomponents.service.CollectionRoleService; import org.dspace.xmlworkflow.storedcomponents.service.CollectionRoleService;
import org.dspace.xmlworkflow.storedcomponents.service.PoolTaskService;
import org.slf4j.Logger; import org.slf4j.Logger;
import org.slf4j.LoggerFactory; import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired; import org.springframework.beans.factory.annotation.Autowired;
@@ -81,6 +88,13 @@ public class GroupServiceImpl extends DSpaceObjectServiceImpl<Group> implements
@Autowired(required = true) @Autowired(required = true)
protected ResourcePolicyService resourcePolicyService; protected ResourcePolicyService resourcePolicyService;
@Autowired(required = true)
protected PoolTaskService poolTaskService;
@Autowired(required = true)
protected ClaimedTaskService claimedTaskService;
@Autowired(required = true)
protected XmlWorkflowFactory workflowFactory;
protected GroupServiceImpl() { protected GroupServiceImpl() {
super(); super();
} }
@@ -143,8 +157,48 @@ public class GroupServiceImpl extends DSpaceObjectServiceImpl<Group> implements
groupChild.getName(), getIdentifiers(context, groupParent))); groupChild.getName(), getIdentifiers(context, groupParent)));
} }
/**
* Removes a member of a group.
* The removal will be refused if the group is linked to a workflow step which has claimed tasks or pool tasks
* and no other member is present in the group to handle these.
* @param context DSpace context object
* @param group DSpace group
* @param ePerson eperson
* @throws SQLException
*/
@Override @Override
public void removeMember(Context context, Group group, EPerson ePerson) { public void removeMember(Context context, Group group, EPerson ePerson) throws SQLException {
List<CollectionRole> collectionRoles = collectionRoleService.findByGroup(context, group);
if (!collectionRoles.isEmpty()) {
List<PoolTask> poolTasks = poolTaskService.findByGroup(context, group);
List<ClaimedTask> claimedTasks = claimedTaskService.findByEperson(context, ePerson);
for (ClaimedTask claimedTask : claimedTasks) {
Step stepByName = workflowFactory.getStepByName(claimedTask.getStepID());
Role role = stepByName.getRole();
for (CollectionRole collectionRole : collectionRoles) {
if (StringUtils.equals(collectionRole.getRoleId(), role.getId())
&& claimedTask.getWorkflowItem().getCollection() == collectionRole.getCollection()) {
List<EPerson> ePeople = allMembers(context, group);
if (ePeople.size() == 1 && ePeople.contains(ePerson)) {
throw new IllegalStateException(
"Refused to remove user " + ePerson
.getID() + " from workflow group because the group " + group
.getID() + " has tasks assigned and no other members");
}
}
}
}
if (!poolTasks.isEmpty()) {
List<EPerson> ePeople = allMembers(context, group);
if (ePeople.size() == 1 && ePeople.contains(ePerson)) {
throw new IllegalStateException(
"Refused to remove user " + ePerson
.getID() + " from workflow group because the group " + group
.getID() + " has tasks assigned and no other members");
}
}
}
if (group.remove(ePerson)) { if (group.remove(ePerson)) {
context.addEvent(new Event(Event.REMOVE, Constants.GROUP, group.getID(), Constants.EPERSON, ePerson.getID(), context.addEvent(new Event(Event.REMOVE, Constants.GROUP, group.getID(), Constants.EPERSON, ePerson.getID(),
ePerson.getEmail(), getIdentifiers(context, group))); ePerson.getEmail(), getIdentifiers(context, group)));
@@ -153,6 +207,20 @@ public class GroupServiceImpl extends DSpaceObjectServiceImpl<Group> implements
@Override @Override
public void removeMember(Context context, Group groupParent, Group childGroup) throws SQLException { public void removeMember(Context context, Group groupParent, Group childGroup) throws SQLException {
List<CollectionRole> collectionRoles = collectionRoleService.findByGroup(context, groupParent);
if (!collectionRoles.isEmpty()) {
List<PoolTask> poolTasks = poolTaskService.findByGroup(context, groupParent);
if (!poolTasks.isEmpty()) {
List<EPerson> parentPeople = allMembers(context, groupParent);
List<EPerson> childPeople = allMembers(context, childGroup);
if (childPeople.containsAll(parentPeople)) {
throw new IllegalStateException(
"Refused to remove sub group " + childGroup
.getID() + " from workflow group because the group " + groupParent
.getID() + " has tasks assigned and no other members");
}
}
}
if (groupParent.remove(childGroup)) { if (groupParent.remove(childGroup)) {
childGroup.removeParentGroup(groupParent); childGroup.removeParentGroup(groupParent);
context.addEvent( context.addEvent(

View File

@@ -76,7 +76,7 @@ public interface GroupService extends DSpaceObjectService<Group>, DSpaceObjectLe
* @param group DSpace group * @param group DSpace group
* @param ePerson eperson * @param ePerson eperson
*/ */
public void removeMember(Context context, Group group, EPerson ePerson); public void removeMember(Context context, Group group, EPerson ePerson) throws SQLException;
/** /**

View File

@@ -0,0 +1,162 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.external.provider.impl;
import java.util.Collection;
import java.util.List;
import java.util.Optional;
import java.util.stream.Collectors;
import org.apache.commons.lang3.StringUtils;
import org.dspace.content.dto.MetadataValueDTO;
import org.dspace.external.model.ExternalDataObject;
import org.dspace.external.provider.ExternalDataProvider;
import org.dspace.importer.external.datamodel.ImportRecord;
import org.dspace.importer.external.exception.MetadataSourceException;
import org.dspace.importer.external.metadatamapping.MetadatumDTO;
import org.dspace.importer.external.service.components.QuerySource;
/**
* This class allows to configure a Live Import Provider as an External Data Provider
*
* @author Andrea Bollini (andrea.bollini at 4science.it)
*
*/
public class LiveImportDataProvider implements ExternalDataProvider {
/**
* The {@link QuerySource} live import provider
*/
private QuerySource querySource;
/**
* An unique human readable identifier for this provider
*/
private String sourceIdentifier;
private String recordIdMetadata;
private String displayMetadata = "dc.title";
@Override
public String getSourceIdentifier() {
return sourceIdentifier;
}
/**
* This method set the SourceIdentifier for the ExternalDataProvider
* @param sourceIdentifier The UNIQUE sourceIdentifier to be set on any LiveImport data provider
*/
public void setSourceIdentifier(String sourceIdentifier) {
this.sourceIdentifier = sourceIdentifier;
}
/**
* This method set the MetadataSource for the ExternalDataProvider
* @param metadataSource {@link org.dspace.importer.external.service.components.MetadataSource} implementation used to process the input data
*/
public void setMetadataSource(QuerySource querySource) {
this.querySource = querySource;
}
/**
* This method set dublin core identifier to use as metadata id
* @param recordIdMetadata dublin core identifier to use as metadata id
*/
public void setRecordIdMetadata(String recordIdMetadata) {
this.recordIdMetadata = recordIdMetadata;
}
/**
* This method set the dublin core identifier to display the title
* @param displayMetadata metadata to use as title
*/
public void setDisplayMetadata(String displayMetadata) {
this.displayMetadata = displayMetadata;
}
@Override
public Optional<ExternalDataObject> getExternalDataObject(String id) {
try {
ExternalDataObject externalDataObject = getExternalDataObject(querySource.getRecord(id));
return Optional.of(externalDataObject);
} catch (MetadataSourceException e) {
throw new RuntimeException(
"The live import provider " + querySource.getImportSource() + " throws an exception", e);
}
}
@Override
public List<ExternalDataObject> searchExternalDataObjects(String query, int start, int limit) {
Collection<ImportRecord> records;
try {
records = querySource.getRecords(query, start, limit);
return records.stream().map(r -> getExternalDataObject(r)).collect(Collectors.toList());
} catch (MetadataSourceException e) {
throw new RuntimeException(
"The live import provider " + querySource.getImportSource() + " throws an exception", e);
}
}
@Override
public boolean supports(String source) {
return StringUtils.equalsIgnoreCase(sourceIdentifier, source);
}
@Override
public int getNumberOfResults(String query) {
try {
return querySource.getRecordsCount(query);
} catch (MetadataSourceException e) {
throw new RuntimeException(
"The live import provider " + querySource.getImportSource() + " throws an exception", e);
}
}
/**
* Internal method to convert an ImportRecord to an ExternalDataObject
*
* FIXME it would be useful to remove ImportRecord at all in favor of the
* ExternalDataObject
*
* @param record
* @return
*/
private ExternalDataObject getExternalDataObject(ImportRecord record) {
//return 400 if no record were found
if (record == null) {
throw new IllegalArgumentException("No record found for query or id");
}
ExternalDataObject externalDataObject = new ExternalDataObject(sourceIdentifier);
String id = getFirstValue(record, recordIdMetadata);
String display = getFirstValue(record, displayMetadata);
externalDataObject.setId(id);
externalDataObject.setDisplayValue(display);
externalDataObject.setValue(display);
for (MetadatumDTO dto : record.getValueList()) {
// FIXME it would be useful to remove MetadatumDTO in favor of MetadataValueDTO
MetadataValueDTO mvDTO = new MetadataValueDTO();
mvDTO.setSchema(dto.getSchema());
mvDTO.setElement(dto.getElement());
mvDTO.setQualifier(dto.getQualifier());
mvDTO.setValue(dto.getValue());
externalDataObject.addMetadata(mvDTO);
}
return externalDataObject;
}
private String getFirstValue(ImportRecord record, String metadata) {
String id = null;
String[] split = StringUtils.split(metadata, ".", 3);
Collection<MetadatumDTO> values = record.getValue(split[0], split[1], split.length == 3 ? split[2] : null);
if (!values.isEmpty()) {
id = (values.iterator().next().getValue());
}
return id;
}
}

View File

@@ -761,9 +761,9 @@ public class DOIIdentifierProvider
Item item = (Item) dso; Item item = (Item) dso;
List<MetadataValue> metadata = itemService.getMetadata(item, MD_SCHEMA, DOI_ELEMENT, DOI_QUALIFIER, null); List<MetadataValue> metadata = itemService.getMetadata(item, MD_SCHEMA, DOI_ELEMENT, DOI_QUALIFIER, null);
String leftPart = DOI.RESOLVER + SLASH + getPrefix() + SLASH + getNamespaceSeparator();
for (MetadataValue id : metadata) { for (MetadataValue id : metadata) {
if (id.getValue().startsWith( if (id.getValue().startsWith(leftPart)) {
DOI.RESOLVER + String.valueOf(SLASH) + PREFIX + String.valueOf(SLASH) + NAMESPACE_SEPARATOR)) {
return doiService.DOIFromExternalFormat(id.getValue()); return doiService.DOIFromExternalFormat(id.getValue());
} }
} }

View File

@@ -0,0 +1,37 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.importer.external.arxiv.metadatamapping;
import java.util.Map;
import javax.annotation.Resource;
import org.dspace.importer.external.metadatamapping.AbstractMetadataFieldMapping;
/**
* An implementation of {@link AbstractMetadataFieldMapping}
* Responsible for defining the mapping of the ArXiv metadatum fields on the DSpace metadatum fields
*
* @author Pasquale Cavallo (pasquale.cavallo at 4science dot it)
*/
public class ArXivFieldMapping extends AbstractMetadataFieldMapping {
/**
* Defines which metadatum is mapped on which metadatum. Note that while the key must be unique it
* only matters here for postprocessing of the value. The mapped MetadatumContributor has full control over
* what metadatafield is generated.
*
* @param metadataFieldMap The map containing the link between retrieve metadata and metadata that will be set to
* the item.
*/
@Override
@Resource(name = "arxivMetadataFieldMap")
public void setMetadataFieldMap(Map metadataFieldMap) {
super.setMetadataFieldMap(metadataFieldMap);
}
}

View File

@@ -0,0 +1,60 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.importer.external.arxiv.metadatamapping.contributor;
import java.util.Collection;
import org.apache.axiom.om.OMElement;
import org.dspace.importer.external.metadatamapping.MetadatumDTO;
import org.dspace.importer.external.metadatamapping.contributor.MetadataContributor;
import org.dspace.importer.external.metadatamapping.contributor.SimpleXpathMetadatumContributor;
/**
* Arxiv specific implementation of {@link MetadataContributor}
* Responsible for generating the ArXiv Id from the retrieved item.
*
* @author Pasquale Cavallo (pasquale.cavallo at 4science dot it)
*
*/
public class ArXivIdMetadataContributor extends SimpleXpathMetadatumContributor {
/**
* Retrieve the metadata associated with the given object.
* Depending on the retrieved node (using the query), different types of values will be added to the MetadatumDTO
* list
*
* @param t A class to retrieve metadata from.
* @return a collection of import records. Only the identifier of the found records may be put in the record.
*/
@Override
public Collection<MetadatumDTO> contributeMetadata(OMElement t) {
Collection<MetadatumDTO> values = super.contributeMetadata(t);
parseValue(values);
return values;
}
/**
* ArXiv returns a full URL as in the <id> value, e.g. http://arxiv.org/abs/1911.11405v1.
* This method parses out the identifier from the end of the URL, e.g. 1911.11405v1.
*
* @param dtos Metadata which contains the items uri
*/
private void parseValue(Collection<MetadatumDTO> dtos) {
if (dtos != null) {
for (MetadatumDTO dto : dtos) {
if (dto != null && dto.getValue() != null && dto.getValue().contains("/")) {
int startIndex = dto.getValue().lastIndexOf('/') + 1;
int endIndex = dto.getValue().length();
String id = dto.getValue().substring(startIndex, endIndex);
dto.setValue(id);
}
}
}
}
}

View File

@@ -0,0 +1,421 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.importer.external.arxiv.service;
import java.io.StringReader;
import java.util.ArrayList;
import java.util.Collection;
import java.util.List;
import java.util.concurrent.Callable;
import javax.el.MethodNotFoundException;
import javax.ws.rs.client.Client;
import javax.ws.rs.client.ClientBuilder;
import javax.ws.rs.client.Invocation;
import javax.ws.rs.client.WebTarget;
import javax.ws.rs.core.MediaType;
import javax.ws.rs.core.Response;
import org.apache.axiom.om.OMElement;
import org.apache.axiom.om.OMXMLBuilderFactory;
import org.apache.axiom.om.OMXMLParserWrapper;
import org.apache.axiom.om.xpath.AXIOMXPath;
import org.apache.commons.lang3.StringUtils;
import org.dspace.content.Item;
import org.dspace.importer.external.datamodel.ImportRecord;
import org.dspace.importer.external.datamodel.Query;
import org.dspace.importer.external.exception.MetadataSourceException;
import org.dspace.importer.external.service.AbstractImportMetadataSourceService;
import org.dspace.importer.external.service.components.QuerySource;
import org.jaxen.JaxenException;
/**
* Implements a data source for querying ArXiv
*
* @author Pasquale Cavallo (pasquale.cavallo at 4Science dot it)
*
*/
public class ArXivImportMetadataSourceServiceImpl extends AbstractImportMetadataSourceService<OMElement>
implements QuerySource {
private WebTarget webTarget;
private String baseAddress;
/**
* Find the number of records matching the query string in ArXiv. Supports pagination.
*
* @param query a query string to base the search on.
* @param start offset to start at
* @param count number of records to retrieve.
* @return a set of records. Fully transformed.
* @throws MetadataSourceException if the underlying methods throw any exception.
*/
@Override
public Collection<ImportRecord> getRecords(String query, int start, int count) throws MetadataSourceException {
return retry(new SearchByQueryCallable(query, count, start));
}
/**
* Find records based on a object query and convert them to a list metadata mapped in ImportRecord.
* The entry with the key "query" of the Query's map will be used as query string value.
*
* @see org.dspace.importer.external.datamodel.Query
* @see org.dspace.importer.external.datamodel.ImportRecord
* @param query a query object to base the search on.
* @return a set of records. Fully transformed.
* @throws MetadataSourceException if the underlying methods throw any exception.
*/
@Override
public Collection<ImportRecord> getRecords(Query query) throws MetadataSourceException {
return retry(new SearchByQueryCallable(query));
}
/**
* Find the number of records matching the query string in ArXiv;
*
* @param query a query object to base the search on.
* @return the sum of the matching records over this import source
* @throws MetadataSourceException if the underlying methods throw any exception.
*/
@Override
public int getRecordsCount(String query) throws MetadataSourceException {
return retry(new CountByQueryCallable(query));
}
/**
* Find the number of records matching a query;
* The entry with the key "query" of the Query's map will be used to get the query string.
*
* @see org.dspace.importer.external.datamodel.Query
* @param query a query string to base the search on.
* @return the sum of the matching records over this import source
* @throws MetadataSourceException if the underlying methods throw any exception.
*/
@Override
public int getRecordsCount(Query query) throws MetadataSourceException {
return retry(new CountByQueryCallable(query));
}
/**
* Get a single record of metadata from the arxiv by ArXiv ID.
*
* @param id id of the record in ArXiv
* @return the first matching record
* @throws MetadataSourceException if the underlying methods throw any exception.
*/
@Override
public ImportRecord getRecord(String id) throws MetadataSourceException {
List<ImportRecord> records = retry(new SearchByIdCallable(id));
return records == null || records.isEmpty() ? null : records.get(0);
}
/**
* Get a single record from the ArXiv matching the query.
* Field "query" will be used to get data from.
*
* @see org.dspace.importer.external.datamodel.Query
* @param query a query matching a single record
* @return the first matching record
* @throws MetadataSourceException if the underlying methods throw any exception.
*/
@Override
public ImportRecord getRecord(Query query) throws MetadataSourceException {
List<ImportRecord> records = retry(new SearchByIdCallable(query));
return records == null || records.isEmpty() ? null : records.get(0);
}
/**
* Initialize the class
*
* @throws Exception on generic exception
*/
@Override
public void init() throws Exception {
Client client = ClientBuilder.newClient();
webTarget = client.target(baseAddress);
}
/**
* The string that identifies this import implementation. Preferable a URI
*
* @return the identifying uri
*/
@Override
public String getImportSource() {
return "arxiv";
}
/**
* Expect this method will be not used and erased from the interface soon
*/
@Override
public Collection<ImportRecord> findMatchingRecords(Item item) throws MetadataSourceException {
// FIXME: we need this method?
throw new MethodNotFoundException("This method is not implemented for ArXiv");
}
/**
* Finds records based on query object.
* Supports search by title and/or author
*
* @param query a query object to base the search on.
* @return a collection of import records.
* @throws MetadataSourceException if the underlying methods throw any exception.
*/
@Override
public Collection<ImportRecord> findMatchingRecords(Query query) throws MetadataSourceException {
return retry(new FindMatchingRecordCallable(query));
}
/**
* This class is a Callable implementation to count the number of entries for an ArXiv
* query.
* This Callable use as query value to ArXiv the string queryString passed to constructor.
* If the object will be construct through Query.class instance, the value of the Query's
* map with the key "query" will be used.
*
* @author Pasquale Cavallo (pasquale.cavallo at 4science dot it)
*
*/
private class CountByQueryCallable implements Callable<Integer> {
private Query query;
private CountByQueryCallable(String queryString) {
query = new Query();
query.addParameter("query", queryString);
}
private CountByQueryCallable(Query query) {
this.query = query;
}
@Override
public Integer call() throws Exception {
String queryString = query.getParameterAsClass("query", String.class);
Integer start = query.getParameterAsClass("start", Integer.class);
Integer maxResult = query.getParameterAsClass("count", Integer.class);
WebTarget local = webTarget.queryParam("search_query", queryString);
if (maxResult != null) {
local = local.queryParam("max_results", String.valueOf(maxResult));
}
if (start != null) {
local = local.queryParam("start", String.valueOf(start));
}
Invocation.Builder invocationBuilder = local.request(MediaType.TEXT_PLAIN_TYPE);
Response response = invocationBuilder.get();
if (response.getStatus() == 200) {
String responseString = response.readEntity(String.class);
OMXMLParserWrapper records = OMXMLBuilderFactory.createOMBuilder(new StringReader(responseString));
OMElement element = records.getDocumentElement();
AXIOMXPath xpath = null;
try {
xpath = new AXIOMXPath("opensearch:totalResults");
xpath.addNamespace("opensearch", "http://a9.com/-/spec/opensearch/1.1/");
OMElement count = (OMElement) xpath.selectSingleNode(element);
return Integer.parseInt(count.getText());
} catch (JaxenException e) {
return null;
}
} else {
return null;
}
}
}
/**
* This class is a Callable implementation to get ArXiv entries based on
* query object.
* This Callable use as query value the string queryString passed to constructor.
* If the object will be construct through Query.class instance, a Query's map entry with key "query" will be used.
* Pagination is supported too, using the value of the Query's map with keys "start" and "count".
*
* @see org.dspace.importer.external.datamodel.Query
* @author Pasquale Cavallo (pasquale.cavallo at 4science dot it)
*
*/
private class SearchByQueryCallable implements Callable<List<ImportRecord>> {
private Query query;
private SearchByQueryCallable(String queryString, Integer maxResult, Integer start) {
query = new Query();
query.addParameter("query", queryString);
query.addParameter("start", start);
query.addParameter("count", maxResult);
}
private SearchByQueryCallable(Query query) {
this.query = query;
}
@Override
public List<ImportRecord> call() throws Exception {
List<ImportRecord> results = new ArrayList<ImportRecord>();
String queryString = query.getParameterAsClass("query", String.class);
Integer start = query.getParameterAsClass("start", Integer.class);
Integer maxResult = query.getParameterAsClass("count", Integer.class);
WebTarget local = webTarget.queryParam("search_query", queryString);
if (maxResult != null) {
local = local.queryParam("max_results", String.valueOf(maxResult));
}
if (start != null) {
local = local.queryParam("start", String.valueOf(start));
}
Invocation.Builder invocationBuilder = local.request(MediaType.TEXT_PLAIN_TYPE);
Response response = invocationBuilder.get();
if (response.getStatus() == 200) {
String responseString = response.readEntity(String.class);
List<OMElement> omElements = splitToRecords(responseString);
for (OMElement record : omElements) {
results.add(transformSourceRecords(record));
}
return results;
} else {
return null;
}
}
}
/**
* This class is a Callable implementation to get an ArXiv entry using ArXiv ID
* The ID to use can be passed through the constructor as a String or as Query's map entry, with the key "id".
*
* @author Pasquale Cavallo (pasquale.cavallo at 4science dot it)
*
*/
private class SearchByIdCallable implements Callable<List<ImportRecord>> {
private Query query;
private SearchByIdCallable(Query query) {
this.query = query;
}
private SearchByIdCallable(String id) {
this.query = new Query();
query.addParameter("id", id);
}
@Override
public List<ImportRecord> call() throws Exception {
List<ImportRecord> results = new ArrayList<ImportRecord>();
String arxivid = query.getParameterAsClass("id", String.class);
if (StringUtils.isNotBlank(arxivid)) {
arxivid = arxivid.trim();
if (arxivid.startsWith("http://arxiv.org/abs/")) {
arxivid = arxivid.substring("http://arxiv.org/abs/".length());
} else if (arxivid.toLowerCase().startsWith("arxiv:")) {
arxivid = arxivid.substring("arxiv:".length());
}
}
WebTarget local = webTarget.queryParam("id_list", arxivid);
Invocation.Builder invocationBuilder = local.request(MediaType.TEXT_PLAIN_TYPE);
Response response = invocationBuilder.get();
if (response.getStatus() == 200) {
String responseString = response.readEntity(String.class);
List<OMElement> omElements = splitToRecords(responseString);
for (OMElement record : omElements) {
results.add(transformSourceRecords(record));
}
return results;
} else {
return null;
}
}
}
/**
* This class is a Callable implementation to search ArXiv entries
* using author and title.
* There are two field in the Query map to pass, with keys "title" and "author"
* (at least one must be used).
*
* @see org.dspace.importer.external.datamodel.Query
* @author Pasquale Cavallo (pasquale.cavallo at 4science dot it)
*
*/
private class FindMatchingRecordCallable implements Callable<List<ImportRecord>> {
private Query query;
private FindMatchingRecordCallable(Query q) {
query = q;
}
@Override
public List<ImportRecord> call() throws Exception {
String queryString = getQuery(this.query);
List<ImportRecord> results = new ArrayList<ImportRecord>();
WebTarget local = webTarget.queryParam("search_query", queryString);
Invocation.Builder invocationBuilder = local.request(MediaType.TEXT_PLAIN_TYPE);
Response response = invocationBuilder.get();
if (response.getStatus() == 200) {
String responseString = response.readEntity(String.class);
List<OMElement> omElements = splitToRecords(responseString);
for (OMElement record : omElements) {
results.add(transformSourceRecords(record));
}
return results;
} else {
return null;
}
}
private String getQuery(Query query) {
String title = query.getParameterAsClass("title", String.class);
String author = query.getParameterAsClass("author", String.class);
StringBuffer queryString = new StringBuffer();
if (StringUtils.isNotBlank(title)) {
queryString.append("ti:\"").append(title).append("\"");
}
if (StringUtils.isNotBlank(author)) {
// [FAU]
if (queryString.length() > 0) {
queryString.append(" AND ");
}
queryString.append("au:\"").append(author).append("\"");
}
return queryString.toString();
}
}
private List<OMElement> splitToRecords(String recordsSrc) {
OMXMLParserWrapper records = OMXMLBuilderFactory.createOMBuilder(new StringReader(recordsSrc));
OMElement element = records.getDocumentElement();
AXIOMXPath xpath = null;
try {
xpath = new AXIOMXPath("ns:entry");
xpath.addNamespace("ns", "http://www.w3.org/2005/Atom");
List<OMElement> recordsList = xpath.selectNodes(element);
return recordsList;
} catch (JaxenException e) {
return null;
}
}
/**
* Return the baseAddress set to this object
*
* @return The String object that represents the baseAddress of this object
*/
public String getBaseAddress() {
return baseAddress;
}
/**
* Set the baseAddress to this object
*
* @param baseAddress The String object that represents the baseAddress of this object
*/
public void setBaseAddress(String baseAddress) {
this.baseAddress = baseAddress;
}
}

View File

@@ -0,0 +1,154 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.importer.external.csv.service;
import java.io.IOException;
import java.io.InputStream;
import java.io.InputStreamReader;
import java.nio.charset.StandardCharsets;
import java.util.ArrayList;
import java.util.List;
import java.util.Map;
import au.com.bytecode.opencsv.CSVReader;
import org.dspace.importer.external.exception.FileSourceException;
import org.dspace.importer.external.metadatamapping.MetadataFieldConfig;
import org.dspace.importer.external.metadatamapping.contributor.MetadataContributor;
import org.dspace.importer.external.service.components.AbstractPlainMetadataSource;
import org.dspace.importer.external.service.components.MetadataSource;
import org.dspace.importer.external.service.components.dto.PlainMetadataKeyValueItem;
import org.dspace.importer.external.service.components.dto.PlainMetadataSourceDto;
/**
* This class is an implementation of {@link MetadataSource} which extends {@link AbstractPlainMetadataSource}
* in order to parse "character separated" files like csv, tsv, etc using the Live Import framework.
*
* @author Pasquale Cavallo
*
*/
public class CharacterSeparatedImportMetadataSourceServiceImpl extends AbstractPlainMetadataSource {
private char separator = ',';
private char escapeCharacter = '"';
private Integer skipLines = 1;
private String importSource = "CsvMetadataSource";
/**
* Set the number of lines to skip at the start of the file. This method is suitable,
* for example, to skip file headers.
*
* @param skipLines number of the line at the start of the file to skip.
*/
public void setSkipLines(Integer skipLines) {
this.skipLines = skipLines;
}
/**
*
* @return the number of the lines to skip
*/
public Integer getSkipLines() {
return skipLines;
}
/**
* Method to inject the separator
* This must be the ASCII integer
* related to the char.
* In example, 9 for tab, 44 for comma
*/
public void setSeparator(char separator) {
this.separator = separator;
}
@Override
public String getImportSource() {
return importSource;
}
/**
* Method to set the name of the source
*/
public void setImportSource(String importSource) {
this.importSource = importSource;
}
/**
* Method to inject the escape character. This must be the ASCII integer
* related to the char.
* In example, 9 for tab, 44 for comma
*
*/
public void setEscapeCharacter(char escapeCharacter) {
this.escapeCharacter = escapeCharacter;
}
/**
* The method process any kind of "character separated" files, like CSV, TSV, and so on.
* It return a List of PlainMetadataSourceDto.
* Using the superclass methods AbstractPlainMetadataSource.getRecord(s), any of this
* element will then be converted in an {@link org.dspace.importer.external.datamodel.ImportRecord}.
* Columns will be identified by their position, zero based notation.
* Separator character and escape character MUST be defined at class level. Number of lines to skip (headers)
* could also be defined in the field skipLines.
*
* @param InputStream The inputStream of the file
* @return A list of PlainMetadataSourceDto
* @throws FileSourceException if, for any reason, the file is not parsable
*/
@Override
protected List<PlainMetadataSourceDto> readData(InputStream inputStream) throws FileSourceException {
List<PlainMetadataSourceDto> plainMetadataList = new ArrayList<>();
try (CSVReader csvReader = new CSVReader(new InputStreamReader(inputStream, StandardCharsets.UTF_8),
separator, escapeCharacter);) {
// read all row
List<String[]> lines = csvReader.readAll();
int listSize = lines == null ? 0 : lines.size();
int count = skipLines;
// iterate over row (skipping the first skipLines)
while (count < listSize) {
String [] items = lines.get(count);
List<PlainMetadataKeyValueItem> keyValueList = new ArrayList<>();
if (items != null) {
int size = items.length;
int index = 0;
//iterate over column in the selected row
while (index < size) {
//create key/value item for the specifics row/column
PlainMetadataKeyValueItem keyValueItem = new PlainMetadataKeyValueItem();
keyValueItem.setKey(String.valueOf(index));
keyValueItem.setValue(items[index]);
keyValueList.add(keyValueItem);
index++;
}
//save all column key/value for the given row
PlainMetadataSourceDto dto = new PlainMetadataSourceDto();
dto.setMetadata(keyValueList);
plainMetadataList.add(dto);
}
count++;
}
} catch (IOException e) {
throw new FileSourceException("Error reading file", e);
}
return plainMetadataList;
}
@Override
public void setMetadataFieldMap(Map<MetadataFieldConfig,
MetadataContributor<PlainMetadataSourceDto>> metadataFieldMap) {
super.setMetadataFieldMap(metadataFieldMap);
}
}

View File

@@ -71,7 +71,7 @@ public class Query {
return null; return null;
} else { } else {
Object o = c.iterator().next(); Object o = c.iterator().next();
if (clazz.isAssignableFrom(o.getClass())) { if (o != null && clazz.isAssignableFrom(o.getClass())) {
return (T) o; return (T) o;
} else { } else {
return null; return null;

View File

@@ -0,0 +1,140 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.importer.external.endnote.service;
import java.io.BufferedReader;
import java.io.IOException;
import java.io.InputStream;
import java.io.InputStreamReader;
import java.util.ArrayList;
import java.util.List;
import java.util.Map;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
import org.dspace.importer.external.exception.FileSourceException;
import org.dspace.importer.external.metadatamapping.MetadataFieldConfig;
import org.dspace.importer.external.metadatamapping.contributor.MetadataContributor;
import org.dspace.importer.external.service.components.AbstractPlainMetadataSource;
import org.dspace.importer.external.service.components.dto.PlainMetadataKeyValueItem;
import org.dspace.importer.external.service.components.dto.PlainMetadataSourceDto;
/**
* Implements a metadata importer for Endnote files
*
* @author Pasquale Cavallo (pasquale.cavallo at 4science dot it)
*/
public class EndnoteImportMetadataSourceServiceImpl extends AbstractPlainMetadataSource {
@Override
public String getImportSource() {
return "EndnoteMetadataSource";
}
/**
* This method map the data present in the inputStream, then return a list PlainMetadataSourceDto.
* Any PlainMetadataSourceDto will be used to create a single {@link org.dspace.importer.external.datamodel.ImportRecord}
*
* @param inputStream the inputStream of the Endnote file
* @return List of {@link org.dspace.importer.external.service.components.dto.PlainMetadataSourceDto}
* @throws FileSourceException
* @see org.dspace.importer.external.service.components.AbstractPlainMetadataSource
*/
@Override
protected List<PlainMetadataSourceDto> readData(InputStream fileInpuStream) throws FileSourceException {
List<PlainMetadataSourceDto> list = new ArrayList<>();
try {
// row start from 3, because the first 2 (FN and VR) will be removed by tokenize
int lineForDebug = 3;
List<PlainMetadataKeyValueItem> tokenized = tokenize(fileInpuStream);
List<PlainMetadataKeyValueItem> tmpList = new ArrayList<>();
// iterate over key/value pairs, create a new PlainMetadataSourceDto on "ER" rows (which means "new record)
// and stop on EF (end of file).
for (PlainMetadataKeyValueItem item : tokenized) {
if (item.getKey() == null || item.getKey().isEmpty()) {
throw new FileSourceException("Null or empty key expected on line "
+ lineForDebug + ". Keys cannot be null nor empty");
}
if ("EF".equals(item.getKey())) {
// end of file
break;
}
if ("ER".equals(item.getKey())) {
// new ImportRecord start from here (ER is a content delimiter)
// save the previous, then create a new one
PlainMetadataSourceDto dto = new PlainMetadataSourceDto();
dto.setMetadata(new ArrayList<>(tmpList));
list.add(dto);
tmpList = new ArrayList<>();
} else {
if (item.getValue() == null || item.getValue().isEmpty()) {
throw new FileSourceException("Null or empty value expected on line "
+ lineForDebug + ". Value expected");
}
tmpList.add(item);
}
lineForDebug++;
}
} catch (Exception e) {
throw new FileSourceException("Error reading file", e);
}
return list;
}
/**
* This method iterate over file rows, split content in a list of key/value items through RexExp
* and save the content sequentially.
* Key "FN" and "VR", which is a preamble in Endnote, will be checked but not saved.
*
* @param fileInpuStream the inputStream of the Endnote file
* @return A list of key/value items which map the file's row sequentially
* @throws IOException
* @throws FileSourceException
*/
private List<PlainMetadataKeyValueItem> tokenize(InputStream fileInpuStream)
throws IOException, FileSourceException {
BufferedReader reader = new BufferedReader(new InputStreamReader(fileInpuStream));
String line;
line = reader.readLine();
// FN and VR works as preamble, just check and skip them
if (line == null || !line.startsWith("FN")) {
throw new FileSourceException("Invalid endNote file");
}
line = reader.readLine();
if (line == null || !line.startsWith("VR")) {
throw new FileSourceException("Invalid endNote file");
}
// split any row into first part ^[A-Z]{2} used as key (the meaning of the data)
// and second part ?(.*) used as value (the data)
Pattern pattern = Pattern.compile("(^[A-Z]{2}) ?(.*)$");
List<PlainMetadataKeyValueItem> list = new ArrayList<PlainMetadataKeyValueItem>();
while ((line = reader.readLine()) != null) {
line = line.trim();
// skip empty lines
if (line.isEmpty() || line.equals("")) {
continue;
}
Matcher matcher = pattern.matcher(line);
if (matcher.matches()) {
PlainMetadataKeyValueItem item = new PlainMetadataKeyValueItem();
item.setKey(matcher.group(1));
item.setValue(matcher.group(2));
list.add(item);
}
}
return list;
}
@Override
public void setMetadataFieldMap(Map<MetadataFieldConfig,
MetadataContributor<PlainMetadataSourceDto>> metadataFieldMap) {
super.setMetadataFieldMap(metadataFieldMap);
}
}

View File

@@ -0,0 +1,108 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.importer.external.metadatamapping.contributor;
import java.io.IOException;
import java.io.StringReader;
import java.util.Collection;
import java.util.LinkedList;
import java.util.List;
import au.com.bytecode.opencsv.CSVReader;
import org.dspace.importer.external.metadatamapping.MetadatumDTO;
import org.dspace.importer.external.service.components.dto.PlainMetadataKeyValueItem;
import org.dspace.importer.external.service.components.dto.PlainMetadataSourceDto;
/**
* This class implements functionalities to handle common situation regarding plain metadata.
* In some scenario, like csv or tsv, the format don't allow lists.
* We can use this MetadataContribut to parse a given plain metadata and split it into
* related list, based on the delimiter. No escape character is present.
* Default values are comma (,) for delimiter, and double quote (") for escape character
*
* @author Pasquale Cavallo (pasquale.cavallo at 4science dot it)
*
*/
public class EnhancedSimpleMetadataContributor extends SimpleMetadataContributor {
private char delimiter = ',';
private char escape = '"';
/**
* This method could be used to set the delimiter used during parse
* If no delimiter is set, comma will be used
*/
public void setDelimiter(char delimiter) {
this.delimiter = delimiter;
}
/**
* This method could be used to get the delimiter used in this class
*/
public char getDelimiter() {
return delimiter;
}
/**
* Method to inject the escape character.
* This must be the ASCII integer
* related to the char.
* In example, 9 for tab, 44 for comma
* If no escape is set, double quote will be used
*/
public void setEscape(char escape) {
this.escape = escape;
}
/**
* Method to get the escape character.
*
*/
public char getEscape() {
return escape;
}
@Override
public Collection<MetadatumDTO> contributeMetadata(PlainMetadataSourceDto t) {
Collection<MetadatumDTO> values = null;
values = new LinkedList<>();
for (PlainMetadataKeyValueItem metadatum : t.getMetadata()) {
if (getKey().equals(metadatum.getKey())) {
String[] splitted = splitToRecord(metadatum.getValue());
for (String value : splitted) {
MetadatumDTO dcValue = new MetadatumDTO();
dcValue.setValue(value);
dcValue.setElement(getField().getElement());
dcValue.setQualifier(getField().getQualifier());
dcValue.setSchema(getField().getSchema());
values.add(dcValue);
}
}
}
return values;
}
private String[] splitToRecord(String value) {
List<String[]> rows;
// For example, list of author must be: Author 1, author 2, author 3
// if author name contains comma, is important to escape its in
// this way: Author 1, \"Author 2, something\", Author 3
try (CSVReader csvReader = new CSVReader(new StringReader(value),
delimiter, escape);) {
rows = csvReader.readAll();
} catch (IOException e) {
//fallback, use the inpu as value
return new String[] { value };
}
//must be one row
return rows.get(0);
}
}

View File

@@ -0,0 +1,139 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.importer.external.metadatamapping.contributor;
import java.util.ArrayList;
import java.util.Collection;
import java.util.LinkedList;
import java.util.List;
import org.dspace.importer.external.metadatamapping.MetadataFieldConfig;
import org.dspace.importer.external.metadatamapping.MetadataFieldMapping;
import org.dspace.importer.external.metadatamapping.MetadatumDTO;
/**
* This Contributor is helpful to avoid the limit of the Live Import Framework.
* In Live Import, one dc schema/element/qualifier could be associate with one and
* only one MetadataContributor, because the map they're saved in use dc entity as key.
*
* In fact, in this implementation we use the MetadataFieldConfig present in this MultipleMetadataContributor
* contributor, but the data (values of the dc metadatum) will be loaded using any of the contributor defined
* in the List metadatumContributors, by iterating over them.
*
* @see org.dspace.importer.external.metadatamapping.AbstractMetadataFieldMapping<RecordType>
*
* @author Pasquale Cavallo (pasquale.cavallo at 4science dot it)
*
*/
public class MultipleMetadataContributor<T> implements MetadataContributor<T> {
private MetadataFieldConfig field;
private List<MetadataContributor> metadatumContributors;
/**
* Empty constructor
*/
public MultipleMetadataContributor() {
}
/**
* @param field {@link org.dspace.importer.external.metadatamapping.MetadataFieldConfig} used in
* mapping
* @param metadatumContributors A list of MetadataContributor
*/
public MultipleMetadataContributor(MetadataFieldConfig field, List<MetadataContributor> metadatumContributors) {
this.field = field;
this.metadatumContributors = (LinkedList<MetadataContributor>) metadatumContributors;
}
/**
* Set the metadatafieldMapping used in the transforming of a record to actual metadata
*
* @param metadataFieldMapping the new mapping.
*/
@Override
public void setMetadataFieldMapping(MetadataFieldMapping<T, MetadataContributor<T>> metadataFieldMapping) {
for (MetadataContributor metadatumContributor : metadatumContributors) {
metadatumContributor.setMetadataFieldMapping(metadataFieldMapping);
}
}
/**
* a separate Metadatum object is created for each index of Metadatum returned from the calls to
* MetadatumContributor.contributeMetadata(t) for each MetadatumContributor in the metadatumContributors list.
* All of them have as dc schema/element/qualifier the values defined in MetadataFieldConfig.
*
* @param t the object we are trying to translate
* @return a collection of metadata got from each MetadataContributor
*/
@Override
public Collection<MetadatumDTO> contributeMetadata(T t) {
Collection<MetadatumDTO> values = new ArrayList<>();
for (MetadataContributor metadatumContributor : metadatumContributors) {
Collection<MetadatumDTO> metadata = metadatumContributor.contributeMetadata(t);
values.addAll(metadata);
}
changeDC(values);
return values;
}
/**
* This method does the trick of this implementation.
* It changes the DC schema/element/qualifier of the given Metadatum into
* the ones present in this contributor.
* In this way, the contributors in metadatumContributors could have any dc values,
* because this method remap them all.
*
* @param the list of metadata we want to remap
*/
private void changeDC(Collection<MetadatumDTO> values) {
for (MetadatumDTO dto : values) {
dto.setElement(field.getElement());
dto.setQualifier(field.getQualifier());
dto.setSchema(field.getSchema());
}
}
/**
* Return the MetadataFieldConfig used while retrieving MetadatumDTO
*
* @return MetadataFieldConfig
*/
public MetadataFieldConfig getField() {
return field;
}
/**
* Setting the MetadataFieldConfig
*
* @param field MetadataFieldConfig used while retrieving MetadatumDTO
*/
public void setField(MetadataFieldConfig field) {
this.field = field;
}
/**
* Return the List of MetadataContributor objects set to this class
*
* @return metadatumContributors, list of MetadataContributor
*/
public List<MetadataContributor> getMetadatumContributors() {
return metadatumContributors;
}
/**
* Set the List of MetadataContributor objects set to this class
*
* @param metadatumContributors A list of MetadatumContributor classes
*/
public void setMetadatumContributors(List<MetadataContributor> metadatumContributors) {
this.metadatumContributors = metadatumContributors;
}
}

View File

@@ -77,18 +77,33 @@ public class SimpleMetadataContributor implements MetadataContributor<PlainMetad
return values; return values;
} }
/* /**
* Setter to inject field item * Method to inject field item
*
* @param field the {@link MetadataFieldConfig} to use in this contributor
*/ */
public void setField(MetadataFieldConfig field) { public void setField(MetadataFieldConfig field) {
this.field = field; this.field = field;
} }
/* /**
* Setter to inject key value * Method to inject key value
*/ */
public void setKey(String key) { public void setKey(String key) {
this.key = key; this.key = key;
} }
/**
* Method to retrieve field item
*/
public String getKey() {
return key;
}
/**
* Method to retrieve the {@link MetadataFieldConfig} used in this contributor
*/
public MetadataFieldConfig getField() {
return field;
}
} }

View File

@@ -21,6 +21,8 @@ import org.dspace.importer.external.metadatamapping.MetadataFieldConfig;
import org.dspace.importer.external.metadatamapping.MetadataFieldMapping; import org.dspace.importer.external.metadatamapping.MetadataFieldMapping;
import org.dspace.importer.external.metadatamapping.MetadatumDTO; import org.dspace.importer.external.metadatamapping.MetadatumDTO;
import org.jaxen.JaxenException; import org.jaxen.JaxenException;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Required; import org.springframework.beans.factory.annotation.Required;
/** /**
@@ -31,6 +33,8 @@ import org.springframework.beans.factory.annotation.Required;
public class SimpleXpathMetadatumContributor implements MetadataContributor<OMElement> { public class SimpleXpathMetadatumContributor implements MetadataContributor<OMElement> {
private MetadataFieldConfig field; private MetadataFieldConfig field;
private static final Logger log = LoggerFactory.getLogger(SimpleXpathMetadatumContributor.class);
/** /**
* Return prefixToNamespaceMapping * Return prefixToNamespaceMapping
* *
@@ -157,12 +161,12 @@ public class SimpleXpathMetadatumContributor implements MetadataContributor<OMEl
} else if (el instanceof OMText) { } else if (el instanceof OMText) {
values.add(metadataFieldMapping.toDCValue(field, ((OMText) el).getText())); values.add(metadataFieldMapping.toDCValue(field, ((OMText) el).getText()));
} else { } else {
System.err.println("node of type: " + el.getClass()); log.error("node of type: " + el.getClass());
} }
} }
return values; return values;
} catch (JaxenException e) { } catch (JaxenException e) {
System.err.println(query); log.error(query, e);
throw new RuntimeException(e); throw new RuntimeException(e);
} }

View File

@@ -44,6 +44,7 @@ import org.jaxen.JaxenException;
* Implements a data source for querying PubMed Central * Implements a data source for querying PubMed Central
* *
* @author Roeland Dillen (roeland at atmire dot com) * @author Roeland Dillen (roeland at atmire dot com)
* @author Pasquale Cavallo (pasquale.cavallo at 4science dot it)
*/ */
public class PubmedImportMetadataSourceServiceImpl extends AbstractImportMetadataSourceService<OMElement> public class PubmedImportMetadataSourceServiceImpl extends AbstractImportMetadataSourceService<OMElement>
implements QuerySource, FileSource { implements QuerySource, FileSource {

View File

@@ -0,0 +1,141 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.importer.external.ris.service;
import java.io.BufferedReader;
import java.io.InputStream;
import java.io.InputStreamReader;
import java.util.ArrayList;
import java.util.Iterator;
import java.util.LinkedList;
import java.util.List;
import java.util.Map;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
import javax.annotation.Resource;
import org.dspace.importer.external.exception.FileSourceException;
import org.dspace.importer.external.service.components.AbstractPlainMetadataSource;
import org.dspace.importer.external.service.components.dto.PlainMetadataKeyValueItem;
import org.dspace.importer.external.service.components.dto.PlainMetadataSourceDto;
/**
* Implements a metadata importer for RIS files
* Implementations insprider by BTE DataLoader {@link https://github.com/EKT/Biblio-Transformation-Engine/blob/master/bte-io/src/main/java/gr/ekt/bteio/loaders/RISDataLoader.java}
*
* @author Pasquale Cavallo (pasquale.cavallo at 4science dot it)
*/
public class RisImportMetadataSourceServiceImpl extends AbstractPlainMetadataSource {
@Override
public String getImportSource() {
return "RISMetadataSource";
}
@Override
protected List<PlainMetadataSourceDto> readData(InputStream inputStream) throws FileSourceException {
return aggregateData(inputStream);
}
/**
* This method map the data present in the inputStream, then return a list PlainMetadataSourceDto.
* Any PlainMetadataSourceDto will be used to create a single {@link org.dspace.importer.external.datamodel.ImportRecord}
*
* @see org.dspace.importer.external.service.components.AbstractPlainMetadataSource
*
* @param inputStream the inputStream of the RIS file
* @return List of {@link org.dspace.importer.external.service.components.dto.PlainMetadataSourceDto}
* @throws FileSourceException
*/
private List<PlainMetadataSourceDto> aggregateData(InputStream inputStream) throws FileSourceException {
List<PlainMetadataSourceDto> metadata = new ArrayList<>();
//map any line of the field to a key/value pair
List<PlainMetadataKeyValueItem> notAggregatedItems = notAggregatedData(inputStream);
List<PlainMetadataKeyValueItem> aggregatedTmpList = null;
Iterator<PlainMetadataKeyValueItem> itr = notAggregatedItems.iterator();
// iterate over the list of key/value items
// create a new PlainMetadataSourceDto (which map and ImportRecord)
// any times the key is "TY" (content separator in RIS)
while (itr.hasNext()) {
PlainMetadataKeyValueItem item = itr.next();
if ("TY".equals(item.getKey())) {
if (aggregatedTmpList != null) {
PlainMetadataSourceDto dto = new PlainMetadataSourceDto();
dto.setMetadata(new ArrayList<>(aggregatedTmpList));
metadata.add(dto);
}
aggregatedTmpList = new ArrayList<>();
aggregatedTmpList.add(item);
} else {
if (aggregatedTmpList != null) {
aggregatedTmpList.add(item);
// save last iteration metadata
if (!itr.hasNext()) {
PlainMetadataSourceDto dto = new PlainMetadataSourceDto();
dto.setMetadata(new ArrayList<>(aggregatedTmpList));
metadata.add(dto);
}
}
}
}
return metadata;
}
/**
* This method transform any row of the RIS file into a PlainMetadataKeyValueItem,
* splitting the row sequentially through a RegExp without take care of the means of the data.
* In this way, all entries present in the file are mapped in the resulting list.
*
* @param inputStream the inputStrem of the file
* @return A list
* @throws FileSourceException
*/
private List<PlainMetadataKeyValueItem> notAggregatedData(InputStream inputStream) throws FileSourceException {
LinkedList<PlainMetadataKeyValueItem> items = new LinkedList<>();
BufferedReader reader;
try {
reader = new BufferedReader(new InputStreamReader(inputStream, "UTF-8"));
String line;
while ((line = reader.readLine()) != null) {
if (line.isEmpty() || line.equals("") || line.matches("^\\s*$")) {
continue;
}
//match valid RIS entry
Pattern risPattern = Pattern.compile("^([A-Z][A-Z0-9]) - (.*)$");
Matcher risMatcher = risPattern.matcher(line);
if (risMatcher.matches()) {
PlainMetadataKeyValueItem keyValueItem = new PlainMetadataKeyValueItem();
keyValueItem.setValue(risMatcher.group(2));
keyValueItem.setKey(risMatcher.group(1));
items.add(keyValueItem);
} else {
if (!items.isEmpty()) {
items.getLast().setValue(items.getLast().getValue().concat(line));
}
}
}
} catch (Exception e) {
throw new FileSourceException("Cannot parse RIS file", e);
}
return items;
}
/**
* Retrieve the MetadataFieldMapping containing the mapping between RecordType
* (in this case PlainMetadataSourceDto.class) and Metadata
*
* @return The configured MetadataFieldMapping
*/
@Override
@SuppressWarnings("unchecked")
@Resource(name = "risMetadataFieldMap")
public void setMetadataFieldMap(@SuppressWarnings("rawtypes") Map metadataFieldMap) {
super.setMetadataFieldMap(metadataFieldMap);
}
}

View File

@@ -16,7 +16,6 @@ import org.dspace.importer.external.metadatamapping.contributor.MetadataContribu
import org.dspace.importer.external.metadatamapping.transform.GenerateQueryService; import org.dspace.importer.external.metadatamapping.transform.GenerateQueryService;
import org.dspace.importer.external.service.components.AbstractRemoteMetadataSource; import org.dspace.importer.external.service.components.AbstractRemoteMetadataSource;
import org.dspace.importer.external.service.components.MetadataSource; import org.dspace.importer.external.service.components.MetadataSource;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Required; import org.springframework.beans.factory.annotation.Required;
/** /**
@@ -49,7 +48,6 @@ public abstract class AbstractImportMetadataSourceService<RecordType> extends Ab
* *
* @param generateQueryForItem the query generator to be used. * @param generateQueryForItem the query generator to be used.
*/ */
@Autowired
public void setGenerateQueryForItem(GenerateQueryService generateQueryForItem) { public void setGenerateQueryForItem(GenerateQueryService generateQueryForItem) {
this.generateQueryForItem = generateQueryForItem; this.generateQueryForItem = generateQueryForItem;
} }

View File

@@ -22,6 +22,11 @@ import org.dspace.importer.external.exception.FileSourceException;
*/ */
public interface FileSource extends MetadataSource { public interface FileSource extends MetadataSource {
/**
* Get the file extensions (xml, csv, txt, ...) supported by the FileSource
*/
public List<String> getSupportedExtensions();
/** /**
* Return a list of ImportRecord constructed from input file. * Return a list of ImportRecord constructed from input file.
* *
@@ -62,9 +67,4 @@ public interface FileSource extends MetadataSource {
return false; return false;
} }
/**
* Get the file extensions (xml, csv, txt, ...) supported by the FileSource implementation
*/
public List<String> getSupportedExtensions();
} }

View File

@@ -82,6 +82,7 @@ public class Process implements ReloadableEntity<Integer> {
private Date creationTime; private Date creationTime;
public static final String BITSTREAM_TYPE_METADATAFIELD = "dspace.process.filetype"; public static final String BITSTREAM_TYPE_METADATAFIELD = "dspace.process.filetype";
public static final String OUTPUT_TYPE = "script_output";
protected Process() { protected Process() {
} }

View File

@@ -0,0 +1,14 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.scripts;
public enum ProcessLogLevel {
INFO,
WARNING,
ERROR
}

View File

@@ -0,0 +1,78 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.scripts;
import java.util.HashMap;
import java.util.Map;
/**
* This is a container class in which the variables can be stored that a {@link Process} must adhere to when being
* retrieved from the DB through the search methods
*/
public class ProcessQueryParameterContainer {
private Map<String, Object> queryParameterMap = new HashMap<>();
/**
* Generic getter for the queryParameterMap
* @return the queryParameterMap value of this ProcessQueryParameterContainer
*/
public Map<String, Object> getQueryParameterMap() {
return queryParameterMap;
}
private String sortProperty = "startTime";
private String sortOrder = "desc";
/**
* Generic setter for the queryParameterMap
* @param queryParameterMap The queryParameterMap to be set on this ProcessQueryParameterContainer
*/
public void setQueryParameterMap(Map<String, Object> queryParameterMap) {
this.queryParameterMap = queryParameterMap;
}
public void addToQueryParameterMap(String key, Object object) {
if (queryParameterMap == null) {
queryParameterMap = new HashMap<>();
}
queryParameterMap.put(key, object);
}
/**
* Generic getter for the sortProperty
* @return the sortProperty value of this ProcessQueryParameterContainer
*/
public String getSortProperty() {
return sortProperty;
}
/**
* Generic setter for the sortProperty
* @param sortProperty The sortProperty to be set on this ProcessQueryParameterContainer
*/
public void setSortProperty(String sortProperty) {
this.sortProperty = sortProperty;
}
/**
* Generic getter for the sortOrder
* @return the sortOrder value of this ProcessQueryParameterContainer
*/
public String getSortOrder() {
return sortOrder;
}
/**
* Generic setter for the sortOrder
* @param sortOrder The sortOrder to be set on this ProcessQueryParameterContainer
*/
public void setSortOrder(String sortOrder) {
this.sortOrder = sortOrder;
}
}

Some files were not shown because too many files have changed in this diff Show More