mirror of
https://github.com/DSpace/DSpace.git
synced 2025-10-07 01:54:22 +00:00
Merge branch 'main' into DS-4515_submit-external-source
This commit is contained in:
@@ -30,14 +30,14 @@ install: "echo 'Skipping install stage, dependencies will be downloaded during b
|
||||
script:
|
||||
# Summary of flags used (below):
|
||||
# license:check => Validate all source code license headers
|
||||
# -Dmaven.test.skip=false => Enable DSpace Unit Tests
|
||||
# -DskipTests=false => Enable DSpace Unit Tests
|
||||
# -DskipITs=false => Enable DSpace Integration Tests
|
||||
# -Pdspace-rest => Enable optional dspace-rest module as part of build
|
||||
# -Pdspace-rest => Enable optional dspace-rest module as part of build
|
||||
# -P !assembly => Skip assembly of "dspace-installer" directory (as it can be memory intensive)
|
||||
# -B => Maven batch/non-interactive mode (recommended for CI)
|
||||
# -V => Display Maven version info before build
|
||||
# -Dsurefire.rerunFailingTestsCount=2 => try again for flakey tests, and keep track of/report on number of retries
|
||||
- "mvn clean install license:check -Dmaven.test.skip=false -DskipITs=false -Pdspace-rest -P !assembly -B -V -Dsurefire.rerunFailingTestsCount=2"
|
||||
- "mvn clean install license:check -DskipTests=false -DskipITs=false -Pdspace-rest -P !assembly -B -V -Dsurefire.rerunFailingTestsCount=2"
|
||||
|
||||
# After a successful build and test (see 'script'), send code coverage reports to coveralls.io
|
||||
# These code coverage reports are generated by jacoco-maven-plugin (during test process above).
|
||||
|
14
README.md
14
README.md
@@ -90,33 +90,33 @@ run automatically by [Travis CI](https://travis-ci.com/DSpace/DSpace/) for all P
|
||||
|
||||
* How to run both Unit Tests (via `maven-surefire-plugin`) and Integration Tests (via `maven-failsafe-plugin`):
|
||||
```
|
||||
mvn clean test -Dmaven.test.skip=false -DskipITs=false
|
||||
mvn clean test -DskipTests=false -DskipITs=false
|
||||
```
|
||||
* How to run just Unit Tests:
|
||||
```
|
||||
mvn test -Dmaven.test.skip=false
|
||||
mvn test -DskipTests=false
|
||||
```
|
||||
* How to run a *single* Unit Test
|
||||
```
|
||||
# Run all tests in a specific test class
|
||||
# NOTE: failIfNoTests=false is required to skip tests in other modules
|
||||
mvn test -Dmaven.test.skip=false -Dtest=[full.package.testClassName] -DfailIfNoTests=false
|
||||
mvn test -DskipTests=false -Dtest=[full.package.testClassName] -DfailIfNoTests=false
|
||||
|
||||
# Run one test method in a specific test class
|
||||
mvn test -Dmaven.test.skip=false -Dtest=[full.package.testClassName]#[testMethodName] -DfailIfNoTests=false
|
||||
mvn test -DskipTests=false -Dtest=[full.package.testClassName]#[testMethodName] -DfailIfNoTests=false
|
||||
```
|
||||
* How to run Integration Tests (requires enabling Unit tests too)
|
||||
```
|
||||
mvn verify -Dmaven.test.skip=false -DskipITs=false
|
||||
mvn verify -DskipTests=false -DskipITs=false
|
||||
```
|
||||
* How to run a *single* Integration Test (requires enabling Unit tests too)
|
||||
```
|
||||
# Run all integration tests in a specific test class
|
||||
# NOTE: failIfNoTests=false is required to skip tests in other modules
|
||||
mvn test -Dmaven.test.skip=false -DskipITs=false -Dtest=[full.package.testClassName] -DfailIfNoTests=false
|
||||
mvn test -DskipTests=false -DskipITs=false -Dtest=[full.package.testClassName] -DfailIfNoTests=false
|
||||
|
||||
# Run one test method in a specific test class
|
||||
mvn test -Dmaven.test.skip=false -DskipITs=false -Dtest=[full.package.testClassName]#[testMethodName] -DfailIfNoTests=false
|
||||
mvn test -DskipTests=false -DskipITs=false -Dtest=[full.package.testClassName]#[testMethodName] -DfailIfNoTests=false
|
||||
```
|
||||
* How to run only tests of a specific DSpace module
|
||||
```
|
||||
|
@@ -137,7 +137,7 @@
|
||||
<activation>
|
||||
<activeByDefault>false</activeByDefault>
|
||||
<!-- property>
|
||||
<name>maven.test.skip</name>
|
||||
<name>skipTests</name>
|
||||
<value>false</value>
|
||||
</property -->
|
||||
</activation>
|
||||
@@ -158,7 +158,7 @@
|
||||
<activation>
|
||||
<activeByDefault>false</activeByDefault>
|
||||
<property>
|
||||
<name>maven.test.skip</name>
|
||||
<name>skipTests</name>
|
||||
<value>false</value>
|
||||
</property>
|
||||
</activation>
|
||||
@@ -241,6 +241,7 @@
|
||||
<dspace.dir>${agnostic.build.dir}/testing/dspace/</dspace.dir>
|
||||
<!-- Turn off any DSpace logging -->
|
||||
<dspace.log.init.disable>true</dspace.log.init.disable>
|
||||
<solr.install.dir>${agnostic.build.dir}/testing/dspace/solr/</solr.install.dir>
|
||||
</systemPropertyVariables>
|
||||
</configuration>
|
||||
</plugin>
|
||||
@@ -255,6 +256,7 @@
|
||||
<dspace.dir>${agnostic.build.dir}/testing/dspace/</dspace.dir>
|
||||
<!-- Turn off any DSpace logging -->
|
||||
<dspace.log.init.disable>true</dspace.log.init.disable>
|
||||
<solr.install.dir>${agnostic.build.dir}/testing/dspace/solr/</solr.install.dir>
|
||||
</systemPropertyVariables>
|
||||
</configuration>
|
||||
</plugin>
|
||||
@@ -331,6 +333,10 @@
|
||||
<version>${jersey.version}</version>
|
||||
</dependency>
|
||||
|
||||
<dependency>
|
||||
<groupId>commons-cli</groupId>
|
||||
<artifactId>commons-cli</artifactId>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>commons-codec</groupId>
|
||||
<artifactId>commons-codec</artifactId>
|
||||
@@ -487,10 +493,84 @@
|
||||
|
||||
<dependency>
|
||||
<groupId>org.apache.solr</groupId>
|
||||
<artifactId>solr-cell</artifactId>
|
||||
<artifactId>solr-solrj</artifactId>
|
||||
<version>${solr.client.version}</version>
|
||||
</dependency>
|
||||
<!-- Solr Core is needed for Integration Tests (to run a MockSolrServer) -->
|
||||
<!-- The following Solr / Lucene dependencies also support integration tests -->
|
||||
<dependency>
|
||||
<groupId>org.apache.solr</groupId>
|
||||
<artifactId>solr-core</artifactId>
|
||||
<scope>test</scope>
|
||||
<version>${solr.client.version}</version>
|
||||
<exclusions>
|
||||
<exclusion>
|
||||
<groupId>commons-cli</groupId>
|
||||
<artifactId>commons-cli</artifactId>
|
||||
</exclusion>
|
||||
<exclusion>
|
||||
<groupId>org.eclipse.jetty</groupId>
|
||||
<artifactId>jetty-continuation</artifactId>
|
||||
</exclusion>
|
||||
<exclusion>
|
||||
<groupId>org.eclipse.jetty</groupId>
|
||||
<artifactId>jetty-deploy</artifactId>
|
||||
</exclusion>
|
||||
<exclusion>
|
||||
<groupId>org.eclipse.jetty</groupId>
|
||||
<artifactId>jetty-http</artifactId>
|
||||
</exclusion>
|
||||
<exclusion>
|
||||
<groupId>org.eclipse.jetty</groupId>
|
||||
<artifactId>jetty-io</artifactId>
|
||||
</exclusion>
|
||||
<exclusion>
|
||||
<groupId>org.eclipse.jetty</groupId>
|
||||
<artifactId>jetty-jmx</artifactId>
|
||||
</exclusion>
|
||||
<exclusion>
|
||||
<groupId>org.eclipse.jetty</groupId>
|
||||
<artifactId>jetty-rewrite</artifactId>
|
||||
</exclusion>
|
||||
<exclusion>
|
||||
<groupId>org.eclipse.jetty</groupId>
|
||||
<artifactId>jetty-security</artifactId>
|
||||
</exclusion>
|
||||
<exclusion>
|
||||
<groupId>org.eclipse.jetty</groupId>
|
||||
<artifactId>jetty-server</artifactId>
|
||||
</exclusion>
|
||||
<exclusion>
|
||||
<groupId>org.eclipse.jetty</groupId>
|
||||
<artifactId>jetty-servlet</artifactId>
|
||||
</exclusion>
|
||||
<exclusion>
|
||||
<groupId>org.eclipse.jetty</groupId>
|
||||
<artifactId>jetty-servlets</artifactId>
|
||||
</exclusion>
|
||||
<exclusion>
|
||||
<groupId>org.eclipse.jetty</groupId>
|
||||
<artifactId>jetty-util</artifactId>
|
||||
</exclusion>
|
||||
<exclusion>
|
||||
<groupId>org.eclipse.jetty</groupId>
|
||||
<artifactId>jetty-webapp</artifactId>
|
||||
</exclusion>
|
||||
<exclusion>
|
||||
<groupId>org.eclipse.jetty</groupId>
|
||||
<artifactId>jetty-xml</artifactId>
|
||||
</exclusion>
|
||||
</exclusions>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.apache.solr</groupId>
|
||||
<artifactId>solr-cell</artifactId>
|
||||
<exclusions>
|
||||
<!-- Newer versions provided in our parent POM -->
|
||||
<exclusion>
|
||||
<groupId>commons-cli</groupId>
|
||||
<artifactId>commons-cli</artifactId>
|
||||
</exclusion>
|
||||
<exclusion>
|
||||
<groupId>org.ow2.asm</groupId>
|
||||
<artifactId>asm-commons</artifactId>
|
||||
@@ -545,6 +625,32 @@
|
||||
</exclusion>
|
||||
</exclusions>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.apache.lucene</groupId>
|
||||
<artifactId>lucene-core</artifactId>
|
||||
</dependency>
|
||||
<!-- Reminder: Keep icu4j (in Parent POM) synced with version used by lucene-analyzers-icu below,
|
||||
otherwise ICUFoldingFilterFactory may throw errors in tests. -->
|
||||
<dependency>
|
||||
<groupId>org.apache.lucene</groupId>
|
||||
<artifactId>lucene-analyzers-icu</artifactId>
|
||||
<scope>test</scope>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.apache.lucene</groupId>
|
||||
<artifactId>lucene-analyzers-smartcn</artifactId>
|
||||
<scope>test</scope>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.apache.lucene</groupId>
|
||||
<artifactId>lucene-analyzers-stempel</artifactId>
|
||||
<scope>test</scope>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.apache.xmlbeans</groupId>
|
||||
<artifactId>xmlbeans</artifactId>
|
||||
<version>2.6.0</version>
|
||||
</dependency>
|
||||
|
||||
<dependency>
|
||||
<groupId>com.maxmind.geoip2</groupId>
|
||||
@@ -725,7 +831,7 @@
|
||||
|
||||
<dependency>
|
||||
<groupId>org.xmlunit</groupId>
|
||||
<artifactId>xmlunit-matchers</artifactId>
|
||||
<artifactId>xmlunit-core</artifactId>
|
||||
<version>2.6.3</version>
|
||||
<scope>test</scope>
|
||||
</dependency>
|
||||
|
@@ -9,6 +9,7 @@ package org.dspace.content;
|
||||
|
||||
import java.sql.SQLException;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Collections;
|
||||
import java.util.Iterator;
|
||||
import java.util.List;
|
||||
|
||||
@@ -101,7 +102,7 @@ public class MetadataDSpaceCsvExportServiceImpl implements MetadataDSpaceCsvExpo
|
||||
throws SQLException {
|
||||
// Add all the collections
|
||||
List<Collection> collections = community.getCollections();
|
||||
Iterator<Item> result = null;
|
||||
Iterator<Item> result = Collections.<Item>emptyIterator();
|
||||
for (Collection collection : collections) {
|
||||
Iterator<Item> items = itemService.findByCollection(context, collection);
|
||||
result = addItemsToResult(result, items);
|
||||
|
@@ -8,21 +8,22 @@
|
||||
package org.dspace.curate;
|
||||
|
||||
import java.io.BufferedReader;
|
||||
import java.io.File;
|
||||
import java.io.FileNotFoundException;
|
||||
import java.io.FileReader;
|
||||
import java.io.IOException;
|
||||
import java.io.OutputStream;
|
||||
import java.io.OutputStreamWriter;
|
||||
import java.io.PrintStream;
|
||||
import java.io.Writer;
|
||||
import java.sql.SQLException;
|
||||
import java.util.HashMap;
|
||||
import java.util.Iterator;
|
||||
import java.util.Map;
|
||||
|
||||
import org.apache.commons.cli.CommandLine;
|
||||
import org.apache.commons.cli.CommandLineParser;
|
||||
import org.apache.commons.cli.HelpFormatter;
|
||||
import org.apache.commons.cli.Options;
|
||||
import org.apache.commons.cli.PosixParser;
|
||||
import org.apache.commons.io.output.NullOutputStream;
|
||||
import org.dspace.authorize.AuthorizeException;
|
||||
import org.dspace.content.DSpaceObject;
|
||||
import org.dspace.content.factory.ContentServiceFactory;
|
||||
import org.dspace.core.Context;
|
||||
import org.dspace.core.factory.CoreServiceFactory;
|
||||
@@ -30,183 +31,86 @@ import org.dspace.curate.factory.CurateServiceFactory;
|
||||
import org.dspace.eperson.EPerson;
|
||||
import org.dspace.eperson.factory.EPersonServiceFactory;
|
||||
import org.dspace.eperson.service.EPersonService;
|
||||
import org.dspace.handle.factory.HandleServiceFactory;
|
||||
import org.dspace.handle.service.HandleService;
|
||||
import org.dspace.scripts.DSpaceRunnable;
|
||||
import org.dspace.utils.DSpace;
|
||||
|
||||
/**
|
||||
* CurationCli provides command-line access to Curation tools and processes.
|
||||
*
|
||||
* @author richardrodgers
|
||||
*/
|
||||
public class CurationCli {
|
||||
public class CurationCli extends DSpaceRunnable<CurationScriptConfiguration> {
|
||||
|
||||
private EPersonService ePersonService = EPersonServiceFactory.getInstance().getEPersonService();
|
||||
|
||||
private Context context;
|
||||
private CurationClientOptions curationClientOptions;
|
||||
|
||||
private String task;
|
||||
private String taskFile;
|
||||
private String id;
|
||||
private String queue;
|
||||
private String scope;
|
||||
private String reporter;
|
||||
private Map<String, String> parameters;
|
||||
private boolean verbose;
|
||||
|
||||
@Override
|
||||
public void internalRun() throws Exception {
|
||||
if (curationClientOptions == CurationClientOptions.HELP) {
|
||||
printHelp();
|
||||
return;
|
||||
}
|
||||
|
||||
Curator curator = initCurator();
|
||||
|
||||
// load curation tasks
|
||||
if (curationClientOptions == CurationClientOptions.TASK) {
|
||||
long start = System.currentTimeMillis();
|
||||
handleCurationTask(curator);
|
||||
this.endScript(start);
|
||||
}
|
||||
|
||||
// process task queue
|
||||
if (curationClientOptions == CurationClientOptions.QUEUE) {
|
||||
// process the task queue
|
||||
TaskQueue taskQueue = (TaskQueue) CoreServiceFactory.getInstance().getPluginService()
|
||||
.getSinglePlugin(TaskQueue.class);
|
||||
if (taskQueue == null) {
|
||||
super.handler.logError("No implementation configured for queue");
|
||||
throw new UnsupportedOperationException("No queue service available");
|
||||
}
|
||||
long timeRun = this.runQueue(taskQueue, curator);
|
||||
this.endScript(timeRun);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Default constructor
|
||||
* Does the curation task (-t) or the task in the given file (-T).
|
||||
* Checks:
|
||||
* - if required option -i is missing.
|
||||
* - if option -t has a valid task option
|
||||
*/
|
||||
private CurationCli() { }
|
||||
|
||||
public static void main(String[] args) throws Exception {
|
||||
// create an options object and populate it
|
||||
CommandLineParser parser = new PosixParser();
|
||||
|
||||
Options options = new Options();
|
||||
|
||||
options.addOption("t", "task", true,
|
||||
"curation task name");
|
||||
options.addOption("T", "taskfile", true,
|
||||
"file containing curation task names");
|
||||
options.addOption("i", "id", true,
|
||||
"Id (handle) of object to perform task on, or 'all' to perform on whole repository");
|
||||
options.addOption("p", "parameter", true,
|
||||
"a task parameter 'NAME=VALUE'");
|
||||
options.addOption("q", "queue", true,
|
||||
"name of task queue to process");
|
||||
options.addOption("e", "eperson", true,
|
||||
"email address of curating eperson");
|
||||
options.addOption("r", "reporter", true,
|
||||
"relative or absolute path to the desired report file. "
|
||||
+ "Use '-' to report to console. "
|
||||
+ "If absent, no reporting");
|
||||
options.addOption("s", "scope", true,
|
||||
"transaction scope to impose: use 'object', 'curation', or 'open'. If absent, 'open' " +
|
||||
"applies");
|
||||
options.addOption("v", "verbose", false,
|
||||
"report activity to stdout");
|
||||
options.addOption("h", "help", false, "help");
|
||||
|
||||
CommandLine line = parser.parse(options, args);
|
||||
|
||||
String taskName = null;
|
||||
String taskFileName = null;
|
||||
String idName = null;
|
||||
String taskQueueName = null;
|
||||
String ePersonName = null;
|
||||
String reporterName = null;
|
||||
String scope = null;
|
||||
boolean verbose = false;
|
||||
final Map<String, String> parameters = new HashMap<>();
|
||||
|
||||
if (line.hasOption('h')) {
|
||||
HelpFormatter help = new HelpFormatter();
|
||||
help.printHelp("CurationCli\n", options);
|
||||
System.out
|
||||
.println("\nwhole repo: CurationCli -t estimate -i all");
|
||||
System.out
|
||||
.println("single item: CurationCli -t generate -i itemId");
|
||||
System.out
|
||||
.println("task queue: CurationCli -q monthly");
|
||||
System.exit(0);
|
||||
}
|
||||
|
||||
if (line.hasOption('t')) { // task
|
||||
taskName = line.getOptionValue('t');
|
||||
}
|
||||
|
||||
if (line.hasOption('T')) { // task file
|
||||
taskFileName = line.getOptionValue('T');
|
||||
}
|
||||
|
||||
if (line.hasOption('i')) { // id
|
||||
idName = line.getOptionValue('i');
|
||||
}
|
||||
|
||||
if (line.hasOption('q')) { // task queue
|
||||
taskQueueName = line.getOptionValue('q');
|
||||
}
|
||||
|
||||
if (line.hasOption('e')) { // eperson
|
||||
ePersonName = line.getOptionValue('e');
|
||||
}
|
||||
|
||||
if (line.hasOption('p')) { // parameter
|
||||
for (String parameter : line.getOptionValues('p')) {
|
||||
String[] parts = parameter.split("=", 2);
|
||||
String name = parts[0].trim();
|
||||
String value;
|
||||
if (parts.length > 1) {
|
||||
value = parts[1].trim();
|
||||
} else {
|
||||
value = "true";
|
||||
}
|
||||
parameters.put(name, value);
|
||||
}
|
||||
}
|
||||
if (line.hasOption('r')) { // report file
|
||||
reporterName = line.getOptionValue('r');
|
||||
}
|
||||
|
||||
|
||||
if (line.hasOption('s')) { // transaction scope
|
||||
scope = line.getOptionValue('s');
|
||||
}
|
||||
|
||||
if (line.hasOption('v')) { // verbose
|
||||
verbose = true;
|
||||
}
|
||||
|
||||
// now validate the args
|
||||
if (idName == null && taskQueueName == null) {
|
||||
System.out.println("Id must be specified: a handle, 'all', or a task queue (-h for help)");
|
||||
System.exit(1);
|
||||
}
|
||||
|
||||
if (taskName == null && taskFileName == null && taskQueueName == null) {
|
||||
System.out.println("A curation task or queue must be specified (-h for help)");
|
||||
System.exit(1);
|
||||
}
|
||||
|
||||
if (scope != null && Curator.TxScope.valueOf(scope.toUpperCase()) == null) {
|
||||
System.out.println("Bad transaction scope '" + scope + "': only 'object', 'curation' or 'open' recognized");
|
||||
System.exit(1);
|
||||
}
|
||||
EPersonService ePersonService = EPersonServiceFactory.getInstance().getEPersonService();
|
||||
|
||||
Context c = new Context(Context.Mode.BATCH_EDIT);
|
||||
if (ePersonName != null) {
|
||||
EPerson ePerson = ePersonService.findByEmail(c, ePersonName);
|
||||
if (ePerson == null) {
|
||||
System.out.println("EPerson not found: " + ePersonName);
|
||||
System.exit(1);
|
||||
}
|
||||
c.setCurrentUser(ePerson);
|
||||
} else {
|
||||
c.turnOffAuthorisationSystem();
|
||||
}
|
||||
|
||||
Curator curator = new Curator();
|
||||
OutputStream reporter;
|
||||
if (null == reporterName) {
|
||||
reporter = new NullOutputStream();
|
||||
} else if ("-".equals(reporterName)) {
|
||||
reporter = System.out;
|
||||
} else {
|
||||
reporter = new PrintStream(reporterName);
|
||||
}
|
||||
Writer reportWriter = new OutputStreamWriter(reporter);
|
||||
curator.setReporter(reportWriter);
|
||||
|
||||
if (scope != null) {
|
||||
Curator.TxScope txScope = Curator.TxScope.valueOf(scope.toUpperCase());
|
||||
curator.setTransactionScope(txScope);
|
||||
}
|
||||
curator.addParameters(parameters);
|
||||
// we are operating in batch mode, if anyone cares.
|
||||
curator.setInvoked(Curator.Invoked.BATCH);
|
||||
// load curation tasks
|
||||
if (taskName != null) {
|
||||
private void handleCurationTask(Curator curator) throws IOException, SQLException {
|
||||
String taskName;
|
||||
if (commandLine.hasOption('t')) {
|
||||
if (verbose) {
|
||||
System.out.println("Adding task: " + taskName);
|
||||
handler.logInfo("Adding task: " + this.task);
|
||||
}
|
||||
curator.addTask(taskName);
|
||||
if (verbose && !curator.hasTask(taskName)) {
|
||||
System.out.println("Task: " + taskName + " not resolved");
|
||||
curator.addTask(this.task);
|
||||
if (verbose && !curator.hasTask(this.task)) {
|
||||
handler.logInfo("Task: " + this.task + " not resolved");
|
||||
}
|
||||
} else if (taskQueueName == null) {
|
||||
} else if (commandLine.hasOption('T')) {
|
||||
// load taskFile
|
||||
BufferedReader reader = null;
|
||||
try {
|
||||
reader = new BufferedReader(new FileReader(taskFileName));
|
||||
reader = new BufferedReader(new FileReader(this.taskFile));
|
||||
while ((taskName = reader.readLine()) != null) {
|
||||
if (verbose) {
|
||||
System.out.println("Adding task: " + taskName);
|
||||
super.handler.logInfo("Adding task: " + taskName);
|
||||
}
|
||||
curator.addTask(taskName);
|
||||
}
|
||||
@@ -217,59 +121,242 @@ public class CurationCli {
|
||||
}
|
||||
}
|
||||
// run tasks against object
|
||||
long start = System.currentTimeMillis();
|
||||
if (verbose) {
|
||||
System.out.println("Starting curation");
|
||||
super.handler.logInfo("Starting curation");
|
||||
super.handler.logInfo("Curating id: " + this.id);
|
||||
}
|
||||
if (idName != null) {
|
||||
if ("all".equals(this.id)) {
|
||||
// run on whole Site
|
||||
curator.curate(context,
|
||||
ContentServiceFactory.getInstance().getSiteService().findSite(context).getHandle());
|
||||
} else {
|
||||
curator.curate(context, this.id);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Runs task queue (-q set)
|
||||
*
|
||||
* @param queue The task queue
|
||||
* @param curator The curator
|
||||
* @return Time when queue started
|
||||
*/
|
||||
private long runQueue(TaskQueue queue, Curator curator) throws SQLException, AuthorizeException, IOException {
|
||||
// use current time as our reader 'ticket'
|
||||
long ticket = System.currentTimeMillis();
|
||||
Iterator<TaskQueueEntry> entryIter = queue.dequeue(this.queue, ticket).iterator();
|
||||
while (entryIter.hasNext()) {
|
||||
TaskQueueEntry entry = entryIter.next();
|
||||
if (verbose) {
|
||||
System.out.println("Curating id: " + idName);
|
||||
super.handler.logInfo("Curating id: " + entry.getObjectId());
|
||||
}
|
||||
if ("all".equals(idName)) {
|
||||
// run on whole Site
|
||||
curator.curate(c, ContentServiceFactory.getInstance().getSiteService().findSite(c).getHandle());
|
||||
curator.clear();
|
||||
// does entry relate to a DSO or workflow object?
|
||||
if (entry.getObjectId().indexOf('/') > 0) {
|
||||
for (String taskName : entry.getTaskNames()) {
|
||||
curator.addTask(taskName);
|
||||
}
|
||||
curator.curate(context, entry.getObjectId());
|
||||
} else {
|
||||
curator.curate(c, idName);
|
||||
// make eperson who queued task the effective user
|
||||
EPerson agent = ePersonService.findByEmail(context, entry.getEpersonId());
|
||||
if (agent != null) {
|
||||
context.setCurrentUser(agent);
|
||||
}
|
||||
CurateServiceFactory.getInstance().getWorkflowCuratorService()
|
||||
.curate(curator, context, entry.getObjectId());
|
||||
}
|
||||
}
|
||||
queue.release(this.queue, ticket, true);
|
||||
return ticket;
|
||||
}
|
||||
|
||||
/**
|
||||
* End of curation script; logs script time if -v verbose is set
|
||||
*
|
||||
* @param timeRun Time script was started
|
||||
* @throws SQLException If DSpace contextx can't complete
|
||||
*/
|
||||
private void endScript(long timeRun) throws SQLException {
|
||||
context.complete();
|
||||
if (verbose) {
|
||||
long elapsed = System.currentTimeMillis() - timeRun;
|
||||
this.handler.logInfo("Ending curation. Elapsed time: " + elapsed);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Initialize the curator with command line variables
|
||||
*
|
||||
* @return Initialised curator
|
||||
* @throws FileNotFoundException If file of command line variable -r reporter is not found
|
||||
*/
|
||||
private Curator initCurator() throws FileNotFoundException {
|
||||
Curator curator = new Curator();
|
||||
OutputStream reporterStream;
|
||||
if (null == this.reporter) {
|
||||
reporterStream = new NullOutputStream();
|
||||
} else if ("-".equals(this.reporter)) {
|
||||
reporterStream = System.out;
|
||||
} else {
|
||||
reporterStream = new PrintStream(this.reporter);
|
||||
}
|
||||
Writer reportWriter = new OutputStreamWriter(reporterStream);
|
||||
curator.setReporter(reportWriter);
|
||||
|
||||
if (this.scope != null) {
|
||||
Curator.TxScope txScope = Curator.TxScope.valueOf(this.scope.toUpperCase());
|
||||
curator.setTransactionScope(txScope);
|
||||
}
|
||||
|
||||
curator.addParameters(parameters);
|
||||
// we are operating in batch mode, if anyone cares.
|
||||
curator.setInvoked(Curator.Invoked.BATCH);
|
||||
return curator;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void printHelp() {
|
||||
super.printHelp();
|
||||
super.handler.logInfo("\nwhole repo: CurationCli -t estimate -i all");
|
||||
super.handler.logInfo("single item: CurationCli -t generate -i itemId");
|
||||
super.handler.logInfo("task queue: CurationCli -q monthly");
|
||||
}
|
||||
|
||||
@Override
|
||||
public CurationScriptConfiguration getScriptConfiguration() {
|
||||
return new DSpace().getServiceManager().getServiceByName("curate", CurationScriptConfiguration.class);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void setup() {
|
||||
if (this.commandLine.hasOption('e')) {
|
||||
String ePersonEmail = this.commandLine.getOptionValue('e');
|
||||
this.context = new Context(Context.Mode.BATCH_EDIT);
|
||||
try {
|
||||
EPerson ePerson = ePersonService.findByEmail(this.context, ePersonEmail);
|
||||
if (ePerson == null) {
|
||||
super.handler.logError("EPerson not found: " + ePersonEmail);
|
||||
throw new IllegalArgumentException("Unable to find a user with email: " + ePersonEmail);
|
||||
}
|
||||
this.context.setCurrentUser(ePerson);
|
||||
} catch (SQLException e) {
|
||||
throw new IllegalArgumentException("SQLException trying to find user with email: " + ePersonEmail);
|
||||
}
|
||||
} else {
|
||||
// process the task queue
|
||||
TaskQueue queue = (TaskQueue) CoreServiceFactory.getInstance().getPluginService()
|
||||
.getSinglePlugin(TaskQueue.class);
|
||||
if (queue == null) {
|
||||
System.out.println("No implementation configured for queue");
|
||||
throw new UnsupportedOperationException("No queue service available");
|
||||
}
|
||||
// use current time as our reader 'ticket'
|
||||
long ticket = System.currentTimeMillis();
|
||||
Iterator<TaskQueueEntry> entryIter = queue.dequeue(taskQueueName, ticket).iterator();
|
||||
while (entryIter.hasNext()) {
|
||||
TaskQueueEntry entry = entryIter.next();
|
||||
if (verbose) {
|
||||
System.out.println("Curating id: " + entry.getObjectId());
|
||||
}
|
||||
curator.clear();
|
||||
// does entry relate to a DSO or workflow object?
|
||||
if (entry.getObjectId().indexOf("/") > 0) {
|
||||
for (String task : entry.getTaskNames()) {
|
||||
curator.addTask(task);
|
||||
}
|
||||
curator.curate(c, entry.getObjectId());
|
||||
} else {
|
||||
// make eperson who queued task the effective user
|
||||
EPerson agent = ePersonService.findByEmail(c, entry.getEpersonId());
|
||||
if (agent != null) {
|
||||
c.setCurrentUser(agent);
|
||||
}
|
||||
CurateServiceFactory.getInstance().getWorkflowCuratorService()
|
||||
.curate(curator, c, entry.getObjectId());
|
||||
}
|
||||
}
|
||||
queue.release(taskQueueName, ticket, true);
|
||||
throw new IllegalArgumentException("Needs an -e to set eperson (admin)");
|
||||
}
|
||||
c.complete();
|
||||
if (verbose) {
|
||||
long elapsed = System.currentTimeMillis() - start;
|
||||
System.out.println("Ending curation. Elapsed time: " + elapsed);
|
||||
this.curationClientOptions = CurationClientOptions.getClientOption(commandLine);
|
||||
|
||||
if (this.curationClientOptions != null) {
|
||||
this.initGeneralLineOptionsAndCheckIfValid();
|
||||
if (curationClientOptions == CurationClientOptions.TASK) {
|
||||
this.initTaskLineOptionsAndCheckIfValid();
|
||||
} else if (curationClientOptions == CurationClientOptions.QUEUE) {
|
||||
this.queue = this.commandLine.getOptionValue('q');
|
||||
}
|
||||
} else {
|
||||
throw new IllegalArgumentException("[--help || --task|--taskfile <> -identifier <> || -queue <> ] must be" +
|
||||
" specified");
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Fills in some optional command line options.
|
||||
* Checks if there are missing required options or invalid values for options.
|
||||
*/
|
||||
private void initGeneralLineOptionsAndCheckIfValid() {
|
||||
// report file
|
||||
if (this.commandLine.hasOption('r')) {
|
||||
this.reporter = this.commandLine.getOptionValue('r');
|
||||
}
|
||||
|
||||
// parameters
|
||||
this.parameters = new HashMap<>();
|
||||
if (this.commandLine.hasOption('p')) {
|
||||
for (String parameter : this.commandLine.getOptionValues('p')) {
|
||||
String[] parts = parameter.split("=", 2);
|
||||
String name = parts[0].trim();
|
||||
String value;
|
||||
if (parts.length > 1) {
|
||||
value = parts[1].trim();
|
||||
} else {
|
||||
value = "true";
|
||||
}
|
||||
this.parameters.put(name, value);
|
||||
}
|
||||
}
|
||||
|
||||
// verbose
|
||||
verbose = false;
|
||||
if (commandLine.hasOption('v')) {
|
||||
verbose = true;
|
||||
}
|
||||
|
||||
// scope
|
||||
if (this.commandLine.getOptionValue('s') != null) {
|
||||
this.scope = this.commandLine.getOptionValue('s');
|
||||
if (this.scope != null && Curator.TxScope.valueOf(this.scope.toUpperCase()) == null) {
|
||||
this.handler.logError("Bad transaction scope '" + this.scope + "': only 'object', 'curation' or " +
|
||||
"'open' recognized");
|
||||
throw new IllegalArgumentException(
|
||||
"Bad transaction scope '" + this.scope + "': only 'object', 'curation' or " +
|
||||
"'open' recognized");
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Fills in required command line options for the task or taskFile option.
|
||||
* Checks if there are is a missing required -i option and if -i is either 'all' or a valid dso handle.
|
||||
* Checks if -t task has a valid task option.
|
||||
* Checks if -T taskfile is a valid file.
|
||||
*/
|
||||
private void initTaskLineOptionsAndCheckIfValid() {
|
||||
// task or taskFile
|
||||
if (this.commandLine.hasOption('t')) {
|
||||
this.task = this.commandLine.getOptionValue('t');
|
||||
if (!CurationClientOptions.getTaskOptions().contains(this.task)) {
|
||||
super.handler
|
||||
.logError("-t task must be one of: " + CurationClientOptions.getTaskOptions());
|
||||
throw new IllegalArgumentException(
|
||||
"-t task must be one of: " + CurationClientOptions.getTaskOptions());
|
||||
}
|
||||
} else if (this.commandLine.hasOption('T')) {
|
||||
this.taskFile = this.commandLine.getOptionValue('T');
|
||||
if (!(new File(this.taskFile).isFile())) {
|
||||
super.handler
|
||||
.logError("-T taskFile must be valid file: " + this.taskFile);
|
||||
throw new IllegalArgumentException("-T taskFile must be valid file: " + this.taskFile);
|
||||
}
|
||||
}
|
||||
|
||||
if (this.commandLine.hasOption('i')) {
|
||||
this.id = this.commandLine.getOptionValue('i').toLowerCase();
|
||||
if (!this.id.equalsIgnoreCase("all")) {
|
||||
HandleService handleService = HandleServiceFactory.getInstance().getHandleService();
|
||||
DSpaceObject dso;
|
||||
try {
|
||||
dso = handleService.resolveToObject(this.context, id);
|
||||
} catch (SQLException e) {
|
||||
super.handler.logError("SQLException trying to resolve handle " + id + " to a valid dso");
|
||||
throw new IllegalArgumentException(
|
||||
"SQLException trying to resolve handle " + id + " to a valid dso");
|
||||
}
|
||||
if (dso == null) {
|
||||
super.handler.logError("Id must be specified: a valid dso handle or 'all'; " + this.id + " could " +
|
||||
"not be resolved to valid dso handle");
|
||||
throw new IllegalArgumentException(
|
||||
"Id must be specified: a valid dso handle or 'all'; " + this.id + " could " +
|
||||
"not be resolved to valid dso handle");
|
||||
}
|
||||
}
|
||||
} else {
|
||||
super.handler.logError("Id must be specified: a handle, 'all', or no -i and a -q task queue (-h for " +
|
||||
"help)");
|
||||
throw new IllegalArgumentException(
|
||||
"Id must be specified: a handle, 'all', or no -i and a -q task queue (-h for " +
|
||||
"help)");
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@@ -0,0 +1,85 @@
|
||||
/**
|
||||
* The contents of this file are subject to the license and copyright
|
||||
* detailed in the LICENSE and NOTICE files at the root of the source
|
||||
* tree and available online at
|
||||
*
|
||||
* http://www.dspace.org/license/
|
||||
*/
|
||||
package org.dspace.curate;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
|
||||
import org.apache.commons.cli.CommandLine;
|
||||
import org.apache.commons.cli.Options;
|
||||
import org.apache.commons.lang3.StringUtils;
|
||||
import org.dspace.services.ConfigurationService;
|
||||
import org.dspace.services.factory.DSpaceServicesFactory;
|
||||
|
||||
/**
|
||||
* This Enum holds all the possible options and combinations for the Curation script
|
||||
*
|
||||
* @author Maria Verdonck (Atmire) on 23/06/2020
|
||||
*/
|
||||
public enum CurationClientOptions {
|
||||
TASK,
|
||||
QUEUE,
|
||||
HELP;
|
||||
|
||||
private static List<String> taskOptions;
|
||||
|
||||
/**
|
||||
* This method resolves the CommandLine parameters to figure out which action the curation script should perform
|
||||
*
|
||||
* @param commandLine The relevant CommandLine for the curation script
|
||||
* @return The curation option to be ran, parsed from the CommandLine
|
||||
*/
|
||||
protected static CurationClientOptions getClientOption(CommandLine commandLine) {
|
||||
if (commandLine.hasOption("h")) {
|
||||
return CurationClientOptions.HELP;
|
||||
} else if (commandLine.hasOption("t") || commandLine.hasOption("T")) {
|
||||
return CurationClientOptions.TASK;
|
||||
} else if (commandLine.hasOption("q")) {
|
||||
return CurationClientOptions.QUEUE;
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
||||
protected static Options constructOptions() {
|
||||
Options options = new Options();
|
||||
|
||||
options.addOption("t", "task", true, "curation task name; options: " + getTaskOptions());
|
||||
options.addOption("T", "taskfile", true, "file containing curation task names");
|
||||
options.addOption("i", "id", true,
|
||||
"Id (handle) of object to perform task on, or 'all' to perform on whole repository");
|
||||
options.addOption("p", "parameter", true, "a task parameter 'NAME=VALUE'");
|
||||
options.addOption("q", "queue", true, "name of task queue to process");
|
||||
options.addOption("e", "eperson", true, "email address of curating eperson");
|
||||
options.addOption("r", "reporter", true,
|
||||
"relative or absolute path to the desired report file. Use '-' to report to console. If absent, no " +
|
||||
"reporting");
|
||||
options.addOption("s", "scope", true,
|
||||
"transaction scope to impose: use 'object', 'curation', or 'open'. If absent, 'open' applies");
|
||||
options.addOption("v", "verbose", false, "report activity to stdout");
|
||||
options.addOption("h", "help", false, "help");
|
||||
|
||||
return options;
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates list of the taskOptions' keys from the configs of plugin.named.org.dspace.curate.CurationTask
|
||||
*
|
||||
* @return List of the taskOptions' keys from the configs of plugin.named.org.dspace.curate.CurationTask
|
||||
*/
|
||||
public static List<String> getTaskOptions() {
|
||||
if (taskOptions == null) {
|
||||
ConfigurationService configurationService = DSpaceServicesFactory.getInstance().getConfigurationService();
|
||||
String[] taskConfigs = configurationService.getArrayProperty("plugin.named.org.dspace.curate.CurationTask");
|
||||
taskOptions = new ArrayList<>();
|
||||
for (String taskConfig : taskConfigs) {
|
||||
taskOptions.add(StringUtils.substringAfterLast(taskConfig, "=").trim());
|
||||
}
|
||||
}
|
||||
return taskOptions;
|
||||
}
|
||||
}
|
@@ -0,0 +1,61 @@
|
||||
/**
|
||||
* The contents of this file are subject to the license and copyright
|
||||
* detailed in the LICENSE and NOTICE files at the root of the source
|
||||
* tree and available online at
|
||||
*
|
||||
* http://www.dspace.org/license/
|
||||
*/
|
||||
package org.dspace.curate;
|
||||
|
||||
import java.sql.SQLException;
|
||||
|
||||
import org.apache.commons.cli.Options;
|
||||
import org.dspace.authorize.service.AuthorizeService;
|
||||
import org.dspace.core.Context;
|
||||
import org.dspace.scripts.configuration.ScriptConfiguration;
|
||||
import org.springframework.beans.factory.annotation.Autowired;
|
||||
|
||||
/**
|
||||
* The {@link ScriptConfiguration} for the {@link CurationCli} script
|
||||
*
|
||||
* @author Maria Verdonck (Atmire) on 23/06/2020
|
||||
*/
|
||||
public class CurationScriptConfiguration<T extends CurationCli> extends ScriptConfiguration<T> {
|
||||
|
||||
@Autowired
|
||||
private AuthorizeService authorizeService;
|
||||
|
||||
private Class<T> dspaceRunnableClass;
|
||||
|
||||
@Override
|
||||
public Class<T> getDspaceRunnableClass() {
|
||||
return this.dspaceRunnableClass;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void setDspaceRunnableClass(Class<T> dspaceRunnableClass) {
|
||||
this.dspaceRunnableClass = dspaceRunnableClass;
|
||||
}
|
||||
|
||||
/**
|
||||
* Only admin can run Curation script via the scripts and processes endpoints.
|
||||
* @param context The relevant DSpace context
|
||||
* @return True if currentUser is admin, otherwise false
|
||||
*/
|
||||
@Override
|
||||
public boolean isAllowedToExecute(Context context) {
|
||||
try {
|
||||
return authorizeService.isAdmin(context);
|
||||
} catch (SQLException e) {
|
||||
throw new RuntimeException("SQLException occurred when checking if the current user is an admin", e);
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public Options getOptions() {
|
||||
if (options == null) {
|
||||
super.options = CurationClientOptions.constructOptions();
|
||||
}
|
||||
return options;
|
||||
}
|
||||
}
|
@@ -98,6 +98,7 @@ public class Curator {
|
||||
communityService = ContentServiceFactory.getInstance().getCommunityService();
|
||||
itemService = ContentServiceFactory.getInstance().getItemService();
|
||||
handleService = HandleServiceFactory.getInstance().getHandleService();
|
||||
resolver = new TaskResolver();
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -142,10 +143,10 @@ public class Curator {
|
||||
// performance order currently FIFO - to be revisited
|
||||
perfList.add(taskName);
|
||||
} catch (IOException ioE) {
|
||||
log.error("Task: '" + taskName + "' initialization failure: " + ioE.getMessage());
|
||||
System.out.println("Task: '" + taskName + "' initialization failure: " + ioE.getMessage());
|
||||
}
|
||||
} else {
|
||||
log.error("Task: '" + taskName + "' does not resolve");
|
||||
System.out.println("Task: '" + taskName + "' does not resolve");
|
||||
}
|
||||
return this;
|
||||
}
|
||||
@@ -259,13 +260,6 @@ public class Curator {
|
||||
/**
|
||||
* Performs all configured tasks upon DSpace object
|
||||
* (Community, Collection or Item).
|
||||
* <P>
|
||||
* Note: Site-wide tasks will default to running as
|
||||
* an Anonymous User unless you call the Site-wide task
|
||||
* via the {@link curate(Context,String)} or
|
||||
* {@link #curate(Context, DSpaceObject)} method with an
|
||||
* authenticated Context object.
|
||||
*
|
||||
* @param dso the DSpace object
|
||||
* @throws IOException if IO error
|
||||
*/
|
||||
@@ -325,7 +319,7 @@ public class Curator {
|
||||
taskQ.enqueue(queueId, new TaskQueueEntry(c.getCurrentUser().getName(),
|
||||
System.currentTimeMillis(), perfList, id));
|
||||
} else {
|
||||
log.error("curate - no TaskQueue implemented");
|
||||
System.out.println("curate - no TaskQueue implemented");
|
||||
}
|
||||
}
|
||||
|
||||
@@ -346,7 +340,7 @@ public class Curator {
|
||||
try {
|
||||
reporter.append(message);
|
||||
} catch (IOException ex) {
|
||||
log.error("Task reporting failure", ex);
|
||||
System.out.println("Task reporting failure: " + ex);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -552,7 +546,7 @@ public class Curator {
|
||||
return !suspend(statusCode);
|
||||
} catch (IOException ioe) {
|
||||
//log error & pass exception upwards
|
||||
log.error("Error executing curation task '" + task.getName() + "'", ioe);
|
||||
System.out.println("Error executing curation task '" + task.getName() + "'; " + ioe);
|
||||
throw ioe;
|
||||
}
|
||||
}
|
||||
@@ -568,7 +562,7 @@ public class Curator {
|
||||
return !suspend(statusCode);
|
||||
} catch (IOException ioe) {
|
||||
//log error & pass exception upwards
|
||||
log.error("Error executing curation task '" + task.getName() + "'", ioe);
|
||||
System.out.println("Error executing curation task '" + task.getName() + "'; " + ioe);
|
||||
throw ioe;
|
||||
}
|
||||
}
|
||||
|
@@ -7,6 +7,9 @@
|
||||
*/
|
||||
package org.dspace.discovery;
|
||||
|
||||
import static java.util.Collections.singletonList;
|
||||
import static org.apache.commons.lang3.StringUtils.isNotBlank;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.Arrays;
|
||||
import java.util.Collections;
|
||||
@@ -31,7 +34,7 @@ public class DiscoverQuery {
|
||||
**/
|
||||
private String query;
|
||||
private List<String> filterQueries;
|
||||
private String DSpaceObjectFilter = null;
|
||||
private List<String> dspaceObjectFilters = new ArrayList<>();
|
||||
private List<String> fieldPresentQueries;
|
||||
private boolean spellCheck;
|
||||
|
||||
@@ -118,20 +121,33 @@ public class DiscoverQuery {
|
||||
* Sets the DSpace object filter, must be an DSpace Object type integer
|
||||
* can be used to only return objects from a certain DSpace Object type
|
||||
*
|
||||
* @param DSpaceObjectFilter the DSpace object filer
|
||||
* @param dspaceObjectFilter the DSpace object filter
|
||||
*/
|
||||
public void setDSpaceObjectFilter(String DSpaceObjectFilter) {
|
||||
this.DSpaceObjectFilter = DSpaceObjectFilter;
|
||||
public void setDSpaceObjectFilter(String dspaceObjectFilter) {
|
||||
this.dspaceObjectFilters = singletonList(dspaceObjectFilter);
|
||||
}
|
||||
|
||||
/**
|
||||
* Gets the DSpace object filter
|
||||
* can be used to only return objects from a certain DSpace Object type
|
||||
* Adds a DSpace object filter, must be an DSpace Object type integer.
|
||||
* Can be used to also return objects from a certain DSpace Object type.
|
||||
*
|
||||
* @return the DSpace object filer
|
||||
* @param dspaceObjectFilter the DSpace object filer
|
||||
*/
|
||||
public String getDSpaceObjectFilter() {
|
||||
return DSpaceObjectFilter;
|
||||
public void addDSpaceObjectFilter(String dspaceObjectFilter) {
|
||||
|
||||
if (isNotBlank(dspaceObjectFilter)) {
|
||||
this.dspaceObjectFilters.add(dspaceObjectFilter);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Gets the DSpace object filters
|
||||
* can be used to only return objects from certain DSpace Object types
|
||||
*
|
||||
* @return the DSpace object filters
|
||||
*/
|
||||
public List<String> getDSpaceObjectFilters() {
|
||||
return dspaceObjectFilters;
|
||||
}
|
||||
|
||||
/**
|
||||
|
@@ -7,6 +7,8 @@
|
||||
*/
|
||||
package org.dspace.discovery;
|
||||
|
||||
import static java.util.stream.Collectors.joining;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.io.PrintWriter;
|
||||
import java.io.StringWriter;
|
||||
@@ -751,8 +753,13 @@ public class SolrServiceImpl implements SearchService, IndexingService {
|
||||
String filterQuery = discoveryQuery.getFilterQueries().get(i);
|
||||
solrQuery.addFilterQuery(filterQuery);
|
||||
}
|
||||
if (discoveryQuery.getDSpaceObjectFilter() != null) {
|
||||
solrQuery.addFilterQuery(SearchUtils.RESOURCE_TYPE_FIELD + ":" + discoveryQuery.getDSpaceObjectFilter());
|
||||
if (discoveryQuery.getDSpaceObjectFilters() != null) {
|
||||
solrQuery.addFilterQuery(
|
||||
discoveryQuery.getDSpaceObjectFilters()
|
||||
.stream()
|
||||
.map(filter -> SearchUtils.RESOURCE_TYPE_FIELD + ":" + filter)
|
||||
.collect(joining(" OR "))
|
||||
);
|
||||
}
|
||||
|
||||
for (int i = 0; i < discoveryQuery.getFieldPresentQueries().size(); i++) {
|
||||
|
@@ -0,0 +1,107 @@
|
||||
/**
|
||||
* The contents of this file are subject to the license and copyright
|
||||
* detailed in the LICENSE and NOTICE files at the root of the source
|
||||
* tree and available online at
|
||||
*
|
||||
* http://www.dspace.org/license/
|
||||
*/
|
||||
|
||||
package org.dspace.importer.external.bibtex.service;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.io.InputStream;
|
||||
import java.io.InputStreamReader;
|
||||
import java.io.Reader;
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.Map.Entry;
|
||||
import javax.annotation.Resource;
|
||||
|
||||
import org.dspace.importer.external.exception.FileSourceException;
|
||||
import org.dspace.importer.external.service.components.AbstractPlainMetadataSource;
|
||||
import org.dspace.importer.external.service.components.dto.PlainMetadataKeyValueItem;
|
||||
import org.dspace.importer.external.service.components.dto.PlainMetadataSourceDto;
|
||||
import org.jbibtex.BibTeXDatabase;
|
||||
import org.jbibtex.BibTeXEntry;
|
||||
import org.jbibtex.BibTeXParser;
|
||||
import org.jbibtex.Key;
|
||||
import org.jbibtex.ParseException;
|
||||
import org.jbibtex.Value;
|
||||
|
||||
/**
|
||||
* Implements a metadata importer for BibTeX files
|
||||
*
|
||||
* @author Pasquale Cavallo (pasquale.cavallo at 4science dot it)
|
||||
*/
|
||||
public class BibtexImportMetadataSourceServiceImpl extends AbstractPlainMetadataSource {
|
||||
|
||||
|
||||
/**
|
||||
* The string that identifies this import implementation as
|
||||
* MetadataSource implementation
|
||||
*
|
||||
* @return the identifying uri
|
||||
*/
|
||||
@Override
|
||||
public String getImportSource() {
|
||||
return "BibTeXMetadataSource";
|
||||
}
|
||||
|
||||
@Override
|
||||
protected List<PlainMetadataSourceDto> readData (InputStream
|
||||
inputStream) throws FileSourceException {
|
||||
List<PlainMetadataSourceDto> list = new ArrayList<>();
|
||||
BibTeXDatabase database;
|
||||
try {
|
||||
database = parseBibTex(inputStream);
|
||||
} catch (IOException | ParseException e) {
|
||||
throw new FileSourceException("Unable to parse file with BibTeX parser");
|
||||
}
|
||||
if (database == null || database.getEntries() == null) {
|
||||
throw new FileSourceException("File results in an empty list of metadata");
|
||||
}
|
||||
if (database.getEntries() != null) {
|
||||
for (Entry<Key, BibTeXEntry> entry : database.getEntries().entrySet()) {
|
||||
PlainMetadataSourceDto item = new PlainMetadataSourceDto();
|
||||
List<PlainMetadataKeyValueItem> keyValues = new ArrayList<>();
|
||||
item.setMetadata(keyValues);
|
||||
PlainMetadataKeyValueItem keyValueItem = new PlainMetadataKeyValueItem();
|
||||
keyValueItem.setKey(entry.getValue().getType().getValue());
|
||||
keyValueItem.setValue(entry.getKey().getValue());
|
||||
keyValues.add(keyValueItem);
|
||||
if (entry.getValue().getFields() != null) {
|
||||
for (Entry<Key,Value> subentry : entry.getValue().getFields().entrySet()) {
|
||||
PlainMetadataKeyValueItem innerItem = new PlainMetadataKeyValueItem();
|
||||
innerItem.setKey(subentry.getKey().getValue());
|
||||
innerItem.setValue(subentry.getValue().toUserString());
|
||||
keyValues.add(innerItem);
|
||||
}
|
||||
}
|
||||
list.add(item);
|
||||
}
|
||||
}
|
||||
return list;
|
||||
}
|
||||
|
||||
private BibTeXDatabase parseBibTex(InputStream inputStream) throws IOException, ParseException {
|
||||
Reader reader = new InputStreamReader(inputStream);
|
||||
BibTeXParser bibtexParser = new BibTeXParser();
|
||||
return bibtexParser.parse(reader);
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* Retrieve the MetadataFieldMapping containing the mapping between RecordType
|
||||
* (in this case PlainMetadataSourceDto.class) and Metadata
|
||||
*
|
||||
* @return The configured MetadataFieldMapping
|
||||
*/
|
||||
@Override
|
||||
@SuppressWarnings("unchecked")
|
||||
@Resource(name = "bibtexMetadataFieldMap")
|
||||
public void setMetadataFieldMap(@SuppressWarnings("rawtypes") Map metadataFieldMap) {
|
||||
super.setMetadataFieldMap(metadataFieldMap);
|
||||
}
|
||||
|
||||
}
|
@@ -0,0 +1,29 @@
|
||||
/**
|
||||
* The contents of this file are subject to the license and copyright
|
||||
* detailed in the LICENSE and NOTICE files at the root of the source
|
||||
* tree and available online at
|
||||
*
|
||||
* http://www.dspace.org/license/
|
||||
*/
|
||||
|
||||
package org.dspace.importer.external.exception;
|
||||
|
||||
/**
|
||||
* This exception could be throws when more than one element is found
|
||||
* in a method that works on one only.
|
||||
*
|
||||
* @author Pasquale Cavallo (pasquale.cavallo at 4science dot it)
|
||||
*/
|
||||
|
||||
public class FileMultipleOccurencesException extends Exception {
|
||||
|
||||
private static final long serialVersionUID = 1222409723339501937L;
|
||||
|
||||
public FileMultipleOccurencesException(String message, Throwable cause) {
|
||||
super(message, cause);
|
||||
}
|
||||
|
||||
public FileMultipleOccurencesException(String message) {
|
||||
super(message);
|
||||
}
|
||||
}
|
28
dspace-api/src/main/java/org/dspace/importer/external/exception/FileSourceException.java
vendored
Normal file
28
dspace-api/src/main/java/org/dspace/importer/external/exception/FileSourceException.java
vendored
Normal file
@@ -0,0 +1,28 @@
|
||||
/**
|
||||
* The contents of this file are subject to the license and copyright
|
||||
* detailed in the LICENSE and NOTICE files at the root of the source
|
||||
* tree and available online at
|
||||
*
|
||||
* http://www.dspace.org/license/
|
||||
*/
|
||||
|
||||
package org.dspace.importer.external.exception;
|
||||
|
||||
/**
|
||||
* Represents a problem with the File content: e.g. null input stream, invalid content, ...
|
||||
*
|
||||
* @author Pasquale Cavallo (pasquale.cavallo at 4science dot it)
|
||||
*/
|
||||
|
||||
public class FileSourceException extends Exception {
|
||||
|
||||
private static final long serialVersionUID = 6895579588455260182L;
|
||||
|
||||
public FileSourceException(String message, Throwable cause) {
|
||||
super(message, cause);
|
||||
}
|
||||
|
||||
public FileSourceException(String message) {
|
||||
super(message);
|
||||
}
|
||||
}
|
@@ -117,16 +117,13 @@ public abstract class AbstractMetadataFieldMapping<RecordType>
|
||||
public Collection<MetadatumDTO> resultToDCValueMapping(RecordType record) {
|
||||
List<MetadatumDTO> values = new LinkedList<MetadatumDTO>();
|
||||
|
||||
|
||||
for (MetadataContributor<RecordType> query : getMetadataFieldMap().values()) {
|
||||
try {
|
||||
values.addAll(query.contributeMetadata(record));
|
||||
} catch (Exception e) {
|
||||
log.error("Error", e);
|
||||
}
|
||||
|
||||
}
|
||||
return values;
|
||||
|
||||
}
|
||||
}
|
||||
|
@@ -0,0 +1,94 @@
|
||||
/**
|
||||
* The contents of this file are subject to the license and copyright
|
||||
* detailed in the LICENSE and NOTICE files at the root of the source
|
||||
* tree and available online at
|
||||
*
|
||||
* http://www.dspace.org/license/
|
||||
*/
|
||||
|
||||
package org.dspace.importer.external.metadatamapping.contributor;
|
||||
|
||||
import java.util.Collection;
|
||||
import java.util.LinkedList;
|
||||
import java.util.List;
|
||||
|
||||
import org.dspace.importer.external.metadatamapping.MetadataFieldConfig;
|
||||
import org.dspace.importer.external.metadatamapping.MetadataFieldMapping;
|
||||
import org.dspace.importer.external.metadatamapping.MetadatumDTO;
|
||||
import org.dspace.importer.external.service.components.dto.PlainMetadataKeyValueItem;
|
||||
import org.dspace.importer.external.service.components.dto.PlainMetadataSourceDto;
|
||||
|
||||
/**
|
||||
* Metadata contributor that takes an PlainMetadataSourceDto instance and turns it into a
|
||||
* collection of metadatum
|
||||
*
|
||||
* @author Pasquale Cavallo (pasquale.cavallo at 4science dot it)
|
||||
*/
|
||||
public class SimpleMetadataContributor implements MetadataContributor<PlainMetadataSourceDto> {
|
||||
|
||||
private MetadataFieldConfig field;
|
||||
|
||||
private String key;
|
||||
|
||||
private MetadataFieldMapping<PlainMetadataSourceDto,
|
||||
MetadataContributor<PlainMetadataSourceDto>> metadataFieldMapping;
|
||||
|
||||
public SimpleMetadataContributor(MetadataFieldConfig field, String key) {
|
||||
this.field = field;
|
||||
this.key = key;
|
||||
}
|
||||
|
||||
public SimpleMetadataContributor() { }
|
||||
|
||||
/**
|
||||
* Set the metadataFieldMapping of this SimpleMetadataContributor
|
||||
*
|
||||
* @param metadataFieldMapping the new mapping.
|
||||
*/
|
||||
@Override
|
||||
public void setMetadataFieldMapping(
|
||||
MetadataFieldMapping<PlainMetadataSourceDto,
|
||||
MetadataContributor<PlainMetadataSourceDto>> metadataFieldMapping) {
|
||||
this.metadataFieldMapping = metadataFieldMapping;
|
||||
}
|
||||
|
||||
/**
|
||||
* Retrieve the metadata associated with the given object.
|
||||
* It match the key found in PlainMetadataSourceDto instance with the key passed to constructor.
|
||||
* In case of success, new metadatum is constructer (using field elements and PlainMetadataSourceDto value)
|
||||
* and added to the list.
|
||||
*
|
||||
* @param t A class to retrieve metadata and key to match from. t and contained list "metadata" MUST be not null.
|
||||
* @return a collection of import records. Only the identifier of the found records may be put in the record.
|
||||
*/
|
||||
@Override
|
||||
public Collection<MetadatumDTO> contributeMetadata(PlainMetadataSourceDto t) {
|
||||
List<MetadatumDTO> values = new LinkedList<>();
|
||||
for (PlainMetadataKeyValueItem metadatum : t.getMetadata()) {
|
||||
if (key.equals(metadatum.getKey())) {
|
||||
MetadatumDTO dcValue = new MetadatumDTO();
|
||||
dcValue.setValue(metadatum.getValue());
|
||||
dcValue.setElement(field.getElement());
|
||||
dcValue.setQualifier(field.getQualifier());
|
||||
dcValue.setSchema(field.getSchema());
|
||||
values.add(dcValue);
|
||||
}
|
||||
}
|
||||
return values;
|
||||
}
|
||||
|
||||
/*
|
||||
* Setter to inject field item
|
||||
*/
|
||||
public void setField(MetadataFieldConfig field) {
|
||||
this.field = field;
|
||||
}
|
||||
|
||||
/*
|
||||
* Setter to inject key value
|
||||
*/
|
||||
public void setKey(String key) {
|
||||
this.key = key;
|
||||
}
|
||||
|
||||
}
|
@@ -8,6 +8,10 @@
|
||||
|
||||
package org.dspace.importer.external.pubmed.service;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.io.InputStream;
|
||||
import java.io.InputStreamReader;
|
||||
import java.io.Reader;
|
||||
import java.io.StringReader;
|
||||
import java.util.Collection;
|
||||
import java.util.LinkedList;
|
||||
@@ -20,6 +24,7 @@ import javax.ws.rs.client.WebTarget;
|
||||
import javax.ws.rs.core.MediaType;
|
||||
import javax.ws.rs.core.Response;
|
||||
|
||||
import com.google.common.io.CharStreams;
|
||||
import org.apache.axiom.om.OMElement;
|
||||
import org.apache.axiom.om.OMXMLBuilderFactory;
|
||||
import org.apache.axiom.om.OMXMLParserWrapper;
|
||||
@@ -27,8 +32,12 @@ import org.apache.axiom.om.xpath.AXIOMXPath;
|
||||
import org.dspace.content.Item;
|
||||
import org.dspace.importer.external.datamodel.ImportRecord;
|
||||
import org.dspace.importer.external.datamodel.Query;
|
||||
import org.dspace.importer.external.exception.FileMultipleOccurencesException;
|
||||
import org.dspace.importer.external.exception.FileSourceException;
|
||||
import org.dspace.importer.external.exception.MetadataSourceException;
|
||||
import org.dspace.importer.external.service.AbstractImportMetadataSourceService;
|
||||
import org.dspace.importer.external.service.components.FileSource;
|
||||
import org.dspace.importer.external.service.components.QuerySource;
|
||||
import org.jaxen.JaxenException;
|
||||
|
||||
/**
|
||||
@@ -36,11 +45,29 @@ import org.jaxen.JaxenException;
|
||||
*
|
||||
* @author Roeland Dillen (roeland at atmire dot com)
|
||||
*/
|
||||
public class PubmedImportMetadataSourceServiceImpl extends AbstractImportMetadataSourceService<OMElement> {
|
||||
public class PubmedImportMetadataSourceServiceImpl extends AbstractImportMetadataSourceService<OMElement>
|
||||
implements QuerySource, FileSource {
|
||||
|
||||
private String baseAddress;
|
||||
|
||||
private WebTarget pubmedWebTarget;
|
||||
|
||||
private List<String> supportedExtensions;
|
||||
|
||||
/**
|
||||
* Set the file extensions supported by this metadata service
|
||||
*
|
||||
* @param supportedExtensionsthe file extensions (xml,txt,...) supported by this service
|
||||
*/
|
||||
public void setSupportedExtensions(List<String> supportedExtensions) {
|
||||
this.supportedExtensions = supportedExtensions;
|
||||
}
|
||||
|
||||
@Override
|
||||
public List<String> getSupportedExtensions() {
|
||||
return supportedExtensions;
|
||||
}
|
||||
|
||||
/**
|
||||
* Find the number of records matching a query;
|
||||
*
|
||||
@@ -49,7 +76,7 @@ public class PubmedImportMetadataSourceServiceImpl extends AbstractImportMetadat
|
||||
* @throws MetadataSourceException if the underlying methods throw any exception.
|
||||
*/
|
||||
@Override
|
||||
public int getNbRecords(String query) throws MetadataSourceException {
|
||||
public int getRecordsCount(String query) throws MetadataSourceException {
|
||||
return retry(new GetNbRecords(query));
|
||||
}
|
||||
|
||||
@@ -61,7 +88,7 @@ public class PubmedImportMetadataSourceServiceImpl extends AbstractImportMetadat
|
||||
* @throws MetadataSourceException if the underlying methods throw any exception.
|
||||
*/
|
||||
@Override
|
||||
public int getNbRecords(Query query) throws MetadataSourceException {
|
||||
public int getRecordsCount(Query query) throws MetadataSourceException {
|
||||
return retry(new GetNbRecords(query));
|
||||
}
|
||||
|
||||
@@ -357,7 +384,6 @@ public class PubmedImportMetadataSourceServiceImpl extends AbstractImportMetadat
|
||||
|
||||
@Override
|
||||
public Collection<ImportRecord> call() throws Exception {
|
||||
List<ImportRecord> records = new LinkedList<ImportRecord>();
|
||||
|
||||
WebTarget getRecordIdsTarget = pubmedWebTarget
|
||||
.queryParam("term", query.getParameterAsClass("term", String.class));
|
||||
@@ -382,13 +408,41 @@ public class PubmedImportMetadataSourceServiceImpl extends AbstractImportMetadat
|
||||
invocationBuilder = getRecordsTarget.request(MediaType.TEXT_PLAIN_TYPE);
|
||||
response = invocationBuilder.get();
|
||||
|
||||
List<OMElement> omElements = splitToRecords(response.readEntity(String.class));
|
||||
|
||||
for (OMElement record : omElements) {
|
||||
records.add(transformSourceRecords(record));
|
||||
}
|
||||
|
||||
return records;
|
||||
String xml = response.readEntity(String.class);
|
||||
return parseXMLString(xml);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@Override
|
||||
public List<ImportRecord> getRecords(InputStream inputStream) throws FileSourceException {
|
||||
String xml = null;
|
||||
try (Reader reader = new InputStreamReader(inputStream, "UTF-8")) {
|
||||
xml = CharStreams.toString(reader);
|
||||
return parseXMLString(xml);
|
||||
} catch (IOException e) {
|
||||
throw new FileSourceException ("Cannot read XML from InputStream", e);
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public ImportRecord getRecord(InputStream inputStream) throws FileSourceException, FileMultipleOccurencesException {
|
||||
List<ImportRecord> importRecord = getRecords(inputStream);
|
||||
if (importRecord == null || importRecord.isEmpty()) {
|
||||
throw new FileSourceException("Cannot find (valid) record in File");
|
||||
} else if (importRecord.size() > 1) {
|
||||
throw new FileMultipleOccurencesException("File contains more than one entry");
|
||||
} else {
|
||||
return importRecord.get(0);
|
||||
}
|
||||
}
|
||||
|
||||
private List<ImportRecord> parseXMLString(String xml) {
|
||||
List<ImportRecord> records = new LinkedList<ImportRecord>();
|
||||
List<OMElement> omElements = splitToRecords(xml);
|
||||
for (OMElement record : omElements) {
|
||||
records.add(transformSourceRecords(record));
|
||||
}
|
||||
return records;
|
||||
}
|
||||
}
|
||||
|
@@ -8,6 +8,10 @@
|
||||
|
||||
package org.dspace.importer.external.service;
|
||||
|
||||
import java.io.File;
|
||||
import java.io.FileInputStream;
|
||||
import java.io.IOException;
|
||||
import java.io.InputStream;
|
||||
import java.util.Collection;
|
||||
import java.util.Collections;
|
||||
import java.util.HashMap;
|
||||
@@ -19,11 +23,16 @@ import org.apache.logging.log4j.Logger;
|
||||
import org.dspace.content.Item;
|
||||
import org.dspace.importer.external.datamodel.ImportRecord;
|
||||
import org.dspace.importer.external.datamodel.Query;
|
||||
import org.dspace.importer.external.exception.FileMultipleOccurencesException;
|
||||
import org.dspace.importer.external.exception.FileSourceException;
|
||||
import org.dspace.importer.external.exception.MetadataSourceException;
|
||||
import org.dspace.importer.external.service.components.Destroyable;
|
||||
import org.dspace.importer.external.service.components.FileSource;
|
||||
import org.dspace.importer.external.service.components.MetadataSource;
|
||||
import org.dspace.importer.external.service.components.QuerySource;
|
||||
import org.springframework.beans.factory.annotation.Autowired;
|
||||
|
||||
|
||||
/**
|
||||
* Main entry point for the import framework.
|
||||
* Instead of calling the different importer implementations, the ImportService should be called instead.
|
||||
@@ -32,8 +41,10 @@ import org.springframework.beans.factory.annotation.Autowired;
|
||||
* importer implementation you want to use.
|
||||
*
|
||||
* @author Roeland Dillen (roeland at atmire dot com)
|
||||
* @author Pasquale Cavallo (pasquale.cavallo@4science.it)
|
||||
*/
|
||||
public class ImportService implements Destroyable {
|
||||
|
||||
private HashMap<String, MetadataSource> importSources = new HashMap<>();
|
||||
|
||||
Logger log = org.apache.logging.log4j.LogManager.getLogger(ImportService.class);
|
||||
@@ -101,11 +112,11 @@ public class ImportService implements Destroyable {
|
||||
public Collection<ImportRecord> findMatchingRecords(String uri, Item item) throws MetadataSourceException {
|
||||
try {
|
||||
List<ImportRecord> recordList = new LinkedList<ImportRecord>();
|
||||
|
||||
for (MetadataSource metadataSource : matchingImports(uri)) {
|
||||
recordList.addAll(metadataSource.findMatchingRecords(item));
|
||||
if (metadataSource instanceof QuerySource) {
|
||||
recordList.addAll(((QuerySource)metadataSource).findMatchingRecords(item));
|
||||
}
|
||||
}
|
||||
|
||||
return recordList;
|
||||
} catch (Exception e) {
|
||||
throw new MetadataSourceException(e);
|
||||
@@ -125,9 +136,10 @@ public class ImportService implements Destroyable {
|
||||
try {
|
||||
List<ImportRecord> recordList = new LinkedList<ImportRecord>();
|
||||
for (MetadataSource metadataSource : matchingImports(uri)) {
|
||||
recordList.addAll(metadataSource.findMatchingRecords(query));
|
||||
if (metadataSource instanceof QuerySource) {
|
||||
recordList.addAll(((QuerySource)metadataSource).findMatchingRecords(query));
|
||||
}
|
||||
}
|
||||
|
||||
return recordList;
|
||||
} catch (Exception e) {
|
||||
throw new MetadataSourceException(e);
|
||||
@@ -145,8 +157,10 @@ public class ImportService implements Destroyable {
|
||||
public int getNbRecords(String uri, String query) throws MetadataSourceException {
|
||||
try {
|
||||
int total = 0;
|
||||
for (MetadataSource MetadataSource : matchingImports(uri)) {
|
||||
total += MetadataSource.getNbRecords(query);
|
||||
for (MetadataSource metadataSource : matchingImports(uri)) {
|
||||
if (metadataSource instanceof QuerySource) {
|
||||
total += ((QuerySource)metadataSource).getRecordsCount(query);
|
||||
}
|
||||
}
|
||||
return total;
|
||||
} catch (Exception e) {
|
||||
@@ -165,8 +179,10 @@ public class ImportService implements Destroyable {
|
||||
public int getNbRecords(String uri, Query query) throws MetadataSourceException {
|
||||
try {
|
||||
int total = 0;
|
||||
for (MetadataSource MetadataSource : matchingImports(uri)) {
|
||||
total += MetadataSource.getNbRecords(query);
|
||||
for (MetadataSource metadataSource : matchingImports(uri)) {
|
||||
if (metadataSource instanceof QuerySource) {
|
||||
total += ((QuerySource)metadataSource).getRecordsCount(query);
|
||||
}
|
||||
}
|
||||
return total;
|
||||
} catch (Exception e) {
|
||||
@@ -189,7 +205,9 @@ public class ImportService implements Destroyable {
|
||||
try {
|
||||
List<ImportRecord> recordList = new LinkedList<>();
|
||||
for (MetadataSource metadataSource : matchingImports(uri)) {
|
||||
recordList.addAll(metadataSource.getRecords(query, start, count));
|
||||
if (metadataSource instanceof QuerySource) {
|
||||
recordList.addAll(((QuerySource)metadataSource).getRecords(query, start, count));
|
||||
}
|
||||
}
|
||||
return recordList;
|
||||
} catch (Exception e) {
|
||||
@@ -209,7 +227,9 @@ public class ImportService implements Destroyable {
|
||||
try {
|
||||
List<ImportRecord> recordList = new LinkedList<>();
|
||||
for (MetadataSource metadataSource : matchingImports(uri)) {
|
||||
recordList.addAll(metadataSource.getRecords(query));
|
||||
if (metadataSource instanceof QuerySource) {
|
||||
recordList.addAll(((QuerySource)metadataSource).getRecords(query));
|
||||
}
|
||||
}
|
||||
return recordList;
|
||||
} catch (Exception e) {
|
||||
@@ -229,10 +249,12 @@ public class ImportService implements Destroyable {
|
||||
public ImportRecord getRecord(String uri, String id) throws MetadataSourceException {
|
||||
try {
|
||||
for (MetadataSource metadataSource : matchingImports(uri)) {
|
||||
if (metadataSource.getRecord(id) != null) {
|
||||
return metadataSource.getRecord(id);
|
||||
if (metadataSource instanceof QuerySource) {
|
||||
QuerySource querySource = (QuerySource)metadataSource;
|
||||
if (querySource.getRecord(id) != null) {
|
||||
return querySource.getRecord(id);
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
return null;
|
||||
} catch (Exception e) {
|
||||
@@ -252,10 +274,12 @@ public class ImportService implements Destroyable {
|
||||
public ImportRecord getRecord(String uri, Query query) throws MetadataSourceException {
|
||||
try {
|
||||
for (MetadataSource metadataSource : matchingImports(uri)) {
|
||||
if (metadataSource.getRecord(query) != null) {
|
||||
return metadataSource.getRecord(query);
|
||||
if (metadataSource instanceof QuerySource) {
|
||||
QuerySource querySource = (QuerySource)metadataSource;
|
||||
if (querySource.getRecord(query) != null) {
|
||||
return querySource.getRecord(query);
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
return null;
|
||||
} catch (Exception e) {
|
||||
@@ -272,6 +296,41 @@ public class ImportService implements Destroyable {
|
||||
return importSources.keySet();
|
||||
}
|
||||
|
||||
/*
|
||||
* Get a collection of record from File,
|
||||
* The first match will be return.
|
||||
*
|
||||
* @param file The file from which will read records
|
||||
* @param originalName The original file name or full path
|
||||
* @return a single record contains the metadatum
|
||||
* @throws FileMultipleOccurencesException if more than one entry is found
|
||||
*/
|
||||
public ImportRecord getRecord(File file, String originalName)
|
||||
throws FileMultipleOccurencesException, FileSourceException {
|
||||
ImportRecord importRecords = null;
|
||||
for (MetadataSource metadataSource : importSources.values()) {
|
||||
try (InputStream fileInputStream = new FileInputStream(file)) {
|
||||
if (metadataSource instanceof FileSource) {
|
||||
FileSource fileSource = (FileSource)metadataSource;
|
||||
if (fileSource.isValidSourceForFile(originalName)) {
|
||||
importRecords = fileSource.getRecord(fileInputStream);
|
||||
break;
|
||||
}
|
||||
}
|
||||
//catch statements is required because we could have supported format (i.e. XML)
|
||||
//which fail on schema validation
|
||||
} catch (FileSourceException e) {
|
||||
log.debug(metadataSource.getImportSource() + " isn't a valid parser for file");
|
||||
} catch (FileMultipleOccurencesException e) {
|
||||
log.debug("File contains multiple metadata, return with error");
|
||||
throw e;
|
||||
} catch (IOException e1) {
|
||||
throw new FileSourceException("File cannot be read, may be null");
|
||||
}
|
||||
}
|
||||
return importRecords;
|
||||
}
|
||||
|
||||
/**
|
||||
* Call destroy on all {@link Destroyable} {@link MetadataSource} objects set in this ImportService
|
||||
*/
|
||||
|
@@ -0,0 +1,103 @@
|
||||
/**
|
||||
* The contents of this file are subject to the license and copyright
|
||||
* detailed in the LICENSE and NOTICE files at the root of the source
|
||||
* tree and available online at
|
||||
*
|
||||
* http://www.dspace.org/license/
|
||||
*/
|
||||
|
||||
package org.dspace.importer.external.service.components;
|
||||
|
||||
import java.io.InputStream;
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
|
||||
import org.dspace.importer.external.datamodel.ImportRecord;
|
||||
import org.dspace.importer.external.exception.FileMultipleOccurencesException;
|
||||
import org.dspace.importer.external.exception.FileSourceException;
|
||||
import org.dspace.importer.external.metadatamapping.AbstractMetadataFieldMapping;
|
||||
import org.dspace.importer.external.metadatamapping.MetadatumDTO;
|
||||
import org.dspace.importer.external.service.components.dto.PlainMetadataSourceDto;
|
||||
|
||||
|
||||
/**
|
||||
* This class is an abstract implementation of {@link MetadataSource} useful in cases
|
||||
* of plain metadata sources.
|
||||
* It provides the methot to mapping metadata to DSpace Format when source is a file
|
||||
* whit a list of <key, value> strings.
|
||||
*
|
||||
* @author Pasquale Cavallo (pasquale.cavallo at 4science dot it)
|
||||
*/
|
||||
|
||||
public abstract class AbstractPlainMetadataSource
|
||||
extends AbstractMetadataFieldMapping<PlainMetadataSourceDto>
|
||||
implements FileSource {
|
||||
|
||||
protected abstract List<PlainMetadataSourceDto>
|
||||
readData(InputStream fileInpuStream) throws FileSourceException;
|
||||
|
||||
|
||||
private List<String> supportedExtensions;
|
||||
|
||||
/**
|
||||
* Set the file extensions supported by this metadata service
|
||||
*
|
||||
* @param supportedExtensionsthe file extensions (xml,txt,...) supported by this service
|
||||
*/
|
||||
public void setSupportedExtensions(List<String> supportedExtensions) {
|
||||
this.supportedExtensions = supportedExtensions;
|
||||
}
|
||||
|
||||
@Override
|
||||
public List<String> getSupportedExtensions() {
|
||||
return supportedExtensions;
|
||||
}
|
||||
|
||||
/**
|
||||
* Return a list of ImportRecord constructed from input file. This list is based on
|
||||
* the results retrieved from the file (InputStream) parsed through abstract method readData
|
||||
*
|
||||
* @param InputStream The inputStream of the file
|
||||
* @return A list of {@link ImportRecord}
|
||||
* @throws FileSourceException if, for any reason, the file is not parsable
|
||||
*/
|
||||
@Override
|
||||
public List<ImportRecord> getRecords(InputStream is) throws FileSourceException {
|
||||
List<PlainMetadataSourceDto> datas = readData(is);
|
||||
List<ImportRecord> records = new ArrayList<>();
|
||||
for (PlainMetadataSourceDto item : datas) {
|
||||
records.add(toRecord(item));
|
||||
}
|
||||
return records;
|
||||
}
|
||||
|
||||
/**
|
||||
* Return an ImportRecord constructed from input file. This list is based on
|
||||
* the result retrieved from the file (InputStream) parsed through abstract method
|
||||
* "readData" implementation
|
||||
*
|
||||
* @param InputStream The inputStream of the file
|
||||
* @return An {@link ImportRecord} matching the file content
|
||||
* @throws FileSourceException if, for any reason, the file is not parsable
|
||||
* @throws FileMultipleOccurencesException if the file contains more than one entry
|
||||
*/
|
||||
@Override
|
||||
public ImportRecord getRecord(InputStream is) throws FileSourceException, FileMultipleOccurencesException {
|
||||
List<PlainMetadataSourceDto> datas = readData(is);
|
||||
if (datas == null || datas.isEmpty()) {
|
||||
throw new FileSourceException("File is empty");
|
||||
}
|
||||
if (datas.size() > 1) {
|
||||
throw new FileMultipleOccurencesException("File "
|
||||
+ "contains more than one entry (" + datas.size() + " entries");
|
||||
}
|
||||
return toRecord(datas.get(0));
|
||||
}
|
||||
|
||||
|
||||
private ImportRecord toRecord(PlainMetadataSourceDto entry) {
|
||||
List<MetadatumDTO> metadata = new ArrayList<>();
|
||||
metadata.addAll(resultToDCValueMapping(entry));
|
||||
return new ImportRecord(metadata);
|
||||
}
|
||||
}
|
70
dspace-api/src/main/java/org/dspace/importer/external/service/components/FileSource.java
vendored
Normal file
70
dspace-api/src/main/java/org/dspace/importer/external/service/components/FileSource.java
vendored
Normal file
@@ -0,0 +1,70 @@
|
||||
/**
|
||||
* The contents of this file are subject to the license and copyright
|
||||
* detailed in the LICENSE and NOTICE files at the root of the source
|
||||
* tree and available online at
|
||||
*
|
||||
* http://www.dspace.org/license/
|
||||
*/
|
||||
|
||||
package org.dspace.importer.external.service.components;
|
||||
|
||||
import java.io.InputStream;
|
||||
import java.util.List;
|
||||
|
||||
import org.dspace.importer.external.datamodel.ImportRecord;
|
||||
import org.dspace.importer.external.exception.FileMultipleOccurencesException;
|
||||
import org.dspace.importer.external.exception.FileSourceException;
|
||||
|
||||
/**
|
||||
* This interface declare the base methods to work with files containing metadata.
|
||||
*
|
||||
* @author Pasquale Cavallo (pasquale.cavallo at 4science dot it)
|
||||
*/
|
||||
public interface FileSource extends MetadataSource {
|
||||
|
||||
/**
|
||||
* Return a list of ImportRecord constructed from input file.
|
||||
*
|
||||
* @param InputStream The inputStream of the file
|
||||
* @return A list of {@link ImportRecord}
|
||||
* @throws FileSourceException if, for any reason, the file is not parsable
|
||||
*/
|
||||
public List<ImportRecord> getRecords(InputStream inputStream)
|
||||
throws FileSourceException;
|
||||
|
||||
/**
|
||||
* Return an ImportRecord constructed from input file.
|
||||
*
|
||||
* @param InputStream The inputStream of the file
|
||||
* @return An {@link ImportRecord} matching the file content
|
||||
* @throws FileSourceException if, for any reason, the file is not parsable
|
||||
* @throws FileMultipleOccurencesException if the file contains more than one entry
|
||||
*/
|
||||
public ImportRecord getRecord(InputStream inputStream)
|
||||
throws FileSourceException, FileMultipleOccurencesException;
|
||||
|
||||
/**
|
||||
* This method is used to decide if the FileSource manage the file format
|
||||
*
|
||||
* @param originalName the file file original name
|
||||
* @return true if the FileSource can parse the file, false otherwise
|
||||
*/
|
||||
public default boolean isValidSourceForFile(String originalName) {
|
||||
List<String> extensions = getSupportedExtensions();
|
||||
if (extensions == null || extensions.isEmpty()) {
|
||||
return false;
|
||||
}
|
||||
if (originalName != null && originalName.contains(".")) {
|
||||
String extension = originalName.substring(originalName.lastIndexOf('.') + 1,
|
||||
originalName.length());
|
||||
return getSupportedExtensions().contains(extension);
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the file extensions (xml, csv, txt, ...) supported by the FileSource implementation
|
||||
*/
|
||||
public List<String> getSupportedExtensions();
|
||||
|
||||
}
|
@@ -8,76 +8,14 @@
|
||||
|
||||
package org.dspace.importer.external.service.components;
|
||||
|
||||
import java.util.Collection;
|
||||
|
||||
import org.dspace.content.Item;
|
||||
import org.dspace.importer.external.datamodel.ImportRecord;
|
||||
import org.dspace.importer.external.datamodel.Query;
|
||||
import org.dspace.importer.external.exception.MetadataSourceException;
|
||||
|
||||
/**
|
||||
* Common interface for all import implementations.
|
||||
* Super interface for all import implementations.
|
||||
*
|
||||
* @author Roeland Dillen (roeland at atmire dot com)
|
||||
* @author Pasquale Cavallo (pasquale.cavallo@4science.it)
|
||||
*/
|
||||
public interface MetadataSource {
|
||||
/**
|
||||
* Gets the number of records matching a query
|
||||
*
|
||||
* @param query the query in string format
|
||||
* @return the number of records matching the query
|
||||
* @throws MetadataSourceException if the underlying methods throw any exception.
|
||||
*/
|
||||
public int getNbRecords(String query) throws MetadataSourceException;
|
||||
|
||||
/**
|
||||
* Gets the number of records matching a query
|
||||
*
|
||||
* @param query the query object
|
||||
* @return the number of records matching the query
|
||||
* @throws MetadataSourceException if the underlying methods throw any exception.
|
||||
*/
|
||||
public int getNbRecords(Query query) throws MetadataSourceException;
|
||||
|
||||
/**
|
||||
* Gets a set of records matching a query. Supports pagination
|
||||
*
|
||||
* @param query the query. The query will generally be posted 'as is' to the source
|
||||
* @param start offset
|
||||
* @param count page size
|
||||
* @return a collection of fully transformed id's
|
||||
* @throws MetadataSourceException if the underlying methods throw any exception.
|
||||
*/
|
||||
public Collection<ImportRecord> getRecords(String query, int start, int count) throws MetadataSourceException;
|
||||
|
||||
/**
|
||||
* Find records based on a object query.
|
||||
*
|
||||
* @param query a query object to base the search on.
|
||||
* @return a set of records. Fully transformed.
|
||||
* @throws MetadataSourceException if the underlying methods throw any exception.
|
||||
*/
|
||||
public Collection<ImportRecord> getRecords(Query query) throws MetadataSourceException;
|
||||
|
||||
/**
|
||||
* Get a single record from the source.
|
||||
* The first match will be returned
|
||||
*
|
||||
* @param id identifier for the record
|
||||
* @return a matching record
|
||||
* @throws MetadataSourceException if the underlying methods throw any exception.
|
||||
*/
|
||||
public ImportRecord getRecord(String id) throws MetadataSourceException;
|
||||
|
||||
/**
|
||||
* Get a single record from the source.
|
||||
* The first match will be returned
|
||||
*
|
||||
* @param query a query matching a single record
|
||||
* @return a matching record
|
||||
* @throws MetadataSourceException if the underlying methods throw any exception.
|
||||
*/
|
||||
public ImportRecord getRecord(Query query) throws MetadataSourceException;
|
||||
|
||||
/**
|
||||
* The string that identifies this import implementation. Preferable a URI
|
||||
@@ -86,23 +24,4 @@ public interface MetadataSource {
|
||||
*/
|
||||
public String getImportSource();
|
||||
|
||||
/**
|
||||
* Finds records based on an item
|
||||
* Delegates to one or more MetadataSource implementations based on the uri. Results will be aggregated.
|
||||
*
|
||||
* @param item an item to base the search on
|
||||
* @return a collection of import records. Only the identifier of the found records may be put in the record.
|
||||
* @throws MetadataSourceException if the underlying methods throw any exception.
|
||||
*/
|
||||
public Collection<ImportRecord> findMatchingRecords(Item item) throws MetadataSourceException;
|
||||
|
||||
/**
|
||||
* Finds records based on query object.
|
||||
* Delegates to one or more MetadataSource implementations based on the uri. Results will be aggregated.
|
||||
*
|
||||
* @param query a query object to base the search on.
|
||||
* @return a collection of import records. Only the identifier of the found records may be put in the record.
|
||||
* @throws MetadataSourceException passed through.
|
||||
*/
|
||||
public Collection<ImportRecord> findMatchingRecords(Query query) throws MetadataSourceException;
|
||||
}
|
||||
|
106
dspace-api/src/main/java/org/dspace/importer/external/service/components/QuerySource.java
vendored
Normal file
106
dspace-api/src/main/java/org/dspace/importer/external/service/components/QuerySource.java
vendored
Normal file
@@ -0,0 +1,106 @@
|
||||
/**
|
||||
* The contents of this file are subject to the license and copyright
|
||||
* detailed in the LICENSE and NOTICE files at the root of the source
|
||||
* tree and available online at
|
||||
*
|
||||
* http://www.dspace.org/license/
|
||||
*/
|
||||
|
||||
package org.dspace.importer.external.service.components;
|
||||
|
||||
import java.util.Collection;
|
||||
|
||||
import org.dspace.content.Item;
|
||||
import org.dspace.importer.external.datamodel.ImportRecord;
|
||||
import org.dspace.importer.external.datamodel.Query;
|
||||
import org.dspace.importer.external.exception.MetadataSourceException;
|
||||
|
||||
|
||||
/**
|
||||
* Common interface for database-based imports.
|
||||
*
|
||||
* @author Roeland Dillen (roeland at atmire dot com)
|
||||
* @author Pasquale Cavallo (pasquale.cavallo@4science.it)
|
||||
*/
|
||||
|
||||
public interface QuerySource extends MetadataSource {
|
||||
|
||||
/**
|
||||
* Get a single record from the source.
|
||||
* The first match will be returned
|
||||
*
|
||||
* @param id identifier for the record
|
||||
* @return a matching record
|
||||
* @throws MetadataSourceException if the underlying methods throw any exception.
|
||||
*/
|
||||
public ImportRecord getRecord(String id) throws MetadataSourceException;
|
||||
|
||||
/**
|
||||
* Gets the number of records matching a query
|
||||
*
|
||||
* @param query the query in string format
|
||||
* @return the number of records matching the query
|
||||
* @throws MetadataSourceException if the underlying methods throw any exception.
|
||||
*/
|
||||
public int getRecordsCount(String query) throws MetadataSourceException;
|
||||
|
||||
/**
|
||||
* Gets the number of records matching a query
|
||||
*
|
||||
* @param query the query object
|
||||
* @return the number of records matching the query
|
||||
* @throws MetadataSourceException if the underlying methods throw any exception.
|
||||
*/
|
||||
public int getRecordsCount(Query query) throws MetadataSourceException;
|
||||
|
||||
/**
|
||||
* Gets a set of records matching a query. Supports pagination
|
||||
*
|
||||
* @param query the query. The query will generally be posted 'as is' to the source
|
||||
* @param start offset
|
||||
* @param count page size
|
||||
* @return a collection of fully transformed id's
|
||||
* @throws MetadataSourceException if the underlying methods throw any exception.
|
||||
*/
|
||||
public Collection<ImportRecord> getRecords(String query, int start, int count) throws MetadataSourceException;
|
||||
|
||||
/**
|
||||
* Find records based on a object query.
|
||||
*
|
||||
* @param query a query object to base the search on.
|
||||
* @return a set of records. Fully transformed.
|
||||
* @throws MetadataSourceException if the underlying methods throw any exception.
|
||||
*/
|
||||
public Collection<ImportRecord> getRecords(Query query) throws MetadataSourceException;
|
||||
|
||||
/**
|
||||
* Get a single record from the source.
|
||||
* The first match will be returned
|
||||
*
|
||||
* @param query a query matching a single record
|
||||
* @return a matching record
|
||||
* @throws MetadataSourceException if the underlying methods throw any exception.
|
||||
*/
|
||||
public ImportRecord getRecord(Query query) throws MetadataSourceException;
|
||||
|
||||
/**
|
||||
* Finds records based on query object.
|
||||
* Delegates to one or more MetadataSource implementations based on the uri. Results will be aggregated.
|
||||
*
|
||||
* @param query a query object to base the search on.
|
||||
* @return a collection of import records. Only the identifier of the found records may be put in the record.
|
||||
* @throws MetadataSourceException passed through.
|
||||
*/
|
||||
public Collection<ImportRecord> findMatchingRecords(Query query) throws MetadataSourceException;
|
||||
|
||||
/**
|
||||
* Finds records based on an item
|
||||
* Delegates to one or more MetadataSource implementations based on the uri. Results will be aggregated.
|
||||
*
|
||||
* @param item an item to base the search on
|
||||
* @return a collection of import records. Only the identifier of the found records may be put in the record.
|
||||
* @throws MetadataSourceException if the underlying methods throw any exception.
|
||||
*/
|
||||
public Collection<ImportRecord> findMatchingRecords(Item item) throws MetadataSourceException;
|
||||
|
||||
}
|
@@ -0,0 +1,50 @@
|
||||
/**
|
||||
* The contents of this file are subject to the license and copyright
|
||||
* detailed in the LICENSE and NOTICE files at the root of the source
|
||||
* tree and available online at
|
||||
*
|
||||
* http://www.dspace.org/license/
|
||||
*/
|
||||
package org.dspace.importer.external.service.components.dto;
|
||||
|
||||
/**
|
||||
* Simple object to construct <key,value> items
|
||||
*
|
||||
* @author Pasquale Cavallo (pasquale.cavallo at 4science dot it)
|
||||
*/
|
||||
public class PlainMetadataKeyValueItem {
|
||||
|
||||
private String key;
|
||||
private String value;
|
||||
|
||||
/*
|
||||
* In a key-value items, like PlainMetadata, this method get the item's key
|
||||
*/
|
||||
public String getKey() {
|
||||
return key;
|
||||
}
|
||||
|
||||
/*
|
||||
* In a key-value items, like PlainMetadata, this method set the item's key.
|
||||
* Never set or leave this field to null
|
||||
*
|
||||
*/
|
||||
public void setKey(String key) {
|
||||
this.key = key;
|
||||
}
|
||||
|
||||
/*
|
||||
* In key-value items, like PlainMetadata, this method get the item's value
|
||||
*/
|
||||
public String getValue() {
|
||||
return value;
|
||||
}
|
||||
|
||||
/*
|
||||
* In key-value items, like PlainMetadata, this method set the item's value
|
||||
*/
|
||||
public void setValue(String value) {
|
||||
this.value = value;
|
||||
}
|
||||
|
||||
}
|
@@ -0,0 +1,38 @@
|
||||
/**
|
||||
* The contents of this file are subject to the license and copyright
|
||||
* detailed in the LICENSE and NOTICE files at the root of the source
|
||||
* tree and available online at
|
||||
*
|
||||
* http://www.dspace.org/license/
|
||||
*/
|
||||
package org.dspace.importer.external.service.components.dto;
|
||||
|
||||
import java.util.List;
|
||||
|
||||
|
||||
/**
|
||||
* Simple object used to construct a list of <key,value> items.
|
||||
* This type is used in file plain metadata import as RecordType.
|
||||
*
|
||||
* @author Pasquale Cavallo (pasquale.cavallo at 4science dot it)
|
||||
*/
|
||||
|
||||
public class PlainMetadataSourceDto {
|
||||
|
||||
private List<PlainMetadataKeyValueItem> metadata;
|
||||
|
||||
/*
|
||||
* Method used to get the Metadata list
|
||||
*/
|
||||
public List<PlainMetadataKeyValueItem> getMetadata() {
|
||||
return metadata;
|
||||
}
|
||||
|
||||
/*
|
||||
* Method used to set the metadata list
|
||||
*/
|
||||
public void setMetadata(List<PlainMetadataKeyValueItem> metadata) {
|
||||
this.metadata = metadata;
|
||||
}
|
||||
|
||||
}
|
@@ -19,7 +19,6 @@
|
||||
|
||||
<context:annotation-config/> <!-- allows us to use spring annotations in beans -->
|
||||
|
||||
|
||||
<!--If multiple importServices have been configured here but only one is to be used during the lookup step (StartSubmissionLookupStep),
|
||||
this can be accomplished by specifying the property "publication-lookup.url" to the baseAddress of the required importService
|
||||
So for example
|
||||
@@ -30,9 +29,14 @@
|
||||
|
||||
<bean id="importService" class="org.dspace.importer.external.service.ImportService" scope="singleton"
|
||||
lazy-init="false" autowire="byType" destroy-method="destroy">
|
||||
|
||||
<property name="importSources">
|
||||
<list>
|
||||
<ref bean="PubmedImportService"></ref>
|
||||
<ref bean="BibtexImportService"></ref>
|
||||
</list>
|
||||
</property>
|
||||
</bean>
|
||||
|
||||
|
||||
<bean id="ArXivImportService"
|
||||
class="org.dspace.importer.external.arxiv.service.ArXivImportMetadataSourceServiceImpl" scope="singleton">
|
||||
<property name="metadataFieldMapping" ref="ArXivMetadataFieldMapping"/>
|
||||
@@ -48,12 +52,28 @@
|
||||
<property name="metadataFieldMapping" ref="PubmedMetadataFieldMapping"/>
|
||||
<property name="baseAddress" value="https://eutils.ncbi.nlm.nih.gov/entrez/eutils/"/>
|
||||
<property name="generateQueryForItem" ref="pubmedService"></property>
|
||||
|
||||
<property name="supportedExtensions">
|
||||
<list>
|
||||
<value>xml</value>
|
||||
</list>
|
||||
</property>
|
||||
</bean>
|
||||
|
||||
|
||||
<bean id="PubmedMetadataFieldMapping"
|
||||
class="org.dspace.importer.external.pubmed.metadatamapping.PubmedFieldMapping">
|
||||
</bean>
|
||||
|
||||
<bean id="BibtexImportService"
|
||||
class="org.dspace.importer.external.bibtex.service.BibtexImportMetadataSourceServiceImpl" scope="singleton">
|
||||
<property name="supportedExtensions">
|
||||
<list>
|
||||
<value>bib</value>
|
||||
<value>bibtex</value>
|
||||
</list>
|
||||
</property>
|
||||
</bean>
|
||||
|
||||
<!-- Metadatafield used to check against if it's already imported or not during the JSONLookupSearcher-->
|
||||
<bean id="lookupID" class="org.dspace.importer.external.metadatamapping.MetadataFieldConfig">
|
||||
<constructor-arg value="dc.identifier.other"/>
|
||||
|
@@ -0,0 +1,2 @@
|
||||
checklinks
|
||||
requiredmetadata
|
@@ -19,6 +19,12 @@
|
||||
<property name="dspaceRunnableClass" value="org.dspace.app.bulkedit.MetadataExport"/>
|
||||
</bean>
|
||||
|
||||
<bean id="curate" class="org.dspace.curate.CurationScriptConfiguration">
|
||||
<property name="description" value="Curation tasks"/>
|
||||
<property name="dspaceRunnableClass" value="org.dspace.curate.CurationCli"/>
|
||||
</bean>
|
||||
|
||||
<!-- Keep as last script; for test ScriptRestRepository#findOneScriptByNameTest -->
|
||||
<bean id="mock-script" class="org.dspace.scripts.MockDSpaceRunnableScriptConfiguration" scope="prototype">
|
||||
<property name="description" value="Mocking a script for testing purposes" />
|
||||
<property name="dspaceRunnableClass" value="org.dspace.scripts.impl.MockDSpaceRunnableScript"/>
|
||||
|
@@ -19,19 +19,29 @@
|
||||
|
||||
<context:annotation-config/> <!-- allows us to use spring annotations in beans -->
|
||||
|
||||
<bean class="org.dspace.discovery.SolrServiceImpl" id="org.dspace.discovery.SearchService"/>
|
||||
<bean class="org.dspace.discovery.SolrServiceImpl"
|
||||
id="org.dspace.discovery.SearchService"/>
|
||||
|
||||
<alias name="org.dspace.discovery.SearchService" alias="org.dspace.discovery.IndexingService"/>
|
||||
<alias name="org.dspace.discovery.SearchService"
|
||||
alias="org.dspace.discovery.IndexingService"/>
|
||||
|
||||
<!-- These beans have been added so that we can mock our AuthoritySearchService in the tests-->
|
||||
<bean class="org.dspace.authority.MockAuthoritySolrServiceImpl" id="org.dspace.authority.AuthoritySearchService"/>
|
||||
<alias name="org.dspace.authority.AuthoritySearchService" alias="org.dspace.authority.indexer.AuthorityIndexingService"/>
|
||||
<bean class="org.dspace.authority.MockAuthoritySolrServiceImpl"
|
||||
id="org.dspace.authority.AuthoritySearchService"/>
|
||||
<alias name="org.dspace.authority.AuthoritySearchService"
|
||||
alias="org.dspace.authority.indexer.AuthorityIndexingService"/>
|
||||
|
||||
<bean id="org.dspace.discovery.SolrSearchCore" class="org.dspace.discovery.MockSolrSearchCore" autowire-candidate="true"/>
|
||||
<bean id="org.dspace.discovery.MockSolrSearchCore"
|
||||
class="org.dspace.discovery.MockSolrSearchCore"
|
||||
autowire-candidate="true"/>
|
||||
|
||||
<!--<bean class="org.dspace.discovery.SolrServiceIndexOutputPlugin" id="solrServiceIndexOutputPlugin"/>-->
|
||||
<!--<bean class="org.dspace.discovery.SolrServiceIndexOutputPlugin"
|
||||
id="solrServiceIndexOutputPlugin"/>-->
|
||||
|
||||
<!-- Statistics services are both lazy loaded (by name), as you are likely just using ONE of them and not both -->
|
||||
<bean id="solrLoggerService" class="org.dspace.statistics.MockSolrLoggerServiceImpl" lazy-init="true"/>
|
||||
<!-- Statistics services are both lazy loaded (by name), as you are likely
|
||||
just using ONE of them and not both -->
|
||||
<bean id="solrLoggerService"
|
||||
class="org.dspace.statistics.MockSolrLoggerServiceImpl"
|
||||
lazy-init="true"/>
|
||||
|
||||
</beans>
|
||||
|
@@ -237,7 +237,7 @@ it, please enter the types and the actual numbers or codes.</hint>
|
||||
<form name="journalVolumeStep">
|
||||
<row>
|
||||
<relation-field>
|
||||
<relationship-type>isVolumeOfJournal</relationship-type>
|
||||
<relationship-type>isJournalOfVolume</relationship-type>
|
||||
<search-configuration>periodical</search-configuration>
|
||||
<filter>creativework.publisher:somepublishername</filter>
|
||||
<label>Journal</label>
|
||||
|
3
dspace-api/src/test/data/solr/solr.xml
Normal file
3
dspace-api/src/test/data/solr/solr.xml
Normal file
@@ -0,0 +1,3 @@
|
||||
<?xml version='1.0'?>
|
||||
<!-- This empty configuration is required to start EmbeddedSolrServer for Integration Tests (see MockSolrServer) -->
|
||||
<solr/>
|
@@ -5,7 +5,7 @@
|
||||
*
|
||||
* http://www.dspace.org/license/
|
||||
*/
|
||||
package org.dspace.app.rest.test;
|
||||
package org.dspace;
|
||||
|
||||
import static org.junit.Assert.fail;
|
||||
|
||||
@@ -17,7 +17,7 @@ import java.util.TimeZone;
|
||||
|
||||
import org.apache.logging.log4j.LogManager;
|
||||
import org.apache.logging.log4j.Logger;
|
||||
import org.dspace.app.rest.builder.AbstractBuilder;
|
||||
import org.dspace.builder.AbstractBuilder;
|
||||
import org.dspace.servicemanager.DSpaceKernelImpl;
|
||||
import org.dspace.servicemanager.DSpaceKernelInit;
|
||||
import org.junit.AfterClass;
|
||||
@@ -90,8 +90,9 @@ public class AbstractDSpaceIntegrationTest {
|
||||
}
|
||||
|
||||
/**
|
||||
* This method will be run after all tests finish as per @AfterClass. It
|
||||
* This method will be run after all tests finish as per @AfterClass. It
|
||||
* will clean resources initialized by the @BeforeClass methods.
|
||||
* @throws java.sql.SQLException
|
||||
*/
|
||||
@AfterClass
|
||||
public static void destroyTestEnvironment() throws SQLException {
|
@@ -5,7 +5,7 @@
|
||||
*
|
||||
* http://www.dspace.org/license/
|
||||
*/
|
||||
package org.dspace.app.rest.test;
|
||||
package org.dspace;
|
||||
|
||||
import static org.junit.Assert.fail;
|
||||
|
||||
@@ -14,21 +14,20 @@ import java.sql.SQLException;
|
||||
import org.apache.logging.log4j.LogManager;
|
||||
import org.apache.logging.log4j.Logger;
|
||||
import org.dspace.app.launcher.ScriptLauncher;
|
||||
import org.dspace.app.rest.builder.AbstractBuilder;
|
||||
import org.dspace.app.scripts.handler.impl.TestDSpaceRunnableHandler;
|
||||
import org.dspace.authority.AuthoritySearchService;
|
||||
import org.dspace.authority.MockAuthoritySolrServiceImpl;
|
||||
import org.dspace.authorize.AuthorizeException;
|
||||
import org.dspace.builder.AbstractBuilder;
|
||||
import org.dspace.content.Community;
|
||||
import org.dspace.core.Context;
|
||||
import org.dspace.core.I18nUtil;
|
||||
import org.dspace.discovery.MockSolrSearchCore;
|
||||
import org.dspace.discovery.SolrSearchCore;
|
||||
import org.dspace.eperson.EPerson;
|
||||
import org.dspace.eperson.Group;
|
||||
import org.dspace.eperson.factory.EPersonServiceFactory;
|
||||
import org.dspace.eperson.service.EPersonService;
|
||||
import org.dspace.eperson.service.GroupService;
|
||||
import org.dspace.kernel.ServiceManager;
|
||||
import org.dspace.services.factory.DSpaceServicesFactory;
|
||||
import org.dspace.statistics.MockSolrLoggerServiceImpl;
|
||||
import org.dspace.storage.rdbms.DatabaseUtils;
|
||||
@@ -181,21 +180,20 @@ public class AbstractIntegrationTestWithDatabase extends AbstractDSpaceIntegrati
|
||||
parentCommunity = null;
|
||||
cleanupContext();
|
||||
|
||||
ServiceManager serviceManager = DSpaceServicesFactory.getInstance().getServiceManager();
|
||||
// Clear the search core.
|
||||
MockSolrSearchCore searchService = DSpaceServicesFactory.getInstance()
|
||||
.getServiceManager()
|
||||
.getServiceByName(SolrSearchCore.class.getName(), MockSolrSearchCore.class);
|
||||
MockSolrSearchCore searchService = serviceManager
|
||||
.getServiceByName(null, MockSolrSearchCore.class);
|
||||
searchService.reset();
|
||||
|
||||
MockSolrLoggerServiceImpl statisticsService = DSpaceServicesFactory.getInstance()
|
||||
.getServiceManager()
|
||||
.getServiceByName("solrLoggerService", MockSolrLoggerServiceImpl.class);
|
||||
MockSolrLoggerServiceImpl statisticsService = serviceManager
|
||||
.getServiceByName(null, MockSolrLoggerServiceImpl.class);
|
||||
statisticsService.reset();
|
||||
|
||||
MockAuthoritySolrServiceImpl authorityService = DSpaceServicesFactory.getInstance()
|
||||
.getServiceManager()
|
||||
.getServiceByName(AuthoritySearchService.class.getName(), MockAuthoritySolrServiceImpl.class);
|
||||
MockAuthoritySolrServiceImpl authorityService = serviceManager
|
||||
.getServiceByName(null, MockAuthoritySolrServiceImpl.class);
|
||||
authorityService.reset();
|
||||
|
||||
// Reload our ConfigurationService (to reset configs to defaults again)
|
||||
DSpaceServicesFactory.getInstance().getConfigurationService().reloadConfig();
|
||||
|
||||
@@ -209,6 +207,7 @@ public class AbstractIntegrationTestWithDatabase extends AbstractDSpaceIntegrati
|
||||
/**
|
||||
* Utility method to cleanup a created Context object (to save memory).
|
||||
* This can also be used by individual tests to cleanup context objects they create.
|
||||
* @throws java.sql.SQLException passed through.
|
||||
*/
|
||||
protected void cleanupContext() throws SQLException {
|
||||
// If context still valid, flush all database changes and close it
|
@@ -5,7 +5,7 @@
|
||||
*
|
||||
* http://www.dspace.org/license/
|
||||
*/
|
||||
package org.dspace.app.rest.test;
|
||||
package org.dspace;
|
||||
|
||||
public class ExitException extends SecurityException {
|
||||
private final int status;
|
@@ -5,7 +5,7 @@
|
||||
*
|
||||
* http://www.dspace.org/license/
|
||||
*/
|
||||
package org.dspace.app.rest.test;
|
||||
package org.dspace;
|
||||
|
||||
import java.security.Permission;
|
||||
|
@@ -0,0 +1,62 @@
|
||||
/**
|
||||
* The contents of this file are subject to the license and copyright
|
||||
* detailed in the LICENSE and NOTICE files at the root of the source
|
||||
* tree and available online at
|
||||
*
|
||||
* http://www.dspace.org/license/
|
||||
*/
|
||||
package org.dspace.app.bulkedit;
|
||||
|
||||
import static junit.framework.TestCase.assertTrue;
|
||||
|
||||
import java.io.File;
|
||||
import java.io.FileInputStream;
|
||||
import java.nio.charset.StandardCharsets;
|
||||
|
||||
import org.apache.commons.io.IOUtils;
|
||||
import org.dspace.AbstractIntegrationTestWithDatabase;
|
||||
import org.dspace.app.launcher.ScriptLauncher;
|
||||
import org.dspace.app.scripts.handler.impl.TestDSpaceRunnableHandler;
|
||||
import org.dspace.builder.CollectionBuilder;
|
||||
import org.dspace.builder.CommunityBuilder;
|
||||
import org.dspace.builder.ItemBuilder;
|
||||
import org.dspace.content.Collection;
|
||||
import org.dspace.content.Community;
|
||||
import org.dspace.content.Item;
|
||||
import org.dspace.services.ConfigurationService;
|
||||
import org.dspace.services.factory.DSpaceServicesFactory;
|
||||
import org.junit.Test;
|
||||
|
||||
public class MetadataExportIT
|
||||
extends AbstractIntegrationTestWithDatabase {
|
||||
private final ConfigurationService configurationService
|
||||
= DSpaceServicesFactory.getInstance().getConfigurationService();
|
||||
|
||||
@Test
|
||||
public void metadataExportToCsvTest() throws Exception {
|
||||
context.turnOffAuthorisationSystem();
|
||||
Community community = CommunityBuilder.createCommunity(context)
|
||||
.build();
|
||||
Collection collection = CollectionBuilder.createCollection(context, community)
|
||||
.build();
|
||||
Item item = ItemBuilder.createItem(context, collection)
|
||||
.withAuthor("Donald, Smith")
|
||||
.build();
|
||||
context.restoreAuthSystemState();
|
||||
String fileLocation = configurationService.getProperty("dspace.dir")
|
||||
+ testProps.get("test.exportcsv").toString();
|
||||
|
||||
String[] args = new String[] {"metadata-export",
|
||||
"-i", String.valueOf(item.getHandle()),
|
||||
"-f", fileLocation};
|
||||
TestDSpaceRunnableHandler testDSpaceRunnableHandler
|
||||
= new TestDSpaceRunnableHandler();
|
||||
|
||||
ScriptLauncher.handleScript(args, ScriptLauncher.getConfig(kernelImpl),
|
||||
testDSpaceRunnableHandler, kernelImpl);
|
||||
File file = new File(fileLocation);
|
||||
String fileContent = IOUtils.toString(new FileInputStream(file), StandardCharsets.UTF_8);
|
||||
assertTrue(fileContent.contains("Donald, Smith"));
|
||||
assertTrue(fileContent.contains(String.valueOf(item.getID())));
|
||||
}
|
||||
}
|
@@ -1,71 +0,0 @@
|
||||
/**
|
||||
* The contents of this file are subject to the license and copyright
|
||||
* detailed in the LICENSE and NOTICE files at the root of the source
|
||||
* tree and available online at
|
||||
*
|
||||
* http://www.dspace.org/license/
|
||||
*/
|
||||
package org.dspace.app.bulkedit;
|
||||
|
||||
import static junit.framework.TestCase.assertTrue;
|
||||
|
||||
import java.io.File;
|
||||
import java.io.FileInputStream;
|
||||
import java.nio.charset.StandardCharsets;
|
||||
|
||||
import org.apache.commons.io.IOUtils;
|
||||
import org.dspace.AbstractIntegrationTest;
|
||||
import org.dspace.app.launcher.ScriptLauncher;
|
||||
import org.dspace.app.scripts.handler.impl.TestDSpaceRunnableHandler;
|
||||
import org.dspace.content.Collection;
|
||||
import org.dspace.content.Community;
|
||||
import org.dspace.content.Item;
|
||||
import org.dspace.content.WorkspaceItem;
|
||||
import org.dspace.content.factory.ContentServiceFactory;
|
||||
import org.dspace.content.service.CollectionService;
|
||||
import org.dspace.content.service.CommunityService;
|
||||
import org.dspace.content.service.InstallItemService;
|
||||
import org.dspace.content.service.ItemService;
|
||||
import org.dspace.content.service.WorkspaceItemService;
|
||||
import org.dspace.services.ConfigurationService;
|
||||
import org.dspace.services.factory.DSpaceServicesFactory;
|
||||
import org.junit.Test;
|
||||
|
||||
public class MetadataExportTest extends AbstractIntegrationTest {
|
||||
|
||||
private ItemService itemService = ContentServiceFactory.getInstance().getItemService();
|
||||
private CollectionService collectionService = ContentServiceFactory.getInstance().getCollectionService();
|
||||
private CommunityService communityService = ContentServiceFactory.getInstance().getCommunityService();
|
||||
private WorkspaceItemService workspaceItemService = ContentServiceFactory.getInstance().getWorkspaceItemService();
|
||||
private InstallItemService installItemService = ContentServiceFactory.getInstance().getInstallItemService();
|
||||
private ConfigurationService configurationService = DSpaceServicesFactory.getInstance().getConfigurationService();
|
||||
|
||||
@Test
|
||||
public void metadataExportToCsvTest() throws Exception {
|
||||
context.turnOffAuthorisationSystem();
|
||||
Community community = communityService.create(null, context);
|
||||
Collection collection = collectionService.create(context, community);
|
||||
WorkspaceItem wi = workspaceItemService.create(context, collection, true);
|
||||
Item item = wi.getItem();
|
||||
itemService.addMetadata(context, item, "dc", "contributor", "author", null, "Donald, Smith");
|
||||
item = installItemService.installItem(context, wi);
|
||||
context.restoreAuthSystemState();
|
||||
String fileLocation = configurationService.getProperty("dspace.dir") + testProps.get("test.exportcsv")
|
||||
.toString();
|
||||
|
||||
String[] args = new String[] {"metadata-export", "-i", String.valueOf(item.getHandle()), "-f", fileLocation};
|
||||
TestDSpaceRunnableHandler testDSpaceRunnableHandler = new TestDSpaceRunnableHandler();
|
||||
|
||||
ScriptLauncher.handleScript(args, ScriptLauncher.getConfig(kernelImpl), testDSpaceRunnableHandler, kernelImpl);
|
||||
File file = new File(fileLocation);
|
||||
String fileContent = IOUtils.toString(new FileInputStream(file), StandardCharsets.UTF_8);
|
||||
assertTrue(fileContent.contains("Donald, Smith"));
|
||||
assertTrue(fileContent.contains(String.valueOf(item.getID())));
|
||||
|
||||
context.turnOffAuthorisationSystem();
|
||||
itemService.delete(context, itemService.find(context, item.getID()));
|
||||
collectionService.delete(context, collectionService.find(context, collection.getID()));
|
||||
communityService.delete(context, communityService.find(context, community.getID()));
|
||||
context.restoreAuthSystemState();
|
||||
}
|
||||
}
|
@@ -22,16 +22,16 @@ import org.dspace.content.factory.ContentServiceFactory;
|
||||
import org.dspace.content.service.CollectionService;
|
||||
import org.dspace.content.service.CommunityService;
|
||||
import org.dspace.content.service.ItemService;
|
||||
import org.dspace.services.ConfigurationService;
|
||||
import org.dspace.services.factory.DSpaceServicesFactory;
|
||||
import org.junit.Test;
|
||||
|
||||
public class MetadataImportTest extends AbstractIntegrationTest {
|
||||
|
||||
private ItemService itemService = ContentServiceFactory.getInstance().getItemService();
|
||||
private CollectionService collectionService = ContentServiceFactory.getInstance().getCollectionService();
|
||||
private CommunityService communityService = ContentServiceFactory.getInstance().getCommunityService();
|
||||
private ConfigurationService configurationService = DSpaceServicesFactory.getInstance().getConfigurationService();
|
||||
private final ItemService itemService
|
||||
= ContentServiceFactory.getInstance().getItemService();
|
||||
private final CollectionService collectionService
|
||||
= ContentServiceFactory.getInstance().getCollectionService();
|
||||
private final CommunityService communityService
|
||||
= ContentServiceFactory.getInstance().getCommunityService();
|
||||
|
||||
@Test
|
||||
public void metadataImportTest() throws Exception {
|
||||
|
@@ -21,4 +21,8 @@ public class MockAuthoritySolrServiceImpl extends AuthoritySolrServiceImpl imple
|
||||
//We don't use SOLR in the tests of this module
|
||||
solr = null;
|
||||
}
|
||||
|
||||
public void reset() {
|
||||
// This method intentionally left blank.
|
||||
}
|
||||
}
|
||||
|
@@ -20,7 +20,7 @@ import org.junit.Test;
|
||||
* @author Andrea Bollini (andrea.bollini at 4science.it)
|
||||
*
|
||||
*/
|
||||
public class AuthorizeConfigIntegrationTest extends AbstractIntegrationTest {
|
||||
public class AuthorizeConfigIT extends AbstractIntegrationTest {
|
||||
|
||||
@Test
|
||||
public void testReloadConfiguration() {
|
@@ -5,18 +5,18 @@
|
||||
*
|
||||
* http://www.dspace.org/license/
|
||||
*/
|
||||
package org.dspace.app.rest.builder;
|
||||
package org.dspace.builder;
|
||||
|
||||
import java.sql.SQLException;
|
||||
import java.util.List;
|
||||
|
||||
import org.apache.commons.collections4.CollectionUtils;
|
||||
import org.apache.logging.log4j.Logger;
|
||||
import org.dspace.app.rest.builder.util.AbstractBuilderCleanupUtil;
|
||||
import org.dspace.authorize.AuthorizeException;
|
||||
import org.dspace.authorize.factory.AuthorizeServiceFactory;
|
||||
import org.dspace.authorize.service.AuthorizeService;
|
||||
import org.dspace.authorize.service.ResourcePolicyService;
|
||||
import org.dspace.builder.util.AbstractBuilderCleanupUtil;
|
||||
import org.dspace.content.Bitstream;
|
||||
import org.dspace.content.factory.ContentServiceFactory;
|
||||
import org.dspace.content.service.BitstreamFormatService;
|
||||
@@ -55,8 +55,8 @@ import org.dspace.xmlworkflow.storedcomponents.service.XmlWorkflowItemService;
|
||||
/**
|
||||
* Abstract builder class that holds references to all available services
|
||||
*
|
||||
* @param <T> This param represents the Model object for the Builder
|
||||
* @param <S> This param represents the Service object for the builder
|
||||
* @param <T> This parameter represents the Model object for the Builder
|
||||
* @param <S> This parameter represents the Service object for the builder
|
||||
* @author Jonas Van Goolen - (jonas@atmire.com)
|
||||
*/
|
||||
public abstract class AbstractBuilder<T, S> {
|
||||
@@ -96,7 +96,8 @@ public abstract class AbstractBuilder<T, S> {
|
||||
* This static class will make sure that the objects built with the builders are disposed of in a foreign-key
|
||||
* constraint safe manner by predefining an order
|
||||
*/
|
||||
private static AbstractBuilderCleanupUtil abstractBuilderCleanupUtil = new AbstractBuilderCleanupUtil();
|
||||
private static final AbstractBuilderCleanupUtil abstractBuilderCleanupUtil
|
||||
= new AbstractBuilderCleanupUtil();
|
||||
/**
|
||||
* log4j category
|
||||
*/
|
@@ -5,7 +5,7 @@
|
||||
*
|
||||
* http://www.dspace.org/license/
|
||||
*/
|
||||
package org.dspace.app.rest.builder;
|
||||
package org.dspace.builder;
|
||||
|
||||
import org.dspace.core.Context;
|
||||
import org.dspace.core.ReloadableEntity;
|
||||
@@ -13,6 +13,8 @@ import org.dspace.service.DSpaceCRUDService;
|
||||
|
||||
/**
|
||||
* @author Jonas Van Goolen - (jonas@atmire.com)
|
||||
*
|
||||
* @param <T> A specific kind of ReloadableEntity.
|
||||
*/
|
||||
public abstract class AbstractCRUDBuilder<T extends ReloadableEntity> extends AbstractBuilder<T, DSpaceCRUDService> {
|
||||
|
||||
@@ -20,8 +22,10 @@ public abstract class AbstractCRUDBuilder<T extends ReloadableEntity> extends Ab
|
||||
super(context);
|
||||
}
|
||||
|
||||
@Override
|
||||
protected abstract DSpaceCRUDService getService();
|
||||
|
||||
@Override
|
||||
public abstract T build();
|
||||
|
||||
public void delete(T dso) throws Exception {
|
@@ -5,7 +5,7 @@
|
||||
*
|
||||
* http://www.dspace.org/license/
|
||||
*/
|
||||
package org.dspace.app.rest.builder;
|
||||
package org.dspace.builder;
|
||||
|
||||
import java.sql.SQLException;
|
||||
import java.util.Date;
|
||||
@@ -43,12 +43,15 @@ public abstract class AbstractDSpaceObjectBuilder<T extends DSpaceObject>
|
||||
this.context = context;
|
||||
}
|
||||
|
||||
@Override
|
||||
public abstract void cleanup() throws Exception;
|
||||
|
||||
|
||||
@Override
|
||||
protected abstract DSpaceObjectService<T> getService();
|
||||
|
||||
|
||||
@Override
|
||||
protected <B> B handleException(final Exception e) {
|
||||
log.error(e.getMessage(), e);
|
||||
return null;
|
||||
@@ -231,13 +234,15 @@ public abstract class AbstractDSpaceObjectBuilder<T extends DSpaceObject>
|
||||
return (B) this;
|
||||
}
|
||||
|
||||
@Override
|
||||
public abstract T build() throws SQLException, AuthorizeException;
|
||||
|
||||
@Override
|
||||
public void delete(Context c, T dso) throws Exception {
|
||||
if (dso != null) {
|
||||
getService().delete(c, dso);
|
||||
}
|
||||
c.complete();
|
||||
indexingService.commit();
|
||||
if (dso != null) {
|
||||
getService().delete(c, dso);
|
||||
}
|
||||
c.complete();
|
||||
indexingService.commit();
|
||||
}
|
||||
}
|
@@ -5,7 +5,7 @@
|
||||
*
|
||||
* http://www.dspace.org/license/
|
||||
*/
|
||||
package org.dspace.app.rest.builder;
|
||||
package org.dspace.builder;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.io.InputStream;
|
||||
@@ -129,6 +129,7 @@ public class BitstreamBuilder extends AbstractDSpaceObjectBuilder<Bitstream> {
|
||||
return this;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Bitstream build() {
|
||||
try {
|
||||
bitstreamService.update(context, bitstream);
|
||||
@@ -152,7 +153,7 @@ public class BitstreamBuilder extends AbstractDSpaceObjectBuilder<Bitstream> {
|
||||
|
||||
@Override
|
||||
public void cleanup() throws Exception {
|
||||
try (Context c = new Context()) {
|
||||
try (Context c = new Context()) {
|
||||
c.turnOffAuthorisationSystem();
|
||||
// Ensure object and any related objects are reloaded before checking to see what needs cleanup
|
||||
bitstream = c.reloadEntity(bitstream);
|
||||
@@ -163,6 +164,7 @@ public class BitstreamBuilder extends AbstractDSpaceObjectBuilder<Bitstream> {
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
protected DSpaceObjectService<Bitstream> getService() {
|
||||
return bitstreamService;
|
||||
}
|
@@ -5,7 +5,7 @@
|
||||
*
|
||||
* http://www.dspace.org/license/
|
||||
*/
|
||||
package org.dspace.app.rest.builder;
|
||||
package org.dspace.builder;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.sql.SQLException;
|
||||
@@ -71,7 +71,6 @@ public class BitstreamFormatBuilder extends AbstractCRUDBuilder<BitstreamFormat>
|
||||
log.error(e);
|
||||
} catch (AuthorizeException e) {
|
||||
log.error(e);
|
||||
;
|
||||
}
|
||||
return bitstreamFormat;
|
||||
}
|
@@ -5,7 +5,7 @@
|
||||
*
|
||||
* http://www.dspace.org/license/
|
||||
*/
|
||||
package org.dspace.app.rest.builder;
|
||||
package org.dspace.builder;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.sql.SQLException;
|
||||
@@ -25,7 +25,7 @@ public class BundleBuilder extends AbstractDSpaceObjectBuilder<Bundle> {
|
||||
private Bundle bundle;
|
||||
private Item item;
|
||||
private String name;
|
||||
private List<Bitstream> bitstreams = new ArrayList<>();
|
||||
private final List<Bitstream> bitstreams = new ArrayList<>();
|
||||
|
||||
protected BundleBuilder(Context context) {
|
||||
super(context);
|
||||
@@ -52,6 +52,7 @@ public class BundleBuilder extends AbstractDSpaceObjectBuilder<Bundle> {
|
||||
return this;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void cleanup() throws Exception {
|
||||
try (Context c = new Context()) {
|
||||
c.turnOffAuthorisationSystem();
|
||||
@@ -64,10 +65,12 @@ public class BundleBuilder extends AbstractDSpaceObjectBuilder<Bundle> {
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
protected DSpaceObjectService<Bundle> getService() {
|
||||
return bundleService;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Bundle build() throws SQLException, AuthorizeException {
|
||||
bundle = bundleService.create(context, item, name);
|
||||
|
@@ -5,7 +5,7 @@
|
||||
*
|
||||
* http://www.dspace.org/license/
|
||||
*/
|
||||
package org.dspace.app.rest.builder;
|
||||
package org.dspace.builder;
|
||||
|
||||
import java.io.InputStream;
|
||||
|
@@ -5,7 +5,7 @@
|
||||
*
|
||||
* http://www.dspace.org/license/
|
||||
*/
|
||||
package org.dspace.app.rest.builder;
|
||||
package org.dspace.builder;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.io.InputStream;
|
@@ -5,7 +5,7 @@
|
||||
*
|
||||
* http://www.dspace.org/license/
|
||||
*/
|
||||
package org.dspace.app.rest.builder;
|
||||
package org.dspace.builder;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.io.InputStream;
|
@@ -5,12 +5,14 @@
|
||||
*
|
||||
* http://www.dspace.org/license/
|
||||
*/
|
||||
package org.dspace.app.rest.builder;
|
||||
package org.dspace.builder;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.sql.SQLException;
|
||||
import java.util.UUID;
|
||||
|
||||
import org.apache.logging.log4j.LogManager;
|
||||
import org.apache.logging.log4j.Logger;
|
||||
import org.dspace.authorize.AuthorizeException;
|
||||
import org.dspace.content.service.DSpaceObjectService;
|
||||
import org.dspace.core.Context;
|
||||
@@ -19,6 +21,7 @@ import org.dspace.eperson.EPerson;
|
||||
import org.dspace.eperson.Group;
|
||||
|
||||
public class EPersonBuilder extends AbstractDSpaceObjectBuilder<EPerson> {
|
||||
private static final Logger LOG = LogManager.getLogger(EPersonBuilder.class);
|
||||
|
||||
private EPerson ePerson;
|
||||
|
||||
@@ -28,7 +31,7 @@ public class EPersonBuilder extends AbstractDSpaceObjectBuilder<EPerson> {
|
||||
|
||||
@Override
|
||||
public void cleanup() throws Exception {
|
||||
try (Context c = new Context()) {
|
||||
try (Context c = new Context()) {
|
||||
c.turnOffAuthorisationSystem();
|
||||
// Ensure object and any related objects are reloaded before checking to see what needs cleanup
|
||||
ePerson = c.reloadEntity(ePerson);
|
||||
@@ -36,23 +39,21 @@ public class EPersonBuilder extends AbstractDSpaceObjectBuilder<EPerson> {
|
||||
delete(c, ePerson);
|
||||
c.complete();
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
protected DSpaceObjectService<EPerson> getService() {
|
||||
return ePersonService;
|
||||
}
|
||||
|
||||
@Override
|
||||
public EPerson build() {
|
||||
try {
|
||||
ePersonService.update(context, ePerson);
|
||||
indexingService.commit();
|
||||
} catch (SearchServiceException e) {
|
||||
e.printStackTrace();
|
||||
} catch (SQLException e) {
|
||||
e.printStackTrace();
|
||||
} catch (AuthorizeException e) {
|
||||
e.printStackTrace();
|
||||
} catch (SearchServiceException | SQLException | AuthorizeException e) {
|
||||
LOG.warn("Failed to complete the EPerson", e);
|
||||
}
|
||||
return ePerson;
|
||||
}
|
||||
@@ -65,10 +66,8 @@ public class EPersonBuilder extends AbstractDSpaceObjectBuilder<EPerson> {
|
||||
private EPersonBuilder create() {
|
||||
try {
|
||||
ePerson = ePersonService.create(context);
|
||||
} catch (SQLException e) {
|
||||
e.printStackTrace();
|
||||
} catch (AuthorizeException e) {
|
||||
e.printStackTrace();
|
||||
} catch (SQLException | AuthorizeException e) {
|
||||
LOG.warn("Failed to create the EPerson", e);
|
||||
}
|
||||
return this;
|
||||
}
|
@@ -5,7 +5,7 @@
|
||||
*
|
||||
* http://www.dspace.org/license/
|
||||
*/
|
||||
package org.dspace.app.rest.builder;
|
||||
package org.dspace.builder;
|
||||
|
||||
import java.sql.SQLException;
|
||||
|
||||
@@ -53,6 +53,7 @@ public class EntityTypeBuilder extends AbstractBuilder<EntityType, EntityTypeSer
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public EntityType build() {
|
||||
try {
|
||||
|
||||
@@ -91,7 +92,7 @@ public class EntityTypeBuilder extends AbstractBuilder<EntityType, EntityTypeSer
|
||||
this.entityType = entityTypeService.create(context, entityType);
|
||||
|
||||
} catch (SQLException | AuthorizeException e) {
|
||||
e.printStackTrace();
|
||||
log.warn("Failed to create the EntityType", e);
|
||||
}
|
||||
|
||||
return this;
|
@@ -5,7 +5,7 @@
|
||||
*
|
||||
* http://www.dspace.org/license/
|
||||
*/
|
||||
package org.dspace.app.rest.builder;
|
||||
package org.dspace.builder;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.sql.SQLException;
|
||||
@@ -34,7 +34,7 @@ public class GroupBuilder extends AbstractDSpaceObjectBuilder<Group> {
|
||||
|
||||
@Override
|
||||
public void cleanup() throws Exception {
|
||||
try (Context c = new Context()) {
|
||||
try (Context c = new Context()) {
|
||||
c.turnOffAuthorisationSystem();
|
||||
// Ensure object and any related objects are reloaded before checking to see what needs cleanup
|
||||
group = c.reloadEntity(group);
|
||||
@@ -42,7 +42,7 @@ public class GroupBuilder extends AbstractDSpaceObjectBuilder<Group> {
|
||||
delete(c, group);
|
||||
c.complete();
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
public static GroupBuilder createGroup(final Context context) {
|
@@ -5,7 +5,7 @@
|
||||
*
|
||||
* http://www.dspace.org/license/
|
||||
*/
|
||||
package org.dspace.app.rest.builder;
|
||||
package org.dspace.builder;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.sql.SQLException;
|
@@ -5,7 +5,7 @@
|
||||
*
|
||||
* http://www.dspace.org/license/
|
||||
*/
|
||||
package org.dspace.app.rest.builder;
|
||||
package org.dspace.builder;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.sql.SQLException;
|
||||
@@ -64,17 +64,9 @@ public class MetadataFieldBuilder extends AbstractBuilder<MetadataField, Metadat
|
||||
context.dispatchEvents();
|
||||
|
||||
indexingService.commit();
|
||||
} catch (SearchServiceException e) {
|
||||
log.error(e);
|
||||
} catch (SQLException e) {
|
||||
log.error(e);
|
||||
} catch (AuthorizeException e) {
|
||||
log.error(e);
|
||||
;
|
||||
} catch (NonUniqueMetadataException e) {
|
||||
e.printStackTrace();
|
||||
} catch (IOException e) {
|
||||
e.printStackTrace();
|
||||
} catch (SearchServiceException | SQLException | AuthorizeException
|
||||
| NonUniqueMetadataException | IOException e) {
|
||||
log.error("Failed to complete MetadataField", e);
|
||||
}
|
||||
return metadataField;
|
||||
}
|
||||
@@ -104,7 +96,7 @@ public class MetadataFieldBuilder extends AbstractBuilder<MetadataField, Metadat
|
||||
MetadataField metadataField = metadataFieldService.find(c, id);
|
||||
if (metadataField != null) {
|
||||
try {
|
||||
metadataFieldService.delete(c, metadataField);
|
||||
metadataFieldService.delete(c, metadataField);
|
||||
} catch (AuthorizeException e) {
|
||||
throw new RuntimeException(e);
|
||||
}
|
||||
@@ -141,7 +133,7 @@ public class MetadataFieldBuilder extends AbstractBuilder<MetadataField, Metadat
|
||||
metadataField = metadataFieldService
|
||||
.create(context, schema, element, qualifier, scopeNote);
|
||||
} catch (NonUniqueMetadataException e) {
|
||||
e.printStackTrace();
|
||||
log.error("Failed to create MetadataField", e);
|
||||
}
|
||||
|
||||
return this;
|
@@ -5,7 +5,7 @@
|
||||
*
|
||||
* http://www.dspace.org/license/
|
||||
*/
|
||||
package org.dspace.app.rest.builder;
|
||||
package org.dspace.builder;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.sql.SQLException;
|
||||
@@ -63,15 +63,10 @@ public class MetadataSchemaBuilder extends AbstractBuilder<MetadataSchema, Metad
|
||||
context.dispatchEvents();
|
||||
|
||||
indexingService.commit();
|
||||
} catch (SearchServiceException e) {
|
||||
} catch (SearchServiceException | SQLException | AuthorizeException e) {
|
||||
log.error(e);
|
||||
} catch (SQLException e) {
|
||||
log.error(e);
|
||||
} catch (AuthorizeException e) {
|
||||
log.error(e);
|
||||
;
|
||||
} catch (NonUniqueMetadataException e) {
|
||||
e.printStackTrace();
|
||||
log.error("Failed to complete MetadataSchema", e);
|
||||
}
|
||||
return metadataSchema;
|
||||
}
|
||||
@@ -101,7 +96,7 @@ public class MetadataSchemaBuilder extends AbstractBuilder<MetadataSchema, Metad
|
||||
MetadataSchema metadataSchema = metadataSchemaService.find(c, id);
|
||||
if (metadataSchema != null) {
|
||||
try {
|
||||
metadataSchemaService.delete(c, metadataSchema);
|
||||
metadataSchemaService.delete(c, metadataSchema);
|
||||
} catch (AuthorizeException e) {
|
||||
throw new RuntimeException(e);
|
||||
}
|
||||
@@ -123,7 +118,7 @@ public class MetadataSchemaBuilder extends AbstractBuilder<MetadataSchema, Metad
|
||||
try {
|
||||
metadataSchema = metadataSchemaService.create(context, name, namespace);
|
||||
} catch (NonUniqueMetadataException e) {
|
||||
e.printStackTrace();
|
||||
log.error("Failed to create MetadataSchema", e);
|
||||
}
|
||||
|
||||
return this;
|
@@ -5,7 +5,7 @@
|
||||
*
|
||||
* http://www.dspace.org/license/
|
||||
*/
|
||||
package org.dspace.app.rest.builder;
|
||||
package org.dspace.builder;
|
||||
|
||||
import java.io.InputStream;
|
||||
|
@@ -5,7 +5,7 @@
|
||||
*
|
||||
* http://www.dspace.org/license/
|
||||
*/
|
||||
package org.dspace.app.rest.builder;
|
||||
package org.dspace.builder;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.sql.SQLException;
|
||||
@@ -57,6 +57,7 @@ public class ProcessBuilder extends AbstractBuilder<Process, ProcessService> {
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public Process build() {
|
||||
try {
|
||||
processService.update(context, process);
|
||||
@@ -68,6 +69,7 @@ public class ProcessBuilder extends AbstractBuilder<Process, ProcessService> {
|
||||
return process;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected ProcessService getService() {
|
||||
return processService;
|
||||
}
|
@@ -5,7 +5,7 @@
|
||||
*
|
||||
* http://www.dspace.org/license/
|
||||
*/
|
||||
package org.dspace.app.rest.builder;
|
||||
package org.dspace.builder;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.sql.SQLException;
|
||||
@@ -56,6 +56,7 @@ public class RelationshipBuilder extends AbstractBuilder<Relationship, Relations
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public Relationship build() {
|
||||
try {
|
||||
|
||||
@@ -117,7 +118,7 @@ public class RelationshipBuilder extends AbstractBuilder<Relationship, Relations
|
||||
try {
|
||||
relationship = relationshipService.create(context, leftItem, rightItem, relationshipType, 0, 0);
|
||||
} catch (SQLException | AuthorizeException e) {
|
||||
e.printStackTrace();
|
||||
log.warn("Failed to create relationship", e);
|
||||
}
|
||||
|
||||
return this;
|
@@ -5,7 +5,7 @@
|
||||
*
|
||||
* http://www.dspace.org/license/
|
||||
*/
|
||||
package org.dspace.app.rest.builder;
|
||||
package org.dspace.builder;
|
||||
|
||||
import java.sql.SQLException;
|
||||
import java.util.List;
|
||||
@@ -62,6 +62,7 @@ public class RelationshipTypeBuilder extends AbstractBuilder<RelationshipType, R
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public RelationshipType build() {
|
||||
try {
|
||||
|
||||
@@ -116,7 +117,7 @@ public class RelationshipTypeBuilder extends AbstractBuilder<RelationshipType, R
|
||||
leftCardinalityMax, rightCardinalityMin, rightCardinalityMax);
|
||||
|
||||
} catch (SQLException | AuthorizeException e) {
|
||||
e.printStackTrace();
|
||||
log.error("Failed to create RelationshipType", e);
|
||||
}
|
||||
|
||||
return this;
|
@@ -5,7 +5,7 @@
|
||||
*
|
||||
* http://www.dspace.org/license/
|
||||
*/
|
||||
package org.dspace.app.rest.builder;
|
||||
package org.dspace.builder;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.sql.SQLException;
|
@@ -5,7 +5,7 @@
|
||||
*
|
||||
* http://www.dspace.org/license/
|
||||
*/
|
||||
package org.dspace.app.rest.builder;
|
||||
package org.dspace.builder;
|
||||
|
||||
import org.dspace.content.Site;
|
||||
import org.dspace.content.service.DSpaceObjectService;
|
@@ -5,7 +5,7 @@
|
||||
*
|
||||
* http://www.dspace.org/license/
|
||||
*/
|
||||
package org.dspace.app.rest.builder;
|
||||
package org.dspace.builder;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.io.InputStream;
|
@@ -5,7 +5,7 @@
|
||||
*
|
||||
* http://www.dspace.org/license/
|
||||
*/
|
||||
package org.dspace.app.rest.builder;
|
||||
package org.dspace.builder;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.io.InputStream;
|
@@ -5,32 +5,32 @@
|
||||
*
|
||||
* http://www.dspace.org/license/
|
||||
*/
|
||||
package org.dspace.app.rest.builder.util;
|
||||
package org.dspace.builder.util;
|
||||
|
||||
import java.util.LinkedHashMap;
|
||||
import java.util.LinkedList;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
|
||||
import org.dspace.app.rest.builder.AbstractBuilder;
|
||||
import org.dspace.app.rest.builder.BitstreamBuilder;
|
||||
import org.dspace.app.rest.builder.BitstreamFormatBuilder;
|
||||
import org.dspace.app.rest.builder.ClaimedTaskBuilder;
|
||||
import org.dspace.app.rest.builder.CollectionBuilder;
|
||||
import org.dspace.app.rest.builder.CommunityBuilder;
|
||||
import org.dspace.app.rest.builder.EPersonBuilder;
|
||||
import org.dspace.app.rest.builder.EntityTypeBuilder;
|
||||
import org.dspace.app.rest.builder.GroupBuilder;
|
||||
import org.dspace.app.rest.builder.ItemBuilder;
|
||||
import org.dspace.app.rest.builder.MetadataFieldBuilder;
|
||||
import org.dspace.app.rest.builder.MetadataSchemaBuilder;
|
||||
import org.dspace.app.rest.builder.PoolTaskBuilder;
|
||||
import org.dspace.app.rest.builder.ProcessBuilder;
|
||||
import org.dspace.app.rest.builder.RelationshipBuilder;
|
||||
import org.dspace.app.rest.builder.RelationshipTypeBuilder;
|
||||
import org.dspace.app.rest.builder.SiteBuilder;
|
||||
import org.dspace.app.rest.builder.WorkflowItemBuilder;
|
||||
import org.dspace.app.rest.builder.WorkspaceItemBuilder;
|
||||
import org.dspace.builder.AbstractBuilder;
|
||||
import org.dspace.builder.BitstreamBuilder;
|
||||
import org.dspace.builder.BitstreamFormatBuilder;
|
||||
import org.dspace.builder.ClaimedTaskBuilder;
|
||||
import org.dspace.builder.CollectionBuilder;
|
||||
import org.dspace.builder.CommunityBuilder;
|
||||
import org.dspace.builder.EPersonBuilder;
|
||||
import org.dspace.builder.EntityTypeBuilder;
|
||||
import org.dspace.builder.GroupBuilder;
|
||||
import org.dspace.builder.ItemBuilder;
|
||||
import org.dspace.builder.MetadataFieldBuilder;
|
||||
import org.dspace.builder.MetadataSchemaBuilder;
|
||||
import org.dspace.builder.PoolTaskBuilder;
|
||||
import org.dspace.builder.ProcessBuilder;
|
||||
import org.dspace.builder.RelationshipBuilder;
|
||||
import org.dspace.builder.RelationshipTypeBuilder;
|
||||
import org.dspace.builder.SiteBuilder;
|
||||
import org.dspace.builder.WorkflowItemBuilder;
|
||||
import org.dspace.builder.WorkspaceItemBuilder;
|
||||
|
||||
/**
|
||||
* This class will ensure that all the builders that are registered will be cleaned up in the order as defined
|
||||
@@ -39,7 +39,8 @@ import org.dspace.app.rest.builder.WorkspaceItemBuilder;
|
||||
*/
|
||||
public class AbstractBuilderCleanupUtil {
|
||||
|
||||
private LinkedHashMap<String, List<AbstractBuilder>> map = new LinkedHashMap<>();
|
||||
private final LinkedHashMap<String, List<AbstractBuilder>> map
|
||||
= new LinkedHashMap<>();
|
||||
|
||||
/**
|
||||
* Constructor that will initialize the Map with a predefined order for deletion
|
@@ -8,23 +8,27 @@
|
||||
package org.dspace.curate;
|
||||
|
||||
import static org.junit.Assert.assertEquals;
|
||||
import static org.mockito.Mockito.mock;
|
||||
import static org.mockito.Mockito.when;
|
||||
|
||||
import java.util.HashMap;
|
||||
import java.util.Map;
|
||||
|
||||
import org.dspace.AbstractUnitTest;
|
||||
import org.dspace.content.DSpaceObject;
|
||||
import org.dspace.content.Item;
|
||||
import org.dspace.content.factory.ContentServiceFactory;
|
||||
import org.dspace.content.service.SiteService;
|
||||
import org.dspace.core.factory.CoreServiceFactory;
|
||||
import org.dspace.ctask.general.NoOpCurationTask;
|
||||
import org.dspace.services.ConfigurationService;
|
||||
import org.junit.Test;
|
||||
|
||||
/**
|
||||
*
|
||||
* @author mhwood
|
||||
*/
|
||||
public class CuratorTest
|
||||
extends AbstractUnitTest {
|
||||
public class CuratorTest extends AbstractUnitTest {
|
||||
|
||||
private static final SiteService SITE_SERVICE = ContentServiceFactory.getInstance().getSiteService();
|
||||
|
||||
static final String RUN_PARAMETER_NAME = "runParameter";
|
||||
@@ -32,20 +36,24 @@ public class CuratorTest
|
||||
static final String TASK_PROPERTY_NAME = "taskProperty";
|
||||
static final String TASK_PROPERTY_VALUE = "a property";
|
||||
|
||||
/** Value of a known runtime parameter, if any. */
|
||||
/**
|
||||
* Value of a known runtime parameter, if any.
|
||||
*/
|
||||
static String runParameter;
|
||||
|
||||
/** Value of a known task property, if any. */
|
||||
/**
|
||||
* Value of a known task property, if any.
|
||||
*/
|
||||
static String taskProperty;
|
||||
|
||||
/**
|
||||
* Test of curate method, of class Curator.
|
||||
* Currently this just tests task properties and run parameters.
|
||||
*
|
||||
* @throws java.lang.Exception passed through.
|
||||
*/
|
||||
@Test
|
||||
public void testCurate_DSpaceObject()
|
||||
throws Exception {
|
||||
public void testCurate_DSpaceObject() throws Exception {
|
||||
System.out.println("curate");
|
||||
|
||||
final String TASK_NAME = "dummyTask";
|
||||
@@ -53,7 +61,7 @@ public class CuratorTest
|
||||
// Configure the task to be run.
|
||||
ConfigurationService cfg = kernelImpl.getConfigurationService();
|
||||
cfg.setProperty("plugin.named.org.dspace.curate.CurationTask",
|
||||
DummyTask.class.getName() + " = " + TASK_NAME);
|
||||
DummyTask.class.getName() + " = " + TASK_NAME);
|
||||
cfg.setProperty(TASK_NAME + '.' + TASK_PROPERTY_NAME, TASK_PROPERTY_VALUE);
|
||||
|
||||
// Get and configure a Curator.
|
||||
@@ -72,12 +80,40 @@ public class CuratorTest
|
||||
|
||||
// Check the result.
|
||||
System.out.format("Task %s result was '%s'%n",
|
||||
TASK_NAME, instance.getResult(TASK_NAME));
|
||||
TASK_NAME, instance.getResult(TASK_NAME));
|
||||
System.out.format("Task %s status was %d%n",
|
||||
TASK_NAME, instance.getStatus(TASK_NAME));
|
||||
TASK_NAME, instance.getStatus(TASK_NAME));
|
||||
assertEquals("Unexpected task status",
|
||||
Curator.CURATE_SUCCESS, instance.getStatus(TASK_NAME));
|
||||
Curator.CURATE_SUCCESS, instance.getStatus(TASK_NAME));
|
||||
assertEquals("Wrong run parameter", RUN_PARAMETER_VALUE, runParameter);
|
||||
assertEquals("Wrong task property", TASK_PROPERTY_VALUE, taskProperty);
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testCurate_NoOpTask() throws Exception {
|
||||
|
||||
CoreServiceFactory.getInstance().getPluginService().clearNamedPluginClasses();
|
||||
|
||||
final String TASK_NAME = "noop";
|
||||
|
||||
// Configure the noop task to be run.
|
||||
ConfigurationService cfg = kernelImpl.getConfigurationService();
|
||||
cfg.setProperty("plugin.named.org.dspace.curate.CurationTask",
|
||||
NoOpCurationTask.class.getName() + " = " + TASK_NAME);
|
||||
|
||||
// Get and configure a Curator.
|
||||
Curator curator = new Curator();
|
||||
|
||||
StringBuilder reporterOutput = new StringBuilder();
|
||||
curator.setReporter(reporterOutput); // Send any report to our StringBuilder.
|
||||
|
||||
curator.addTask(TASK_NAME);
|
||||
Item item = mock(Item.class);
|
||||
when(item.getType()).thenReturn(2);
|
||||
when(item.getHandle()).thenReturn("testHandle");
|
||||
curator.curate(context, item);
|
||||
|
||||
assertEquals(Curator.CURATE_SUCCESS, curator.getStatus(TASK_NAME));
|
||||
assertEquals(reporterOutput.toString(), "No operation performed on testHandle");
|
||||
}
|
||||
}
|
||||
|
@@ -7,19 +7,35 @@
|
||||
*/
|
||||
package org.dspace.discovery;
|
||||
|
||||
import org.dspace.solr.MockSolrServer;
|
||||
import org.springframework.beans.factory.DisposableBean;
|
||||
import org.springframework.beans.factory.InitializingBean;
|
||||
import org.springframework.stereotype.Service;
|
||||
|
||||
/**
|
||||
* Mock SOLR service for the Search Core
|
||||
* Mock SOLR service for the Search Core. Manages an in-process Solr server
|
||||
* with an in-memory "search" core.
|
||||
*/
|
||||
@Service
|
||||
public class MockSolrSearchCore extends SolrSearchCore implements InitializingBean {
|
||||
public class MockSolrSearchCore extends SolrSearchCore
|
||||
implements InitializingBean, DisposableBean {
|
||||
private MockSolrServer mockSolrServer;
|
||||
|
||||
@Override
|
||||
public void afterPropertiesSet() throws Exception {
|
||||
//We don't use SOLR in the tests of this module
|
||||
solr = null;
|
||||
mockSolrServer = new MockSolrServer("search");
|
||||
solr = mockSolrServer.getSolrServer();
|
||||
}
|
||||
|
||||
/**
|
||||
* Reset the core for the next test. See {@link MockSolrServer#reset()}.
|
||||
*/
|
||||
public void reset() {
|
||||
mockSolrServer.reset();
|
||||
}
|
||||
|
||||
@Override
|
||||
public void destroy() throws Exception {
|
||||
mockSolrServer.destroy();
|
||||
}
|
||||
}
|
||||
|
@@ -29,6 +29,7 @@ public class MockCCLicenseConnectorServiceImpl extends CCLicenseConnectorService
|
||||
* @param language - the language
|
||||
* @return a map of mocked licenses with the id and the license
|
||||
*/
|
||||
@Override
|
||||
public Map<String, CCLicense> retrieveLicenses(String language) {
|
||||
Map<String, CCLicense> ccLicenses = new HashMap<>();
|
||||
CCLicense mockLicense1 = createMockLicense(1, new int[]{3, 2, 3});
|
||||
@@ -89,6 +90,7 @@ public class MockCCLicenseConnectorServiceImpl extends CCLicenseConnectorService
|
||||
* @param answerMap - the answers to the different field questions
|
||||
* @return the CC License URI
|
||||
*/
|
||||
@Override
|
||||
public String retrieveRightsByQuestion(final String licenseId,
|
||||
final String language,
|
||||
final Map<String, String> answerMap) {
|
||||
@@ -105,6 +107,7 @@ public class MockCCLicenseConnectorServiceImpl extends CCLicenseConnectorService
|
||||
* @return a mock license RDF document or null when the URI contains invalid
|
||||
* @throws IOException
|
||||
*/
|
||||
@Override
|
||||
public Document retrieveLicenseRDFDoc(String licenseURI) throws IOException {
|
||||
if (!StringUtils.contains(licenseURI, "invalid")) {
|
||||
InputStream cclicense = null;
|
||||
|
@@ -19,7 +19,7 @@ import org.apache.solr.client.solrj.SolrClient;
|
||||
import org.apache.solr.client.solrj.SolrServerException;
|
||||
import org.apache.solr.client.solrj.embedded.EmbeddedSolrServer;
|
||||
import org.apache.solr.core.CoreContainer;
|
||||
import org.dspace.app.rest.test.AbstractDSpaceIntegrationTest;
|
||||
import org.dspace.AbstractDSpaceIntegrationTest;
|
||||
|
||||
/**
|
||||
* Factory of connections to an in-process embedded Solr service.
|
||||
@@ -110,7 +110,7 @@ public class MockSolrServer {
|
||||
server.deleteByQuery("*:*");
|
||||
server.commit();
|
||||
} catch (SolrServerException | IOException e) {
|
||||
e.printStackTrace(System.err);
|
||||
log.error("Failed to empty Solr index: {}", e.getMessage(), e);
|
||||
}
|
||||
|
||||
loadedCores.put(coreName, server);
|
@@ -27,27 +27,29 @@ import com.maxmind.geoip2.record.MaxMind;
|
||||
import com.maxmind.geoip2.record.Postal;
|
||||
import com.maxmind.geoip2.record.RepresentedCountry;
|
||||
import com.maxmind.geoip2.record.Traits;
|
||||
import org.dspace.solr.MockSolrServer;
|
||||
import org.springframework.beans.factory.DisposableBean;
|
||||
import org.springframework.beans.factory.InitializingBean;
|
||||
import org.springframework.stereotype.Service;
|
||||
|
||||
/**
|
||||
* Mock service that uses an embedded SOLR server for the statistics core.
|
||||
* <p>
|
||||
* <strong>NOTE:</strong> this class is overridden by one <em>of the same name</em>
|
||||
* defined in dspace-server-webapp and declared as a bean there.
|
||||
* See {@code test/data/dspaceFolder/config/spring/api/solr-services.xml}. Some kind of classpath
|
||||
* magic makes this work.
|
||||
*/
|
||||
@Service
|
||||
public class MockSolrLoggerServiceImpl
|
||||
extends SolrLoggerServiceImpl
|
||||
implements InitializingBean {
|
||||
implements InitializingBean, DisposableBean {
|
||||
|
||||
private MockSolrServer mockSolrServer;
|
||||
|
||||
public MockSolrLoggerServiceImpl() {
|
||||
}
|
||||
|
||||
@Override
|
||||
public void afterPropertiesSet() throws Exception {
|
||||
//We don't use SOLR in the tests of this module
|
||||
solr = null;
|
||||
// Initialize our service with a Mock Solr statistics core
|
||||
mockSolrServer = new MockSolrServer("statistics");
|
||||
solr = mockSolrServer.getSolrServer();
|
||||
|
||||
// Mock GeoIP's DatabaseReader
|
||||
DatabaseReader reader = mock(DatabaseReader.class);
|
||||
@@ -58,14 +60,16 @@ public class MockSolrLoggerServiceImpl
|
||||
}
|
||||
|
||||
/**
|
||||
* A mock/fake GeoIP CityResponse, which will be used for *all* test statistical requests
|
||||
* A mock/fake GeoIP CityResponse, which will be used for *all* test
|
||||
* statistical requests.
|
||||
*
|
||||
* @return faked CityResponse
|
||||
*/
|
||||
private CityResponse mockCityResponse() {
|
||||
List<String> cityNames = new ArrayList<String>(Collections.singleton("New York"));
|
||||
List<String> cityNames = new ArrayList<>(Collections.singleton("New York"));
|
||||
City city = new City(cityNames, 1, 1, new HashMap());
|
||||
|
||||
List<String> countryNames = new ArrayList<String>(Collections.singleton("United States"));
|
||||
List<String> countryNames = new ArrayList<>(Collections.singleton("United States"));
|
||||
Country country = new Country(countryNames, 1, 1, "US", new HashMap());
|
||||
|
||||
Location location = new Location(1, 1, 40.760498D, -73.9933D, 501, 1, "EST");
|
||||
@@ -73,7 +77,17 @@ public class MockSolrLoggerServiceImpl
|
||||
Postal postal = new Postal("10036", 1);
|
||||
|
||||
return new CityResponse(city, new Continent(), country, location, new MaxMind(), postal,
|
||||
country, new RepresentedCountry(), new ArrayList<>(0),
|
||||
new Traits());
|
||||
country, new RepresentedCountry(), new ArrayList<>(0),
|
||||
new Traits());
|
||||
}
|
||||
|
||||
/** Reset the core for the next test. See {@link MockSolrServer#reset()}. */
|
||||
public void reset() {
|
||||
mockSolrServer.reset();
|
||||
}
|
||||
|
||||
@Override
|
||||
public void destroy() throws Exception {
|
||||
mockSolrServer.destroy();
|
||||
}
|
||||
}
|
||||
|
@@ -10,8 +10,10 @@ package org.dspace.xmlworkflow;
|
||||
import static junit.framework.TestCase.assertEquals;
|
||||
import static org.junit.Assert.fail;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.sql.SQLException;
|
||||
|
||||
import org.apache.logging.log4j.LogManager;
|
||||
import org.apache.logging.log4j.Logger;
|
||||
import org.dspace.AbstractUnitTest;
|
||||
import org.dspace.authorize.AuthorizeException;
|
||||
@@ -35,9 +37,11 @@ import org.junit.Test;
|
||||
*/
|
||||
public class XmlWorkflowFactoryTest extends AbstractUnitTest {
|
||||
|
||||
private CollectionService collectionService = ContentServiceFactory.getInstance().getCollectionService();
|
||||
private CommunityService communityService = ContentServiceFactory.getInstance().getCommunityService();
|
||||
private XmlWorkflowFactory xmlWorkflowFactory
|
||||
private final CollectionService collectionService
|
||||
= ContentServiceFactory.getInstance().getCollectionService();
|
||||
private final CommunityService communityService
|
||||
= ContentServiceFactory.getInstance().getCommunityService();
|
||||
private final XmlWorkflowFactory xmlWorkflowFactory
|
||||
= new DSpace().getServiceManager().getServiceByName("xmlWorkflowFactory",
|
||||
XmlWorkflowFactoryImpl.class);
|
||||
private Community owningCommunity;
|
||||
@@ -47,7 +51,7 @@ public class XmlWorkflowFactoryTest extends AbstractUnitTest {
|
||||
/**
|
||||
* log4j category
|
||||
*/
|
||||
private static final Logger log = org.apache.logging.log4j.LogManager.getLogger(XmlWorkflowFactoryTest.class);
|
||||
private static final Logger log = LogManager.getLogger(XmlWorkflowFactoryTest.class);
|
||||
|
||||
/**
|
||||
* This method will be run before every test as per @Before. It will
|
||||
@@ -94,7 +98,7 @@ public class XmlWorkflowFactoryTest extends AbstractUnitTest {
|
||||
this.collectionService.delete(context, this.nonMappedCollection);
|
||||
this.collectionService.delete(context, this.mappedCollection);
|
||||
this.communityService.delete(context, this.owningCommunity);
|
||||
} catch (Exception e) {
|
||||
} catch (IOException | SQLException | AuthorizeException e) {
|
||||
log.error("Error in destroy", e);
|
||||
}
|
||||
|
||||
@@ -112,12 +116,12 @@ public class XmlWorkflowFactoryTest extends AbstractUnitTest {
|
||||
@Test
|
||||
public void workflowMapping_NonMappedCollection() throws WorkflowConfigurationException {
|
||||
Workflow workflow = xmlWorkflowFactory.getWorkflow(this.nonMappedCollection);
|
||||
assertEquals(workflow.getID(), "defaultWorkflow");
|
||||
assertEquals("defaultWorkflow", workflow.getID());
|
||||
}
|
||||
|
||||
@Test
|
||||
public void workflowMapping_MappedCollection() throws WorkflowConfigurationException {
|
||||
Workflow workflow = xmlWorkflowFactory.getWorkflow(this.mappedCollection);
|
||||
assertEquals(workflow.getID(), "selectSingleReviewer");
|
||||
assertEquals("selectSingleReviewer", workflow.getID());
|
||||
}
|
||||
}
|
||||
|
@@ -37,7 +37,7 @@
|
||||
<activation>
|
||||
<activeByDefault>false</activeByDefault>
|
||||
<property>
|
||||
<name>maven.test.skip</name>
|
||||
<name>skipTests</name>
|
||||
<value>false</value>
|
||||
</property>
|
||||
</activation>
|
||||
@@ -307,6 +307,13 @@
|
||||
<artifactId>dspace-api</artifactId>
|
||||
</dependency>
|
||||
|
||||
<dependency>
|
||||
<groupId>org.dspace</groupId>
|
||||
<artifactId>dspace-api</artifactId>
|
||||
<type>test-jar</type>
|
||||
<scope>test</scope>
|
||||
</dependency>
|
||||
|
||||
<dependency>
|
||||
<groupId>org.dspace</groupId>
|
||||
<artifactId>dspace-services</artifactId>
|
||||
@@ -532,13 +539,11 @@
|
||||
<dependency>
|
||||
<groupId>org.apache.lucene</groupId>
|
||||
<artifactId>lucene-analyzers-smartcn</artifactId>
|
||||
<version>${solr.client.version}</version>
|
||||
<scope>test</scope>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.apache.lucene</groupId>
|
||||
<artifactId>lucene-analyzers-stempel</artifactId>
|
||||
<version>${solr.client.version}</version>
|
||||
<scope>test</scope>
|
||||
</dependency>
|
||||
|
||||
|
@@ -7,6 +7,8 @@
|
||||
*/
|
||||
package org.dspace.app.rest;
|
||||
|
||||
import static org.apache.commons.collections4.ListUtils.emptyIfNull;
|
||||
|
||||
import java.util.Arrays;
|
||||
import java.util.List;
|
||||
import java.util.Objects;
|
||||
@@ -100,51 +102,55 @@ public class DiscoveryRestController implements InitializingBean {
|
||||
|
||||
@RequestMapping(method = RequestMethod.GET, value = "/search/facets")
|
||||
public FacetsResource getFacets(@RequestParam(name = "query", required = false) String query,
|
||||
@RequestParam(name = "dsoType", required = false) String dsoType,
|
||||
@RequestParam(name = "dsoType", required = false) List<String> dsoTypes,
|
||||
@RequestParam(name = "scope", required = false) String dsoScope,
|
||||
@RequestParam(name = "configuration", required = false) String configuration,
|
||||
List<SearchFilter> searchFilters,
|
||||
Pageable page) throws Exception {
|
||||
|
||||
dsoTypes = emptyIfNull(dsoTypes);
|
||||
|
||||
if (log.isTraceEnabled()) {
|
||||
log.trace("Searching with scope: " + StringUtils.trimToEmpty(dsoScope)
|
||||
+ ", configuration name: " + StringUtils.trimToEmpty(configuration)
|
||||
+ ", dsoType: " + StringUtils.trimToEmpty(dsoType)
|
||||
+ ", query: " + StringUtils.trimToEmpty(query)
|
||||
+ ", filters: " + Objects.toString(searchFilters));
|
||||
+ ", configuration name: " + StringUtils.trimToEmpty(configuration)
|
||||
+ ", dsoTypes: " + String.join(", ", dsoTypes)
|
||||
+ ", query: " + StringUtils.trimToEmpty(query)
|
||||
+ ", filters: " + Objects.toString(searchFilters));
|
||||
}
|
||||
|
||||
SearchResultsRest searchResultsRest = discoveryRestRepository
|
||||
.getAllFacets(query, dsoType, dsoScope, configuration, searchFilters);
|
||||
.getAllFacets(query, dsoTypes, dsoScope, configuration, searchFilters);
|
||||
|
||||
FacetsResource facetsResource = new FacetsResource(searchResultsRest, page);
|
||||
halLinkService.addLinks(facetsResource, page);
|
||||
|
||||
return facetsResource;
|
||||
|
||||
|
||||
}
|
||||
|
||||
@RequestMapping(method = RequestMethod.GET, value = "/search/objects")
|
||||
public SearchResultsResource getSearchObjects(@RequestParam(name = "query", required = false) String query,
|
||||
@RequestParam(name = "dsoType", required = false) String dsoType,
|
||||
@RequestParam(name = "dsoType", required = false)
|
||||
List<String> dsoTypes,
|
||||
@RequestParam(name = "scope", required = false) String dsoScope,
|
||||
@RequestParam(name = "configuration", required = false) String
|
||||
configuration,
|
||||
List<SearchFilter> searchFilters,
|
||||
Pageable page) throws Exception {
|
||||
|
||||
dsoTypes = emptyIfNull(dsoTypes);
|
||||
|
||||
if (log.isTraceEnabled()) {
|
||||
log.trace("Searching with scope: " + StringUtils.trimToEmpty(dsoScope)
|
||||
+ ", configuration name: " + StringUtils.trimToEmpty(configuration)
|
||||
+ ", dsoType: " + StringUtils.trimToEmpty(dsoType)
|
||||
+ ", query: " + StringUtils.trimToEmpty(query)
|
||||
+ ", filters: " + Objects.toString(searchFilters)
|
||||
+ ", page: " + Objects.toString(page));
|
||||
+ ", configuration name: " + StringUtils.trimToEmpty(configuration)
|
||||
+ ", dsoTypes: " + String.join(", ", dsoTypes)
|
||||
+ ", query: " + StringUtils.trimToEmpty(query)
|
||||
+ ", filters: " + Objects.toString(searchFilters)
|
||||
+ ", page: " + Objects.toString(page));
|
||||
}
|
||||
|
||||
//Get the Search results in JSON format
|
||||
SearchResultsRest searchResultsRest = discoveryRestRepository
|
||||
.getSearchObjects(query, dsoType, dsoScope, configuration, searchFilters, page, utils.obtainProjection());
|
||||
.getSearchObjects(query, dsoTypes, dsoScope, configuration, searchFilters, page, utils.obtainProjection());
|
||||
|
||||
//Convert the Search JSON results to paginated HAL resources
|
||||
SearchResultsResource searchResultsResource = new SearchResultsResource(searchResultsRest, utils, page);
|
||||
@@ -174,15 +180,18 @@ public class DiscoveryRestController implements InitializingBean {
|
||||
public RepresentationModel getFacetValues(@PathVariable("name") String facetName,
|
||||
@RequestParam(name = "prefix", required = false) String prefix,
|
||||
@RequestParam(name = "query", required = false) String query,
|
||||
@RequestParam(name = "dsoType", required = false) String dsoType,
|
||||
@RequestParam(name = "dsoType", required = false) List<String> dsoTypes,
|
||||
@RequestParam(name = "scope", required = false) String dsoScope,
|
||||
@RequestParam(name = "configuration", required = false) String
|
||||
configuration,
|
||||
List<SearchFilter> searchFilters,
|
||||
Pageable page) throws Exception {
|
||||
|
||||
dsoTypes = emptyIfNull(dsoTypes);
|
||||
|
||||
if (log.isTraceEnabled()) {
|
||||
log.trace("Facetting on facet " + facetName + " with scope: " + StringUtils.trimToEmpty(dsoScope)
|
||||
+ ", dsoType: " + StringUtils.trimToEmpty(dsoType)
|
||||
+ ", dsoTypes: " + String.join(", ", dsoTypes)
|
||||
+ ", prefix: " + StringUtils.trimToEmpty(prefix)
|
||||
+ ", query: " + StringUtils.trimToEmpty(query)
|
||||
+ ", filters: " + Objects.toString(searchFilters)
|
||||
@@ -190,7 +199,7 @@ public class DiscoveryRestController implements InitializingBean {
|
||||
}
|
||||
|
||||
FacetResultsRest facetResultsRest = discoveryRestRepository
|
||||
.getFacetObjects(facetName, prefix, query, dsoType, dsoScope, configuration, searchFilters, page);
|
||||
.getFacetObjects(facetName, prefix, query, dsoTypes, dsoScope, configuration, searchFilters, page);
|
||||
|
||||
FacetResultsResource facetResultsResource = converter.toResource(facetResultsRest);
|
||||
|
||||
|
@@ -624,7 +624,7 @@ public class RestResourceController implements InitializingBean {
|
||||
HttpServletRequest request,
|
||||
@PathVariable String apiCategory,
|
||||
@PathVariable String model,
|
||||
@RequestParam("file") MultipartFile uploadfile)
|
||||
@RequestParam("file") List<MultipartFile> uploadfile)
|
||||
throws SQLException, FileNotFoundException, IOException, AuthorizeException {
|
||||
|
||||
checkModelPluralForm(apiCategory, model);
|
||||
|
@@ -11,7 +11,9 @@ import java.io.IOException;
|
||||
import java.util.Arrays;
|
||||
import javax.servlet.http.HttpServletResponse;
|
||||
|
||||
import org.apache.commons.lang3.StringUtils;
|
||||
import org.dspace.app.rest.model.AuthnRest;
|
||||
import org.dspace.core.Utils;
|
||||
import org.dspace.services.ConfigurationService;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
@@ -47,14 +49,29 @@ public class ShibbolethRestController implements InitializingBean {
|
||||
.register(this, Arrays.asList(new Link("/api/" + AuthnRest.CATEGORY, "shibboleth")));
|
||||
}
|
||||
|
||||
// LGTM.com thinks this method has an unvalidated URL redirect (https://lgtm.com/rules/4840088/) in `redirectUrl`,
|
||||
// even though we are clearly validating the hostname of `redirectUrl` and test it in ShibbolethRestControllerIT
|
||||
@SuppressWarnings("lgtm[java/unvalidated-url-redirection]")
|
||||
@RequestMapping(method = RequestMethod.GET)
|
||||
public void shibboleth(HttpServletResponse response,
|
||||
@RequestParam(name = "redirectUrl", required = false) String redirectUrl) throws IOException {
|
||||
if (redirectUrl == null) {
|
||||
redirectUrl = configurationService.getProperty("dspace.ui.url");
|
||||
}
|
||||
log.info("Redirecting to " + redirectUrl);
|
||||
response.sendRedirect(redirectUrl);
|
||||
|
||||
// Validate that the redirectURL matches either the server or UI hostname. It *cannot* be an arbitrary URL.
|
||||
String redirectHostName = Utils.getHostName(redirectUrl);
|
||||
String serverHostName = Utils.getHostName(configurationService.getProperty("dspace.server.url"));
|
||||
String clientHostName = Utils.getHostName(configurationService.getProperty("dspace.ui.url"));
|
||||
if (StringUtils.equalsAnyIgnoreCase(redirectHostName, serverHostName, clientHostName)) {
|
||||
log.debug("Shibboleth redirecting to " + redirectUrl);
|
||||
response.sendRedirect(redirectUrl);
|
||||
} else {
|
||||
log.error("Invalid Shibboleth redirectURL=" + redirectUrl +
|
||||
". URL doesn't match hostname of server or UI!");
|
||||
response.sendError(HttpServletResponse.SC_BAD_REQUEST,
|
||||
"Invalid redirectURL! Must match server or ui hostname.");
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
|
@@ -35,13 +35,14 @@ public class DiscoverFacetResultsConverter {
|
||||
@Autowired
|
||||
private SearchFilterToAppliedFilterConverter searchFilterToAppliedFilterConverter;
|
||||
|
||||
public FacetResultsRest convert(Context context, String facetName, String prefix, String query, String dsoType,
|
||||
String dsoScope, List<SearchFilter> searchFilters, DiscoverResult searchResult,
|
||||
DiscoveryConfiguration configuration, Pageable page, Projection projection) {
|
||||
public FacetResultsRest convert(Context context, String facetName, String prefix, String query,
|
||||
List<String> dsoTypes, String dsoScope, List<SearchFilter> searchFilters,
|
||||
DiscoverResult searchResult, DiscoveryConfiguration configuration, Pageable page,
|
||||
Projection projection) {
|
||||
FacetResultsRest facetResultsRest = new FacetResultsRest();
|
||||
facetResultsRest.setProjection(projection);
|
||||
|
||||
setRequestInformation(context, facetName, prefix, query, dsoType, dsoScope, searchFilters, searchResult,
|
||||
setRequestInformation(context, facetName, prefix, query, dsoTypes, dsoScope, searchFilters, searchResult,
|
||||
configuration, facetResultsRest, page, projection);
|
||||
|
||||
addToFacetResultList(facetName, searchResult, facetResultsRest, configuration, page, projection);
|
||||
@@ -72,14 +73,14 @@ public class DiscoverFacetResultsConverter {
|
||||
return facetValueConverter.convert(value, projection);
|
||||
}
|
||||
|
||||
private void setRequestInformation(Context context, String facetName, String prefix, String query, String dsoType,
|
||||
String dsoScope, List<SearchFilter> searchFilters, DiscoverResult searchResult,
|
||||
DiscoveryConfiguration configuration, FacetResultsRest facetResultsRest,
|
||||
Pageable page, Projection projection) {
|
||||
private void setRequestInformation(Context context, String facetName, String prefix, String query,
|
||||
List<String> dsoTypes, String dsoScope, List<SearchFilter> searchFilters,
|
||||
DiscoverResult searchResult, DiscoveryConfiguration configuration,
|
||||
FacetResultsRest facetResultsRest, Pageable page, Projection projection) {
|
||||
facetResultsRest.setQuery(query);
|
||||
facetResultsRest.setPrefix(prefix);
|
||||
facetResultsRest.setScope(dsoScope);
|
||||
facetResultsRest.setDsoType(dsoType);
|
||||
facetResultsRest.setDsoTypes(dsoTypes);
|
||||
|
||||
facetResultsRest.setFacetEntry(convertFacetEntry(facetName, searchResult, configuration, page, projection));
|
||||
|
||||
|
@@ -38,7 +38,7 @@ public class DiscoverFacetsConverter {
|
||||
@Autowired
|
||||
private SearchService searchService;
|
||||
|
||||
public SearchResultsRest convert(Context context, String query, String dsoType, String configurationName,
|
||||
public SearchResultsRest convert(Context context, String query, List<String> dsoTypes, String configurationName,
|
||||
String dsoScope, List<SearchFilter> searchFilters, final Pageable page,
|
||||
DiscoveryConfiguration configuration, DiscoverResult searchResult,
|
||||
Projection projection) {
|
||||
@@ -46,7 +46,7 @@ public class DiscoverFacetsConverter {
|
||||
SearchResultsRest searchResultsRest = new SearchResultsRest();
|
||||
searchResultsRest.setProjection(projection);
|
||||
|
||||
setRequestInformation(context, query, dsoType, configurationName, dsoScope, searchFilters, page,
|
||||
setRequestInformation(context, query, dsoTypes, configurationName, dsoScope, searchFilters, page,
|
||||
searchResultsRest);
|
||||
addFacetValues(context, searchResult, searchResultsRest, configuration, projection);
|
||||
|
||||
@@ -129,13 +129,13 @@ public class DiscoverFacetsConverter {
|
||||
}
|
||||
}
|
||||
|
||||
private void setRequestInformation(final Context context, final String query, final String dsoType,
|
||||
private void setRequestInformation(final Context context, final String query, final List<String> dsoTypes,
|
||||
final String configurationName, final String scope,
|
||||
final List<SearchFilter> searchFilters, final Pageable page,
|
||||
final SearchResultsRest resultsRest) {
|
||||
resultsRest.setQuery(query);
|
||||
resultsRest.setConfiguration(configurationName);
|
||||
resultsRest.setDsoType(dsoType);
|
||||
resultsRest.setDsoTypes(dsoTypes);
|
||||
resultsRest.setSort(SearchResultsRest.Sorting.fromPage(page));
|
||||
|
||||
resultsRest.setScope(scope);
|
||||
|
@@ -43,7 +43,7 @@ public class DiscoverResultConverter {
|
||||
@Autowired
|
||||
private SearchFilterToAppliedFilterConverter searchFilterToAppliedFilterConverter;
|
||||
|
||||
public SearchResultsRest convert(final Context context, final String query, final String dsoType,
|
||||
public SearchResultsRest convert(final Context context, final String query, final List<String> dsoTypes,
|
||||
final String configurationName, final String scope,
|
||||
final List<SearchFilter> searchFilters, final Pageable page,
|
||||
final DiscoverResult searchResult, final DiscoveryConfiguration configuration,
|
||||
@@ -52,7 +52,7 @@ public class DiscoverResultConverter {
|
||||
SearchResultsRest resultsRest = new SearchResultsRest();
|
||||
resultsRest.setProjection(projection);
|
||||
|
||||
setRequestInformation(context, query, dsoType, configurationName, scope, searchFilters, page, resultsRest);
|
||||
setRequestInformation(context, query, dsoTypes, configurationName, scope, searchFilters, page, resultsRest);
|
||||
|
||||
addSearchResults(searchResult, resultsRest, projection);
|
||||
|
||||
@@ -101,13 +101,13 @@ public class DiscoverResultConverter {
|
||||
return null;
|
||||
}
|
||||
|
||||
private void setRequestInformation(final Context context, final String query, final String dsoType,
|
||||
private void setRequestInformation(final Context context, final String query, final List<String> dsoTypes,
|
||||
final String configurationName, final String scope,
|
||||
final List<SearchFilter> searchFilters, final Pageable page,
|
||||
final SearchResultsRest resultsRest) {
|
||||
resultsRest.setQuery(query);
|
||||
resultsRest.setConfiguration(configurationName);
|
||||
resultsRest.setDsoType(dsoType);
|
||||
resultsRest.setDsoTypes(dsoTypes);
|
||||
|
||||
resultsRest.setScope(scope);
|
||||
|
||||
|
@@ -47,9 +47,9 @@ public class CollectionResourceWorkflowGroupHalLinkFactory
|
||||
Map<String, Role> roles = WorkflowUtils.getCollectionRoles(collection);
|
||||
UUID resourceUuid = UUID.fromString(halResource.getContent().getUuid());
|
||||
for (Map.Entry<String, Role> entry : roles.entrySet()) {
|
||||
list.add(buildLink("workflowGroups/" + entry.getKey(), getMethodOn()
|
||||
list.add(buildLink("workflowGroups", getMethodOn()
|
||||
.getWorkflowGroupForRole(resourceUuid, null, null,
|
||||
entry.getKey())));
|
||||
entry.getKey())).withName(entry.getKey()));
|
||||
}
|
||||
}
|
||||
|
||||
|
@@ -28,7 +28,7 @@ public abstract class DiscoveryRestHalLinkFactory<T> extends HalLinkFactory<T, D
|
||||
public UriComponentsBuilder buildSearchBaseLink(final DiscoveryResultsRest data) {
|
||||
try {
|
||||
UriComponentsBuilder uriBuilder = uriBuilder(getMethodOn()
|
||||
.getSearchObjects(data.getQuery(), data.getDsoType(),
|
||||
.getSearchObjects(data.getQuery(), data.getDsoTypes(),
|
||||
data.getScope(), data.getConfiguration(),
|
||||
null, null));
|
||||
|
||||
@@ -43,7 +43,7 @@ public abstract class DiscoveryRestHalLinkFactory<T> extends HalLinkFactory<T, D
|
||||
try {
|
||||
UriComponentsBuilder uriBuilder = uriBuilder(
|
||||
getMethodOn().getFacetValues(data.getFacetEntry().getName(), data.getPrefix(), data.getQuery(),
|
||||
data.getDsoType(), data.getScope(), data.getConfiguration(), null, null));
|
||||
data.getDsoTypes(), data.getScope(), data.getConfiguration(), null, null));
|
||||
|
||||
return addFilterParams(uriBuilder, data);
|
||||
} catch (Exception ex) {
|
||||
@@ -54,7 +54,7 @@ public abstract class DiscoveryRestHalLinkFactory<T> extends HalLinkFactory<T, D
|
||||
|
||||
protected UriComponentsBuilder buildSearchFacetsBaseLink(final SearchResultsRest data) {
|
||||
try {
|
||||
UriComponentsBuilder uriBuilder = uriBuilder(getMethodOn().getFacets(data.getQuery(), data.getDsoType(),
|
||||
UriComponentsBuilder uriBuilder = uriBuilder(getMethodOn().getFacets(data.getQuery(), data.getDsoTypes(),
|
||||
data.getScope(), data.getConfiguration(), null, null));
|
||||
|
||||
uriBuilder = addSortingParms(uriBuilder, data);
|
||||
|
@@ -8,6 +8,7 @@
|
||||
package org.dspace.app.rest.link.search;
|
||||
|
||||
import java.util.LinkedList;
|
||||
import java.util.List;
|
||||
|
||||
import org.apache.commons.collections4.CollectionUtils;
|
||||
import org.apache.commons.lang3.BooleanUtils;
|
||||
@@ -39,7 +40,7 @@ public class SearchFacetEntryHalLinkFactory extends DiscoveryRestHalLinkFactory<
|
||||
DiscoveryResultsRest searchData = halResource.getSearchData();
|
||||
|
||||
String query = searchData == null ? null : searchData.getQuery();
|
||||
String dsoType = searchData == null ? null : searchData.getDsoType();
|
||||
List<String> dsoType = searchData == null ? null : searchData.getDsoTypes();
|
||||
String scope = searchData == null ? null : searchData.getScope();
|
||||
String configuration = searchData == null ? null : searchData.getConfiguration();
|
||||
|
||||
|
@@ -27,7 +27,7 @@ public abstract class DiscoveryResultsRest extends BaseObjectRest<String> {
|
||||
private List<SearchResultsRest.AppliedFilter> appliedFilters;
|
||||
private SearchResultsRest.Sorting sort;
|
||||
@JsonIgnore
|
||||
private String dsoType;
|
||||
private List<String> dsoTypes;
|
||||
@JsonIgnore
|
||||
private List<SearchFilter> searchFilters;
|
||||
private String configuration;
|
||||
@@ -52,12 +52,12 @@ public abstract class DiscoveryResultsRest extends BaseObjectRest<String> {
|
||||
this.query = query;
|
||||
}
|
||||
|
||||
public String getDsoType() {
|
||||
return dsoType;
|
||||
public List<String> getDsoTypes() {
|
||||
return dsoTypes;
|
||||
}
|
||||
|
||||
public void setDsoType(final String dsoType) {
|
||||
this.dsoType = dsoType;
|
||||
public void setDsoTypes(final List<String> dsoTypes) {
|
||||
this.dsoTypes = dsoTypes;
|
||||
}
|
||||
|
||||
public String getScope() {
|
||||
|
@@ -27,7 +27,6 @@ public class RelationshipRest extends BaseObjectRest<Integer> {
|
||||
@JsonIgnore
|
||||
private UUID rightId;
|
||||
|
||||
private int relationshipTypeId;
|
||||
private RelationshipTypeRest relationshipType;
|
||||
private int leftPlace;
|
||||
private int rightPlace;
|
||||
@@ -90,14 +89,6 @@ public class RelationshipRest extends BaseObjectRest<Integer> {
|
||||
this.rightPlace = rightPlace;
|
||||
}
|
||||
|
||||
public int getRelationshipTypeId() {
|
||||
return relationshipTypeId;
|
||||
}
|
||||
|
||||
public void setRelationshipTypeId(int relationshipTypeId) {
|
||||
this.relationshipTypeId = relationshipTypeId;
|
||||
}
|
||||
|
||||
public String getRightwardValue() {
|
||||
return rightwardValue;
|
||||
}
|
||||
|
@@ -8,11 +8,13 @@
|
||||
package org.dspace.app.rest.model.hateoas;
|
||||
|
||||
import java.util.HashMap;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
|
||||
import com.fasterxml.jackson.annotation.JsonInclude;
|
||||
import com.fasterxml.jackson.annotation.JsonProperty;
|
||||
import com.fasterxml.jackson.annotation.JsonUnwrapped;
|
||||
import org.apache.commons.lang3.StringUtils;
|
||||
import org.springframework.hateoas.EntityModel;
|
||||
import org.springframework.hateoas.Link;
|
||||
|
||||
@@ -49,6 +51,15 @@ public abstract class HALResource<T> extends EntityModel<T> {
|
||||
public EntityModel<T> add(Link link) {
|
||||
if (!hasLink(link.getRel())) {
|
||||
return super.add(link);
|
||||
} else {
|
||||
String name = link.getName();
|
||||
if (StringUtils.isNotBlank(name)) {
|
||||
List<Link> list = this.getLinks(link.getRel());
|
||||
// If a link of this name doesn't already exist in the list, add it
|
||||
if (!list.stream().anyMatch((l -> StringUtils.equalsIgnoreCase(l.getName(), name)))) {
|
||||
super.add(link);
|
||||
}
|
||||
}
|
||||
}
|
||||
return this;
|
||||
}
|
||||
|
@@ -7,6 +7,10 @@
|
||||
*/
|
||||
package org.dspace.app.rest.repository;
|
||||
|
||||
import static java.util.Collections.emptyList;
|
||||
import static java.util.Collections.singletonList;
|
||||
import static org.apache.commons.lang3.StringUtils.isNotBlank;
|
||||
|
||||
import java.sql.SQLException;
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
@@ -145,9 +149,11 @@ public class AuthorizationRestRepository extends DSpaceRestRepository<Authorizat
|
||||
@PreAuthorize("#epersonUuid==null || hasPermission(#epersonUuid, 'EPERSON', 'READ')")
|
||||
@SearchRestMethod(name = "object")
|
||||
public Page<AuthorizationRest> findByObject(@Parameter(value = "uri", required = true) String uri,
|
||||
@Parameter(value = "eperson") UUID epersonUuid,
|
||||
@Parameter(value = "eperson") UUID epersonUuid, @Parameter(value = "feature") String featureName,
|
||||
Pageable pageable) throws AuthorizeException, SQLException {
|
||||
|
||||
Context context = obtainContext();
|
||||
|
||||
BaseObjectRest obj = utils.getBaseObjectRestFromUri(context, uri);
|
||||
if (obj == null) {
|
||||
return null;
|
||||
@@ -162,11 +168,16 @@ public class AuthorizationRestRepository extends DSpaceRestRepository<Authorizat
|
||||
context.switchContextUser(user);
|
||||
}
|
||||
|
||||
List<AuthorizationFeature> features = authorizationFeatureService.findByResourceType(obj.getUniqueType());
|
||||
List<Authorization> authorizations = new ArrayList<Authorization>();
|
||||
for (AuthorizationFeature f : features) {
|
||||
if (authorizationFeatureService.isAuthorized(context, f, obj)) {
|
||||
authorizations.add(new Authorization(user, f, obj));
|
||||
List<Authorization> authorizations;
|
||||
if (isNotBlank(featureName)) {
|
||||
authorizations = findByObjectAndFeature(context, user, obj, featureName);
|
||||
} else {
|
||||
List<AuthorizationFeature> features = authorizationFeatureService.findByResourceType(obj.getUniqueType());
|
||||
authorizations = new ArrayList<>();
|
||||
for (AuthorizationFeature f : features) {
|
||||
if (authorizationFeatureService.isAuthorized(context, f, obj)) {
|
||||
authorizations.add(new Authorization(user, f, obj));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -177,57 +188,17 @@ public class AuthorizationRestRepository extends DSpaceRestRepository<Authorizat
|
||||
return converter.toRestPage(authorizations, pageable, utils.obtainProjection());
|
||||
}
|
||||
|
||||
/**
|
||||
* It returns the authorization related to the requested feature if granted to the specified eperson or to the
|
||||
* anonymous user. Only administrators and the user identified by the epersonUuid parameter can access this method
|
||||
*
|
||||
* @param uri
|
||||
* the uri of the object to check the authorization against
|
||||
* @param epersonUuid
|
||||
* the eperson uuid to use in the authorization evaluation
|
||||
* @param featureName
|
||||
* limit the authorization check to only the feature identified via its name
|
||||
* @param pageable
|
||||
* the pagination options
|
||||
* @return the list of matching authorization available for the requested user and object, filtered by feature if
|
||||
* provided
|
||||
* @throws AuthorizeException
|
||||
* @throws SQLException
|
||||
*/
|
||||
@PreAuthorize("#epersonUuid==null || hasPermission(#epersonUuid, 'EPERSON', 'READ')")
|
||||
@SearchRestMethod(name = "objectAndFeature")
|
||||
public AuthorizationRest findByObjectAndFeature(@Parameter(value = "uri", required = true) String uri,
|
||||
@Parameter(value = "eperson") UUID epersonUuid,
|
||||
@Parameter(value = "feature", required = true) String featureName,
|
||||
Pageable pageable) throws AuthorizeException, SQLException {
|
||||
Context context = obtainContext();
|
||||
BaseObjectRest obj = utils.getBaseObjectRestFromUri(context, uri);
|
||||
if (obj == null) {
|
||||
return null;
|
||||
private List<Authorization> findByObjectAndFeature(
|
||||
Context context, EPerson user, BaseObjectRest obj, String featureName
|
||||
) throws SQLException {
|
||||
|
||||
AuthorizationFeature feature = authorizationFeatureService.find(featureName);
|
||||
|
||||
if (!authorizationFeatureService.isAuthorized(context, feature, obj)) {
|
||||
return emptyList();
|
||||
}
|
||||
|
||||
EPerson currUser = context.getCurrentUser();
|
||||
// get the user specified in the requested parameters, can be null for anonymous
|
||||
EPerson user = getUserFromRequestParameter(context, epersonUuid);
|
||||
if (currUser != user) {
|
||||
// Temporarily change the Context's current user in order to retrieve
|
||||
// authorizations based on that user
|
||||
context.switchContextUser(user);
|
||||
}
|
||||
AuthorizationFeature feature = authorizationFeatureService.find(featureName);
|
||||
AuthorizationRest authorizationRest = null;
|
||||
if (authorizationFeatureService.isAuthorized(context, feature, obj)) {
|
||||
Authorization authz = new Authorization();
|
||||
authz.setEperson(user);
|
||||
authz.setFeature(feature);
|
||||
authz.setObject(obj);
|
||||
authorizationRest = converter.toRest(authz, utils.obtainProjection());
|
||||
}
|
||||
if (currUser != user) {
|
||||
// restore the real current user
|
||||
context.restoreContextUser();
|
||||
}
|
||||
return authorizationRest;
|
||||
return singletonList(new Authorization(user, feature, obj));
|
||||
}
|
||||
|
||||
/**
|
||||
|
@@ -463,7 +463,7 @@ public abstract class DSpaceRestRepository<T extends RestAddressableModel, ID ex
|
||||
* @throws IOException
|
||||
* @throws AuthorizeException
|
||||
*/
|
||||
public Iterable<T> upload(HttpServletRequest request, MultipartFile uploadfile)
|
||||
public Iterable<T> upload(HttpServletRequest request, List<MultipartFile> uploadfile)
|
||||
throws SQLException, FileNotFoundException, IOException, AuthorizeException {
|
||||
Context context = obtainContext();
|
||||
Iterable<T> entity = upload(context, request, uploadfile);
|
||||
@@ -486,7 +486,7 @@ public abstract class DSpaceRestRepository<T extends RestAddressableModel, ID ex
|
||||
* @throws RepositoryMethodNotImplementedException
|
||||
*/
|
||||
protected Iterable<T> upload(Context context, HttpServletRequest request,
|
||||
MultipartFile uploadfile)
|
||||
List<MultipartFile> uploadfile)
|
||||
throws SQLException, FileNotFoundException, IOException, AuthorizeException {
|
||||
throw new RepositoryMethodNotImplementedException("No implementation found; Method not allowed!", "");
|
||||
}
|
||||
|
@@ -89,7 +89,7 @@ public class DiscoveryRestRepository extends AbstractDSpaceRestRepository {
|
||||
return discoverConfigurationConverter.convert(discoveryConfiguration, utils.obtainProjection());
|
||||
}
|
||||
|
||||
public SearchResultsRest getSearchObjects(final String query, final String dsoType, final String dsoScope,
|
||||
public SearchResultsRest getSearchObjects(final String query, final List<String> dsoTypes, final String dsoScope,
|
||||
final String configuration,
|
||||
final List<SearchFilter> searchFilters, final Pageable page,
|
||||
final Projection projection) {
|
||||
@@ -103,7 +103,7 @@ public class DiscoveryRestRepository extends AbstractDSpaceRestRepository {
|
||||
|
||||
try {
|
||||
discoverQuery = queryBuilder
|
||||
.buildQuery(context, scopeObject, discoveryConfiguration, query, searchFilters, dsoType, page);
|
||||
.buildQuery(context, scopeObject, discoveryConfiguration, query, searchFilters, dsoTypes, page);
|
||||
searchResult = searchService.search(context, scopeObject, discoverQuery);
|
||||
|
||||
} catch (SearchServiceException e) {
|
||||
@@ -112,7 +112,7 @@ public class DiscoveryRestRepository extends AbstractDSpaceRestRepository {
|
||||
}
|
||||
|
||||
return discoverResultConverter
|
||||
.convert(context, query, dsoType, configuration, dsoScope, searchFilters, page, searchResult,
|
||||
.convert(context, query, dsoTypes, configuration, dsoScope, searchFilters, page, searchResult,
|
||||
discoveryConfiguration, projection);
|
||||
}
|
||||
|
||||
@@ -130,7 +130,7 @@ public class DiscoveryRestRepository extends AbstractDSpaceRestRepository {
|
||||
return discoverSearchSupportConverter.convert();
|
||||
}
|
||||
|
||||
public FacetResultsRest getFacetObjects(String facetName, String prefix, String query, String dsoType,
|
||||
public FacetResultsRest getFacetObjects(String facetName, String prefix, String query, List<String> dsoTypes,
|
||||
String dsoScope, final String configuration, List<SearchFilter> searchFilters, Pageable page) {
|
||||
|
||||
Context context = obtainContext();
|
||||
@@ -143,7 +143,7 @@ public class DiscoveryRestRepository extends AbstractDSpaceRestRepository {
|
||||
DiscoverQuery discoverQuery = null;
|
||||
try {
|
||||
discoverQuery = queryBuilder.buildFacetQuery(context, scopeObject, discoveryConfiguration, prefix, query,
|
||||
searchFilters, dsoType, page, facetName);
|
||||
searchFilters, dsoTypes, page, facetName);
|
||||
searchResult = searchService.search(context, scopeObject, discoverQuery);
|
||||
|
||||
} catch (SearchServiceException e) {
|
||||
@@ -152,12 +152,12 @@ public class DiscoveryRestRepository extends AbstractDSpaceRestRepository {
|
||||
}
|
||||
|
||||
FacetResultsRest facetResultsRest = discoverFacetResultsConverter.convert(context, facetName, prefix, query,
|
||||
dsoType, dsoScope, searchFilters, searchResult, discoveryConfiguration, page,
|
||||
dsoTypes, dsoScope, searchFilters, searchResult, discoveryConfiguration, page,
|
||||
utils.obtainProjection());
|
||||
return facetResultsRest;
|
||||
}
|
||||
|
||||
public SearchResultsRest getAllFacets(String query, String dsoType, String dsoScope, String configuration,
|
||||
public SearchResultsRest getAllFacets(String query, List<String> dsoTypes, String dsoScope, String configuration,
|
||||
List<SearchFilter> searchFilters) {
|
||||
|
||||
Context context = obtainContext();
|
||||
@@ -171,14 +171,14 @@ public class DiscoveryRestRepository extends AbstractDSpaceRestRepository {
|
||||
|
||||
try {
|
||||
discoverQuery = queryBuilder
|
||||
.buildQuery(context, scopeObject, discoveryConfiguration, query, searchFilters, dsoType, page);
|
||||
.buildQuery(context, scopeObject, discoveryConfiguration, query, searchFilters, dsoTypes, page);
|
||||
searchResult = searchService.search(context, scopeObject, discoverQuery);
|
||||
|
||||
} catch (SearchServiceException e) {
|
||||
log.error("Error while searching with Discovery", e);
|
||||
}
|
||||
|
||||
SearchResultsRest searchResultsRest = discoverFacetsConverter.convert(context, query, dsoType,
|
||||
SearchResultsRest searchResultsRest = discoverFacetsConverter.convert(context, query, dsoTypes,
|
||||
configuration, dsoScope, searchFilters, page, discoveryConfiguration, searchResult,
|
||||
utils.obtainProjection());
|
||||
|
||||
|
@@ -16,10 +16,6 @@ import java.util.List;
|
||||
import java.util.UUID;
|
||||
import javax.servlet.http.HttpServletRequest;
|
||||
|
||||
import gr.ekt.bte.core.TransformationEngine;
|
||||
import gr.ekt.bte.core.TransformationSpec;
|
||||
import gr.ekt.bte.exceptions.BadTransformationSpec;
|
||||
import gr.ekt.bte.exceptions.MalformedSourceException;
|
||||
import org.apache.commons.lang3.StringUtils;
|
||||
import org.apache.logging.log4j.Logger;
|
||||
import org.dspace.app.rest.Parameter;
|
||||
@@ -45,6 +41,7 @@ import org.dspace.authorize.AuthorizeException;
|
||||
import org.dspace.authorize.service.AuthorizeService;
|
||||
import org.dspace.content.Collection;
|
||||
import org.dspace.content.Item;
|
||||
import org.dspace.content.MetadataValue;
|
||||
import org.dspace.content.WorkspaceItem;
|
||||
import org.dspace.content.service.BitstreamFormatService;
|
||||
import org.dspace.content.service.BitstreamService;
|
||||
@@ -56,14 +53,12 @@ import org.dspace.core.Context;
|
||||
import org.dspace.eperson.EPerson;
|
||||
import org.dspace.eperson.EPersonServiceImpl;
|
||||
import org.dspace.event.Event;
|
||||
import org.dspace.importer.external.datamodel.ImportRecord;
|
||||
import org.dspace.importer.external.exception.FileMultipleOccurencesException;
|
||||
import org.dspace.importer.external.metadatamapping.MetadatumDTO;
|
||||
import org.dspace.importer.external.service.ImportService;
|
||||
import org.dspace.services.ConfigurationService;
|
||||
import org.dspace.submit.AbstractProcessingStep;
|
||||
import org.dspace.submit.lookup.DSpaceWorkspaceItemOutputGenerator;
|
||||
import org.dspace.submit.lookup.MultipleSubmissionLookupDataLoader;
|
||||
import org.dspace.submit.lookup.SubmissionItemDataLoader;
|
||||
import org.dspace.submit.lookup.SubmissionLookupOutputGenerator;
|
||||
import org.dspace.submit.lookup.SubmissionLookupService;
|
||||
import org.dspace.submit.util.ItemSubmissionLookupDTO;
|
||||
import org.springframework.beans.factory.annotation.Autowired;
|
||||
import org.springframework.data.domain.Page;
|
||||
import org.springframework.data.domain.Pageable;
|
||||
@@ -73,10 +68,12 @@ import org.springframework.security.access.prepost.PreAuthorize;
|
||||
import org.springframework.stereotype.Component;
|
||||
import org.springframework.web.multipart.MultipartFile;
|
||||
|
||||
|
||||
/**
|
||||
* This is the repository responsible to manage WorkspaceItem Rest object
|
||||
*
|
||||
* @author Andrea Bollini (andrea.bollini at 4science.it)
|
||||
* @author Pasquale Cavallo (pasquale.cavallo at 4science.it)
|
||||
*/
|
||||
@Component(WorkspaceItemRest.CATEGORY + "." + WorkspaceItemRest.NAME)
|
||||
public class WorkspaceItemRestRepository extends DSpaceRestRepository<WorkspaceItemRest, Integer>
|
||||
@@ -110,15 +107,15 @@ public class WorkspaceItemRestRepository extends DSpaceRestRepository<WorkspaceI
|
||||
@Autowired
|
||||
EPersonServiceImpl epersonService;
|
||||
|
||||
@Autowired
|
||||
SubmissionLookupService submissionLookupService;
|
||||
|
||||
@Autowired
|
||||
CollectionService collectionService;
|
||||
|
||||
@Autowired
|
||||
AuthorizeService authorizeService;
|
||||
|
||||
@Autowired
|
||||
ImportService importService;
|
||||
|
||||
@Autowired
|
||||
private UriListHandlerService uriListHandlerService;
|
||||
|
||||
@@ -360,147 +357,87 @@ public class WorkspaceItemRestRepository extends DSpaceRestRepository<WorkspaceI
|
||||
|
||||
@Override
|
||||
public Iterable<WorkspaceItemRest> upload(Context context, HttpServletRequest request,
|
||||
MultipartFile uploadfile)
|
||||
List<MultipartFile> uploadfiles)
|
||||
throws SQLException, FileNotFoundException, IOException, AuthorizeException {
|
||||
File file = Utils.getFile(uploadfile, "upload-loader", "filedataloader");
|
||||
List<WorkspaceItemRest> results = new ArrayList<>();
|
||||
|
||||
String uuid = request.getParameter("owningCollection");
|
||||
if (StringUtils.isBlank(uuid)) {
|
||||
uuid = configurationService.getProperty("submission.default.collection");
|
||||
}
|
||||
Collection collection = null;
|
||||
if (StringUtils.isNotBlank(uuid)) {
|
||||
collection = collectionService.find(context, UUID.fromString(uuid));
|
||||
} else {
|
||||
collection = collectionService.findAuthorizedOptimized(context, Constants.ADD).get(0);
|
||||
}
|
||||
|
||||
SubmissionConfig submissionConfig =
|
||||
submissionConfigReader.getSubmissionConfigByCollection(collection.getHandle());
|
||||
List<WorkspaceItem> result = null;
|
||||
List<ImportRecord> records = new ArrayList<>();
|
||||
try {
|
||||
String uuid = request.getParameter("collection");
|
||||
if (StringUtils.isBlank(uuid)) {
|
||||
uuid = configurationService.getProperty("submission.default.collection");
|
||||
}
|
||||
|
||||
Collection collection = null;
|
||||
if (StringUtils.isNotBlank(uuid)) {
|
||||
collection = collectionService.find(context, UUID.fromString(uuid));
|
||||
} else {
|
||||
collection = collectionService.findAuthorizedOptimized(context, Constants.ADD).get(0);
|
||||
}
|
||||
|
||||
SubmissionConfig submissionConfig =
|
||||
submissionConfigReader.getSubmissionConfigByCollection(collection.getHandle());
|
||||
|
||||
|
||||
List<ItemSubmissionLookupDTO> tmpResult = new ArrayList<ItemSubmissionLookupDTO>();
|
||||
|
||||
TransformationEngine transformationEngine1 = submissionLookupService.getPhase1TransformationEngine();
|
||||
TransformationSpec spec = new TransformationSpec();
|
||||
// FIXME this is mostly due to the need to test. The BTE framework has an assert statement that check if the
|
||||
// number of found record is less than the requested and treat 0 as is, instead, the implementation assume
|
||||
// 0=unlimited this lead to test failure.
|
||||
// It is unclear if BTE really respect values other than 0/MAX allowing us to put a protection against heavy
|
||||
// load
|
||||
spec.setNumberOfRecords(Integer.MAX_VALUE);
|
||||
if (transformationEngine1 != null) {
|
||||
MultipleSubmissionLookupDataLoader dataLoader =
|
||||
(MultipleSubmissionLookupDataLoader) transformationEngine1.getDataLoader();
|
||||
|
||||
List<String> fileDataLoaders = submissionLookupService.getFileProviders();
|
||||
for (String fileDataLoader : fileDataLoaders) {
|
||||
dataLoader.setFile(file.getAbsolutePath(), fileDataLoader);
|
||||
|
||||
try {
|
||||
SubmissionLookupOutputGenerator outputGenerator =
|
||||
(SubmissionLookupOutputGenerator) transformationEngine1.getOutputGenerator();
|
||||
outputGenerator.setDtoList(new ArrayList<ItemSubmissionLookupDTO>());
|
||||
log.debug("BTE transformation is about to start!");
|
||||
transformationEngine1.transform(spec);
|
||||
log.debug("BTE transformation finished!");
|
||||
tmpResult.addAll(outputGenerator.getDtoList());
|
||||
if (!tmpResult.isEmpty()) {
|
||||
//exit with the results founded on the first data provided
|
||||
break;
|
||||
}
|
||||
} catch (BadTransformationSpec e1) {
|
||||
log.error(e1.getMessage(), e1);
|
||||
} catch (MalformedSourceException e1) {
|
||||
log.error(e1.getMessage(), e1);
|
||||
for (MultipartFile mpFile : uploadfiles) {
|
||||
File file = Utils.getFile(mpFile, "upload-loader", "filedataloader");
|
||||
try {
|
||||
ImportRecord record = importService.getRecord(file, mpFile.getOriginalFilename());
|
||||
if (record != null) {
|
||||
records.add(record);
|
||||
break;
|
||||
}
|
||||
} finally {
|
||||
file.delete();
|
||||
}
|
||||
}
|
||||
} catch (FileMultipleOccurencesException e) {
|
||||
throw new UnprocessableEntityException("Too many entries in file");
|
||||
} catch (Exception e) {
|
||||
log.error("Error importing metadata", e);
|
||||
}
|
||||
WorkspaceItem source = submissionService.
|
||||
createWorkspaceItem(context, getRequestService().getCurrentRequest());
|
||||
merge(context, records, source);
|
||||
result = new ArrayList<>();
|
||||
result.add(source);
|
||||
|
||||
List<WorkspaceItem> result = null;
|
||||
|
||||
//try to ingest workspaceitems
|
||||
if (!tmpResult.isEmpty()) {
|
||||
TransformationEngine transformationEngine2 = submissionLookupService.getPhase2TransformationEngine();
|
||||
if (transformationEngine2 != null) {
|
||||
SubmissionItemDataLoader dataLoader =
|
||||
(SubmissionItemDataLoader) transformationEngine2.getDataLoader();
|
||||
dataLoader.setDtoList(tmpResult);
|
||||
// dataLoader.setProviders()
|
||||
|
||||
DSpaceWorkspaceItemOutputGenerator outputGenerator =
|
||||
(DSpaceWorkspaceItemOutputGenerator) transformationEngine2.getOutputGenerator();
|
||||
outputGenerator.setCollection(collection);
|
||||
outputGenerator.setContext(context);
|
||||
outputGenerator.setFormName(submissionConfig.getSubmissionName());
|
||||
outputGenerator.setDto(tmpResult.get(0));
|
||||
|
||||
try {
|
||||
transformationEngine2.transform(spec);
|
||||
result = outputGenerator.getWitems();
|
||||
} catch (BadTransformationSpec e1) {
|
||||
e1.printStackTrace();
|
||||
} catch (MalformedSourceException e1) {
|
||||
e1.printStackTrace();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
//we have to create the workspaceitem to push the file also if nothing found before
|
||||
if (result == null) {
|
||||
WorkspaceItem source =
|
||||
submissionService.createWorkspaceItem(context, getRequestService().getCurrentRequest());
|
||||
result = new ArrayList<>();
|
||||
result.add(source);
|
||||
}
|
||||
|
||||
//perform upload of bitstream if there is exact one result and convert workspaceitem to entity rest
|
||||
if (result != null && !result.isEmpty()) {
|
||||
for (WorkspaceItem wi : result) {
|
||||
|
||||
List<ErrorRest> errors = new ArrayList<ErrorRest>();
|
||||
|
||||
//load bitstream into bundle ORIGINAL only if there is one result (approximately this is the
|
||||
// right behaviour for pdf file but not for other bibliographic format e.g. bibtex)
|
||||
if (result.size() == 1) {
|
||||
|
||||
for (int i = 0; i < submissionConfig.getNumberOfSteps(); i++) {
|
||||
SubmissionStepConfig stepConfig = submissionConfig.getStep(i);
|
||||
|
||||
ClassLoader loader = this.getClass().getClassLoader();
|
||||
Class stepClass;
|
||||
try {
|
||||
stepClass = loader.loadClass(stepConfig.getProcessingClassName());
|
||||
|
||||
Object stepInstance = stepClass.newInstance();
|
||||
if (UploadableStep.class.isAssignableFrom(stepClass)) {
|
||||
UploadableStep uploadableStep = (UploadableStep) stepInstance;
|
||||
ErrorRest err = uploadableStep.upload(context, submissionService, stepConfig, wi,
|
||||
uploadfile);
|
||||
//perform upload of bitstream if there is exact one result and convert workspaceitem to entity rest
|
||||
if (!result.isEmpty()) {
|
||||
for (WorkspaceItem wi : result) {
|
||||
List<ErrorRest> errors = new ArrayList<ErrorRest>();
|
||||
wi.setMultipleFiles(uploadfiles.size() > 1);
|
||||
//load bitstream into bundle ORIGINAL only if there is one result (approximately this is the
|
||||
// right behaviour for pdf file but not for other bibliographic format e.g. bibtex)
|
||||
if (result.size() == 1) {
|
||||
for (int i = 0; i < submissionConfig.getNumberOfSteps(); i++) {
|
||||
SubmissionStepConfig stepConfig = submissionConfig.getStep(i);
|
||||
ClassLoader loader = this.getClass().getClassLoader();
|
||||
Class stepClass;
|
||||
try {
|
||||
stepClass = loader.loadClass(stepConfig.getProcessingClassName());
|
||||
Object stepInstance = stepClass.newInstance();
|
||||
if (UploadableStep.class.isAssignableFrom(stepClass)) {
|
||||
UploadableStep uploadableStep = (UploadableStep) stepInstance;
|
||||
for (MultipartFile mpFile : uploadfiles) {
|
||||
ErrorRest err = uploadableStep.upload(context,
|
||||
submissionService, stepConfig, wi, mpFile);
|
||||
if (err != null) {
|
||||
errors.add(err);
|
||||
}
|
||||
}
|
||||
|
||||
} catch (Exception e) {
|
||||
log.error(e.getMessage(), e);
|
||||
}
|
||||
} catch (Exception e) {
|
||||
log.error(e.getMessage(), e);
|
||||
}
|
||||
}
|
||||
WorkspaceItemRest wsi = converter.toRest(wi, utils.obtainProjection());
|
||||
if (result.size() == 1) {
|
||||
if (!errors.isEmpty()) {
|
||||
wsi.getErrors().addAll(errors);
|
||||
}
|
||||
}
|
||||
results.add(wsi);
|
||||
}
|
||||
WorkspaceItemRest wsi = converter.toRest(wi, utils.obtainProjection());
|
||||
if (result.size() == 1) {
|
||||
if (!errors.isEmpty()) {
|
||||
wsi.getErrors().addAll(errors);
|
||||
}
|
||||
}
|
||||
results.add(wsi);
|
||||
}
|
||||
} finally {
|
||||
file.delete();
|
||||
}
|
||||
return results;
|
||||
}
|
||||
@@ -551,4 +488,24 @@ public class WorkspaceItemRestRepository extends DSpaceRestRepository<WorkspaceI
|
||||
public Class<Integer> getPKClass() {
|
||||
return Integer.class;
|
||||
}
|
||||
|
||||
private void merge(Context context, List<ImportRecord> records, WorkspaceItem item) throws SQLException {
|
||||
for (MetadataValue metadataValue : itemService.getMetadata(
|
||||
item.getItem(), Item.ANY, Item.ANY, Item.ANY, Item.ANY)) {
|
||||
itemService.clearMetadata(context, item.getItem(),
|
||||
metadataValue.getMetadataField().getMetadataSchema().getNamespace(),
|
||||
metadataValue.getMetadataField().getElement(),
|
||||
metadataValue.getMetadataField().getQualifier(),
|
||||
metadataValue.getLanguage());
|
||||
}
|
||||
for (ImportRecord record : records) {
|
||||
if (record != null && record.getValueList() != null) {
|
||||
for (MetadatumDTO metadataValue : record.getValueList()) {
|
||||
itemService.addMetadata(context, item.getItem(), metadataValue.getSchema(),
|
||||
metadataValue.getElement(), metadataValue.getQualifier(), null,
|
||||
metadataValue.getValue());
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@@ -7,6 +7,10 @@
|
||||
*/
|
||||
package org.dspace.app.rest.utils;
|
||||
|
||||
import static java.util.Collections.emptyList;
|
||||
import static java.util.Collections.singletonList;
|
||||
import static org.apache.commons.collections4.CollectionUtils.isNotEmpty;
|
||||
|
||||
import java.sql.SQLException;
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
@@ -65,14 +69,47 @@ public class DiscoverQueryBuilder implements InitializingBean {
|
||||
pageSizeLimit = configurationService.getIntProperty("rest.search.max.results", 100);
|
||||
}
|
||||
|
||||
/**
|
||||
* Build a discovery query
|
||||
*
|
||||
* @param context the DSpace context
|
||||
* @param scope the scope for this discovery query
|
||||
* @param discoveryConfiguration the discovery configuration for this discovery query
|
||||
* @param query the query string for this discovery query
|
||||
* @param searchFilters the search filters for this discovery query
|
||||
* @param dsoType only include search results with this type
|
||||
* @param page the pageable for this discovery query
|
||||
*/
|
||||
public DiscoverQuery buildQuery(Context context, IndexableObject scope,
|
||||
DiscoveryConfiguration discoveryConfiguration,
|
||||
String query, List<SearchFilter> searchFilters,
|
||||
String dsoType, Pageable page)
|
||||
throws DSpaceBadRequestException {
|
||||
|
||||
List<String> dsoTypes = dsoType != null ? singletonList(dsoType) : emptyList();
|
||||
|
||||
return buildQuery(context, scope, discoveryConfiguration, query, searchFilters, dsoTypes, page);
|
||||
}
|
||||
|
||||
/**
|
||||
* Build a discovery query
|
||||
*
|
||||
* @param context the DSpace context
|
||||
* @param scope the scope for this discovery query
|
||||
* @param discoveryConfiguration the discovery configuration for this discovery query
|
||||
* @param query the query string for this discovery query
|
||||
* @param searchFilters the search filters for this discovery query
|
||||
* @param dsoTypes only include search results with one of these types
|
||||
* @param page the pageable for this discovery query
|
||||
*/
|
||||
public DiscoverQuery buildQuery(Context context, IndexableObject scope,
|
||||
DiscoveryConfiguration discoveryConfiguration,
|
||||
String query, List<SearchFilter> searchFilters,
|
||||
List<String> dsoTypes, Pageable page)
|
||||
throws DSpaceBadRequestException {
|
||||
|
||||
DiscoverQuery queryArgs = buildCommonDiscoverQuery(context, discoveryConfiguration, query, searchFilters,
|
||||
dsoType);
|
||||
dsoTypes);
|
||||
|
||||
//When all search criteria are set, configure facet results
|
||||
addFaceting(context, scope, queryArgs, discoveryConfiguration);
|
||||
@@ -98,14 +135,52 @@ public class DiscoverQueryBuilder implements InitializingBean {
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Create a discovery facet query.
|
||||
*
|
||||
* @param context the DSpace context
|
||||
* @param scope the scope for this discovery query
|
||||
* @param discoveryConfiguration the discovery configuration for this discovery query
|
||||
* @param prefix limit the facets results to those starting with the given prefix.
|
||||
* @param query the query string for this discovery query
|
||||
* @param searchFilters the search filters for this discovery query
|
||||
* @param dsoType only include search results with this type
|
||||
* @param page the pageable for this discovery query
|
||||
* @param facetName the facet field
|
||||
*/
|
||||
public DiscoverQuery buildFacetQuery(Context context, IndexableObject scope,
|
||||
DiscoveryConfiguration discoveryConfiguration,
|
||||
String prefix, String query, List<SearchFilter> searchFilters,
|
||||
String dsoType, Pageable page, String facetName)
|
||||
throws DSpaceBadRequestException {
|
||||
|
||||
List<String> dsoTypes = dsoType != null ? singletonList(dsoType) : emptyList();
|
||||
|
||||
return buildFacetQuery(
|
||||
context, scope, discoveryConfiguration, prefix, query, searchFilters, dsoTypes, page, facetName);
|
||||
}
|
||||
|
||||
/**
|
||||
* Create a discovery facet query.
|
||||
*
|
||||
* @param context the DSpace context
|
||||
* @param scope the scope for this discovery query
|
||||
* @param discoveryConfiguration the discovery configuration for this discovery query
|
||||
* @param prefix limit the facets results to those starting with the given prefix.
|
||||
* @param query the query string for this discovery query
|
||||
* @param searchFilters the search filters for this discovery query
|
||||
* @param dsoTypes only include search results with one of these types
|
||||
* @param page the pageable for this discovery query
|
||||
* @param facetName the facet field
|
||||
*/
|
||||
public DiscoverQuery buildFacetQuery(Context context, IndexableObject scope,
|
||||
DiscoveryConfiguration discoveryConfiguration,
|
||||
String prefix, String query, List<SearchFilter> searchFilters,
|
||||
List<String> dsoTypes, Pageable page, String facetName)
|
||||
throws DSpaceBadRequestException {
|
||||
|
||||
DiscoverQuery queryArgs = buildCommonDiscoverQuery(context, discoveryConfiguration, query, searchFilters,
|
||||
dsoType);
|
||||
dsoTypes);
|
||||
|
||||
//When all search criteria are set, configure facet results
|
||||
addFacetingForFacets(context, scope, prefix, queryArgs, discoveryConfiguration, facetName, page);
|
||||
@@ -170,7 +245,7 @@ public class DiscoverQueryBuilder implements InitializingBean {
|
||||
|
||||
private DiscoverQuery buildCommonDiscoverQuery(Context context, DiscoveryConfiguration discoveryConfiguration,
|
||||
String query,
|
||||
List<SearchFilter> searchFilters, String dsoType)
|
||||
List<SearchFilter> searchFilters, List<String> dsoTypes)
|
||||
throws DSpaceBadRequestException {
|
||||
DiscoverQuery queryArgs = buildBaseQueryForConfiguration(discoveryConfiguration);
|
||||
|
||||
@@ -182,10 +257,13 @@ public class DiscoverQueryBuilder implements InitializingBean {
|
||||
queryArgs.setQuery(query);
|
||||
}
|
||||
|
||||
//Limit results to DSO type
|
||||
if (StringUtils.isNotBlank(dsoType)) {
|
||||
queryArgs.setDSpaceObjectFilter(getDsoType(dsoType));
|
||||
//Limit results to DSO types
|
||||
if (isNotEmpty(dsoTypes)) {
|
||||
dsoTypes.stream()
|
||||
.map(this::getDsoType)
|
||||
.forEach(queryArgs::addDSpaceObjectFilter);
|
||||
}
|
||||
|
||||
return queryArgs;
|
||||
}
|
||||
|
||||
|
@@ -19,15 +19,22 @@
|
||||
|
||||
<context:annotation-config/> <!-- allows us to use spring annotations in beans -->
|
||||
|
||||
<bean class="org.dspace.discovery.SolrServiceImpl" id="org.dspace.discovery.SearchService"/>
|
||||
<bean class="org.dspace.discovery.SolrServiceImpl"
|
||||
id="org.dspace.discovery.SearchService"/>
|
||||
|
||||
<alias name="org.dspace.discovery.SearchService" alias="org.dspace.discovery.IndexingService"/>
|
||||
<alias name="org.dspace.discovery.SearchService"
|
||||
alias="org.dspace.discovery.IndexingService"/>
|
||||
|
||||
<bean class="org.dspace.discovery.MockSolrSearchCore" autowire-candidate="true"/>
|
||||
<bean class="org.dspace.discovery.MockSolrSearchCore"
|
||||
autowire-candidate="true"/>
|
||||
|
||||
<!--<bean class="org.dspace.discovery.SolrServiceIndexOutputPlugin" id="solrServiceIndexOutputPlugin"/>-->
|
||||
<!--<bean class="org.dspace.discovery.SolrServiceIndexOutputPlugin"
|
||||
id="solrServiceIndexOutputPlugin"/>-->
|
||||
|
||||
<!-- Statistics services are both lazy loaded (by name), as you are likely just using ONE of them and not both -->
|
||||
<bean id="solrLoggerService" class="org.dspace.statistics.MockSolrLoggerServiceImpl" lazy-init="true"/>
|
||||
<!-- Statistics services are both lazy loaded (by name), as you are likely
|
||||
just using ONE of them and not both -->
|
||||
<bean id="solrLoggerService"
|
||||
class="org.dspace.statistics.MockSolrLoggerServiceImpl"
|
||||
lazy-init="true"/>
|
||||
|
||||
</beans>
|
||||
|
@@ -29,9 +29,9 @@ import com.lyncode.xoai.dataprovider.services.impl.BaseDateProvider;
|
||||
import com.lyncode.xoai.dataprovider.xml.xoaiconfig.Configuration;
|
||||
import com.lyncode.xoai.dataprovider.xml.xoaiconfig.ContextConfiguration;
|
||||
import org.apache.commons.lang3.time.DateUtils;
|
||||
import org.dspace.app.rest.builder.CollectionBuilder;
|
||||
import org.dspace.app.rest.builder.CommunityBuilder;
|
||||
import org.dspace.app.rest.test.AbstractControllerIntegrationTest;
|
||||
import org.dspace.builder.CollectionBuilder;
|
||||
import org.dspace.builder.CommunityBuilder;
|
||||
import org.dspace.content.Community;
|
||||
import org.dspace.services.ConfigurationService;
|
||||
import org.dspace.xoai.services.api.EarliestDateResolver;
|
||||
@@ -76,7 +76,7 @@ public class OAIpmhIT extends AbstractControllerIntegrationTest {
|
||||
private EarliestDateResolver earliestDateResolver;
|
||||
|
||||
// XOAI's BaseDateProvider (used for date-based testing below)
|
||||
private static BaseDateProvider baseDateProvider = new BaseDateProvider();
|
||||
private static final BaseDateProvider baseDateProvider = new BaseDateProvider();
|
||||
|
||||
// Spy on the current XOAIManagerResolver bean, to allow us to change behavior of XOAIManager in tests
|
||||
// See also: createMockXOAIManager() method
|
||||
@@ -278,6 +278,6 @@ public class OAIpmhIT extends AbstractControllerIntegrationTest {
|
||||
* @throws ConfigurationException
|
||||
*/
|
||||
private XOAIManager createMockXOAIManager(Configuration xoaiConfig) throws ConfigurationException {
|
||||
return new XOAIManager(filterResolver, resourceResolver, xoaiConfig);
|
||||
return new XOAIManager(filterResolver, resourceResolver, xoaiConfig);
|
||||
}
|
||||
}
|
||||
|
@@ -12,10 +12,10 @@ import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.
|
||||
import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.status;
|
||||
import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.xpath;
|
||||
|
||||
import org.dspace.app.rest.builder.CollectionBuilder;
|
||||
import org.dspace.app.rest.builder.CommunityBuilder;
|
||||
import org.dspace.app.rest.builder.ItemBuilder;
|
||||
import org.dspace.app.rest.test.AbstractControllerIntegrationTest;
|
||||
import org.dspace.builder.CollectionBuilder;
|
||||
import org.dspace.builder.CommunityBuilder;
|
||||
import org.dspace.builder.ItemBuilder;
|
||||
import org.dspace.content.Collection;
|
||||
import org.dspace.content.Community;
|
||||
import org.dspace.content.Item;
|
||||
@@ -210,7 +210,7 @@ public class OpenSearchControllerIT extends AbstractControllerIntegrationTest {
|
||||
.andExpect(xpath("OpenSearchDescription/LongName").string("DSpace at My University"))
|
||||
.andExpect(xpath("OpenSearchDescription/Description")
|
||||
.string("DSpace at My University DSpace repository")
|
||||
)
|
||||
)
|
||||
;
|
||||
/* Expected response for the service document is:
|
||||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
|
@@ -14,8 +14,8 @@ import static org.mockito.Mockito.doReturn;
|
||||
|
||||
import java.net.URI;
|
||||
|
||||
import org.dspace.app.rest.builder.CommunityBuilder;
|
||||
import org.dspace.app.rest.test.AbstractWebClientIntegrationTest;
|
||||
import org.dspace.builder.CommunityBuilder;
|
||||
import org.dspace.content.Community;
|
||||
import org.dspace.content.service.SiteService;
|
||||
import org.dspace.rdf.RDFUtil;
|
||||
|
@@ -10,11 +10,11 @@ package org.dspace.app.rest;
|
||||
import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.get;
|
||||
import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.status;
|
||||
|
||||
import org.dspace.app.rest.builder.CollectionBuilder;
|
||||
import org.dspace.app.rest.builder.CommunityBuilder;
|
||||
import org.dspace.app.rest.builder.GroupBuilder;
|
||||
import org.dspace.app.rest.builder.ItemBuilder;
|
||||
import org.dspace.app.rest.test.AbstractControllerIntegrationTest;
|
||||
import org.dspace.builder.CollectionBuilder;
|
||||
import org.dspace.builder.CommunityBuilder;
|
||||
import org.dspace.builder.GroupBuilder;
|
||||
import org.dspace.builder.ItemBuilder;
|
||||
import org.dspace.content.Collection;
|
||||
import org.dspace.content.Community;
|
||||
import org.dspace.content.Item;
|
||||
|
@@ -30,17 +30,17 @@ import javax.servlet.http.Cookie;
|
||||
import com.fasterxml.jackson.databind.ObjectMapper;
|
||||
import org.apache.commons.codec.CharEncoding;
|
||||
import org.apache.commons.io.IOUtils;
|
||||
import org.dspace.app.rest.builder.BitstreamBuilder;
|
||||
import org.dspace.app.rest.builder.BundleBuilder;
|
||||
import org.dspace.app.rest.builder.CollectionBuilder;
|
||||
import org.dspace.app.rest.builder.CommunityBuilder;
|
||||
import org.dspace.app.rest.builder.EPersonBuilder;
|
||||
import org.dspace.app.rest.builder.GroupBuilder;
|
||||
import org.dspace.app.rest.builder.ItemBuilder;
|
||||
import org.dspace.app.rest.matcher.AuthenticationStatusMatcher;
|
||||
import org.dspace.app.rest.matcher.EPersonMatcher;
|
||||
import org.dspace.app.rest.matcher.HalMatcher;
|
||||
import org.dspace.app.rest.test.AbstractControllerIntegrationTest;
|
||||
import org.dspace.builder.BitstreamBuilder;
|
||||
import org.dspace.builder.BundleBuilder;
|
||||
import org.dspace.builder.CollectionBuilder;
|
||||
import org.dspace.builder.CommunityBuilder;
|
||||
import org.dspace.builder.EPersonBuilder;
|
||||
import org.dspace.builder.GroupBuilder;
|
||||
import org.dspace.builder.ItemBuilder;
|
||||
import org.dspace.content.Bitstream;
|
||||
import org.dspace.content.Bundle;
|
||||
import org.dspace.content.Collection;
|
||||
@@ -70,12 +70,14 @@ public class AuthenticationRestControllerIT extends AbstractControllerIntegratio
|
||||
|
||||
public static final String[] PASS_ONLY = {"org.dspace.authenticate.PasswordAuthentication"};
|
||||
public static final String[] SHIB_ONLY = {"org.dspace.authenticate.ShibAuthentication"};
|
||||
public static final String[] SHIB_AND_PASS =
|
||||
{"org.dspace.authenticate.ShibAuthentication",
|
||||
"org.dspace.authenticate.PasswordAuthentication"};
|
||||
public static final String[] SHIB_AND_IP =
|
||||
{"org.dspace.authenticate.IPAuthentication",
|
||||
"org.dspace.authenticate.ShibAuthentication"};
|
||||
public static final String[] SHIB_AND_PASS = {
|
||||
"org.dspace.authenticate.ShibAuthentication",
|
||||
"org.dspace.authenticate.PasswordAuthentication"
|
||||
};
|
||||
public static final String[] SHIB_AND_IP = {
|
||||
"org.dspace.authenticate.IPAuthentication",
|
||||
"org.dspace.authenticate.ShibAuthentication"
|
||||
};
|
||||
|
||||
@Before
|
||||
public void setup() throws Exception {
|
||||
@@ -406,7 +408,7 @@ public class AuthenticationRestControllerIT extends AbstractControllerIntegratio
|
||||
|
||||
@Test
|
||||
public void testLoginGetRequest() throws Exception {
|
||||
getClient().perform(get("/api/authn/login")
|
||||
getClient().perform(get("/api/authn/login")
|
||||
.param("user", eperson.getEmail())
|
||||
.param("password", password))
|
||||
.andExpect(status().isMethodNotAllowed());
|
||||
@@ -721,8 +723,8 @@ public class AuthenticationRestControllerIT extends AbstractControllerIntegratio
|
||||
|
||||
//Check if WWW-Authenticate header contains only password
|
||||
getClient().perform(get("/api/authn/status").header("Referer", "http://my.uni.edu"))
|
||||
.andExpect(status().isOk())
|
||||
.andExpect(header().string("WWW-Authenticate",
|
||||
.andExpect(status().isOk())
|
||||
.andExpect(header().string("WWW-Authenticate",
|
||||
"password realm=\"DSpace REST API\""));
|
||||
|
||||
//Check if a shibboleth authentication fails
|
||||
@@ -730,7 +732,6 @@ public class AuthenticationRestControllerIT extends AbstractControllerIntegratio
|
||||
.requestAttr("SHIB-MAIL", eperson.getEmail())
|
||||
.requestAttr("SHIB-SCOPED-AFFILIATION", "faculty;staff"))
|
||||
.andExpect(status().isUnauthorized());
|
||||
|
||||
}
|
||||
|
||||
@Test
|
||||
|
@@ -16,6 +16,7 @@ import java.util.List;
|
||||
import java.util.Set;
|
||||
|
||||
import org.apache.commons.lang3.ArrayUtils;
|
||||
import org.dspace.AbstractIntegrationTestWithDatabase;
|
||||
import org.dspace.app.rest.authorization.AlwaysFalseFeature;
|
||||
import org.dspace.app.rest.authorization.AlwaysThrowExceptionFeature;
|
||||
import org.dspace.app.rest.authorization.AlwaysTrueFeature;
|
||||
@@ -26,7 +27,6 @@ import org.dspace.app.rest.converter.SiteConverter;
|
||||
import org.dspace.app.rest.model.CollectionRest;
|
||||
import org.dspace.app.rest.model.SiteRest;
|
||||
import org.dspace.app.rest.projection.DefaultProjection;
|
||||
import org.dspace.app.rest.test.AbstractIntegrationTestWithDatabase;
|
||||
import org.dspace.app.rest.utils.DSpaceConfigurationInitializer;
|
||||
import org.dspace.app.rest.utils.DSpaceKernelInitializer;
|
||||
import org.dspace.content.Site;
|
||||
@@ -77,7 +77,7 @@ public class AuthorizationFeatureServiceIT extends AbstractIntegrationTestWithDa
|
||||
assertThat("We have at least our 7 mock features for testing",
|
||||
authzFeatureServiceFindAll.size(), greaterThanOrEqualTo(7));
|
||||
|
||||
Set<String> featureNames = new HashSet<String>();
|
||||
Set<String> featureNames = new HashSet<>();
|
||||
for (AuthorizationFeature f : authzFeatureServiceFindAll) {
|
||||
featureNames.add(f.getName());
|
||||
}
|
||||
|
@@ -7,8 +7,12 @@
|
||||
*/
|
||||
package org.dspace.app.rest;
|
||||
|
||||
import static com.jayway.jsonpath.matchers.JsonPathMatchers.hasJsonPath;
|
||||
import static org.hamcrest.Matchers.allOf;
|
||||
import static org.hamcrest.Matchers.contains;
|
||||
import static org.hamcrest.Matchers.greaterThanOrEqualTo;
|
||||
import static org.hamcrest.Matchers.is;
|
||||
import static org.hamcrest.Matchers.nullValue;
|
||||
import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.get;
|
||||
import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.jsonPath;
|
||||
import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.status;
|
||||
@@ -28,9 +32,6 @@ import org.dspace.app.rest.authorization.TrueForAdminsFeature;
|
||||
import org.dspace.app.rest.authorization.TrueForLoggedUsersFeature;
|
||||
import org.dspace.app.rest.authorization.TrueForTestUsersFeature;
|
||||
import org.dspace.app.rest.authorization.TrueForUsersInGroupTestFeature;
|
||||
import org.dspace.app.rest.builder.CommunityBuilder;
|
||||
import org.dspace.app.rest.builder.EPersonBuilder;
|
||||
import org.dspace.app.rest.builder.GroupBuilder;
|
||||
import org.dspace.app.rest.converter.CommunityConverter;
|
||||
import org.dspace.app.rest.converter.EPersonConverter;
|
||||
import org.dspace.app.rest.converter.SiteConverter;
|
||||
@@ -43,6 +44,9 @@ import org.dspace.app.rest.model.SiteRest;
|
||||
import org.dspace.app.rest.projection.DefaultProjection;
|
||||
import org.dspace.app.rest.test.AbstractControllerIntegrationTest;
|
||||
import org.dspace.app.rest.utils.Utils;
|
||||
import org.dspace.builder.CommunityBuilder;
|
||||
import org.dspace.builder.EPersonBuilder;
|
||||
import org.dspace.builder.GroupBuilder;
|
||||
import org.dspace.content.Community;
|
||||
import org.dspace.content.Site;
|
||||
import org.dspace.content.factory.ContentServiceFactory;
|
||||
@@ -982,94 +986,153 @@ public class AuthorizationRestRepositoryIT extends AbstractControllerIntegration
|
||||
String adminToken = getAuthToken(admin.getEmail(), password);
|
||||
|
||||
// verify that it works for administrators - with eperson parameter
|
||||
getClient(adminToken).perform(get("/api/authz/authorizations/search/objectAndFeature")
|
||||
getClient(adminToken).perform(get("/api/authz/authorizations/search/object")
|
||||
.param("uri", comUri)
|
||||
.param("projection", "level")
|
||||
.param("embedLevelDepth", "1")
|
||||
.param("feature", alwaysTrue.getName())
|
||||
.param("eperson", admin.getID().toString()))
|
||||
.andExpect(status().isOk())
|
||||
.andExpect(jsonPath("$.type", is("authorization")))
|
||||
.andExpect(jsonPath("$._embedded.feature.id", is(alwaysTrue.getName())))
|
||||
.andExpect(jsonPath("$.id", Matchers.is(admin.getID().toString() + "_" + alwaysTrue.getName() + "_"
|
||||
+ comRest.getUniqueType() + "_" + comRest.getId())));
|
||||
.andExpect(jsonPath("$.page.totalElements", is(1)))
|
||||
.andExpect(jsonPath("$._embedded.authorizations", contains(
|
||||
allOf(
|
||||
hasJsonPath("$.id", is(admin.getID().toString() + "_" + alwaysTrue.getName() + "_"
|
||||
+ comRest.getUniqueType() + "_" + comRest.getId())),
|
||||
hasJsonPath("$.type", is("authorization")),
|
||||
hasJsonPath("$._embedded.feature.id", is(alwaysTrue.getName())),
|
||||
hasJsonPath("$._embedded.eperson.id", is(admin.getID().toString()))
|
||||
)
|
||||
)));
|
||||
|
||||
// verify that it works for administrators - without eperson parameter
|
||||
getClient(adminToken).perform(get("/api/authz/authorizations/search/objectAndFeature")
|
||||
getClient(adminToken).perform(get("/api/authz/authorizations/search/object")
|
||||
.param("uri", comUri)
|
||||
.param("projection", "level")
|
||||
.param("embedLevelDepth", "1")
|
||||
.param("feature", alwaysTrue.getName()))
|
||||
.andExpect(status().isOk())
|
||||
.andExpect(jsonPath("$.type", is("authorization")))
|
||||
.andExpect(jsonPath("$._embedded.feature.id", is(alwaysTrue.getName())))
|
||||
.andExpect(jsonPath("$.id", Matchers.is(admin.getID().toString() + "_" + alwaysTrue.getName() + "_"
|
||||
+ comRest.getUniqueType() + "_" + comRest.getId())));
|
||||
.andExpect(jsonPath("$.page.totalElements", is(1)))
|
||||
.andExpect(jsonPath("$._embedded.authorizations", contains(
|
||||
allOf(
|
||||
hasJsonPath("$.id", is(
|
||||
admin.getID().toString() + "_"
|
||||
+ alwaysTrue.getName() + "_"
|
||||
+ comRest.getUniqueType() + "_" + comRest.getId()
|
||||
)),
|
||||
hasJsonPath("$.type", is("authorization")),
|
||||
hasJsonPath("$._embedded.feature.id", is(alwaysTrue.getName())),
|
||||
hasJsonPath("$._embedded.eperson.id", is(admin.getID().toString()))
|
||||
)
|
||||
)));
|
||||
|
||||
String epersonToken = getAuthToken(eperson.getEmail(), password);
|
||||
|
||||
// verify that it works for normal loggedin users - with eperson parameter
|
||||
getClient(epersonToken).perform(get("/api/authz/authorizations/search/objectAndFeature")
|
||||
getClient(epersonToken).perform(get("/api/authz/authorizations/search/object")
|
||||
.param("uri", comUri)
|
||||
.param("projection", "level")
|
||||
.param("embedLevelDepth", "1")
|
||||
.param("feature", alwaysTrue.getName())
|
||||
.param("eperson", eperson.getID().toString()))
|
||||
.andExpect(status().isOk())
|
||||
.andExpect(jsonPath("$.type", is("authorization")))
|
||||
.andExpect(jsonPath("$._embedded.feature.id", is(alwaysTrue.getName())))
|
||||
.andExpect(jsonPath("$.id", Matchers.is(eperson.getID().toString() + "_" + alwaysTrue.getName() + "_"
|
||||
+ comRest.getUniqueType() + "_" + comRest.getId())));
|
||||
.andExpect(jsonPath("$.page.totalElements", is(1)))
|
||||
.andExpect(jsonPath("$._embedded.authorizations", contains(
|
||||
allOf(
|
||||
hasJsonPath("$.id", is(
|
||||
eperson.getID().toString() + "_"
|
||||
+ alwaysTrue.getName() + "_"
|
||||
+ comRest.getUniqueType() + "_" + comRest.getId()
|
||||
)),
|
||||
hasJsonPath("$.type", is("authorization")),
|
||||
hasJsonPath("$._embedded.feature.id", is(alwaysTrue.getName())),
|
||||
hasJsonPath("$._embedded.eperson.id", is(eperson.getID().toString()))
|
||||
)
|
||||
)));
|
||||
|
||||
// verify that it works for normal loggedin users - without eperson parameter
|
||||
getClient(epersonToken).perform(get("/api/authz/authorizations/search/objectAndFeature")
|
||||
getClient(epersonToken).perform(get("/api/authz/authorizations/search/object")
|
||||
.param("uri", comUri)
|
||||
.param("projection", "level")
|
||||
.param("embedLevelDepth", "1")
|
||||
.param("feature", alwaysTrue.getName()))
|
||||
.andExpect(status().isOk())
|
||||
.andExpect(jsonPath("$.type", is("authorization")))
|
||||
.andExpect(jsonPath("$._embedded.feature.id", is(alwaysTrue.getName())))
|
||||
.andExpect(jsonPath("$.id", Matchers.is(eperson.getID().toString() + "_" + alwaysTrue.getName() + "_"
|
||||
+ comRest.getUniqueType() + "_" + comRest.getId())));
|
||||
.andExpect(status().isOk())
|
||||
.andExpect(jsonPath("$.page.totalElements", is(1)))
|
||||
.andExpect(jsonPath("$._embedded.authorizations", contains(
|
||||
allOf(
|
||||
hasJsonPath("$.id", is(
|
||||
eperson.getID().toString() + "_"
|
||||
+ alwaysTrue.getName() + "_"
|
||||
+ comRest.getUniqueType() + "_" + comRest.getId()
|
||||
)),
|
||||
hasJsonPath("$.type", is("authorization")),
|
||||
hasJsonPath("$._embedded.feature.id", is(alwaysTrue.getName())),
|
||||
hasJsonPath("$._embedded.eperson.id", is(eperson.getID().toString()))
|
||||
)
|
||||
)));
|
||||
|
||||
// verify that it works for administators inspecting other users - by using the eperson parameter
|
||||
getClient(adminToken).perform(get("/api/authz/authorizations/search/objectAndFeature")
|
||||
getClient(adminToken).perform(get("/api/authz/authorizations/search/object")
|
||||
.param("uri", comUri)
|
||||
.param("projection", "level")
|
||||
.param("embedLevelDepth", "1")
|
||||
.param("feature", alwaysTrue.getName())
|
||||
.param("eperson", eperson.getID().toString()))
|
||||
.andExpect(status().isOk())
|
||||
.andExpect(jsonPath("$.type", is("authorization")))
|
||||
.andExpect(jsonPath("$._embedded.feature.id", is(alwaysTrue.getName())))
|
||||
.andExpect(jsonPath("$.id", Matchers.is(eperson.getID().toString() + "_" + alwaysTrue.getName() + "_"
|
||||
+ comRest.getUniqueType() + "_" + comRest.getId())));
|
||||
.andExpect(jsonPath("$.page.totalElements", is(1)))
|
||||
.andExpect(jsonPath("$._embedded.authorizations", contains(
|
||||
allOf(
|
||||
hasJsonPath("$.id", is(
|
||||
eperson.getID().toString() + "_"
|
||||
+ alwaysTrue.getName() + "_"
|
||||
+ comRest.getUniqueType() + "_" + comRest.getId()
|
||||
)),
|
||||
hasJsonPath("$.type", is("authorization")),
|
||||
hasJsonPath("$._embedded.feature.id", is(alwaysTrue.getName())),
|
||||
hasJsonPath("$._embedded.eperson.id", is(eperson.getID().toString()))
|
||||
)
|
||||
)));
|
||||
|
||||
// verify that it works for administators inspecting other users - by assuming login
|
||||
getClient(adminToken).perform(get("/api/authz/authorizations/search/objectAndFeature")
|
||||
getClient(adminToken).perform(get("/api/authz/authorizations/search/object")
|
||||
.param("uri", comUri)
|
||||
.param("projection", "level")
|
||||
.param("embedLevelDepth", "1")
|
||||
.param("feature", alwaysTrue.getName())
|
||||
.header("X-On-Behalf-Of", eperson.getID()))
|
||||
.andExpect(status().isOk())
|
||||
.andExpect(jsonPath("$.type", is("authorization")))
|
||||
.andExpect(jsonPath("$._embedded.feature.id", is(alwaysTrue.getName())))
|
||||
.andExpect(jsonPath("$.id", Matchers.is(eperson.getID().toString() + "_" + alwaysTrue.getName() + "_"
|
||||
+ comRest.getUniqueType() + "_" + comRest.getId())));
|
||||
.andExpect(jsonPath("$.page.totalElements", is(1)))
|
||||
.andExpect(jsonPath("$._embedded.authorizations", contains(
|
||||
allOf(
|
||||
hasJsonPath("$.id", is(
|
||||
eperson.getID().toString() + "_"
|
||||
+ alwaysTrue.getName() + "_"
|
||||
+ comRest.getUniqueType() + "_" + comRest.getId()
|
||||
)),
|
||||
hasJsonPath("$.type", is("authorization")),
|
||||
hasJsonPath("$._embedded.feature.id", is(alwaysTrue.getName())),
|
||||
hasJsonPath("$._embedded.eperson.id", is(eperson.getID().toString()))
|
||||
)
|
||||
)));
|
||||
|
||||
// verify that it works for anonymous users
|
||||
getClient().perform(get("/api/authz/authorizations/search/objectAndFeature")
|
||||
getClient().perform(get("/api/authz/authorizations/search/object")
|
||||
.param("uri", comUri)
|
||||
.param("projection", "level")
|
||||
.param("embedLevelDepth", "1")
|
||||
.param("feature", alwaysTrue.getName()))
|
||||
.andExpect(status().isOk())
|
||||
.andExpect(jsonPath("$.type", is("authorization")))
|
||||
.andExpect(jsonPath("$._embedded.feature.id", is(alwaysTrue.getName())))
|
||||
.andExpect(jsonPath("$.id",Matchers.is(alwaysTrue.getName() + "_"
|
||||
+ comRest.getUniqueType() + "_" + comRest.getId())));
|
||||
.andExpect(status().isOk())
|
||||
.andExpect(jsonPath("$.page.totalElements", is(1)))
|
||||
.andExpect(jsonPath("$._embedded.authorizations", contains(
|
||||
allOf(
|
||||
hasJsonPath("$.id", is(
|
||||
alwaysTrue.getName() + "_"
|
||||
+ comRest.getUniqueType() + "_" + comRest.getId()
|
||||
)),
|
||||
hasJsonPath("$.type", is("authorization")),
|
||||
hasJsonPath("$._embedded.feature.id", is(alwaysTrue.getName())),
|
||||
hasJsonPath("$._embedded.eperson", nullValue())
|
||||
)
|
||||
)));
|
||||
}
|
||||
|
||||
@Test
|
||||
@@ -1086,52 +1149,52 @@ public class AuthorizationRestRepositoryIT extends AbstractControllerIntegration
|
||||
String adminToken = getAuthToken(admin.getEmail(), password);
|
||||
|
||||
// verify that it works for administrators - with eperson parameter
|
||||
getClient(adminToken).perform(get("/api/authz/authorizations/search/objectAndFeature")
|
||||
getClient(adminToken).perform(get("/api/authz/authorizations/search/object")
|
||||
.param("uri", siteUri)
|
||||
.param("feature", alwaysFalse.getName())
|
||||
.param("eperson", admin.getID().toString()))
|
||||
.andExpect(status().isNoContent());
|
||||
.andExpect(jsonPath("$.page.totalElements", is(0)));
|
||||
|
||||
// verify that it works for administrators - without eperson parameter
|
||||
getClient(adminToken).perform(get("/api/authz/authorizations/search/objectAndFeature")
|
||||
getClient(adminToken).perform(get("/api/authz/authorizations/search/object")
|
||||
.param("uri", siteUri)
|
||||
.param("feature", alwaysFalse.getName()))
|
||||
.andExpect(status().isNoContent());
|
||||
.andExpect(jsonPath("$.page.totalElements", is(0)));
|
||||
|
||||
String epersonToken = getAuthToken(eperson.getEmail(), password);
|
||||
|
||||
// verify that it works for normal loggedin users - with eperson parameter
|
||||
getClient(epersonToken).perform(get("/api/authz/authorizations/search/objectAndFeature")
|
||||
getClient(epersonToken).perform(get("/api/authz/authorizations/search/object")
|
||||
.param("uri", siteUri)
|
||||
.param("feature", trueForAdmins.getName())
|
||||
.param("eperson", eperson.getID().toString()))
|
||||
.andExpect(status().isNoContent());
|
||||
.andExpect(jsonPath("$.page.totalElements", is(0)));
|
||||
|
||||
// verify that it works for normal loggedin users - without eperson parameter
|
||||
getClient(epersonToken).perform(get("/api/authz/authorizations/search/objectAndFeature")
|
||||
getClient(epersonToken).perform(get("/api/authz/authorizations/search/object")
|
||||
.param("uri", siteUri)
|
||||
.param("feature", trueForAdmins.getName()))
|
||||
.andExpect(status().isNoContent());
|
||||
.andExpect(jsonPath("$.page.totalElements", is(0)));
|
||||
|
||||
// verify that it works for administators inspecting other users - by using the eperson parameter
|
||||
getClient(adminToken).perform(get("/api/authz/authorizations/search/objectAndFeature")
|
||||
getClient(adminToken).perform(get("/api/authz/authorizations/search/object")
|
||||
.param("uri", siteUri)
|
||||
.param("feature", trueForAdmins.getName())
|
||||
.param("eperson", eperson.getID().toString()))
|
||||
.andExpect(status().isNoContent());
|
||||
.andExpect(jsonPath("$.page.totalElements", is(0)));
|
||||
|
||||
// verify that it works for administators inspecting other users - by assuming login
|
||||
getClient(adminToken).perform(get("/api/authz/authorizations/search/objectAndFeature")
|
||||
getClient(adminToken).perform(get("/api/authz/authorizations/search/object")
|
||||
.param("uri", siteUri)
|
||||
.param("feature", trueForAdmins.getName())
|
||||
.header("X-On-Behalf-Of", eperson.getID()))
|
||||
.andExpect(status().isNoContent());
|
||||
.andExpect(jsonPath("$.page.totalElements", is(0)));
|
||||
|
||||
// verify that it works for anonymous users
|
||||
getClient().perform(get("/api/authz/authorizations/search/objectAndFeature")
|
||||
getClient().perform(get("/api/authz/authorizations/search/object")
|
||||
.param("uri", siteUri)
|
||||
.param("feature", trueForLoggedUsers.getName()))
|
||||
.andExpect(status().isNoContent());
|
||||
.andExpect(jsonPath("$.page.totalElements", is(0)));
|
||||
}
|
||||
|
||||
@Test
|
||||
@@ -1153,98 +1216,99 @@ public class AuthorizationRestRepositoryIT extends AbstractControllerIntegration
|
||||
String adminToken = getAuthToken(admin.getEmail(), password);
|
||||
|
||||
// verify that it works for administrators, no result - with eperson parameter
|
||||
getClient(adminToken).perform(get("/api/authz/authorizations/search/objectAndFeature")
|
||||
getClient(adminToken).perform(get("/api/authz/authorizations/search/object")
|
||||
.param("uri", wrongSiteUri)
|
||||
.param("feature", alwaysTrue.getName())
|
||||
.param("eperson", admin.getID().toString()))
|
||||
.andExpect(status().isNoContent());
|
||||
.andExpect(status().isOk())
|
||||
.andExpect(jsonPath("$.page.totalElements", is(0)));
|
||||
|
||||
getClient(adminToken).perform(get("/api/authz/authorizations/search/objectAndFeature")
|
||||
getClient(adminToken).perform(get("/api/authz/authorizations/search/object")
|
||||
.param("uri", siteUri)
|
||||
.param("feature", "not-existing-feature")
|
||||
.param("eperson", admin.getID().toString()))
|
||||
.andExpect(status().isNoContent());
|
||||
.andExpect(jsonPath("$.page.totalElements", is(0)));
|
||||
|
||||
// verify that it works for administrators, no result - without eperson parameter
|
||||
getClient(adminToken).perform(get("/api/authz/authorizations/search/objectAndFeature")
|
||||
getClient(adminToken).perform(get("/api/authz/authorizations/search/object")
|
||||
.param("uri", wrongSiteUri)
|
||||
.param("feature", alwaysTrue.getName()))
|
||||
.andExpect(status().isNoContent());
|
||||
.andExpect(jsonPath("$.page.totalElements", is(0)));
|
||||
|
||||
getClient(adminToken).perform(get("/api/authz/authorizations/search/objectAndFeature")
|
||||
getClient(adminToken).perform(get("/api/authz/authorizations/search/object")
|
||||
.param("uri", siteUri)
|
||||
.param("feature", "not-existing-feature"))
|
||||
.andExpect(status().isNoContent());
|
||||
.andExpect(jsonPath("$.page.totalElements", is(0)));
|
||||
|
||||
String epersonToken = getAuthToken(eperson.getEmail(), password);
|
||||
|
||||
// verify that it works for normal loggedin users - with eperson parameter
|
||||
getClient(epersonToken).perform(get("/api/authz/authorizations/search/objectAndFeature")
|
||||
getClient(epersonToken).perform(get("/api/authz/authorizations/search/object")
|
||||
.param("uri", wrongSiteUri)
|
||||
.param("feature", alwaysTrue.getName())
|
||||
.param("eperson", eperson.getID().toString()))
|
||||
.andExpect(status().isNoContent());
|
||||
.andExpect(jsonPath("$.page.totalElements", is(0)));
|
||||
|
||||
getClient(epersonToken).perform(get("/api/authz/authorizations/search/objectAndFeature")
|
||||
getClient(epersonToken).perform(get("/api/authz/authorizations/search/object")
|
||||
.param("uri", siteUri)
|
||||
.param("feature", "not-existing-feature")
|
||||
.param("eperson", eperson.getID().toString()))
|
||||
.andExpect(status().isNoContent());
|
||||
.andExpect(jsonPath("$.page.totalElements", is(0)));
|
||||
|
||||
// verify that it works for normal loggedin users - without eperson parameter
|
||||
getClient(epersonToken).perform(get("/api/authz/authorizations/search/objectAndFeature")
|
||||
getClient(epersonToken).perform(get("/api/authz/authorizations/search/object")
|
||||
.param("uri", wrongSiteUri)
|
||||
.param("feature", alwaysTrue.getName()))
|
||||
.andExpect(status().isNoContent());
|
||||
.andExpect(jsonPath("$.page.totalElements", is(0)));
|
||||
|
||||
getClient(epersonToken).perform(get("/api/authz/authorizations/search/objectAndFeature")
|
||||
getClient(epersonToken).perform(get("/api/authz/authorizations/search/object")
|
||||
.param("uri", siteUri)
|
||||
.param("feature", "not-existing-feature"))
|
||||
.andExpect(status().isNoContent());
|
||||
.andExpect(jsonPath("$.page.totalElements", is(0)));
|
||||
|
||||
// verify that it works for administators inspecting other users - by using the eperson parameter
|
||||
getClient(adminToken).perform(get("/api/authz/authorizations/search/objectAndFeature")
|
||||
getClient(adminToken).perform(get("/api/authz/authorizations/search/object")
|
||||
.param("uri", wrongSiteUri)
|
||||
.param("feature", alwaysTrue.getName())
|
||||
.param("eperson", eperson.getID().toString()))
|
||||
.andExpect(status().isNoContent());
|
||||
.andExpect(jsonPath("$.page.totalElements", is(0)));
|
||||
|
||||
getClient(adminToken).perform(get("/api/authz/authorizations/search/objectAndFeature")
|
||||
getClient(adminToken).perform(get("/api/authz/authorizations/search/object")
|
||||
.param("uri", siteUri)
|
||||
.param("feature", "not-existing-feature")
|
||||
.param("eperson", eperson.getID().toString()))
|
||||
.andExpect(status().isNoContent());
|
||||
.andExpect(jsonPath("$.page.totalElements", is(0)));
|
||||
|
||||
// verify that it works for administators inspecting other users - by assuming login
|
||||
getClient(adminToken).perform(get("/api/authz/authorizations/search/objectAndFeature")
|
||||
getClient(adminToken).perform(get("/api/authz/authorizations/search/object")
|
||||
.param("uri", wrongSiteUri)
|
||||
.param("feature", alwaysTrue.getName())
|
||||
.header("X-On-Behalf-Of", eperson.getID()))
|
||||
.andExpect(status().isNoContent());
|
||||
.andExpect(jsonPath("$.page.totalElements", is(0)));
|
||||
|
||||
getClient(adminToken).perform(get("/api/authz/authorizations/search/objectAndFeature")
|
||||
getClient(adminToken).perform(get("/api/authz/authorizations/search/object")
|
||||
.param("uri", siteUri)
|
||||
.param("feature", "not-existing-feature")
|
||||
.header("X-On-Behalf-Of", eperson.getID()))
|
||||
.andExpect(status().isNoContent());
|
||||
.andExpect(jsonPath("$.page.totalElements", is(0)));
|
||||
|
||||
// verify that it works for anonymous users
|
||||
getClient().perform(get("/api/authz/authorizations/search/objectAndFeature")
|
||||
getClient().perform(get("/api/authz/authorizations/search/object")
|
||||
.param("uri", wrongSiteUri)
|
||||
.param("feature", alwaysTrue.getName()))
|
||||
.andExpect(status().isNoContent());
|
||||
.andExpect(jsonPath("$.page.totalElements", is(0)));
|
||||
|
||||
getClient().perform(get("/api/authz/authorizations/search/objectAndFeature")
|
||||
getClient().perform(get("/api/authz/authorizations/search/object")
|
||||
.param("uri", siteUri)
|
||||
.param("feature", "not-existing-feature"))
|
||||
.andExpect(status().isNoContent());
|
||||
.andExpect(jsonPath("$.page.totalElements", is(0)));
|
||||
}
|
||||
|
||||
@Test
|
||||
/**
|
||||
* Verify that the findByObject return the 400 Bad Request response for invalid or missing URI or feature (required
|
||||
* parameters)
|
||||
*
|
||||
*
|
||||
* @throws Exception
|
||||
*/
|
||||
public void findByObjectAndFeatureBadRequestTest() throws Exception {
|
||||
@@ -1266,34 +1330,34 @@ public class AuthorizationRestRepositoryIT extends AbstractControllerIntegration
|
||||
log.debug("findByObjectAndFeatureBadRequestTest - Testing the URI: " + invalidUri);
|
||||
|
||||
// verify that it works for administrators with an invalid or missing uri - with eperson parameter
|
||||
getClient(adminToken).perform(get("/api/authz/authorizations/search/objectAndFeature")
|
||||
getClient(adminToken).perform(get("/api/authz/authorizations/search/object")
|
||||
.param("uri", invalidUri)
|
||||
.param("feature", alwaysTrue.getName())
|
||||
.param("eperson", admin.getID().toString()))
|
||||
.andExpect(status().isBadRequest());
|
||||
|
||||
// verify that it works for administrators with an invalid or missing uri - without eperson parameter
|
||||
getClient(adminToken).perform(get("/api/authz/authorizations/search/objectAndFeature")
|
||||
getClient(adminToken).perform(get("/api/authz/authorizations/search/object")
|
||||
.param("uri", invalidUri)
|
||||
.param("feature", alwaysTrue.getName()))
|
||||
.andExpect(status().isBadRequest());
|
||||
|
||||
// verify that it works for normal loggedin users with an invalid or missing uri - with eperson parameter
|
||||
getClient(epersonToken).perform(get("/api/authz/authorizations/search/objectAndFeature")
|
||||
getClient(epersonToken).perform(get("/api/authz/authorizations/search/object")
|
||||
.param("uri", invalidUri)
|
||||
.param("feature", alwaysTrue.getName())
|
||||
.param("eperson", eperson.getID().toString()))
|
||||
.andExpect(status().isBadRequest());
|
||||
|
||||
// verify that it works for normal loggedin users with an invalid or missing uri - without eperson parameter
|
||||
getClient(epersonToken).perform(get("/api/authz/authorizations/search/objectAndFeature")
|
||||
getClient(epersonToken).perform(get("/api/authz/authorizations/search/object")
|
||||
.param("uri", invalidUri)
|
||||
.param("feature", alwaysTrue.getName()))
|
||||
.andExpect(status().isBadRequest());
|
||||
|
||||
// verify that it works for administators inspecting other users with an invalid or missing uri - by
|
||||
// using the eperson parameter
|
||||
getClient(adminToken).perform(get("/api/authz/authorizations/search/objectAndFeature")
|
||||
getClient(adminToken).perform(get("/api/authz/authorizations/search/object")
|
||||
.param("uri", invalidUri)
|
||||
.param("feature", alwaysTrue.getName())
|
||||
.param("eperson", eperson.getID().toString()))
|
||||
@@ -1301,51 +1365,18 @@ public class AuthorizationRestRepositoryIT extends AbstractControllerIntegration
|
||||
|
||||
// verify that it works for administators inspecting other users with an invalid or missing uri - by
|
||||
// assuming login
|
||||
getClient(adminToken).perform(get("/api/authz/authorizations/search/objectAndFeature")
|
||||
getClient(adminToken).perform(get("/api/authz/authorizations/search/object")
|
||||
.param("uri", invalidUri)
|
||||
.param("feature", alwaysTrue.getName())
|
||||
.header("X-On-Behalf-Of", eperson.getID()))
|
||||
.andExpect(status().isBadRequest());
|
||||
|
||||
// verify that it works for anonymous users with an invalid or missing uri
|
||||
getClient().perform(get("/api/authz/authorizations/search/objectAndFeature")
|
||||
getClient().perform(get("/api/authz/authorizations/search/object")
|
||||
.param("uri", invalidUri)
|
||||
.param("feature", alwaysTrue.getName()))
|
||||
.andExpect(status().isBadRequest());
|
||||
}
|
||||
|
||||
getClient(adminToken).perform(get("/api/authz/authorizations/search/objectAndFeature")
|
||||
.param("eperson", admin.getID().toString()))
|
||||
.andExpect(status().isBadRequest());
|
||||
getClient(epersonToken).perform(get("/api/authz/authorizations/search/objectAndFeature")
|
||||
.param("eperson", eperson.getID().toString()))
|
||||
.andExpect(status().isBadRequest());
|
||||
getClient(adminToken).perform(get("/api/authz/authorizations/search/objectAndFeature")
|
||||
.param("eperson", eperson.getID().toString()))
|
||||
.andExpect(status().isBadRequest());
|
||||
getClient().perform(get("/api/authz/authorizations/search/objectAndFeature"))
|
||||
.andExpect(status().isBadRequest());
|
||||
getClient(adminToken).perform(get("/api/authz/authorizations/search/objectAndFeature"))
|
||||
.andExpect(status().isBadRequest());
|
||||
|
||||
getClient(adminToken).perform(get("/api/authz/authorizations/search/objectAndFeature")
|
||||
.param("uri", siteUri)
|
||||
.param("eperson", admin.getID().toString()))
|
||||
.andExpect(status().isBadRequest());
|
||||
getClient(epersonToken).perform(get("/api/authz/authorizations/search/objectAndFeature")
|
||||
.param("uri", siteUri)
|
||||
.param("eperson", eperson.getID().toString()))
|
||||
.andExpect(status().isBadRequest());
|
||||
getClient(adminToken).perform(get("/api/authz/authorizations/search/objectAndFeature")
|
||||
.param("uri", siteUri)
|
||||
.param("eperson", eperson.getID().toString()))
|
||||
.andExpect(status().isBadRequest());
|
||||
getClient().perform(get("/api/authz/authorizations/search/objectAndFeature")
|
||||
.param("uri", siteUri))
|
||||
.andExpect(status().isBadRequest());
|
||||
getClient(adminToken).perform(get("/api/authz/authorizations/search/objectAndFeature")
|
||||
.param("uri", siteUri.toString()))
|
||||
.andExpect(status().isBadRequest());
|
||||
}
|
||||
|
||||
@Test
|
||||
@@ -1363,28 +1394,28 @@ public class AuthorizationRestRepositoryIT extends AbstractControllerIntegration
|
||||
configurationService.setProperty("org.dspace.app.rest.authorization.AlwaysThrowExceptionFeature.turnoff", true);
|
||||
|
||||
// verify that it works for an anonymous user inspecting an admin user - by using the eperson parameter
|
||||
getClient().perform(get("/api/authz/authorizations/search/objectAndFeature")
|
||||
getClient().perform(get("/api/authz/authorizations/search/object")
|
||||
.param("uri", siteUri)
|
||||
.param("feature", alwaysTrue.getName())
|
||||
.param("eperson", admin.getID().toString()))
|
||||
.andExpect(status().isUnauthorized());
|
||||
|
||||
// verify that it works for an anonymous user inspecting an admin user - by assuming login
|
||||
getClient().perform(get("/api/authz/authorizations/search/objectAndFeature")
|
||||
getClient().perform(get("/api/authz/authorizations/search/object")
|
||||
.param("uri", siteUri)
|
||||
.param("feature", alwaysTrue.getName())
|
||||
.header("X-On-Behalf-Of", admin.getID()))
|
||||
.andExpect(status().isUnauthorized());
|
||||
|
||||
// verify that it works for an anonymous user inspecting a normal user - by using the eperson parameter
|
||||
getClient().perform(get("/api/authz/authorizations/search/objectAndFeature")
|
||||
getClient().perform(get("/api/authz/authorizations/search/object")
|
||||
.param("uri", siteUri)
|
||||
.param("feature", alwaysTrue.getName())
|
||||
.param("eperson", eperson.getID().toString()))
|
||||
.andExpect(status().isUnauthorized());
|
||||
|
||||
// verify that it works for an anonymous user inspecting a normal user - by assuming login
|
||||
getClient().perform(get("/api/authz/authorizations/search/objectAndFeature")
|
||||
getClient().perform(get("/api/authz/authorizations/search/object")
|
||||
.param("uri", siteUri)
|
||||
.param("feature", alwaysTrue.getName())
|
||||
.header("X-On-Behalf-Of", eperson.getID()))
|
||||
@@ -1411,28 +1442,28 @@ public class AuthorizationRestRepositoryIT extends AbstractControllerIntegration
|
||||
String anotherToken = getAuthToken(anotherEperson.getEmail(), password);
|
||||
|
||||
// verify that he cannot search the admin authorizations - by using the eperson parameter
|
||||
getClient(anotherToken).perform(get("/api/authz/authorizations/search/objectAndFeature")
|
||||
getClient(anotherToken).perform(get("/api/authz/authorizations/search/object")
|
||||
.param("uri", siteUri)
|
||||
.param("feature", alwaysTrue.getName())
|
||||
.param("eperson", admin.getID().toString()))
|
||||
.andExpect(status().isForbidden());
|
||||
|
||||
// verify that he cannot search the admin authorizations - by assuming login
|
||||
getClient(anotherToken).perform(get("/api/authz/authorizations/search/objectAndFeature")
|
||||
getClient(anotherToken).perform(get("/api/authz/authorizations/search/object")
|
||||
.param("uri", siteUri)
|
||||
.param("feature", alwaysTrue.getName())
|
||||
.header("X-On-Behalf-Of", admin.getID()))
|
||||
.andExpect(status().isForbidden());
|
||||
|
||||
// verify that he cannot search the authorizations of another "normal" eperson - by using the eperson parameter
|
||||
getClient(anotherToken).perform(get("/api/authz/authorizations/search/objectAndFeature")
|
||||
getClient(anotherToken).perform(get("/api/authz/authorizations/search/object")
|
||||
.param("uri", siteUri)
|
||||
.param("feature", alwaysTrue.getName())
|
||||
.param("eperson", eperson.getID().toString()))
|
||||
.andExpect(status().isForbidden());
|
||||
|
||||
// verify that he cannot search the authorizations of another "normal" eperson - by assuming login
|
||||
getClient(anotherToken).perform(get("/api/authz/authorizations/search/objectAndFeature")
|
||||
getClient(anotherToken).perform(get("/api/authz/authorizations/search/object")
|
||||
.param("uri", siteUri)
|
||||
.param("feature", alwaysTrue.getName())
|
||||
.header("X-On-Behalf-Of", eperson.getID()))
|
||||
@@ -1452,14 +1483,14 @@ public class AuthorizationRestRepositoryIT extends AbstractControllerIntegration
|
||||
String adminToken = getAuthToken(admin.getEmail(), password);
|
||||
|
||||
// verify that it works for administrators - with eperson parameter
|
||||
getClient(adminToken).perform(get("/api/authz/authorizations/search/objectAndFeature")
|
||||
getClient(adminToken).perform(get("/api/authz/authorizations/search/object")
|
||||
.param("uri", siteUri)
|
||||
.param("feature", alwaysException.getName())
|
||||
.param("eperson", admin.getID().toString()))
|
||||
.andExpect(status().isInternalServerError());
|
||||
|
||||
// verify that it works for administrators - without eperson parameter
|
||||
getClient(adminToken).perform(get("/api/authz/authorizations/search/objectAndFeature")
|
||||
getClient(adminToken).perform(get("/api/authz/authorizations/search/object")
|
||||
.param("uri", siteUri)
|
||||
.param("feature", alwaysException.getName()))
|
||||
.andExpect(status().isInternalServerError());
|
||||
@@ -1467,20 +1498,20 @@ public class AuthorizationRestRepositoryIT extends AbstractControllerIntegration
|
||||
String epersonToken = getAuthToken(eperson.getEmail(), password);
|
||||
|
||||
// verify that it works for normal loggedin users - with eperson parameter
|
||||
getClient(epersonToken).perform(get("/api/authz/authorizations/search/objectAndFeature")
|
||||
getClient(epersonToken).perform(get("/api/authz/authorizations/search/object")
|
||||
.param("uri", siteUri)
|
||||
.param("feature", alwaysException.getName())
|
||||
.param("eperson", eperson.getID().toString()))
|
||||
.andExpect(status().isInternalServerError());
|
||||
|
||||
// verify that it works for normal loggedin users - without eperson parameter
|
||||
getClient(epersonToken).perform(get("/api/authz/authorizations/search/objectAndFeature")
|
||||
getClient(epersonToken).perform(get("/api/authz/authorizations/search/object")
|
||||
.param("uri", siteUri)
|
||||
.param("feature", alwaysException.getName()))
|
||||
.andExpect(status().isInternalServerError());
|
||||
|
||||
// verify that it works for anonymous users
|
||||
getClient().perform(get("/api/authz/authorizations/search/objectAndFeature")
|
||||
getClient().perform(get("/api/authz/authorizations/search/object")
|
||||
.param("uri", siteUri)
|
||||
.param("feature", alwaysException.getName()))
|
||||
.andExpect(status().isInternalServerError());
|
||||
@@ -1520,31 +1551,31 @@ public class AuthorizationRestRepositoryIT extends AbstractControllerIntegration
|
||||
// check both via direct access than via a search method
|
||||
getClient(adminToken).perform(get("/api/authz/authorizations/" + authAdminSite.getID()))
|
||||
.andExpect(status().isNotFound());
|
||||
getClient(adminToken).perform(get("/api/authz/authorizations/search/objectAndFeature")
|
||||
getClient(adminToken).perform(get("/api/authz/authorizations/search/object")
|
||||
.param("uri", siteUri)
|
||||
.param("feature", trueForUsersInGroupTest.getName())
|
||||
.param("eperson", admin.getID().toString()))
|
||||
.andExpect(status().isNoContent());
|
||||
.andExpect(jsonPath("$.page.totalElements", is(0)));
|
||||
// nor the normal user both directly than if checked by the admin
|
||||
getClient(adminToken).perform(get("/api/authz/authorizations/" + authNormalUserSite.getID()))
|
||||
.andExpect(status().isNotFound());
|
||||
getClient(adminToken).perform(get("/api/authz/authorizations/search/objectAndFeature")
|
||||
getClient(adminToken).perform(get("/api/authz/authorizations/search/object")
|
||||
.param("uri", siteUri)
|
||||
.param("feature", trueForUsersInGroupTest.getName())
|
||||
.param("eperson", normalUser.getID().toString()))
|
||||
.andExpect(status().isNoContent());
|
||||
.andExpect(jsonPath("$.page.totalElements", is(0)));
|
||||
getClient(normalUserToken).perform(get("/api/authz/authorizations/" + authNormalUserSite.getID()))
|
||||
.andExpect(status().isNotFound());
|
||||
getClient(normalUserToken).perform(get("/api/authz/authorizations/search/objectAndFeature")
|
||||
getClient(normalUserToken).perform(get("/api/authz/authorizations/search/object")
|
||||
.param("uri", siteUri)
|
||||
.param("feature", trueForUsersInGroupTest.getName())
|
||||
.param("eperson", normalUser.getID().toString()))
|
||||
.andExpect(status().isNoContent());
|
||||
.andExpect(jsonPath("$.page.totalElements", is(0)));
|
||||
|
||||
// instead the member user has
|
||||
getClient(adminToken).perform(get("/api/authz/authorizations/" + authMemberSite.getID()))
|
||||
.andExpect(status().isOk());
|
||||
getClient(adminToken).perform(get("/api/authz/authorizations/search/objectAndFeature")
|
||||
getClient(adminToken).perform(get("/api/authz/authorizations/search/object")
|
||||
.param("uri", siteUri)
|
||||
.param("feature", trueForUsersInGroupTest.getName())
|
||||
.param("eperson", memberOfTestGroup.getID().toString()))
|
||||
@@ -1552,7 +1583,7 @@ public class AuthorizationRestRepositoryIT extends AbstractControllerIntegration
|
||||
// so it can also check itself the permission
|
||||
getClient(memberToken).perform(get("/api/authz/authorizations/" + authMemberSite.getID()))
|
||||
.andExpect(status().isOk());
|
||||
getClient(memberToken).perform(get("/api/authz/authorizations/search/objectAndFeature")
|
||||
getClient(memberToken).perform(get("/api/authz/authorizations/search/object")
|
||||
.param("uri", siteUri)
|
||||
.param("feature", trueForUsersInGroupTest.getName())
|
||||
.param("eperson", memberOfTestGroup.getID().toString()))
|
||||
@@ -1568,7 +1599,7 @@ public class AuthorizationRestRepositoryIT extends AbstractControllerIntegration
|
||||
// our admin now should have the authorization
|
||||
getClient(adminToken).perform(get("/api/authz/authorizations/" + authAdminSite.getID()))
|
||||
.andExpect(status().isOk());
|
||||
getClient(adminToken).perform(get("/api/authz/authorizations/search/objectAndFeature")
|
||||
getClient(adminToken).perform(get("/api/authz/authorizations/search/object")
|
||||
.param("uri", siteUri)
|
||||
.param("feature", trueForUsersInGroupTest.getName())
|
||||
.param("eperson", admin.getID().toString()))
|
||||
@@ -1576,15 +1607,15 @@ public class AuthorizationRestRepositoryIT extends AbstractControllerIntegration
|
||||
// our normal user when checked via the admin should still not have the authorization
|
||||
getClient(adminToken).perform(get("/api/authz/authorizations/" + authNormalUserSite.getID()))
|
||||
.andExpect(status().isNotFound());
|
||||
getClient(adminToken).perform(get("/api/authz/authorizations/search/objectAndFeature")
|
||||
getClient(adminToken).perform(get("/api/authz/authorizations/search/object")
|
||||
.param("uri", siteUri)
|
||||
.param("feature", trueForUsersInGroupTest.getName())
|
||||
.param("eperson", normalUser.getID().toString()))
|
||||
.andExpect(status().isNoContent());
|
||||
.andExpect(jsonPath("$.page.totalElements", is(0)));
|
||||
// but he should have the authorization if loggedin directly
|
||||
getClient(normalUserToken).perform(get("/api/authz/authorizations/" + authNormalUserSite.getID()))
|
||||
.andExpect(status().isOk());
|
||||
getClient(normalUserToken).perform(get("/api/authz/authorizations/search/objectAndFeature")
|
||||
getClient(normalUserToken).perform(get("/api/authz/authorizations/search/object")
|
||||
.param("uri", siteUri)
|
||||
.param("feature", trueForUsersInGroupTest.getName())
|
||||
.param("eperson", normalUser.getID().toString()))
|
||||
@@ -1592,14 +1623,14 @@ public class AuthorizationRestRepositoryIT extends AbstractControllerIntegration
|
||||
// for our direct member user we don't expect differences
|
||||
getClient(adminToken).perform(get("/api/authz/authorizations/" + authMemberSite.getID()))
|
||||
.andExpect(status().isOk());
|
||||
getClient(adminToken).perform(get("/api/authz/authorizations/search/objectAndFeature")
|
||||
getClient(adminToken).perform(get("/api/authz/authorizations/search/object")
|
||||
.param("uri", siteUri)
|
||||
.param("feature", trueForUsersInGroupTest.getName())
|
||||
.param("eperson", memberOfTestGroup.getID().toString()))
|
||||
.andExpect(status().isOk());
|
||||
getClient(memberToken).perform(get("/api/authz/authorizations/" + authMemberSite.getID()))
|
||||
.andExpect(status().isOk());
|
||||
getClient(memberToken).perform(get("/api/authz/authorizations/search/objectAndFeature")
|
||||
getClient(memberToken).perform(get("/api/authz/authorizations/search/object")
|
||||
.param("uri", siteUri)
|
||||
.param("feature", trueForUsersInGroupTest.getName())
|
||||
.param("eperson", memberOfTestGroup.getID().toString()))
|
||||
|
@@ -22,15 +22,15 @@ import org.apache.commons.codec.CharEncoding;
|
||||
import org.apache.commons.io.IOUtils;
|
||||
import org.apache.logging.log4j.LogManager;
|
||||
import org.apache.logging.log4j.Logger;
|
||||
import org.dspace.app.rest.builder.BitstreamBuilder;
|
||||
import org.dspace.app.rest.builder.BundleBuilder;
|
||||
import org.dspace.app.rest.builder.CollectionBuilder;
|
||||
import org.dspace.app.rest.builder.CommunityBuilder;
|
||||
import org.dspace.app.rest.builder.EPersonBuilder;
|
||||
import org.dspace.app.rest.builder.ItemBuilder;
|
||||
import org.dspace.app.rest.builder.ResourcePolicyBuilder;
|
||||
import org.dspace.app.rest.matcher.BundleMatcher;
|
||||
import org.dspace.app.rest.test.AbstractControllerIntegrationTest;
|
||||
import org.dspace.builder.BitstreamBuilder;
|
||||
import org.dspace.builder.BundleBuilder;
|
||||
import org.dspace.builder.CollectionBuilder;
|
||||
import org.dspace.builder.CommunityBuilder;
|
||||
import org.dspace.builder.EPersonBuilder;
|
||||
import org.dspace.builder.ItemBuilder;
|
||||
import org.dspace.builder.ResourcePolicyBuilder;
|
||||
import org.dspace.content.Bitstream;
|
||||
import org.dspace.content.Bundle;
|
||||
import org.dspace.content.Collection;
|
||||
|
@@ -25,14 +25,14 @@ import java.util.Random;
|
||||
import java.util.concurrent.atomic.AtomicReference;
|
||||
|
||||
import com.fasterxml.jackson.databind.ObjectMapper;
|
||||
import org.dspace.app.rest.builder.BitstreamFormatBuilder;
|
||||
import org.dspace.app.rest.builder.EPersonBuilder;
|
||||
import org.dspace.app.rest.converter.BitstreamFormatConverter;
|
||||
import org.dspace.app.rest.matcher.BitstreamFormatMatcher;
|
||||
import org.dspace.app.rest.matcher.HalMatcher;
|
||||
import org.dspace.app.rest.model.BitstreamFormatRest;
|
||||
import org.dspace.app.rest.projection.Projection;
|
||||
import org.dspace.app.rest.test.AbstractControllerIntegrationTest;
|
||||
import org.dspace.builder.BitstreamFormatBuilder;
|
||||
import org.dspace.builder.EPersonBuilder;
|
||||
import org.dspace.content.BitstreamFormat;
|
||||
import org.dspace.content.service.BitstreamFormatService;
|
||||
import org.dspace.core.I18nUtil;
|
||||
|
@@ -11,13 +11,13 @@ import static java.util.UUID.randomUUID;
|
||||
import static org.apache.commons.codec.CharEncoding.UTF_8;
|
||||
import static org.apache.commons.collections.CollectionUtils.isEmpty;
|
||||
import static org.apache.commons.io.IOUtils.toInputStream;
|
||||
import static org.dspace.app.rest.builder.BitstreamBuilder.createBitstream;
|
||||
import static org.dspace.app.rest.builder.BitstreamFormatBuilder.createBitstreamFormat;
|
||||
import static org.dspace.app.rest.builder.CollectionBuilder.createCollection;
|
||||
import static org.dspace.app.rest.builder.CommunityBuilder.createCommunity;
|
||||
import static org.dspace.app.rest.builder.ItemBuilder.createItem;
|
||||
import static org.dspace.app.rest.builder.ResourcePolicyBuilder.createResourcePolicy;
|
||||
import static org.dspace.app.rest.matcher.BitstreamFormatMatcher.matchBitstreamFormat;
|
||||
import static org.dspace.builder.BitstreamBuilder.createBitstream;
|
||||
import static org.dspace.builder.BitstreamFormatBuilder.createBitstreamFormat;
|
||||
import static org.dspace.builder.CollectionBuilder.createCollection;
|
||||
import static org.dspace.builder.CommunityBuilder.createCommunity;
|
||||
import static org.dspace.builder.ItemBuilder.createItem;
|
||||
import static org.dspace.builder.ResourcePolicyBuilder.createResourcePolicy;
|
||||
import static org.dspace.content.BitstreamFormat.KNOWN;
|
||||
import static org.dspace.content.BitstreamFormat.SUPPORTED;
|
||||
import static org.dspace.core.Constants.READ;
|
||||
@@ -53,14 +53,14 @@ import org.apache.commons.lang3.StringUtils;
|
||||
import org.apache.pdfbox.pdmodel.PDDocument;
|
||||
import org.apache.pdfbox.text.PDFTextStripper;
|
||||
import org.apache.solr.client.solrj.SolrServerException;
|
||||
import org.dspace.app.rest.builder.BitstreamBuilder;
|
||||
import org.dspace.app.rest.builder.CollectionBuilder;
|
||||
import org.dspace.app.rest.builder.CommunityBuilder;
|
||||
import org.dspace.app.rest.builder.EPersonBuilder;
|
||||
import org.dspace.app.rest.builder.GroupBuilder;
|
||||
import org.dspace.app.rest.builder.ItemBuilder;
|
||||
import org.dspace.app.rest.test.AbstractControllerIntegrationTest;
|
||||
import org.dspace.authorize.service.ResourcePolicyService;
|
||||
import org.dspace.builder.BitstreamBuilder;
|
||||
import org.dspace.builder.CollectionBuilder;
|
||||
import org.dspace.builder.CommunityBuilder;
|
||||
import org.dspace.builder.EPersonBuilder;
|
||||
import org.dspace.builder.GroupBuilder;
|
||||
import org.dspace.builder.ItemBuilder;
|
||||
import org.dspace.content.Bitstream;
|
||||
import org.dspace.content.BitstreamFormat;
|
||||
import org.dspace.content.Collection;
|
||||
|
@@ -23,12 +23,6 @@ import java.util.UUID;
|
||||
|
||||
import org.apache.commons.codec.CharEncoding;
|
||||
import org.apache.commons.io.IOUtils;
|
||||
import org.dspace.app.rest.builder.BitstreamBuilder;
|
||||
import org.dspace.app.rest.builder.BundleBuilder;
|
||||
import org.dspace.app.rest.builder.CollectionBuilder;
|
||||
import org.dspace.app.rest.builder.CommunityBuilder;
|
||||
import org.dspace.app.rest.builder.ItemBuilder;
|
||||
import org.dspace.app.rest.builder.ResourcePolicyBuilder;
|
||||
import org.dspace.app.rest.matcher.BitstreamFormatMatcher;
|
||||
import org.dspace.app.rest.matcher.BitstreamMatcher;
|
||||
import org.dspace.app.rest.matcher.BundleMatcher;
|
||||
@@ -36,6 +30,12 @@ import org.dspace.app.rest.matcher.HalMatcher;
|
||||
import org.dspace.app.rest.test.AbstractControllerIntegrationTest;
|
||||
import org.dspace.app.rest.test.MetadataPatchSuite;
|
||||
import org.dspace.authorize.service.ResourcePolicyService;
|
||||
import org.dspace.builder.BitstreamBuilder;
|
||||
import org.dspace.builder.BundleBuilder;
|
||||
import org.dspace.builder.CollectionBuilder;
|
||||
import org.dspace.builder.CommunityBuilder;
|
||||
import org.dspace.builder.ItemBuilder;
|
||||
import org.dspace.builder.ResourcePolicyBuilder;
|
||||
import org.dspace.content.Bitstream;
|
||||
import org.dspace.content.Bundle;
|
||||
import org.dspace.content.Collection;
|
||||
|
@@ -19,14 +19,14 @@ import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.
|
||||
import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.jsonPath;
|
||||
import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.status;
|
||||
|
||||
import org.dspace.app.rest.builder.CollectionBuilder;
|
||||
import org.dspace.app.rest.builder.CommunityBuilder;
|
||||
import org.dspace.app.rest.builder.GroupBuilder;
|
||||
import org.dspace.app.rest.builder.ItemBuilder;
|
||||
import org.dspace.app.rest.matcher.BrowseEntryResourceMatcher;
|
||||
import org.dspace.app.rest.matcher.BrowseIndexMatcher;
|
||||
import org.dspace.app.rest.matcher.ItemMatcher;
|
||||
import org.dspace.app.rest.test.AbstractControllerIntegrationTest;
|
||||
import org.dspace.builder.CollectionBuilder;
|
||||
import org.dspace.builder.CommunityBuilder;
|
||||
import org.dspace.builder.GroupBuilder;
|
||||
import org.dspace.builder.ItemBuilder;
|
||||
import org.dspace.content.Collection;
|
||||
import org.dspace.content.Community;
|
||||
import org.dspace.content.Item;
|
||||
|
@@ -27,13 +27,6 @@ import javax.ws.rs.core.MediaType;
|
||||
import com.fasterxml.jackson.databind.ObjectMapper;
|
||||
import org.apache.commons.codec.CharEncoding;
|
||||
import org.apache.commons.io.IOUtils;
|
||||
import org.dspace.app.rest.builder.BitstreamBuilder;
|
||||
import org.dspace.app.rest.builder.BundleBuilder;
|
||||
import org.dspace.app.rest.builder.CollectionBuilder;
|
||||
import org.dspace.app.rest.builder.CommunityBuilder;
|
||||
import org.dspace.app.rest.builder.EPersonBuilder;
|
||||
import org.dspace.app.rest.builder.ItemBuilder;
|
||||
import org.dspace.app.rest.builder.ResourcePolicyBuilder;
|
||||
import org.dspace.app.rest.matcher.BitstreamMatcher;
|
||||
import org.dspace.app.rest.matcher.BundleMatcher;
|
||||
import org.dspace.app.rest.matcher.HalMatcher;
|
||||
@@ -47,6 +40,13 @@ import org.dspace.app.rest.model.patch.Operation;
|
||||
import org.dspace.app.rest.test.AbstractControllerIntegrationTest;
|
||||
import org.dspace.authorize.ResourcePolicy;
|
||||
import org.dspace.authorize.service.ResourcePolicyService;
|
||||
import org.dspace.builder.BitstreamBuilder;
|
||||
import org.dspace.builder.BundleBuilder;
|
||||
import org.dspace.builder.CollectionBuilder;
|
||||
import org.dspace.builder.CommunityBuilder;
|
||||
import org.dspace.builder.EPersonBuilder;
|
||||
import org.dspace.builder.ItemBuilder;
|
||||
import org.dspace.builder.ResourcePolicyBuilder;
|
||||
import org.dspace.content.Bitstream;
|
||||
import org.dspace.content.Bundle;
|
||||
import org.dspace.content.Collection;
|
||||
@@ -76,6 +76,7 @@ public class BundleRestRepositoryIT extends AbstractControllerIntegrationTest {
|
||||
private Bitstream bitstream2;
|
||||
|
||||
@Before
|
||||
@Override
|
||||
public void setUp() throws Exception {
|
||||
super.setUp();
|
||||
|
||||
|
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user