Merge branch 'main' into draft_vocabulary

This commit is contained in:
ddinuzzo
2020-08-06 10:20:30 +02:00
committed by GitHub
185 changed files with 3454 additions and 1633 deletions

View File

@@ -1,8 +1,7 @@
## References
_Add references/links to any related tickets or PRs. These may include:_
* Link to [JIRA](https://jira.lyrasis.org/projects/DS/summary) ticket(s), if any
* Link to [REST Contract](https://github.com/DSpace/Rest7Contract) or an open REST Contract PR, if any
* Link to [Angular issue or PR](https://github.com/DSpace/dspace-angular/issues) related to this PR, if any
_Add references/links to any related issues or PRs. These may include:_
* Related to [REST Contract](https://github.com/DSpace/Rest7Contract) or an open REST Contract PR, if any
* Fixes [GitHub issue](https://github.com/DSpace/DSpace/issues), if any
## Description
Short summary of changes (1-2 sentences).

View File

@@ -30,14 +30,14 @@ install: "echo 'Skipping install stage, dependencies will be downloaded during b
script:
# Summary of flags used (below):
# license:check => Validate all source code license headers
# -Dmaven.test.skip=false => Enable DSpace Unit Tests
# -DskipTests=false => Enable DSpace Unit Tests
# -DskipITs=false => Enable DSpace Integration Tests
# -Pdspace-rest => Enable optional dspace-rest module as part of build
# -Pdspace-rest => Enable optional dspace-rest module as part of build
# -P !assembly => Skip assembly of "dspace-installer" directory (as it can be memory intensive)
# -B => Maven batch/non-interactive mode (recommended for CI)
# -V => Display Maven version info before build
# -Dsurefire.rerunFailingTestsCount=2 => try again for flakey tests, and keep track of/report on number of retries
- "mvn clean install license:check -Dmaven.test.skip=false -DskipITs=false -Pdspace-rest -P !assembly -B -V -Dsurefire.rerunFailingTestsCount=2"
- "mvn clean install license:check -DskipTests=false -DskipITs=false -Pdspace-rest -P !assembly -B -V -Dsurefire.rerunFailingTestsCount=2"
# After a successful build and test (see 'script'), send code coverage reports to coveralls.io
# These code coverage reports are generated by jacoco-maven-plugin (during test process above).

View File

@@ -90,33 +90,33 @@ run automatically by [Travis CI](https://travis-ci.com/DSpace/DSpace/) for all P
* How to run both Unit Tests (via `maven-surefire-plugin`) and Integration Tests (via `maven-failsafe-plugin`):
```
mvn clean test -Dmaven.test.skip=false -DskipITs=false
mvn clean test -DskipTests=false -DskipITs=false
```
* How to run just Unit Tests:
```
mvn test -Dmaven.test.skip=false
mvn test -DskipTests=false
```
* How to run a *single* Unit Test
```
# Run all tests in a specific test class
# NOTE: failIfNoTests=false is required to skip tests in other modules
mvn test -Dmaven.test.skip=false -Dtest=[full.package.testClassName] -DfailIfNoTests=false
mvn test -DskipTests=false -Dtest=[full.package.testClassName] -DfailIfNoTests=false
# Run one test method in a specific test class
mvn test -Dmaven.test.skip=false -Dtest=[full.package.testClassName]#[testMethodName] -DfailIfNoTests=false
mvn test -DskipTests=false -Dtest=[full.package.testClassName]#[testMethodName] -DfailIfNoTests=false
```
* How to run Integration Tests (requires enabling Unit tests too)
```
mvn verify -Dmaven.test.skip=false -DskipITs=false
mvn verify -DskipTests=false -DskipITs=false
```
* How to run a *single* Integration Test (requires enabling Unit tests too)
```
# Run all integration tests in a specific test class
# NOTE: failIfNoTests=false is required to skip tests in other modules
mvn test -Dmaven.test.skip=false -DskipITs=false -Dtest=[full.package.testClassName] -DfailIfNoTests=false
mvn test -DskipTests=false -DskipITs=false -Dtest=[full.package.testClassName] -DfailIfNoTests=false
# Run one test method in a specific test class
mvn test -Dmaven.test.skip=false -DskipITs=false -Dtest=[full.package.testClassName]#[testMethodName] -DfailIfNoTests=false
mvn test -DskipTests=false -DskipITs=false -Dtest=[full.package.testClassName]#[testMethodName] -DfailIfNoTests=false
```
* How to run only tests of a specific DSpace module
```

View File

@@ -137,7 +137,7 @@
<activation>
<activeByDefault>false</activeByDefault>
<!-- property>
<name>maven.test.skip</name>
<name>skipTests</name>
<value>false</value>
</property -->
</activation>
@@ -158,7 +158,7 @@
<activation>
<activeByDefault>false</activeByDefault>
<property>
<name>maven.test.skip</name>
<name>skipTests</name>
<value>false</value>
</property>
</activation>
@@ -241,6 +241,7 @@
<dspace.dir>${agnostic.build.dir}/testing/dspace/</dspace.dir>
<!-- Turn off any DSpace logging -->
<dspace.log.init.disable>true</dspace.log.init.disable>
<solr.install.dir>${agnostic.build.dir}/testing/dspace/solr/</solr.install.dir>
</systemPropertyVariables>
</configuration>
</plugin>
@@ -255,6 +256,7 @@
<dspace.dir>${agnostic.build.dir}/testing/dspace/</dspace.dir>
<!-- Turn off any DSpace logging -->
<dspace.log.init.disable>true</dspace.log.init.disable>
<solr.install.dir>${agnostic.build.dir}/testing/dspace/solr/</solr.install.dir>
</systemPropertyVariables>
</configuration>
</plugin>
@@ -291,9 +293,20 @@
</dependency>
<dependency>
<groupId>org.dspace</groupId>
<groupId>net.handle</groupId>
<artifactId>handle</artifactId>
</dependency>
<dependency>
<groupId>net.cnri</groupId>
<artifactId>cnri-servlet-container</artifactId>
<exclusions>
<!-- Newer versions provided in our parent POM -->
<exclusion>
<groupId>org.ow2.asm</groupId>
<artifactId>asm-commons</artifactId>
</exclusion>
</exclusions>
</dependency>
<!-- Jetty is needed to run Handle Server -->
<dependency>
<groupId>org.eclipse.jetty</groupId>
@@ -312,6 +325,10 @@
<artifactId>apache-jena-libs</artifactId>
<type>pom</type>
</dependency>
<dependency>
<groupId>commons-cli</groupId>
<artifactId>commons-cli</artifactId>
</dependency>
<dependency>
<groupId>commons-codec</groupId>
<artifactId>commons-codec</artifactId>
@@ -468,16 +485,164 @@
<dependency>
<groupId>org.apache.solr</groupId>
<artifactId>solr-cell</artifactId>
<artifactId>solr-solrj</artifactId>
<version>${solr.client.version}</version>
</dependency>
<!-- Solr Core is needed for Integration Tests (to run a MockSolrServer) -->
<!-- The following Solr / Lucene dependencies also support integration tests -->
<dependency>
<groupId>org.apache.solr</groupId>
<artifactId>solr-core</artifactId>
<scope>test</scope>
<version>${solr.client.version}</version>
<exclusions>
<!-- Newer version provided in our parent POM -->
<exclusion>
<groupId>commons-cli</groupId>
<artifactId>commons-cli</artifactId>
</exclusion>
<exclusion>
<groupId>org.eclipse.jetty</groupId>
<artifactId>jetty-continuation</artifactId>
</exclusion>
<exclusion>
<groupId>org.eclipse.jetty</groupId>
<artifactId>jetty-deploy</artifactId>
</exclusion>
<exclusion>
<groupId>org.eclipse.jetty</groupId>
<artifactId>jetty-http</artifactId>
</exclusion>
<exclusion>
<groupId>org.eclipse.jetty</groupId>
<artifactId>jetty-io</artifactId>
</exclusion>
<exclusion>
<groupId>org.eclipse.jetty</groupId>
<artifactId>jetty-jmx</artifactId>
</exclusion>
<exclusion>
<groupId>org.eclipse.jetty</groupId>
<artifactId>jetty-rewrite</artifactId>
</exclusion>
<exclusion>
<groupId>org.eclipse.jetty</groupId>
<artifactId>jetty-security</artifactId>
</exclusion>
<exclusion>
<groupId>org.eclipse.jetty</groupId>
<artifactId>jetty-server</artifactId>
</exclusion>
<exclusion>
<groupId>org.eclipse.jetty</groupId>
<artifactId>jetty-servlet</artifactId>
</exclusion>
<exclusion>
<groupId>org.eclipse.jetty</groupId>
<artifactId>jetty-servlets</artifactId>
</exclusion>
<exclusion>
<groupId>org.eclipse.jetty</groupId>
<artifactId>jetty-util</artifactId>
</exclusion>
<exclusion>
<groupId>org.eclipse.jetty</groupId>
<artifactId>jetty-webapp</artifactId>
</exclusion>
<exclusion>
<groupId>org.eclipse.jetty</groupId>
<artifactId>jetty-xml</artifactId>
</exclusion>
</exclusions>
</dependency>
<dependency>
<groupId>org.apache.solr</groupId>
<artifactId>solr-cell</artifactId>
<exclusions>
<!-- Newer versions provided in our parent POM -->
<exclusion>
<groupId>commons-cli</groupId>
<artifactId>commons-cli</artifactId>
</exclusion>
<exclusion>
<groupId>org.ow2.asm</groupId>
<artifactId>asm-commons</artifactId>
</exclusion>
<exclusion>
<groupId>org.bouncycastle</groupId>
<artifactId>bcpkix-jdk15on</artifactId>
</exclusion>
<exclusion>
<groupId>org.bouncycastle</groupId>
<artifactId>bcprov-jdk15on</artifactId>
</exclusion>
<exclusion>
<groupId>org.eclipse.jetty</groupId>
<artifactId>jetty-xml</artifactId>
</exclusion>
<exclusion>
<groupId>org.eclipse.jetty</groupId>
<artifactId>jetty-http</artifactId>
</exclusion>
<exclusion>
<groupId>org.eclipse.jetty</groupId>
<artifactId>jetty-servlet</artifactId>
</exclusion>
<exclusion>
<groupId>org.eclipse.jetty</groupId>
<artifactId>jetty-webapp</artifactId>
</exclusion>
<exclusion>
<groupId>org.eclipse.jetty</groupId>
<artifactId>jetty-util</artifactId>
</exclusion>
<exclusion>
<groupId>org.eclipse.jetty</groupId>
<artifactId>jetty-deploy</artifactId>
</exclusion>
<exclusion>
<groupId>org.eclipse.jetty</groupId>
<artifactId>jetty-continuation</artifactId>
</exclusion>
<exclusion>
<groupId>org.eclipse.jetty</groupId>
<artifactId>jetty-servlets</artifactId>
</exclusion>
<exclusion>
<groupId>org.eclipse.jetty</groupId>
<artifactId>jetty-io</artifactId>
</exclusion>
<exclusion>
<groupId>org.eclipse.jetty</groupId>
<artifactId>jetty-security</artifactId>
</exclusion>
</exclusions>
</dependency>
<dependency>
<groupId>org.apache.lucene</groupId>
<artifactId>lucene-core</artifactId>
</dependency>
<!-- Reminder: Keep icu4j (in Parent POM) synced with version used by lucene-analyzers-icu below,
otherwise ICUFoldingFilterFactory may throw errors in tests. -->
<dependency>
<groupId>org.apache.lucene</groupId>
<artifactId>lucene-analyzers-icu</artifactId>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.apache.lucene</groupId>
<artifactId>lucene-analyzers-smartcn</artifactId>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.apache.lucene</groupId>
<artifactId>lucene-analyzers-stempel</artifactId>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.apache.xmlbeans</groupId>
<artifactId>xmlbeans</artifactId>
<version>2.6.0</version>
</dependency>
<dependency>
<groupId>com.maxmind.geoip2</groupId>
@@ -658,7 +823,7 @@
<dependency>
<groupId>org.xmlunit</groupId>
<artifactId>xmlunit-matchers</artifactId>
<artifactId>xmlunit-core</artifactId>
<version>2.6.3</version>
<scope>test</scope>
</dependency>

View File

@@ -1519,6 +1519,12 @@ public class ItemImportServiceImpl implements ItemImportService, InitializingBea
if (!dir.exists() && !dir.mkdirs()) {
log.error("Unable to create directory: " + dir.getAbsolutePath());
}
// Verify that the directory the entry is using is a subpath of zipDir (and not somewhere else!)
if (!dir.toPath().normalize().startsWith(zipDir)) {
throw new IOException("Bad zip entry: '" + entry.getName()
+ "' in file '" + zipfile.getAbsolutePath() + "'!"
+ " Cannot process this file.");
}
//Entries could have too many directories, and we need to adjust the sourcedir
// file1.zip (SimpleArchiveFormat / item1 / contents|dublin_core|...
@@ -1539,9 +1545,16 @@ public class ItemImportServiceImpl implements ItemImportService, InitializingBea
}
byte[] buffer = new byte[1024];
int len;
File outFile = new File(zipDir + entry.getName());
// Verify that this file will be created in our zipDir (and not somewhere else!)
if (!outFile.toPath().normalize().startsWith(zipDir)) {
throw new IOException("Bad zip entry: '" + entry.getName()
+ "' in file '" + zipfile.getAbsolutePath() + "'!"
+ " Cannot process this file.");
}
InputStream in = zf.getInputStream(entry);
BufferedOutputStream out = new BufferedOutputStream(
new FileOutputStream(zipDir + entry.getName()));
new FileOutputStream(outFile));
while ((len = in.read(buffer)) >= 0) {
out.write(buffer, 0, len);
}

View File

@@ -48,6 +48,9 @@ public class SHERPAResponse {
factory.setValidating(false);
factory.setIgnoringComments(true);
factory.setIgnoringElementContentWhitespace(true);
// disallow DTD parsing to ensure no XXE attacks can occur.
// See https://cheatsheetseries.owasp.org/cheatsheets/XML_External_Entity_Prevention_Cheat_Sheet.html
factory.setFeature("http://apache.org/xml/features/disallow-doctype-decl", true);
DocumentBuilder db = factory.newDocumentBuilder();
Document inDoc = db.parse(xmlData);

View File

@@ -153,7 +153,7 @@ public class BitstreamFormatServiceImpl implements BitstreamFormatService {
// If the exception was thrown, unknown will == null so goahead and
// load s. If not, check that the unknown's registry's name is not
// being reset.
if (unknown == null || unknown.getID() != bitstreamFormat.getID()) {
if (unknown == null || !unknown.getID().equals(bitstreamFormat.getID())) {
bitstreamFormat.setShortDescriptionInternal(shortDescription);
}
}
@@ -208,7 +208,7 @@ public class BitstreamFormatServiceImpl implements BitstreamFormatService {
// Find "unknown" type
BitstreamFormat unknown = findUnknown(context);
if (unknown.getID() == bitstreamFormat.getID()) {
if (unknown.getID().equals(bitstreamFormat.getID())) {
throw new IllegalArgumentException("The Unknown bitstream format may not be deleted.");
}

View File

@@ -9,6 +9,7 @@ package org.dspace.content;
import java.sql.SQLException;
import java.util.ArrayList;
import java.util.Collections;
import java.util.Iterator;
import java.util.List;
@@ -101,7 +102,7 @@ public class MetadataDSpaceCsvExportServiceImpl implements MetadataDSpaceCsvExpo
throws SQLException {
// Add all the collections
List<Collection> collections = community.getCollections();
Iterator<Item> result = null;
Iterator<Item> result = Collections.<Item>emptyIterator();
for (Collection collection : collections) {
Iterator<Item> items = itemService.findByCollection(context, collection);
result = addItemsToResult(result, items);

View File

@@ -168,11 +168,11 @@ public class MetadataField implements ReloadableEntity<Integer> {
return false;
}
Class<?> objClass = HibernateProxyHelper.getClassWithoutInitializingProxy(obj);
if (getClass() != objClass) {
if (!getClass().equals(objClass)) {
return false;
}
final MetadataField other = (MetadataField) obj;
if (this.getID() != other.getID()) {
if (!this.getID().equals(other.getID())) {
return false;
}
if (!getMetadataSchema().equals(other.getMetadataSchema())) {

View File

@@ -67,11 +67,11 @@ public class MetadataSchema implements ReloadableEntity<Integer> {
return false;
}
Class<?> objClass = HibernateProxyHelper.getClassWithoutInitializingProxy(obj);
if (getClass() != objClass) {
if (!getClass().equals(objClass)) {
return false;
}
final MetadataSchema other = (MetadataSchema) obj;
if (this.id != other.id) {
if (!this.id.equals(other.id)) {
return false;
}
if ((this.namespace == null) ? (other.namespace != null) : !this.namespace.equals(other.namespace)) {

View File

@@ -239,17 +239,17 @@ public class MetadataValue implements ReloadableEntity<Integer> {
return false;
}
Class<?> objClass = HibernateProxyHelper.getClassWithoutInitializingProxy(obj);
if (getClass() != objClass) {
if (!getClass().equals(objClass)) {
return false;
}
final MetadataValue other = (MetadataValue) obj;
if (this.id != other.id) {
if (!this.id.equals(other.id)) {
return false;
}
if (this.getID() != other.getID()) {
if (!this.getID().equals(other.getID())) {
return false;
}
if (this.getDSpaceObject().getID() != other.getDSpaceObject().getID()) {
if (!this.getDSpaceObject().getID().equals(other.getDSpaceObject().getID())) {
return false;
}
return true;

View File

@@ -156,11 +156,11 @@ public class WorkspaceItem
return true;
}
Class<?> objClass = HibernateProxyHelper.getClassWithoutInitializingProxy(o);
if (getClass() != objClass) {
if (!getClass().equals(objClass)) {
return false;
}
final WorkspaceItem that = (WorkspaceItem) o;
if (this.getID() != that.getID()) {
if (!this.getID().equals(that.getID())) {
return false;
}

View File

@@ -272,12 +272,16 @@ public class METSManifest {
// Set validation feature
if (validate) {
builder.setFeature("http://apache.org/xml/features/validation/schema", true);
}
// Tell the parser where local copies of schemas are, to speed up
// validation. Local XSDs are identified in the configuration file.
if (localSchemas.length() > 0) {
builder.setProperty("http://apache.org/xml/properties/schema/external-schemaLocation", localSchemas);
// Tell the parser where local copies of schemas are, to speed up
// validation & avoid XXE attacks from remote schemas. Local XSDs are identified in the configuration file.
if (localSchemas.length() > 0) {
builder.setProperty("http://apache.org/xml/properties/schema/external-schemaLocation", localSchemas);
}
} else {
// disallow DTD parsing to ensure no XXE attacks can occur.
// See https://cheatsheetseries.owasp.org/cheatsheets/XML_External_Entity_Prevention_Cheat_Sheet.html
builder.setFeature("http://apache.org/xml/features/disallow-doctype-decl", true);
}
// Parse the METS file

View File

@@ -199,6 +199,9 @@ public class MetadataWebService extends AbstractCurationTask implements Namespac
DocumentBuilderFactory factory = DocumentBuilderFactory.newInstance();
factory.setNamespaceAware(true);
try {
// disallow DTD parsing to ensure no XXE attacks can occur.
// See https://cheatsheetseries.owasp.org/cheatsheets/XML_External_Entity_Prevention_Cheat_Sheet.html
factory.setFeature("http://apache.org/xml/features/disallow-doctype-decl", true);
docBuilder = factory.newDocumentBuilder();
} catch (ParserConfigurationException pcE) {
log.error("caught exception: " + pcE);

View File

@@ -8,21 +8,22 @@
package org.dspace.curate;
import java.io.BufferedReader;
import java.io.File;
import java.io.FileNotFoundException;
import java.io.FileReader;
import java.io.IOException;
import java.io.OutputStream;
import java.io.OutputStreamWriter;
import java.io.PrintStream;
import java.io.Writer;
import java.sql.SQLException;
import java.util.HashMap;
import java.util.Iterator;
import java.util.Map;
import org.apache.commons.cli.CommandLine;
import org.apache.commons.cli.CommandLineParser;
import org.apache.commons.cli.HelpFormatter;
import org.apache.commons.cli.Options;
import org.apache.commons.cli.PosixParser;
import org.apache.commons.io.output.NullOutputStream;
import org.dspace.authorize.AuthorizeException;
import org.dspace.content.DSpaceObject;
import org.dspace.content.factory.ContentServiceFactory;
import org.dspace.core.Context;
import org.dspace.core.factory.CoreServiceFactory;
@@ -30,183 +31,86 @@ import org.dspace.curate.factory.CurateServiceFactory;
import org.dspace.eperson.EPerson;
import org.dspace.eperson.factory.EPersonServiceFactory;
import org.dspace.eperson.service.EPersonService;
import org.dspace.handle.factory.HandleServiceFactory;
import org.dspace.handle.service.HandleService;
import org.dspace.scripts.DSpaceRunnable;
import org.dspace.utils.DSpace;
/**
* CurationCli provides command-line access to Curation tools and processes.
*
* @author richardrodgers
*/
public class CurationCli {
public class CurationCli extends DSpaceRunnable<CurationScriptConfiguration> {
private EPersonService ePersonService = EPersonServiceFactory.getInstance().getEPersonService();
private Context context;
private CurationClientOptions curationClientOptions;
private String task;
private String taskFile;
private String id;
private String queue;
private String scope;
private String reporter;
private Map<String, String> parameters;
private boolean verbose;
@Override
public void internalRun() throws Exception {
if (curationClientOptions == CurationClientOptions.HELP) {
printHelp();
return;
}
Curator curator = initCurator();
// load curation tasks
if (curationClientOptions == CurationClientOptions.TASK) {
long start = System.currentTimeMillis();
handleCurationTask(curator);
this.endScript(start);
}
// process task queue
if (curationClientOptions == CurationClientOptions.QUEUE) {
// process the task queue
TaskQueue taskQueue = (TaskQueue) CoreServiceFactory.getInstance().getPluginService()
.getSinglePlugin(TaskQueue.class);
if (taskQueue == null) {
super.handler.logError("No implementation configured for queue");
throw new UnsupportedOperationException("No queue service available");
}
long timeRun = this.runQueue(taskQueue, curator);
this.endScript(timeRun);
}
}
/**
* Default constructor
* Does the curation task (-t) or the task in the given file (-T).
* Checks:
* - if required option -i is missing.
* - if option -t has a valid task option
*/
private CurationCli() { }
public static void main(String[] args) throws Exception {
// create an options object and populate it
CommandLineParser parser = new PosixParser();
Options options = new Options();
options.addOption("t", "task", true,
"curation task name");
options.addOption("T", "taskfile", true,
"file containing curation task names");
options.addOption("i", "id", true,
"Id (handle) of object to perform task on, or 'all' to perform on whole repository");
options.addOption("p", "parameter", true,
"a task parameter 'NAME=VALUE'");
options.addOption("q", "queue", true,
"name of task queue to process");
options.addOption("e", "eperson", true,
"email address of curating eperson");
options.addOption("r", "reporter", true,
"relative or absolute path to the desired report file. "
+ "Use '-' to report to console. "
+ "If absent, no reporting");
options.addOption("s", "scope", true,
"transaction scope to impose: use 'object', 'curation', or 'open'. If absent, 'open' " +
"applies");
options.addOption("v", "verbose", false,
"report activity to stdout");
options.addOption("h", "help", false, "help");
CommandLine line = parser.parse(options, args);
String taskName = null;
String taskFileName = null;
String idName = null;
String taskQueueName = null;
String ePersonName = null;
String reporterName = null;
String scope = null;
boolean verbose = false;
final Map<String, String> parameters = new HashMap<>();
if (line.hasOption('h')) {
HelpFormatter help = new HelpFormatter();
help.printHelp("CurationCli\n", options);
System.out
.println("\nwhole repo: CurationCli -t estimate -i all");
System.out
.println("single item: CurationCli -t generate -i itemId");
System.out
.println("task queue: CurationCli -q monthly");
System.exit(0);
}
if (line.hasOption('t')) { // task
taskName = line.getOptionValue('t');
}
if (line.hasOption('T')) { // task file
taskFileName = line.getOptionValue('T');
}
if (line.hasOption('i')) { // id
idName = line.getOptionValue('i');
}
if (line.hasOption('q')) { // task queue
taskQueueName = line.getOptionValue('q');
}
if (line.hasOption('e')) { // eperson
ePersonName = line.getOptionValue('e');
}
if (line.hasOption('p')) { // parameter
for (String parameter : line.getOptionValues('p')) {
String[] parts = parameter.split("=", 2);
String name = parts[0].trim();
String value;
if (parts.length > 1) {
value = parts[1].trim();
} else {
value = "true";
}
parameters.put(name, value);
}
}
if (line.hasOption('r')) { // report file
reporterName = line.getOptionValue('r');
}
if (line.hasOption('s')) { // transaction scope
scope = line.getOptionValue('s');
}
if (line.hasOption('v')) { // verbose
verbose = true;
}
// now validate the args
if (idName == null && taskQueueName == null) {
System.out.println("Id must be specified: a handle, 'all', or a task queue (-h for help)");
System.exit(1);
}
if (taskName == null && taskFileName == null && taskQueueName == null) {
System.out.println("A curation task or queue must be specified (-h for help)");
System.exit(1);
}
if (scope != null && Curator.TxScope.valueOf(scope.toUpperCase()) == null) {
System.out.println("Bad transaction scope '" + scope + "': only 'object', 'curation' or 'open' recognized");
System.exit(1);
}
EPersonService ePersonService = EPersonServiceFactory.getInstance().getEPersonService();
Context c = new Context(Context.Mode.BATCH_EDIT);
if (ePersonName != null) {
EPerson ePerson = ePersonService.findByEmail(c, ePersonName);
if (ePerson == null) {
System.out.println("EPerson not found: " + ePersonName);
System.exit(1);
}
c.setCurrentUser(ePerson);
} else {
c.turnOffAuthorisationSystem();
}
Curator curator = new Curator();
OutputStream reporter;
if (null == reporterName) {
reporter = new NullOutputStream();
} else if ("-".equals(reporterName)) {
reporter = System.out;
} else {
reporter = new PrintStream(reporterName);
}
Writer reportWriter = new OutputStreamWriter(reporter);
curator.setReporter(reportWriter);
if (scope != null) {
Curator.TxScope txScope = Curator.TxScope.valueOf(scope.toUpperCase());
curator.setTransactionScope(txScope);
}
curator.addParameters(parameters);
// we are operating in batch mode, if anyone cares.
curator.setInvoked(Curator.Invoked.BATCH);
// load curation tasks
if (taskName != null) {
private void handleCurationTask(Curator curator) throws IOException, SQLException {
String taskName;
if (commandLine.hasOption('t')) {
if (verbose) {
System.out.println("Adding task: " + taskName);
handler.logInfo("Adding task: " + this.task);
}
curator.addTask(taskName);
if (verbose && !curator.hasTask(taskName)) {
System.out.println("Task: " + taskName + " not resolved");
curator.addTask(this.task);
if (verbose && !curator.hasTask(this.task)) {
handler.logInfo("Task: " + this.task + " not resolved");
}
} else if (taskQueueName == null) {
} else if (commandLine.hasOption('T')) {
// load taskFile
BufferedReader reader = null;
try {
reader = new BufferedReader(new FileReader(taskFileName));
reader = new BufferedReader(new FileReader(this.taskFile));
while ((taskName = reader.readLine()) != null) {
if (verbose) {
System.out.println("Adding task: " + taskName);
super.handler.logInfo("Adding task: " + taskName);
}
curator.addTask(taskName);
}
@@ -217,59 +121,242 @@ public class CurationCli {
}
}
// run tasks against object
long start = System.currentTimeMillis();
if (verbose) {
System.out.println("Starting curation");
super.handler.logInfo("Starting curation");
super.handler.logInfo("Curating id: " + this.id);
}
if (idName != null) {
if ("all".equals(this.id)) {
// run on whole Site
curator.curate(context,
ContentServiceFactory.getInstance().getSiteService().findSite(context).getHandle());
} else {
curator.curate(context, this.id);
}
}
/**
* Runs task queue (-q set)
*
* @param queue The task queue
* @param curator The curator
* @return Time when queue started
*/
private long runQueue(TaskQueue queue, Curator curator) throws SQLException, AuthorizeException, IOException {
// use current time as our reader 'ticket'
long ticket = System.currentTimeMillis();
Iterator<TaskQueueEntry> entryIter = queue.dequeue(this.queue, ticket).iterator();
while (entryIter.hasNext()) {
TaskQueueEntry entry = entryIter.next();
if (verbose) {
System.out.println("Curating id: " + idName);
super.handler.logInfo("Curating id: " + entry.getObjectId());
}
if ("all".equals(idName)) {
// run on whole Site
curator.curate(c, ContentServiceFactory.getInstance().getSiteService().findSite(c).getHandle());
curator.clear();
// does entry relate to a DSO or workflow object?
if (entry.getObjectId().indexOf('/') > 0) {
for (String taskName : entry.getTaskNames()) {
curator.addTask(taskName);
}
curator.curate(context, entry.getObjectId());
} else {
curator.curate(c, idName);
// make eperson who queued task the effective user
EPerson agent = ePersonService.findByEmail(context, entry.getEpersonId());
if (agent != null) {
context.setCurrentUser(agent);
}
CurateServiceFactory.getInstance().getWorkflowCuratorService()
.curate(curator, context, entry.getObjectId());
}
}
queue.release(this.queue, ticket, true);
return ticket;
}
/**
* End of curation script; logs script time if -v verbose is set
*
* @param timeRun Time script was started
* @throws SQLException If DSpace contextx can't complete
*/
private void endScript(long timeRun) throws SQLException {
context.complete();
if (verbose) {
long elapsed = System.currentTimeMillis() - timeRun;
this.handler.logInfo("Ending curation. Elapsed time: " + elapsed);
}
}
/**
* Initialize the curator with command line variables
*
* @return Initialised curator
* @throws FileNotFoundException If file of command line variable -r reporter is not found
*/
private Curator initCurator() throws FileNotFoundException {
Curator curator = new Curator();
OutputStream reporterStream;
if (null == this.reporter) {
reporterStream = new NullOutputStream();
} else if ("-".equals(this.reporter)) {
reporterStream = System.out;
} else {
reporterStream = new PrintStream(this.reporter);
}
Writer reportWriter = new OutputStreamWriter(reporterStream);
curator.setReporter(reportWriter);
if (this.scope != null) {
Curator.TxScope txScope = Curator.TxScope.valueOf(this.scope.toUpperCase());
curator.setTransactionScope(txScope);
}
curator.addParameters(parameters);
// we are operating in batch mode, if anyone cares.
curator.setInvoked(Curator.Invoked.BATCH);
return curator;
}
@Override
public void printHelp() {
super.printHelp();
super.handler.logInfo("\nwhole repo: CurationCli -t estimate -i all");
super.handler.logInfo("single item: CurationCli -t generate -i itemId");
super.handler.logInfo("task queue: CurationCli -q monthly");
}
@Override
public CurationScriptConfiguration getScriptConfiguration() {
return new DSpace().getServiceManager().getServiceByName("curate", CurationScriptConfiguration.class);
}
@Override
public void setup() {
if (this.commandLine.hasOption('e')) {
String ePersonEmail = this.commandLine.getOptionValue('e');
this.context = new Context(Context.Mode.BATCH_EDIT);
try {
EPerson ePerson = ePersonService.findByEmail(this.context, ePersonEmail);
if (ePerson == null) {
super.handler.logError("EPerson not found: " + ePersonEmail);
throw new IllegalArgumentException("Unable to find a user with email: " + ePersonEmail);
}
this.context.setCurrentUser(ePerson);
} catch (SQLException e) {
throw new IllegalArgumentException("SQLException trying to find user with email: " + ePersonEmail);
}
} else {
// process the task queue
TaskQueue queue = (TaskQueue) CoreServiceFactory.getInstance().getPluginService()
.getSinglePlugin(TaskQueue.class);
if (queue == null) {
System.out.println("No implementation configured for queue");
throw new UnsupportedOperationException("No queue service available");
}
// use current time as our reader 'ticket'
long ticket = System.currentTimeMillis();
Iterator<TaskQueueEntry> entryIter = queue.dequeue(taskQueueName, ticket).iterator();
while (entryIter.hasNext()) {
TaskQueueEntry entry = entryIter.next();
if (verbose) {
System.out.println("Curating id: " + entry.getObjectId());
}
curator.clear();
// does entry relate to a DSO or workflow object?
if (entry.getObjectId().indexOf("/") > 0) {
for (String task : entry.getTaskNames()) {
curator.addTask(task);
}
curator.curate(c, entry.getObjectId());
} else {
// make eperson who queued task the effective user
EPerson agent = ePersonService.findByEmail(c, entry.getEpersonId());
if (agent != null) {
c.setCurrentUser(agent);
}
CurateServiceFactory.getInstance().getWorkflowCuratorService()
.curate(curator, c, entry.getObjectId());
}
}
queue.release(taskQueueName, ticket, true);
throw new IllegalArgumentException("Needs an -e to set eperson (admin)");
}
c.complete();
if (verbose) {
long elapsed = System.currentTimeMillis() - start;
System.out.println("Ending curation. Elapsed time: " + elapsed);
this.curationClientOptions = CurationClientOptions.getClientOption(commandLine);
if (this.curationClientOptions != null) {
this.initGeneralLineOptionsAndCheckIfValid();
if (curationClientOptions == CurationClientOptions.TASK) {
this.initTaskLineOptionsAndCheckIfValid();
} else if (curationClientOptions == CurationClientOptions.QUEUE) {
this.queue = this.commandLine.getOptionValue('q');
}
} else {
throw new IllegalArgumentException("[--help || --task|--taskfile <> -identifier <> || -queue <> ] must be" +
" specified");
}
}
/**
* Fills in some optional command line options.
* Checks if there are missing required options or invalid values for options.
*/
private void initGeneralLineOptionsAndCheckIfValid() {
// report file
if (this.commandLine.hasOption('r')) {
this.reporter = this.commandLine.getOptionValue('r');
}
// parameters
this.parameters = new HashMap<>();
if (this.commandLine.hasOption('p')) {
for (String parameter : this.commandLine.getOptionValues('p')) {
String[] parts = parameter.split("=", 2);
String name = parts[0].trim();
String value;
if (parts.length > 1) {
value = parts[1].trim();
} else {
value = "true";
}
this.parameters.put(name, value);
}
}
// verbose
verbose = false;
if (commandLine.hasOption('v')) {
verbose = true;
}
// scope
if (this.commandLine.getOptionValue('s') != null) {
this.scope = this.commandLine.getOptionValue('s');
if (this.scope != null && Curator.TxScope.valueOf(this.scope.toUpperCase()) == null) {
this.handler.logError("Bad transaction scope '" + this.scope + "': only 'object', 'curation' or " +
"'open' recognized");
throw new IllegalArgumentException(
"Bad transaction scope '" + this.scope + "': only 'object', 'curation' or " +
"'open' recognized");
}
}
}
/**
* Fills in required command line options for the task or taskFile option.
* Checks if there are is a missing required -i option and if -i is either 'all' or a valid dso handle.
* Checks if -t task has a valid task option.
* Checks if -T taskfile is a valid file.
*/
private void initTaskLineOptionsAndCheckIfValid() {
// task or taskFile
if (this.commandLine.hasOption('t')) {
this.task = this.commandLine.getOptionValue('t');
if (!CurationClientOptions.getTaskOptions().contains(this.task)) {
super.handler
.logError("-t task must be one of: " + CurationClientOptions.getTaskOptions());
throw new IllegalArgumentException(
"-t task must be one of: " + CurationClientOptions.getTaskOptions());
}
} else if (this.commandLine.hasOption('T')) {
this.taskFile = this.commandLine.getOptionValue('T');
if (!(new File(this.taskFile).isFile())) {
super.handler
.logError("-T taskFile must be valid file: " + this.taskFile);
throw new IllegalArgumentException("-T taskFile must be valid file: " + this.taskFile);
}
}
if (this.commandLine.hasOption('i')) {
this.id = this.commandLine.getOptionValue('i').toLowerCase();
if (!this.id.equalsIgnoreCase("all")) {
HandleService handleService = HandleServiceFactory.getInstance().getHandleService();
DSpaceObject dso;
try {
dso = handleService.resolveToObject(this.context, id);
} catch (SQLException e) {
super.handler.logError("SQLException trying to resolve handle " + id + " to a valid dso");
throw new IllegalArgumentException(
"SQLException trying to resolve handle " + id + " to a valid dso");
}
if (dso == null) {
super.handler.logError("Id must be specified: a valid dso handle or 'all'; " + this.id + " could " +
"not be resolved to valid dso handle");
throw new IllegalArgumentException(
"Id must be specified: a valid dso handle or 'all'; " + this.id + " could " +
"not be resolved to valid dso handle");
}
}
} else {
super.handler.logError("Id must be specified: a handle, 'all', or no -i and a -q task queue (-h for " +
"help)");
throw new IllegalArgumentException(
"Id must be specified: a handle, 'all', or no -i and a -q task queue (-h for " +
"help)");
}
}
}

View File

@@ -0,0 +1,85 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.curate;
import java.util.ArrayList;
import java.util.List;
import org.apache.commons.cli.CommandLine;
import org.apache.commons.cli.Options;
import org.apache.commons.lang3.StringUtils;
import org.dspace.services.ConfigurationService;
import org.dspace.services.factory.DSpaceServicesFactory;
/**
* This Enum holds all the possible options and combinations for the Curation script
*
* @author Maria Verdonck (Atmire) on 23/06/2020
*/
public enum CurationClientOptions {
TASK,
QUEUE,
HELP;
private static List<String> taskOptions;
/**
* This method resolves the CommandLine parameters to figure out which action the curation script should perform
*
* @param commandLine The relevant CommandLine for the curation script
* @return The curation option to be ran, parsed from the CommandLine
*/
protected static CurationClientOptions getClientOption(CommandLine commandLine) {
if (commandLine.hasOption("h")) {
return CurationClientOptions.HELP;
} else if (commandLine.hasOption("t") || commandLine.hasOption("T")) {
return CurationClientOptions.TASK;
} else if (commandLine.hasOption("q")) {
return CurationClientOptions.QUEUE;
}
return null;
}
protected static Options constructOptions() {
Options options = new Options();
options.addOption("t", "task", true, "curation task name; options: " + getTaskOptions());
options.addOption("T", "taskfile", true, "file containing curation task names");
options.addOption("i", "id", true,
"Id (handle) of object to perform task on, or 'all' to perform on whole repository");
options.addOption("p", "parameter", true, "a task parameter 'NAME=VALUE'");
options.addOption("q", "queue", true, "name of task queue to process");
options.addOption("e", "eperson", true, "email address of curating eperson");
options.addOption("r", "reporter", true,
"relative or absolute path to the desired report file. Use '-' to report to console. If absent, no " +
"reporting");
options.addOption("s", "scope", true,
"transaction scope to impose: use 'object', 'curation', or 'open'. If absent, 'open' applies");
options.addOption("v", "verbose", false, "report activity to stdout");
options.addOption("h", "help", false, "help");
return options;
}
/**
* Creates list of the taskOptions' keys from the configs of plugin.named.org.dspace.curate.CurationTask
*
* @return List of the taskOptions' keys from the configs of plugin.named.org.dspace.curate.CurationTask
*/
public static List<String> getTaskOptions() {
if (taskOptions == null) {
ConfigurationService configurationService = DSpaceServicesFactory.getInstance().getConfigurationService();
String[] taskConfigs = configurationService.getArrayProperty("plugin.named.org.dspace.curate.CurationTask");
taskOptions = new ArrayList<>();
for (String taskConfig : taskConfigs) {
taskOptions.add(StringUtils.substringAfterLast(taskConfig, "=").trim());
}
}
return taskOptions;
}
}

View File

@@ -0,0 +1,61 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.curate;
import java.sql.SQLException;
import org.apache.commons.cli.Options;
import org.dspace.authorize.service.AuthorizeService;
import org.dspace.core.Context;
import org.dspace.scripts.configuration.ScriptConfiguration;
import org.springframework.beans.factory.annotation.Autowired;
/**
* The {@link ScriptConfiguration} for the {@link CurationCli} script
*
* @author Maria Verdonck (Atmire) on 23/06/2020
*/
public class CurationScriptConfiguration<T extends CurationCli> extends ScriptConfiguration<T> {
@Autowired
private AuthorizeService authorizeService;
private Class<T> dspaceRunnableClass;
@Override
public Class<T> getDspaceRunnableClass() {
return this.dspaceRunnableClass;
}
@Override
public void setDspaceRunnableClass(Class<T> dspaceRunnableClass) {
this.dspaceRunnableClass = dspaceRunnableClass;
}
/**
* Only admin can run Curation script via the scripts and processes endpoints.
* @param context The relevant DSpace context
* @return True if currentUser is admin, otherwise false
*/
@Override
public boolean isAllowedToExecute(Context context) {
try {
return authorizeService.isAdmin(context);
} catch (SQLException e) {
throw new RuntimeException("SQLException occurred when checking if the current user is an admin", e);
}
}
@Override
public Options getOptions() {
if (options == null) {
super.options = CurationClientOptions.constructOptions();
}
return options;
}
}

View File

@@ -98,6 +98,7 @@ public class Curator {
communityService = ContentServiceFactory.getInstance().getCommunityService();
itemService = ContentServiceFactory.getInstance().getItemService();
handleService = HandleServiceFactory.getInstance().getHandleService();
resolver = new TaskResolver();
}
/**
@@ -142,10 +143,10 @@ public class Curator {
// performance order currently FIFO - to be revisited
perfList.add(taskName);
} catch (IOException ioE) {
log.error("Task: '" + taskName + "' initialization failure: " + ioE.getMessage());
System.out.println("Task: '" + taskName + "' initialization failure: " + ioE.getMessage());
}
} else {
log.error("Task: '" + taskName + "' does not resolve");
System.out.println("Task: '" + taskName + "' does not resolve");
}
return this;
}
@@ -259,13 +260,6 @@ public class Curator {
/**
* Performs all configured tasks upon DSpace object
* (Community, Collection or Item).
* <P>
* Note: Site-wide tasks will default to running as
* an Anonymous User unless you call the Site-wide task
* via the {@link curate(Context,String)} or
* {@link #curate(Context, DSpaceObject)} method with an
* authenticated Context object.
*
* @param dso the DSpace object
* @throws IOException if IO error
*/
@@ -325,7 +319,7 @@ public class Curator {
taskQ.enqueue(queueId, new TaskQueueEntry(c.getCurrentUser().getName(),
System.currentTimeMillis(), perfList, id));
} else {
log.error("curate - no TaskQueue implemented");
System.out.println("curate - no TaskQueue implemented");
}
}
@@ -346,7 +340,7 @@ public class Curator {
try {
reporter.append(message);
} catch (IOException ex) {
log.error("Task reporting failure", ex);
System.out.println("Task reporting failure: " + ex);
}
}
@@ -552,7 +546,7 @@ public class Curator {
return !suspend(statusCode);
} catch (IOException ioe) {
//log error & pass exception upwards
log.error("Error executing curation task '" + task.getName() + "'", ioe);
System.out.println("Error executing curation task '" + task.getName() + "'; " + ioe);
throw ioe;
}
}
@@ -568,7 +562,7 @@ public class Curator {
return !suspend(statusCode);
} catch (IOException ioe) {
//log error & pass exception upwards
log.error("Error executing curation task '" + task.getName() + "'", ioe);
System.out.println("Error executing curation task '" + task.getName() + "'; " + ioe);
throw ioe;
}
}

View File

@@ -7,6 +7,9 @@
*/
package org.dspace.discovery;
import static java.util.Collections.singletonList;
import static org.apache.commons.lang3.StringUtils.isNotBlank;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
@@ -31,7 +34,7 @@ public class DiscoverQuery {
**/
private String query;
private List<String> filterQueries;
private String DSpaceObjectFilter = null;
private List<String> dspaceObjectFilters = new ArrayList<>();
private List<String> fieldPresentQueries;
private boolean spellCheck;
@@ -118,20 +121,33 @@ public class DiscoverQuery {
* Sets the DSpace object filter, must be an DSpace Object type integer
* can be used to only return objects from a certain DSpace Object type
*
* @param DSpaceObjectFilter the DSpace object filer
* @param dspaceObjectFilter the DSpace object filter
*/
public void setDSpaceObjectFilter(String DSpaceObjectFilter) {
this.DSpaceObjectFilter = DSpaceObjectFilter;
public void setDSpaceObjectFilter(String dspaceObjectFilter) {
this.dspaceObjectFilters = singletonList(dspaceObjectFilter);
}
/**
* Gets the DSpace object filter
* can be used to only return objects from a certain DSpace Object type
* Adds a DSpace object filter, must be an DSpace Object type integer.
* Can be used to also return objects from a certain DSpace Object type.
*
* @return the DSpace object filer
* @param dspaceObjectFilter the DSpace object filer
*/
public String getDSpaceObjectFilter() {
return DSpaceObjectFilter;
public void addDSpaceObjectFilter(String dspaceObjectFilter) {
if (isNotBlank(dspaceObjectFilter)) {
this.dspaceObjectFilters.add(dspaceObjectFilter);
}
}
/**
* Gets the DSpace object filters
* can be used to only return objects from certain DSpace Object types
*
* @return the DSpace object filters
*/
public List<String> getDSpaceObjectFilters() {
return dspaceObjectFilters;
}
/**

View File

@@ -7,6 +7,8 @@
*/
package org.dspace.discovery;
import static java.util.stream.Collectors.joining;
import java.io.IOException;
import java.io.PrintWriter;
import java.io.StringWriter;
@@ -751,8 +753,13 @@ public class SolrServiceImpl implements SearchService, IndexingService {
String filterQuery = discoveryQuery.getFilterQueries().get(i);
solrQuery.addFilterQuery(filterQuery);
}
if (discoveryQuery.getDSpaceObjectFilter() != null) {
solrQuery.addFilterQuery(SearchUtils.RESOURCE_TYPE_FIELD + ":" + discoveryQuery.getDSpaceObjectFilter());
if (discoveryQuery.getDSpaceObjectFilters() != null) {
solrQuery.addFilterQuery(
discoveryQuery.getDSpaceObjectFilters()
.stream()
.map(filter -> SearchUtils.RESOURCE_TYPE_FIELD + ":" + filter)
.collect(joining(" OR "))
);
}
for (int i = 0; i < discoveryQuery.getFieldPresentQueries().size(); i++) {

View File

@@ -134,11 +134,13 @@ public class HarvestScheduler implements Runnable {
if (maxActiveThreads == 0) {
maxActiveThreads = 3;
}
minHeartbeat = ConfigurationManager.getIntProperty("oai", "harvester.minHeartbeat") * 1000;
minHeartbeat = ConfigurationManager.getIntProperty("oai", "harvester.minHeartbeat");
minHeartbeat = minHeartbeat * 1000; // multiple by 1000 to turn seconds to ms
if (minHeartbeat == 0) {
minHeartbeat = 30000;
}
maxHeartbeat = ConfigurationManager.getIntProperty("oai", "harvester.maxHeartbeat") * 1000;
maxHeartbeat = ConfigurationManager.getIntProperty("oai", "harvester.maxHeartbeat");
maxHeartbeat = maxHeartbeat * 1000; // multiple by 1000 to turn seconds to ms
if (maxHeartbeat == 0) {
maxHeartbeat = 3600000;
}

View File

@@ -75,6 +75,10 @@ public class CCLicenseConnectorServiceImpl implements CCLicenseConnectorService,
.disableAutomaticRetries()
.setMaxConnTotal(5)
.build();
// disallow DTD parsing to ensure no XXE attacks can occur.
// See https://cheatsheetseries.owasp.org/cheatsheets/XML_External_Entity_Prevention_Cheat_Sheet.html
parser.setFeature("http://apache.org/xml/features/disallow-doctype-decl", true);
}
/**

View File

@@ -15,6 +15,7 @@ import java.util.Iterator;
import javax.servlet.http.HttpServletResponse;
import org.apache.commons.lang3.StringUtils;
import org.apache.commons.validator.routines.UrlValidator;
import org.apache.logging.log4j.Logger;
import org.dspace.rdf.RDFUtil;
import org.dspace.services.factory.DSpaceServicesFactory;
@@ -197,6 +198,7 @@ public class Negotiator {
if (extraPathInfo == null) {
extraPathInfo = "";
}
UrlValidator urlValidator = new UrlValidator(UrlValidator.ALLOW_LOCAL_URLS);
StringBuilder urlBuilder = new StringBuilder();
String lang = null;
@@ -256,12 +258,15 @@ public class Negotiator {
urlBuilder.append(handle).append("/").append(extraPathInfo);
}
String url = urlBuilder.toString();
log.debug("Will forward to '" + url + "'.");
response.setStatus(HttpServletResponse.SC_SEE_OTHER);
response.setHeader("Location", url);
response.flushBuffer();
return true;
if (urlValidator.isValid(url)) {
log.debug("Will forward to '" + url + "'.");
response.setStatus(HttpServletResponse.SC_SEE_OTHER);
response.setHeader("Location", url);
response.flushBuffer();
return true;
} else {
throw new IOException("Invalid URL '" + url + "', cannot redirect.");
}
}
// currently we cannot serve statistics as rdf
@@ -287,10 +292,14 @@ public class Negotiator {
urlBuilder.append("/handle/").append(handle);
urlBuilder.append("/").append(lang);
String url = urlBuilder.toString();
log.debug("Will forward to '" + url + "'.");
response.setStatus(HttpServletResponse.SC_SEE_OTHER);
response.setHeader("Location", url);
response.flushBuffer();
return true;
if (urlValidator.isValid(url)) {
log.debug("Will forward to '" + url + "'.");
response.setStatus(HttpServletResponse.SC_SEE_OTHER);
response.setHeader("Location", url);
response.flushBuffer();
return true;
} else {
throw new IOException("Invalid URL '" + url + "', cannot redirect.");
}
}
}

View File

@@ -113,6 +113,9 @@ public class ArXivService {
factory.setValidating(false);
factory.setIgnoringComments(true);
factory.setIgnoringElementContentWhitespace(true);
// disallow DTD parsing to ensure no XXE attacks can occur.
// See https://cheatsheetseries.owasp.org/cheatsheets/XML_External_Entity_Prevention_Cheat_Sheet.html
factory.setFeature("http://apache.org/xml/features/disallow-doctype-decl", true);
DocumentBuilder db = factory.newDocumentBuilder();
Document inDoc = db.parse(response.getEntity().getContent());

View File

@@ -102,6 +102,9 @@ public class CiNiiService {
factory.setValidating(false);
factory.setIgnoringComments(true);
factory.setIgnoringElementContentWhitespace(true);
// disallow DTD parsing to ensure no XXE attacks can occur.
// See https://cheatsheetseries.owasp.org/cheatsheets/XML_External_Entity_Prevention_Cheat_Sheet.html
factory.setFeature("http://apache.org/xml/features/disallow-doctype-decl", true);
DocumentBuilder db = factory.newDocumentBuilder();
Document inDoc = db.parse(response.getEntity().getContent());
@@ -178,6 +181,9 @@ public class CiNiiService {
factory.setValidating(false);
factory.setIgnoringComments(true);
factory.setIgnoringElementContentWhitespace(true);
// disallow DTD parsing to ensure no XXE attacks can occur.
// See https://cheatsheetseries.owasp.org/cheatsheets/XML_External_Entity_Prevention_Cheat_Sheet.html
factory.setFeature("http://apache.org/xml/features/disallow-doctype-decl", true);
DocumentBuilder db = factory.newDocumentBuilder();
Document inDoc = db.parse(response.getEntity().getContent());

View File

@@ -99,6 +99,9 @@ public class CrossRefService {
factory.setValidating(false);
factory.setIgnoringComments(true);
factory.setIgnoringElementContentWhitespace(true);
// disallow DTD parsing to ensure no XXE attacks can occur.
// See https://cheatsheetseries.owasp.org/cheatsheets/XML_External_Entity_Prevention_Cheat_Sheet.html
factory.setFeature("http://apache.org/xml/features/disallow-doctype-decl", true);
DocumentBuilder db = factory
.newDocumentBuilder();

View File

@@ -119,6 +119,9 @@ public class PubmedService {
factory.setValidating(false);
factory.setIgnoringComments(true);
factory.setIgnoringElementContentWhitespace(true);
// disallow DTD parsing to ensure no XXE attacks can occur.
// See https://cheatsheetseries.owasp.org/cheatsheets/XML_External_Entity_Prevention_Cheat_Sheet.html
factory.setFeature("http://apache.org/xml/features/disallow-doctype-decl", true);
DocumentBuilder builder;
try {
@@ -156,6 +159,9 @@ public class PubmedService {
factory.setValidating(false);
factory.setIgnoringComments(true);
factory.setIgnoringElementContentWhitespace(true);
// disallow DTD parsing to ensure no XXE attacks can occur.
// See https://cheatsheetseries.owasp.org/cheatsheets/XML_External_Entity_Prevention_Cheat_Sheet.html
factory.setFeature("http://apache.org/xml/features/disallow-doctype-decl", true);
DocumentBuilder builder = factory.newDocumentBuilder();
Document inDoc = builder.parse(stream);
@@ -216,6 +222,9 @@ public class PubmedService {
factory.setValidating(false);
factory.setIgnoringComments(true);
factory.setIgnoringElementContentWhitespace(true);
// disallow DTD parsing to ensure no XXE attacks can occur.
// See https://cheatsheetseries.owasp.org/cheatsheets/XML_External_Entity_Prevention_Cheat_Sheet.html
factory.setFeature("http://apache.org/xml/features/disallow-doctype-decl", true);
DocumentBuilder builder = factory.newDocumentBuilder();
Document inDoc = builder

View File

@@ -135,12 +135,12 @@ public class Version implements ReloadableEntity<Integer> {
return true;
}
Class<?> objClass = HibernateProxyHelper.getClassWithoutInitializingProxy(o);
if (getClass() != objClass) {
if (!getClass().equals(objClass)) {
return false;
}
final Version that = (Version) o;
if (this.getID() != that.getID()) {
if (!this.getID().equals(that.getID())) {
return false;
}

View File

@@ -93,12 +93,12 @@ public class VersionHistory implements ReloadableEntity<Integer> {
return true;
}
Class<?> objClass = HibernateProxyHelper.getClassWithoutInitializingProxy(o);
if (getClass() != objClass) {
if (!getClass().equals(objClass)) {
return false;
}
final VersionHistory that = (VersionHistory) o;
if (this.getID() != that.getID()) {
if (!this.getID().equals(that.getID())) {
return false;
}

View File

@@ -0,0 +1,2 @@
checklinks
requiredmetadata

View File

@@ -19,6 +19,12 @@
<property name="dspaceRunnableClass" value="org.dspace.app.bulkedit.MetadataExport"/>
</bean>
<bean id="curate" class="org.dspace.curate.CurationScriptConfiguration">
<property name="description" value="Curation tasks"/>
<property name="dspaceRunnableClass" value="org.dspace.curate.CurationCli"/>
</bean>
<!-- Keep as last script; for test ScriptRestRepository#findOneScriptByNameTest -->
<bean id="mock-script" class="org.dspace.scripts.MockDSpaceRunnableScriptConfiguration" scope="prototype">
<property name="description" value="Mocking a script for testing purposes" />
<property name="dspaceRunnableClass" value="org.dspace.scripts.impl.MockDSpaceRunnableScript"/>

View File

@@ -19,19 +19,29 @@
<context:annotation-config/> <!-- allows us to use spring annotations in beans -->
<bean class="org.dspace.discovery.SolrServiceImpl" id="org.dspace.discovery.SearchService"/>
<bean class="org.dspace.discovery.SolrServiceImpl"
id="org.dspace.discovery.SearchService"/>
<alias name="org.dspace.discovery.SearchService" alias="org.dspace.discovery.IndexingService"/>
<alias name="org.dspace.discovery.SearchService"
alias="org.dspace.discovery.IndexingService"/>
<!-- These beans have been added so that we can mock our AuthoritySearchService in the tests-->
<bean class="org.dspace.authority.MockAuthoritySolrServiceImpl" id="org.dspace.authority.AuthoritySearchService"/>
<alias name="org.dspace.authority.AuthoritySearchService" alias="org.dspace.authority.indexer.AuthorityIndexingService"/>
<bean class="org.dspace.authority.MockAuthoritySolrServiceImpl"
id="org.dspace.authority.AuthoritySearchService"/>
<alias name="org.dspace.authority.AuthoritySearchService"
alias="org.dspace.authority.indexer.AuthorityIndexingService"/>
<bean id="org.dspace.discovery.SolrSearchCore" class="org.dspace.discovery.MockSolrSearchCore" autowire-candidate="true"/>
<bean id="org.dspace.discovery.MockSolrSearchCore"
class="org.dspace.discovery.MockSolrSearchCore"
autowire-candidate="true"/>
<!--<bean class="org.dspace.discovery.SolrServiceIndexOutputPlugin" id="solrServiceIndexOutputPlugin"/>-->
<!--<bean class="org.dspace.discovery.SolrServiceIndexOutputPlugin"
id="solrServiceIndexOutputPlugin"/>-->
<!-- Statistics services are both lazy loaded (by name), as you are likely just using ONE of them and not both -->
<bean id="solrLoggerService" class="org.dspace.statistics.MockSolrLoggerServiceImpl" lazy-init="true"/>
<!-- Statistics services are both lazy loaded (by name), as you are likely
just using ONE of them and not both -->
<bean id="solrLoggerService"
class="org.dspace.statistics.MockSolrLoggerServiceImpl"
lazy-init="true"/>
</beans>

View File

@@ -237,7 +237,7 @@ it, please enter the types and the actual numbers or codes.</hint>
<form name="journalVolumeStep">
<row>
<relation-field>
<relationship-type>isVolumeOfJournal</relationship-type>
<relationship-type>isJournalOfVolume</relationship-type>
<search-configuration>periodical</search-configuration>
<filter>creativework.publisher:somepublishername</filter>
<label>Journal</label>

View File

@@ -0,0 +1,3 @@
<?xml version='1.0'?>
<!-- This empty configuration is required to start EmbeddedSolrServer for Integration Tests (see MockSolrServer) -->
<solr/>

View File

@@ -5,7 +5,7 @@
*
* http://www.dspace.org/license/
*/
package org.dspace.app.rest.test;
package org.dspace;
import static org.junit.Assert.fail;
@@ -17,7 +17,7 @@ import java.util.TimeZone;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
import org.dspace.app.rest.builder.AbstractBuilder;
import org.dspace.builder.AbstractBuilder;
import org.dspace.servicemanager.DSpaceKernelImpl;
import org.dspace.servicemanager.DSpaceKernelInit;
import org.junit.AfterClass;
@@ -90,8 +90,9 @@ public class AbstractDSpaceIntegrationTest {
}
/**
* This method will be run after all tests finish as per @AfterClass. It
* This method will be run after all tests finish as per @AfterClass. It
* will clean resources initialized by the @BeforeClass methods.
* @throws java.sql.SQLException
*/
@AfterClass
public static void destroyTestEnvironment() throws SQLException {

View File

@@ -5,7 +5,7 @@
*
* http://www.dspace.org/license/
*/
package org.dspace.app.rest.test;
package org.dspace;
import static org.junit.Assert.fail;
@@ -14,21 +14,20 @@ import java.sql.SQLException;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
import org.dspace.app.launcher.ScriptLauncher;
import org.dspace.app.rest.builder.AbstractBuilder;
import org.dspace.app.scripts.handler.impl.TestDSpaceRunnableHandler;
import org.dspace.authority.AuthoritySearchService;
import org.dspace.authority.MockAuthoritySolrServiceImpl;
import org.dspace.authorize.AuthorizeException;
import org.dspace.builder.AbstractBuilder;
import org.dspace.content.Community;
import org.dspace.core.Context;
import org.dspace.core.I18nUtil;
import org.dspace.discovery.MockSolrSearchCore;
import org.dspace.discovery.SolrSearchCore;
import org.dspace.eperson.EPerson;
import org.dspace.eperson.Group;
import org.dspace.eperson.factory.EPersonServiceFactory;
import org.dspace.eperson.service.EPersonService;
import org.dspace.eperson.service.GroupService;
import org.dspace.kernel.ServiceManager;
import org.dspace.services.factory.DSpaceServicesFactory;
import org.dspace.statistics.MockSolrLoggerServiceImpl;
import org.dspace.storage.rdbms.DatabaseUtils;
@@ -181,21 +180,20 @@ public class AbstractIntegrationTestWithDatabase extends AbstractDSpaceIntegrati
parentCommunity = null;
cleanupContext();
ServiceManager serviceManager = DSpaceServicesFactory.getInstance().getServiceManager();
// Clear the search core.
MockSolrSearchCore searchService = DSpaceServicesFactory.getInstance()
.getServiceManager()
.getServiceByName(SolrSearchCore.class.getName(), MockSolrSearchCore.class);
MockSolrSearchCore searchService = serviceManager
.getServiceByName(null, MockSolrSearchCore.class);
searchService.reset();
MockSolrLoggerServiceImpl statisticsService = DSpaceServicesFactory.getInstance()
.getServiceManager()
.getServiceByName("solrLoggerService", MockSolrLoggerServiceImpl.class);
MockSolrLoggerServiceImpl statisticsService = serviceManager
.getServiceByName(null, MockSolrLoggerServiceImpl.class);
statisticsService.reset();
MockAuthoritySolrServiceImpl authorityService = DSpaceServicesFactory.getInstance()
.getServiceManager()
.getServiceByName(AuthoritySearchService.class.getName(), MockAuthoritySolrServiceImpl.class);
MockAuthoritySolrServiceImpl authorityService = serviceManager
.getServiceByName(null, MockAuthoritySolrServiceImpl.class);
authorityService.reset();
// Reload our ConfigurationService (to reset configs to defaults again)
DSpaceServicesFactory.getInstance().getConfigurationService().reloadConfig();
@@ -209,6 +207,7 @@ public class AbstractIntegrationTestWithDatabase extends AbstractDSpaceIntegrati
/**
* Utility method to cleanup a created Context object (to save memory).
* This can also be used by individual tests to cleanup context objects they create.
* @throws java.sql.SQLException passed through.
*/
protected void cleanupContext() throws SQLException {
// If context still valid, flush all database changes and close it

View File

@@ -5,7 +5,7 @@
*
* http://www.dspace.org/license/
*/
package org.dspace.app.rest.test;
package org.dspace;
public class ExitException extends SecurityException {
private final int status;

View File

@@ -5,7 +5,7 @@
*
* http://www.dspace.org/license/
*/
package org.dspace.app.rest.test;
package org.dspace;
import java.security.Permission;

View File

@@ -0,0 +1,62 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.app.bulkedit;
import static junit.framework.TestCase.assertTrue;
import java.io.File;
import java.io.FileInputStream;
import java.nio.charset.StandardCharsets;
import org.apache.commons.io.IOUtils;
import org.dspace.AbstractIntegrationTestWithDatabase;
import org.dspace.app.launcher.ScriptLauncher;
import org.dspace.app.scripts.handler.impl.TestDSpaceRunnableHandler;
import org.dspace.builder.CollectionBuilder;
import org.dspace.builder.CommunityBuilder;
import org.dspace.builder.ItemBuilder;
import org.dspace.content.Collection;
import org.dspace.content.Community;
import org.dspace.content.Item;
import org.dspace.services.ConfigurationService;
import org.dspace.services.factory.DSpaceServicesFactory;
import org.junit.Test;
public class MetadataExportIT
extends AbstractIntegrationTestWithDatabase {
private final ConfigurationService configurationService
= DSpaceServicesFactory.getInstance().getConfigurationService();
@Test
public void metadataExportToCsvTest() throws Exception {
context.turnOffAuthorisationSystem();
Community community = CommunityBuilder.createCommunity(context)
.build();
Collection collection = CollectionBuilder.createCollection(context, community)
.build();
Item item = ItemBuilder.createItem(context, collection)
.withAuthor("Donald, Smith")
.build();
context.restoreAuthSystemState();
String fileLocation = configurationService.getProperty("dspace.dir")
+ testProps.get("test.exportcsv").toString();
String[] args = new String[] {"metadata-export",
"-i", String.valueOf(item.getHandle()),
"-f", fileLocation};
TestDSpaceRunnableHandler testDSpaceRunnableHandler
= new TestDSpaceRunnableHandler();
ScriptLauncher.handleScript(args, ScriptLauncher.getConfig(kernelImpl),
testDSpaceRunnableHandler, kernelImpl);
File file = new File(fileLocation);
String fileContent = IOUtils.toString(new FileInputStream(file), StandardCharsets.UTF_8);
assertTrue(fileContent.contains("Donald, Smith"));
assertTrue(fileContent.contains(String.valueOf(item.getID())));
}
}

View File

@@ -1,71 +0,0 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.app.bulkedit;
import static junit.framework.TestCase.assertTrue;
import java.io.File;
import java.io.FileInputStream;
import java.nio.charset.StandardCharsets;
import org.apache.commons.io.IOUtils;
import org.dspace.AbstractIntegrationTest;
import org.dspace.app.launcher.ScriptLauncher;
import org.dspace.app.scripts.handler.impl.TestDSpaceRunnableHandler;
import org.dspace.content.Collection;
import org.dspace.content.Community;
import org.dspace.content.Item;
import org.dspace.content.WorkspaceItem;
import org.dspace.content.factory.ContentServiceFactory;
import org.dspace.content.service.CollectionService;
import org.dspace.content.service.CommunityService;
import org.dspace.content.service.InstallItemService;
import org.dspace.content.service.ItemService;
import org.dspace.content.service.WorkspaceItemService;
import org.dspace.services.ConfigurationService;
import org.dspace.services.factory.DSpaceServicesFactory;
import org.junit.Test;
public class MetadataExportTest extends AbstractIntegrationTest {
private ItemService itemService = ContentServiceFactory.getInstance().getItemService();
private CollectionService collectionService = ContentServiceFactory.getInstance().getCollectionService();
private CommunityService communityService = ContentServiceFactory.getInstance().getCommunityService();
private WorkspaceItemService workspaceItemService = ContentServiceFactory.getInstance().getWorkspaceItemService();
private InstallItemService installItemService = ContentServiceFactory.getInstance().getInstallItemService();
private ConfigurationService configurationService = DSpaceServicesFactory.getInstance().getConfigurationService();
@Test
public void metadataExportToCsvTest() throws Exception {
context.turnOffAuthorisationSystem();
Community community = communityService.create(null, context);
Collection collection = collectionService.create(context, community);
WorkspaceItem wi = workspaceItemService.create(context, collection, true);
Item item = wi.getItem();
itemService.addMetadata(context, item, "dc", "contributor", "author", null, "Donald, Smith");
item = installItemService.installItem(context, wi);
context.restoreAuthSystemState();
String fileLocation = configurationService.getProperty("dspace.dir") + testProps.get("test.exportcsv")
.toString();
String[] args = new String[] {"metadata-export", "-i", String.valueOf(item.getHandle()), "-f", fileLocation};
TestDSpaceRunnableHandler testDSpaceRunnableHandler = new TestDSpaceRunnableHandler();
ScriptLauncher.handleScript(args, ScriptLauncher.getConfig(kernelImpl), testDSpaceRunnableHandler, kernelImpl);
File file = new File(fileLocation);
String fileContent = IOUtils.toString(new FileInputStream(file), StandardCharsets.UTF_8);
assertTrue(fileContent.contains("Donald, Smith"));
assertTrue(fileContent.contains(String.valueOf(item.getID())));
context.turnOffAuthorisationSystem();
itemService.delete(context, itemService.find(context, item.getID()));
collectionService.delete(context, collectionService.find(context, collection.getID()));
communityService.delete(context, communityService.find(context, community.getID()));
context.restoreAuthSystemState();
}
}

View File

@@ -22,16 +22,16 @@ import org.dspace.content.factory.ContentServiceFactory;
import org.dspace.content.service.CollectionService;
import org.dspace.content.service.CommunityService;
import org.dspace.content.service.ItemService;
import org.dspace.services.ConfigurationService;
import org.dspace.services.factory.DSpaceServicesFactory;
import org.junit.Test;
public class MetadataImportTest extends AbstractIntegrationTest {
private ItemService itemService = ContentServiceFactory.getInstance().getItemService();
private CollectionService collectionService = ContentServiceFactory.getInstance().getCollectionService();
private CommunityService communityService = ContentServiceFactory.getInstance().getCommunityService();
private ConfigurationService configurationService = DSpaceServicesFactory.getInstance().getConfigurationService();
private final ItemService itemService
= ContentServiceFactory.getInstance().getItemService();
private final CollectionService collectionService
= ContentServiceFactory.getInstance().getCollectionService();
private final CommunityService communityService
= ContentServiceFactory.getInstance().getCommunityService();
@Test
public void metadataImportTest() throws Exception {

View File

@@ -21,4 +21,8 @@ public class MockAuthoritySolrServiceImpl extends AuthoritySolrServiceImpl imple
//We don't use SOLR in the tests of this module
solr = null;
}
public void reset() {
// This method intentionally left blank.
}
}

View File

@@ -20,7 +20,7 @@ import org.junit.Test;
* @author Andrea Bollini (andrea.bollini at 4science.it)
*
*/
public class AuthorizeConfigIntegrationTest extends AbstractIntegrationTest {
public class AuthorizeConfigIT extends AbstractIntegrationTest {
@Test
public void testReloadConfiguration() {

View File

@@ -5,18 +5,18 @@
*
* http://www.dspace.org/license/
*/
package org.dspace.app.rest.builder;
package org.dspace.builder;
import java.sql.SQLException;
import java.util.List;
import org.apache.commons.collections4.CollectionUtils;
import org.apache.logging.log4j.Logger;
import org.dspace.app.rest.builder.util.AbstractBuilderCleanupUtil;
import org.dspace.authorize.AuthorizeException;
import org.dspace.authorize.factory.AuthorizeServiceFactory;
import org.dspace.authorize.service.AuthorizeService;
import org.dspace.authorize.service.ResourcePolicyService;
import org.dspace.builder.util.AbstractBuilderCleanupUtil;
import org.dspace.content.Bitstream;
import org.dspace.content.factory.ContentServiceFactory;
import org.dspace.content.service.BitstreamFormatService;
@@ -55,8 +55,8 @@ import org.dspace.xmlworkflow.storedcomponents.service.XmlWorkflowItemService;
/**
* Abstract builder class that holds references to all available services
*
* @param <T> This param represents the Model object for the Builder
* @param <S> This param represents the Service object for the builder
* @param <T> This parameter represents the Model object for the Builder
* @param <S> This parameter represents the Service object for the builder
* @author Jonas Van Goolen - (jonas@atmire.com)
*/
public abstract class AbstractBuilder<T, S> {
@@ -96,7 +96,8 @@ public abstract class AbstractBuilder<T, S> {
* This static class will make sure that the objects built with the builders are disposed of in a foreign-key
* constraint safe manner by predefining an order
*/
private static AbstractBuilderCleanupUtil abstractBuilderCleanupUtil = new AbstractBuilderCleanupUtil();
private static final AbstractBuilderCleanupUtil abstractBuilderCleanupUtil
= new AbstractBuilderCleanupUtil();
/**
* log4j category
*/

View File

@@ -5,7 +5,7 @@
*
* http://www.dspace.org/license/
*/
package org.dspace.app.rest.builder;
package org.dspace.builder;
import org.dspace.core.Context;
import org.dspace.core.ReloadableEntity;
@@ -13,6 +13,8 @@ import org.dspace.service.DSpaceCRUDService;
/**
* @author Jonas Van Goolen - (jonas@atmire.com)
*
* @param <T> A specific kind of ReloadableEntity.
*/
public abstract class AbstractCRUDBuilder<T extends ReloadableEntity> extends AbstractBuilder<T, DSpaceCRUDService> {
@@ -20,8 +22,10 @@ public abstract class AbstractCRUDBuilder<T extends ReloadableEntity> extends Ab
super(context);
}
@Override
protected abstract DSpaceCRUDService getService();
@Override
public abstract T build();
public void delete(T dso) throws Exception {

View File

@@ -5,7 +5,7 @@
*
* http://www.dspace.org/license/
*/
package org.dspace.app.rest.builder;
package org.dspace.builder;
import java.sql.SQLException;
import java.util.Date;
@@ -43,12 +43,15 @@ public abstract class AbstractDSpaceObjectBuilder<T extends DSpaceObject>
this.context = context;
}
@Override
public abstract void cleanup() throws Exception;
@Override
protected abstract DSpaceObjectService<T> getService();
@Override
protected <B> B handleException(final Exception e) {
log.error(e.getMessage(), e);
return null;
@@ -231,13 +234,15 @@ public abstract class AbstractDSpaceObjectBuilder<T extends DSpaceObject>
return (B) this;
}
@Override
public abstract T build() throws SQLException, AuthorizeException;
@Override
public void delete(Context c, T dso) throws Exception {
if (dso != null) {
getService().delete(c, dso);
}
c.complete();
indexingService.commit();
if (dso != null) {
getService().delete(c, dso);
}
c.complete();
indexingService.commit();
}
}

View File

@@ -5,7 +5,7 @@
*
* http://www.dspace.org/license/
*/
package org.dspace.app.rest.builder;
package org.dspace.builder;
import java.io.IOException;
import java.io.InputStream;
@@ -129,6 +129,7 @@ public class BitstreamBuilder extends AbstractDSpaceObjectBuilder<Bitstream> {
return this;
}
@Override
public Bitstream build() {
try {
bitstreamService.update(context, bitstream);
@@ -152,7 +153,7 @@ public class BitstreamBuilder extends AbstractDSpaceObjectBuilder<Bitstream> {
@Override
public void cleanup() throws Exception {
try (Context c = new Context()) {
try (Context c = new Context()) {
c.turnOffAuthorisationSystem();
// Ensure object and any related objects are reloaded before checking to see what needs cleanup
bitstream = c.reloadEntity(bitstream);
@@ -163,6 +164,7 @@ public class BitstreamBuilder extends AbstractDSpaceObjectBuilder<Bitstream> {
}
}
@Override
protected DSpaceObjectService<Bitstream> getService() {
return bitstreamService;
}

View File

@@ -5,7 +5,7 @@
*
* http://www.dspace.org/license/
*/
package org.dspace.app.rest.builder;
package org.dspace.builder;
import java.io.IOException;
import java.sql.SQLException;
@@ -71,7 +71,6 @@ public class BitstreamFormatBuilder extends AbstractCRUDBuilder<BitstreamFormat>
log.error(e);
} catch (AuthorizeException e) {
log.error(e);
;
}
return bitstreamFormat;
}

View File

@@ -5,7 +5,7 @@
*
* http://www.dspace.org/license/
*/
package org.dspace.app.rest.builder;
package org.dspace.builder;
import java.io.IOException;
import java.sql.SQLException;
@@ -25,7 +25,7 @@ public class BundleBuilder extends AbstractDSpaceObjectBuilder<Bundle> {
private Bundle bundle;
private Item item;
private String name;
private List<Bitstream> bitstreams = new ArrayList<>();
private final List<Bitstream> bitstreams = new ArrayList<>();
protected BundleBuilder(Context context) {
super(context);
@@ -52,6 +52,7 @@ public class BundleBuilder extends AbstractDSpaceObjectBuilder<Bundle> {
return this;
}
@Override
public void cleanup() throws Exception {
try (Context c = new Context()) {
c.turnOffAuthorisationSystem();
@@ -64,10 +65,12 @@ public class BundleBuilder extends AbstractDSpaceObjectBuilder<Bundle> {
}
}
@Override
protected DSpaceObjectService<Bundle> getService() {
return bundleService;
}
@Override
public Bundle build() throws SQLException, AuthorizeException {
bundle = bundleService.create(context, item, name);

View File

@@ -5,7 +5,7 @@
*
* http://www.dspace.org/license/
*/
package org.dspace.app.rest.builder;
package org.dspace.builder;
import java.io.InputStream;

View File

@@ -5,7 +5,7 @@
*
* http://www.dspace.org/license/
*/
package org.dspace.app.rest.builder;
package org.dspace.builder;
import java.io.IOException;
import java.io.InputStream;

View File

@@ -5,7 +5,7 @@
*
* http://www.dspace.org/license/
*/
package org.dspace.app.rest.builder;
package org.dspace.builder;
import java.io.IOException;
import java.io.InputStream;

View File

@@ -5,12 +5,14 @@
*
* http://www.dspace.org/license/
*/
package org.dspace.app.rest.builder;
package org.dspace.builder;
import java.io.IOException;
import java.sql.SQLException;
import java.util.UUID;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
import org.dspace.authorize.AuthorizeException;
import org.dspace.content.service.DSpaceObjectService;
import org.dspace.core.Context;
@@ -19,6 +21,7 @@ import org.dspace.eperson.EPerson;
import org.dspace.eperson.Group;
public class EPersonBuilder extends AbstractDSpaceObjectBuilder<EPerson> {
private static final Logger LOG = LogManager.getLogger(EPersonBuilder.class);
private EPerson ePerson;
@@ -28,7 +31,7 @@ public class EPersonBuilder extends AbstractDSpaceObjectBuilder<EPerson> {
@Override
public void cleanup() throws Exception {
try (Context c = new Context()) {
try (Context c = new Context()) {
c.turnOffAuthorisationSystem();
// Ensure object and any related objects are reloaded before checking to see what needs cleanup
ePerson = c.reloadEntity(ePerson);
@@ -36,23 +39,21 @@ public class EPersonBuilder extends AbstractDSpaceObjectBuilder<EPerson> {
delete(c, ePerson);
c.complete();
}
}
}
}
@Override
protected DSpaceObjectService<EPerson> getService() {
return ePersonService;
}
@Override
public EPerson build() {
try {
ePersonService.update(context, ePerson);
indexingService.commit();
} catch (SearchServiceException e) {
e.printStackTrace();
} catch (SQLException e) {
e.printStackTrace();
} catch (AuthorizeException e) {
e.printStackTrace();
} catch (SearchServiceException | SQLException | AuthorizeException e) {
LOG.warn("Failed to complete the EPerson", e);
}
return ePerson;
}
@@ -65,10 +66,8 @@ public class EPersonBuilder extends AbstractDSpaceObjectBuilder<EPerson> {
private EPersonBuilder create() {
try {
ePerson = ePersonService.create(context);
} catch (SQLException e) {
e.printStackTrace();
} catch (AuthorizeException e) {
e.printStackTrace();
} catch (SQLException | AuthorizeException e) {
LOG.warn("Failed to create the EPerson", e);
}
return this;
}

View File

@@ -5,7 +5,7 @@
*
* http://www.dspace.org/license/
*/
package org.dspace.app.rest.builder;
package org.dspace.builder;
import java.sql.SQLException;
@@ -53,6 +53,7 @@ public class EntityTypeBuilder extends AbstractBuilder<EntityType, EntityTypeSer
}
}
@Override
public EntityType build() {
try {
@@ -91,7 +92,7 @@ public class EntityTypeBuilder extends AbstractBuilder<EntityType, EntityTypeSer
this.entityType = entityTypeService.create(context, entityType);
} catch (SQLException | AuthorizeException e) {
e.printStackTrace();
log.warn("Failed to create the EntityType", e);
}
return this;

View File

@@ -5,7 +5,7 @@
*
* http://www.dspace.org/license/
*/
package org.dspace.app.rest.builder;
package org.dspace.builder;
import java.io.IOException;
import java.sql.SQLException;
@@ -34,7 +34,7 @@ public class GroupBuilder extends AbstractDSpaceObjectBuilder<Group> {
@Override
public void cleanup() throws Exception {
try (Context c = new Context()) {
try (Context c = new Context()) {
c.turnOffAuthorisationSystem();
// Ensure object and any related objects are reloaded before checking to see what needs cleanup
group = c.reloadEntity(group);
@@ -42,7 +42,7 @@ public class GroupBuilder extends AbstractDSpaceObjectBuilder<Group> {
delete(c, group);
c.complete();
}
}
}
}
public static GroupBuilder createGroup(final Context context) {

View File

@@ -5,7 +5,7 @@
*
* http://www.dspace.org/license/
*/
package org.dspace.app.rest.builder;
package org.dspace.builder;
import java.io.IOException;
import java.sql.SQLException;

View File

@@ -5,7 +5,7 @@
*
* http://www.dspace.org/license/
*/
package org.dspace.app.rest.builder;
package org.dspace.builder;
import java.io.IOException;
import java.sql.SQLException;
@@ -64,17 +64,9 @@ public class MetadataFieldBuilder extends AbstractBuilder<MetadataField, Metadat
context.dispatchEvents();
indexingService.commit();
} catch (SearchServiceException e) {
log.error(e);
} catch (SQLException e) {
log.error(e);
} catch (AuthorizeException e) {
log.error(e);
;
} catch (NonUniqueMetadataException e) {
e.printStackTrace();
} catch (IOException e) {
e.printStackTrace();
} catch (SearchServiceException | SQLException | AuthorizeException
| NonUniqueMetadataException | IOException e) {
log.error("Failed to complete MetadataField", e);
}
return metadataField;
}
@@ -104,7 +96,7 @@ public class MetadataFieldBuilder extends AbstractBuilder<MetadataField, Metadat
MetadataField metadataField = metadataFieldService.find(c, id);
if (metadataField != null) {
try {
metadataFieldService.delete(c, metadataField);
metadataFieldService.delete(c, metadataField);
} catch (AuthorizeException e) {
throw new RuntimeException(e);
}
@@ -141,7 +133,7 @@ public class MetadataFieldBuilder extends AbstractBuilder<MetadataField, Metadat
metadataField = metadataFieldService
.create(context, schema, element, qualifier, scopeNote);
} catch (NonUniqueMetadataException e) {
e.printStackTrace();
log.error("Failed to create MetadataField", e);
}
return this;

View File

@@ -5,7 +5,7 @@
*
* http://www.dspace.org/license/
*/
package org.dspace.app.rest.builder;
package org.dspace.builder;
import java.io.IOException;
import java.sql.SQLException;
@@ -63,15 +63,10 @@ public class MetadataSchemaBuilder extends AbstractBuilder<MetadataSchema, Metad
context.dispatchEvents();
indexingService.commit();
} catch (SearchServiceException e) {
} catch (SearchServiceException | SQLException | AuthorizeException e) {
log.error(e);
} catch (SQLException e) {
log.error(e);
} catch (AuthorizeException e) {
log.error(e);
;
} catch (NonUniqueMetadataException e) {
e.printStackTrace();
log.error("Failed to complete MetadataSchema", e);
}
return metadataSchema;
}
@@ -101,7 +96,7 @@ public class MetadataSchemaBuilder extends AbstractBuilder<MetadataSchema, Metad
MetadataSchema metadataSchema = metadataSchemaService.find(c, id);
if (metadataSchema != null) {
try {
metadataSchemaService.delete(c, metadataSchema);
metadataSchemaService.delete(c, metadataSchema);
} catch (AuthorizeException e) {
throw new RuntimeException(e);
}
@@ -123,7 +118,7 @@ public class MetadataSchemaBuilder extends AbstractBuilder<MetadataSchema, Metad
try {
metadataSchema = metadataSchemaService.create(context, name, namespace);
} catch (NonUniqueMetadataException e) {
e.printStackTrace();
log.error("Failed to create MetadataSchema", e);
}
return this;

View File

@@ -5,7 +5,7 @@
*
* http://www.dspace.org/license/
*/
package org.dspace.app.rest.builder;
package org.dspace.builder;
import java.io.InputStream;

View File

@@ -5,7 +5,7 @@
*
* http://www.dspace.org/license/
*/
package org.dspace.app.rest.builder;
package org.dspace.builder;
import java.io.IOException;
import java.sql.SQLException;
@@ -57,6 +57,7 @@ public class ProcessBuilder extends AbstractBuilder<Process, ProcessService> {
}
}
@Override
public Process build() {
try {
processService.update(context, process);
@@ -68,6 +69,7 @@ public class ProcessBuilder extends AbstractBuilder<Process, ProcessService> {
return process;
}
@Override
protected ProcessService getService() {
return processService;
}

View File

@@ -5,7 +5,7 @@
*
* http://www.dspace.org/license/
*/
package org.dspace.app.rest.builder;
package org.dspace.builder;
import java.io.IOException;
import java.sql.SQLException;
@@ -56,6 +56,7 @@ public class RelationshipBuilder extends AbstractBuilder<Relationship, Relations
}
}
@Override
public Relationship build() {
try {
@@ -117,7 +118,7 @@ public class RelationshipBuilder extends AbstractBuilder<Relationship, Relations
try {
relationship = relationshipService.create(context, leftItem, rightItem, relationshipType, 0, 0);
} catch (SQLException | AuthorizeException e) {
e.printStackTrace();
log.warn("Failed to create relationship", e);
}
return this;

View File

@@ -5,7 +5,7 @@
*
* http://www.dspace.org/license/
*/
package org.dspace.app.rest.builder;
package org.dspace.builder;
import java.sql.SQLException;
import java.util.List;
@@ -62,6 +62,7 @@ public class RelationshipTypeBuilder extends AbstractBuilder<RelationshipType, R
}
}
@Override
public RelationshipType build() {
try {
@@ -116,7 +117,7 @@ public class RelationshipTypeBuilder extends AbstractBuilder<RelationshipType, R
leftCardinalityMax, rightCardinalityMin, rightCardinalityMax);
} catch (SQLException | AuthorizeException e) {
e.printStackTrace();
log.error("Failed to create RelationshipType", e);
}
return this;

View File

@@ -5,7 +5,7 @@
*
* http://www.dspace.org/license/
*/
package org.dspace.app.rest.builder;
package org.dspace.builder;
import java.io.IOException;
import java.sql.SQLException;

View File

@@ -5,7 +5,7 @@
*
* http://www.dspace.org/license/
*/
package org.dspace.app.rest.builder;
package org.dspace.builder;
import org.dspace.content.Site;
import org.dspace.content.service.DSpaceObjectService;

View File

@@ -5,7 +5,7 @@
*
* http://www.dspace.org/license/
*/
package org.dspace.app.rest.builder;
package org.dspace.builder;
import java.io.IOException;
import java.io.InputStream;

View File

@@ -5,7 +5,7 @@
*
* http://www.dspace.org/license/
*/
package org.dspace.app.rest.builder;
package org.dspace.builder;
import java.io.IOException;
import java.io.InputStream;

View File

@@ -5,32 +5,32 @@
*
* http://www.dspace.org/license/
*/
package org.dspace.app.rest.builder.util;
package org.dspace.builder.util;
import java.util.LinkedHashMap;
import java.util.LinkedList;
import java.util.List;
import java.util.Map;
import org.dspace.app.rest.builder.AbstractBuilder;
import org.dspace.app.rest.builder.BitstreamBuilder;
import org.dspace.app.rest.builder.BitstreamFormatBuilder;
import org.dspace.app.rest.builder.ClaimedTaskBuilder;
import org.dspace.app.rest.builder.CollectionBuilder;
import org.dspace.app.rest.builder.CommunityBuilder;
import org.dspace.app.rest.builder.EPersonBuilder;
import org.dspace.app.rest.builder.EntityTypeBuilder;
import org.dspace.app.rest.builder.GroupBuilder;
import org.dspace.app.rest.builder.ItemBuilder;
import org.dspace.app.rest.builder.MetadataFieldBuilder;
import org.dspace.app.rest.builder.MetadataSchemaBuilder;
import org.dspace.app.rest.builder.PoolTaskBuilder;
import org.dspace.app.rest.builder.ProcessBuilder;
import org.dspace.app.rest.builder.RelationshipBuilder;
import org.dspace.app.rest.builder.RelationshipTypeBuilder;
import org.dspace.app.rest.builder.SiteBuilder;
import org.dspace.app.rest.builder.WorkflowItemBuilder;
import org.dspace.app.rest.builder.WorkspaceItemBuilder;
import org.dspace.builder.AbstractBuilder;
import org.dspace.builder.BitstreamBuilder;
import org.dspace.builder.BitstreamFormatBuilder;
import org.dspace.builder.ClaimedTaskBuilder;
import org.dspace.builder.CollectionBuilder;
import org.dspace.builder.CommunityBuilder;
import org.dspace.builder.EPersonBuilder;
import org.dspace.builder.EntityTypeBuilder;
import org.dspace.builder.GroupBuilder;
import org.dspace.builder.ItemBuilder;
import org.dspace.builder.MetadataFieldBuilder;
import org.dspace.builder.MetadataSchemaBuilder;
import org.dspace.builder.PoolTaskBuilder;
import org.dspace.builder.ProcessBuilder;
import org.dspace.builder.RelationshipBuilder;
import org.dspace.builder.RelationshipTypeBuilder;
import org.dspace.builder.SiteBuilder;
import org.dspace.builder.WorkflowItemBuilder;
import org.dspace.builder.WorkspaceItemBuilder;
/**
* This class will ensure that all the builders that are registered will be cleaned up in the order as defined
@@ -39,7 +39,8 @@ import org.dspace.app.rest.builder.WorkspaceItemBuilder;
*/
public class AbstractBuilderCleanupUtil {
private LinkedHashMap<String, List<AbstractBuilder>> map = new LinkedHashMap<>();
private final LinkedHashMap<String, List<AbstractBuilder>> map
= new LinkedHashMap<>();
/**
* Constructor that will initialize the Map with a predefined order for deletion

View File

@@ -8,23 +8,27 @@
package org.dspace.curate;
import static org.junit.Assert.assertEquals;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.when;
import java.util.HashMap;
import java.util.Map;
import org.dspace.AbstractUnitTest;
import org.dspace.content.DSpaceObject;
import org.dspace.content.Item;
import org.dspace.content.factory.ContentServiceFactory;
import org.dspace.content.service.SiteService;
import org.dspace.core.factory.CoreServiceFactory;
import org.dspace.ctask.general.NoOpCurationTask;
import org.dspace.services.ConfigurationService;
import org.junit.Test;
/**
*
* @author mhwood
*/
public class CuratorTest
extends AbstractUnitTest {
public class CuratorTest extends AbstractUnitTest {
private static final SiteService SITE_SERVICE = ContentServiceFactory.getInstance().getSiteService();
static final String RUN_PARAMETER_NAME = "runParameter";
@@ -32,20 +36,24 @@ public class CuratorTest
static final String TASK_PROPERTY_NAME = "taskProperty";
static final String TASK_PROPERTY_VALUE = "a property";
/** Value of a known runtime parameter, if any. */
/**
* Value of a known runtime parameter, if any.
*/
static String runParameter;
/** Value of a known task property, if any. */
/**
* Value of a known task property, if any.
*/
static String taskProperty;
/**
* Test of curate method, of class Curator.
* Currently this just tests task properties and run parameters.
*
* @throws java.lang.Exception passed through.
*/
@Test
public void testCurate_DSpaceObject()
throws Exception {
public void testCurate_DSpaceObject() throws Exception {
System.out.println("curate");
final String TASK_NAME = "dummyTask";
@@ -53,7 +61,7 @@ public class CuratorTest
// Configure the task to be run.
ConfigurationService cfg = kernelImpl.getConfigurationService();
cfg.setProperty("plugin.named.org.dspace.curate.CurationTask",
DummyTask.class.getName() + " = " + TASK_NAME);
DummyTask.class.getName() + " = " + TASK_NAME);
cfg.setProperty(TASK_NAME + '.' + TASK_PROPERTY_NAME, TASK_PROPERTY_VALUE);
// Get and configure a Curator.
@@ -72,12 +80,40 @@ public class CuratorTest
// Check the result.
System.out.format("Task %s result was '%s'%n",
TASK_NAME, instance.getResult(TASK_NAME));
TASK_NAME, instance.getResult(TASK_NAME));
System.out.format("Task %s status was %d%n",
TASK_NAME, instance.getStatus(TASK_NAME));
TASK_NAME, instance.getStatus(TASK_NAME));
assertEquals("Unexpected task status",
Curator.CURATE_SUCCESS, instance.getStatus(TASK_NAME));
Curator.CURATE_SUCCESS, instance.getStatus(TASK_NAME));
assertEquals("Wrong run parameter", RUN_PARAMETER_VALUE, runParameter);
assertEquals("Wrong task property", TASK_PROPERTY_VALUE, taskProperty);
}
@Test
public void testCurate_NoOpTask() throws Exception {
CoreServiceFactory.getInstance().getPluginService().clearNamedPluginClasses();
final String TASK_NAME = "noop";
// Configure the noop task to be run.
ConfigurationService cfg = kernelImpl.getConfigurationService();
cfg.setProperty("plugin.named.org.dspace.curate.CurationTask",
NoOpCurationTask.class.getName() + " = " + TASK_NAME);
// Get and configure a Curator.
Curator curator = new Curator();
StringBuilder reporterOutput = new StringBuilder();
curator.setReporter(reporterOutput); // Send any report to our StringBuilder.
curator.addTask(TASK_NAME);
Item item = mock(Item.class);
when(item.getType()).thenReturn(2);
when(item.getHandle()).thenReturn("testHandle");
curator.curate(context, item);
assertEquals(Curator.CURATE_SUCCESS, curator.getStatus(TASK_NAME));
assertEquals(reporterOutput.toString(), "No operation performed on testHandle");
}
}

View File

@@ -7,19 +7,35 @@
*/
package org.dspace.discovery;
import org.dspace.solr.MockSolrServer;
import org.springframework.beans.factory.DisposableBean;
import org.springframework.beans.factory.InitializingBean;
import org.springframework.stereotype.Service;
/**
* Mock SOLR service for the Search Core
* Mock SOLR service for the Search Core. Manages an in-process Solr server
* with an in-memory "search" core.
*/
@Service
public class MockSolrSearchCore extends SolrSearchCore implements InitializingBean {
public class MockSolrSearchCore extends SolrSearchCore
implements InitializingBean, DisposableBean {
private MockSolrServer mockSolrServer;
@Override
public void afterPropertiesSet() throws Exception {
//We don't use SOLR in the tests of this module
solr = null;
mockSolrServer = new MockSolrServer("search");
solr = mockSolrServer.getSolrServer();
}
/**
* Reset the core for the next test. See {@link MockSolrServer#reset()}.
*/
public void reset() {
mockSolrServer.reset();
}
@Override
public void destroy() throws Exception {
mockSolrServer.destroy();
}
}

View File

@@ -29,6 +29,7 @@ public class MockCCLicenseConnectorServiceImpl extends CCLicenseConnectorService
* @param language - the language
* @return a map of mocked licenses with the id and the license
*/
@Override
public Map<String, CCLicense> retrieveLicenses(String language) {
Map<String, CCLicense> ccLicenses = new HashMap<>();
CCLicense mockLicense1 = createMockLicense(1, new int[]{3, 2, 3});
@@ -89,6 +90,7 @@ public class MockCCLicenseConnectorServiceImpl extends CCLicenseConnectorService
* @param answerMap - the answers to the different field questions
* @return the CC License URI
*/
@Override
public String retrieveRightsByQuestion(final String licenseId,
final String language,
final Map<String, String> answerMap) {
@@ -105,6 +107,7 @@ public class MockCCLicenseConnectorServiceImpl extends CCLicenseConnectorService
* @return a mock license RDF document or null when the URI contains invalid
* @throws IOException
*/
@Override
public Document retrieveLicenseRDFDoc(String licenseURI) throws IOException {
if (!StringUtils.contains(licenseURI, "invalid")) {
InputStream cclicense = null;

View File

@@ -19,7 +19,7 @@ import org.apache.solr.client.solrj.SolrClient;
import org.apache.solr.client.solrj.SolrServerException;
import org.apache.solr.client.solrj.embedded.EmbeddedSolrServer;
import org.apache.solr.core.CoreContainer;
import org.dspace.app.rest.test.AbstractDSpaceIntegrationTest;
import org.dspace.AbstractDSpaceIntegrationTest;
/**
* Factory of connections to an in-process embedded Solr service.
@@ -110,7 +110,7 @@ public class MockSolrServer {
server.deleteByQuery("*:*");
server.commit();
} catch (SolrServerException | IOException e) {
e.printStackTrace(System.err);
log.error("Failed to empty Solr index: {}", e.getMessage(), e);
}
loadedCores.put(coreName, server);

View File

@@ -27,27 +27,29 @@ import com.maxmind.geoip2.record.MaxMind;
import com.maxmind.geoip2.record.Postal;
import com.maxmind.geoip2.record.RepresentedCountry;
import com.maxmind.geoip2.record.Traits;
import org.dspace.solr.MockSolrServer;
import org.springframework.beans.factory.DisposableBean;
import org.springframework.beans.factory.InitializingBean;
import org.springframework.stereotype.Service;
/**
* Mock service that uses an embedded SOLR server for the statistics core.
* <p>
* <strong>NOTE:</strong> this class is overridden by one <em>of the same name</em>
* defined in dspace-server-webapp and declared as a bean there.
* See {@code test/data/dspaceFolder/config/spring/api/solr-services.xml}. Some kind of classpath
* magic makes this work.
*/
@Service
public class MockSolrLoggerServiceImpl
extends SolrLoggerServiceImpl
implements InitializingBean {
implements InitializingBean, DisposableBean {
private MockSolrServer mockSolrServer;
public MockSolrLoggerServiceImpl() {
}
@Override
public void afterPropertiesSet() throws Exception {
//We don't use SOLR in the tests of this module
solr = null;
// Initialize our service with a Mock Solr statistics core
mockSolrServer = new MockSolrServer("statistics");
solr = mockSolrServer.getSolrServer();
// Mock GeoIP's DatabaseReader
DatabaseReader reader = mock(DatabaseReader.class);
@@ -58,14 +60,16 @@ public class MockSolrLoggerServiceImpl
}
/**
* A mock/fake GeoIP CityResponse, which will be used for *all* test statistical requests
* A mock/fake GeoIP CityResponse, which will be used for *all* test
* statistical requests.
*
* @return faked CityResponse
*/
private CityResponse mockCityResponse() {
List<String> cityNames = new ArrayList<String>(Collections.singleton("New York"));
List<String> cityNames = new ArrayList<>(Collections.singleton("New York"));
City city = new City(cityNames, 1, 1, new HashMap());
List<String> countryNames = new ArrayList<String>(Collections.singleton("United States"));
List<String> countryNames = new ArrayList<>(Collections.singleton("United States"));
Country country = new Country(countryNames, 1, 1, "US", new HashMap());
Location location = new Location(1, 1, 40.760498D, -73.9933D, 501, 1, "EST");
@@ -73,7 +77,17 @@ public class MockSolrLoggerServiceImpl
Postal postal = new Postal("10036", 1);
return new CityResponse(city, new Continent(), country, location, new MaxMind(), postal,
country, new RepresentedCountry(), new ArrayList<>(0),
new Traits());
country, new RepresentedCountry(), new ArrayList<>(0),
new Traits());
}
/** Reset the core for the next test. See {@link MockSolrServer#reset()}. */
public void reset() {
mockSolrServer.reset();
}
@Override
public void destroy() throws Exception {
mockSolrServer.destroy();
}
}

View File

@@ -10,8 +10,10 @@ package org.dspace.xmlworkflow;
import static junit.framework.TestCase.assertEquals;
import static org.junit.Assert.fail;
import java.io.IOException;
import java.sql.SQLException;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
import org.dspace.AbstractUnitTest;
import org.dspace.authorize.AuthorizeException;
@@ -35,9 +37,11 @@ import org.junit.Test;
*/
public class XmlWorkflowFactoryTest extends AbstractUnitTest {
private CollectionService collectionService = ContentServiceFactory.getInstance().getCollectionService();
private CommunityService communityService = ContentServiceFactory.getInstance().getCommunityService();
private XmlWorkflowFactory xmlWorkflowFactory
private final CollectionService collectionService
= ContentServiceFactory.getInstance().getCollectionService();
private final CommunityService communityService
= ContentServiceFactory.getInstance().getCommunityService();
private final XmlWorkflowFactory xmlWorkflowFactory
= new DSpace().getServiceManager().getServiceByName("xmlWorkflowFactory",
XmlWorkflowFactoryImpl.class);
private Community owningCommunity;
@@ -47,7 +51,7 @@ public class XmlWorkflowFactoryTest extends AbstractUnitTest {
/**
* log4j category
*/
private static final Logger log = org.apache.logging.log4j.LogManager.getLogger(XmlWorkflowFactoryTest.class);
private static final Logger log = LogManager.getLogger(XmlWorkflowFactoryTest.class);
/**
* This method will be run before every test as per @Before. It will
@@ -94,7 +98,7 @@ public class XmlWorkflowFactoryTest extends AbstractUnitTest {
this.collectionService.delete(context, this.nonMappedCollection);
this.collectionService.delete(context, this.mappedCollection);
this.communityService.delete(context, this.owningCommunity);
} catch (Exception e) {
} catch (IOException | SQLException | AuthorizeException e) {
log.error("Error in destroy", e);
}
@@ -112,12 +116,12 @@ public class XmlWorkflowFactoryTest extends AbstractUnitTest {
@Test
public void workflowMapping_NonMappedCollection() throws WorkflowConfigurationException {
Workflow workflow = xmlWorkflowFactory.getWorkflow(this.nonMappedCollection);
assertEquals(workflow.getID(), "defaultWorkflow");
assertEquals("defaultWorkflow", workflow.getID());
}
@Test
public void workflowMapping_MappedCollection() throws WorkflowConfigurationException {
Workflow workflow = xmlWorkflowFactory.getWorkflow(this.mappedCollection);
assertEquals(workflow.getID(), "selectSingleReviewer");
assertEquals("selectSingleReviewer", workflow.getID());
}
}

View File

@@ -86,7 +86,8 @@ public class LocalURIRedirectionServlet extends HttpServlet {
response.sendError(HttpServletResponse.SC_NOT_FOUND);
return;
}
// use object's reported handle for redirect (just in case user provided handle had odd characters)
handle = dso.getHandle();
// close the context and send forward.
context.abort();
Negotiator.sendRedirect(response, handle, "", requestedMimeType, true);

View File

@@ -661,6 +661,7 @@ var HtmlUtil = function() {
a.append(val);
a.attr("href", href);
a.attr("target", "_blank");
a.attr("rel", "noopener noreferrer");
return a;
}

View File

@@ -37,7 +37,7 @@
<activation>
<activeByDefault>false</activeByDefault>
<property>
<name>maven.test.skip</name>
<name>skipTests</name>
<value>false</value>
</property>
</activation>
@@ -307,6 +307,13 @@
<artifactId>dspace-api</artifactId>
</dependency>
<dependency>
<groupId>org.dspace</groupId>
<artifactId>dspace-api</artifactId>
<type>test-jar</type>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.dspace</groupId>
<artifactId>dspace-services</artifactId>
@@ -460,6 +467,14 @@
<artifactId>solr-cell</artifactId>
<scope>test</scope>
<exclusions>
<exclusion>
<groupId>org.bouncycastle</groupId>
<artifactId>bcpkix-jdk15on</artifactId>
</exclusion>
<exclusion>
<groupId>org.bouncycastle</groupId>
<artifactId>bcprov-jdk15on</artifactId>
</exclusion>
<exclusion>
<groupId>org.eclipse.jetty</groupId>
<artifactId>jetty-continuation</artifactId>
@@ -524,13 +539,11 @@
<dependency>
<groupId>org.apache.lucene</groupId>
<artifactId>lucene-analyzers-smartcn</artifactId>
<version>${solr.client.version}</version>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.apache.lucene</groupId>
<artifactId>lucene-analyzers-stempel</artifactId>
<version>${solr.client.version}</version>
<scope>test</scope>
</dependency>

View File

@@ -7,6 +7,8 @@
*/
package org.dspace.app.rest;
import static org.apache.commons.collections4.ListUtils.emptyIfNull;
import java.util.Arrays;
import java.util.List;
import java.util.Objects;
@@ -100,51 +102,55 @@ public class DiscoveryRestController implements InitializingBean {
@RequestMapping(method = RequestMethod.GET, value = "/search/facets")
public FacetsResource getFacets(@RequestParam(name = "query", required = false) String query,
@RequestParam(name = "dsoType", required = false) String dsoType,
@RequestParam(name = "dsoType", required = false) List<String> dsoTypes,
@RequestParam(name = "scope", required = false) String dsoScope,
@RequestParam(name = "configuration", required = false) String configuration,
List<SearchFilter> searchFilters,
Pageable page) throws Exception {
dsoTypes = emptyIfNull(dsoTypes);
if (log.isTraceEnabled()) {
log.trace("Searching with scope: " + StringUtils.trimToEmpty(dsoScope)
+ ", configuration name: " + StringUtils.trimToEmpty(configuration)
+ ", dsoType: " + StringUtils.trimToEmpty(dsoType)
+ ", query: " + StringUtils.trimToEmpty(query)
+ ", filters: " + Objects.toString(searchFilters));
+ ", configuration name: " + StringUtils.trimToEmpty(configuration)
+ ", dsoTypes: " + String.join(", ", dsoTypes)
+ ", query: " + StringUtils.trimToEmpty(query)
+ ", filters: " + Objects.toString(searchFilters));
}
SearchResultsRest searchResultsRest = discoveryRestRepository
.getAllFacets(query, dsoType, dsoScope, configuration, searchFilters);
.getAllFacets(query, dsoTypes, dsoScope, configuration, searchFilters);
FacetsResource facetsResource = new FacetsResource(searchResultsRest, page);
halLinkService.addLinks(facetsResource, page);
return facetsResource;
}
@RequestMapping(method = RequestMethod.GET, value = "/search/objects")
public SearchResultsResource getSearchObjects(@RequestParam(name = "query", required = false) String query,
@RequestParam(name = "dsoType", required = false) String dsoType,
@RequestParam(name = "dsoType", required = false)
List<String> dsoTypes,
@RequestParam(name = "scope", required = false) String dsoScope,
@RequestParam(name = "configuration", required = false) String
configuration,
List<SearchFilter> searchFilters,
Pageable page) throws Exception {
dsoTypes = emptyIfNull(dsoTypes);
if (log.isTraceEnabled()) {
log.trace("Searching with scope: " + StringUtils.trimToEmpty(dsoScope)
+ ", configuration name: " + StringUtils.trimToEmpty(configuration)
+ ", dsoType: " + StringUtils.trimToEmpty(dsoType)
+ ", query: " + StringUtils.trimToEmpty(query)
+ ", filters: " + Objects.toString(searchFilters)
+ ", page: " + Objects.toString(page));
+ ", configuration name: " + StringUtils.trimToEmpty(configuration)
+ ", dsoTypes: " + String.join(", ", dsoTypes)
+ ", query: " + StringUtils.trimToEmpty(query)
+ ", filters: " + Objects.toString(searchFilters)
+ ", page: " + Objects.toString(page));
}
//Get the Search results in JSON format
SearchResultsRest searchResultsRest = discoveryRestRepository
.getSearchObjects(query, dsoType, dsoScope, configuration, searchFilters, page, utils.obtainProjection());
.getSearchObjects(query, dsoTypes, dsoScope, configuration, searchFilters, page, utils.obtainProjection());
//Convert the Search JSON results to paginated HAL resources
SearchResultsResource searchResultsResource = new SearchResultsResource(searchResultsRest, utils, page);
@@ -174,15 +180,18 @@ public class DiscoveryRestController implements InitializingBean {
public RepresentationModel getFacetValues(@PathVariable("name") String facetName,
@RequestParam(name = "prefix", required = false) String prefix,
@RequestParam(name = "query", required = false) String query,
@RequestParam(name = "dsoType", required = false) String dsoType,
@RequestParam(name = "dsoType", required = false) List<String> dsoTypes,
@RequestParam(name = "scope", required = false) String dsoScope,
@RequestParam(name = "configuration", required = false) String
configuration,
List<SearchFilter> searchFilters,
Pageable page) throws Exception {
dsoTypes = emptyIfNull(dsoTypes);
if (log.isTraceEnabled()) {
log.trace("Facetting on facet " + facetName + " with scope: " + StringUtils.trimToEmpty(dsoScope)
+ ", dsoType: " + StringUtils.trimToEmpty(dsoType)
+ ", dsoTypes: " + String.join(", ", dsoTypes)
+ ", prefix: " + StringUtils.trimToEmpty(prefix)
+ ", query: " + StringUtils.trimToEmpty(query)
+ ", filters: " + Objects.toString(searchFilters)
@@ -190,7 +199,7 @@ public class DiscoveryRestController implements InitializingBean {
}
FacetResultsRest facetResultsRest = discoveryRestRepository
.getFacetObjects(facetName, prefix, query, dsoType, dsoScope, configuration, searchFilters, page);
.getFacetObjects(facetName, prefix, query, dsoTypes, dsoScope, configuration, searchFilters, page);
FacetResultsResource facetResultsResource = converter.toResource(facetResultsRest);

View File

@@ -34,6 +34,7 @@ import org.dspace.content.service.CollectionService;
import org.dspace.content.service.CommunityService;
import org.dspace.core.Context;
import org.dspace.core.LogManager;
import org.dspace.core.Utils;
import org.dspace.discovery.DiscoverQuery;
import org.dspace.discovery.DiscoverResult;
import org.dspace.discovery.IndexableObject;
@@ -103,7 +104,8 @@ public class OpenSearchController {
// do some sanity checking
if (!openSearchService.getFormats().contains(format)) {
String err = "Format " + format + " is not supported.";
// Since we are returning error response as HTML, escape any HTML in "format" param
String err = "Format " + Utils.addEntities(format) + " is not supported.";
response.setContentType("text/html");
response.setContentLength(err.length());
response.getWriter().write(err);

View File

@@ -11,7 +11,9 @@ import java.io.IOException;
import java.util.Arrays;
import javax.servlet.http.HttpServletResponse;
import org.apache.commons.lang3.StringUtils;
import org.dspace.app.rest.model.AuthnRest;
import org.dspace.core.Utils;
import org.dspace.services.ConfigurationService;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
@@ -47,14 +49,29 @@ public class ShibbolethRestController implements InitializingBean {
.register(this, Arrays.asList(new Link("/api/" + AuthnRest.CATEGORY, "shibboleth")));
}
// LGTM.com thinks this method has an unvalidated URL redirect (https://lgtm.com/rules/4840088/) in `redirectUrl`,
// even though we are clearly validating the hostname of `redirectUrl` and test it in ShibbolethRestControllerIT
@SuppressWarnings("lgtm[java/unvalidated-url-redirection]")
@RequestMapping(method = RequestMethod.GET)
public void shibboleth(HttpServletResponse response,
@RequestParam(name = "redirectUrl", required = false) String redirectUrl) throws IOException {
if (redirectUrl == null) {
redirectUrl = configurationService.getProperty("dspace.ui.url");
}
log.info("Redirecting to " + redirectUrl);
response.sendRedirect(redirectUrl);
// Validate that the redirectURL matches either the server or UI hostname. It *cannot* be an arbitrary URL.
String redirectHostName = Utils.getHostName(redirectUrl);
String serverHostName = Utils.getHostName(configurationService.getProperty("dspace.server.url"));
String clientHostName = Utils.getHostName(configurationService.getProperty("dspace.ui.url"));
if (StringUtils.equalsAnyIgnoreCase(redirectHostName, serverHostName, clientHostName)) {
log.debug("Shibboleth redirecting to " + redirectUrl);
response.sendRedirect(redirectUrl);
} else {
log.error("Invalid Shibboleth redirectURL=" + redirectUrl +
". URL doesn't match hostname of server or UI!");
response.sendError(HttpServletResponse.SC_BAD_REQUEST,
"Invalid redirectURL! Must match server or ui hostname.");
}
}
}

View File

@@ -35,13 +35,14 @@ public class DiscoverFacetResultsConverter {
@Autowired
private SearchFilterToAppliedFilterConverter searchFilterToAppliedFilterConverter;
public FacetResultsRest convert(Context context, String facetName, String prefix, String query, String dsoType,
String dsoScope, List<SearchFilter> searchFilters, DiscoverResult searchResult,
DiscoveryConfiguration configuration, Pageable page, Projection projection) {
public FacetResultsRest convert(Context context, String facetName, String prefix, String query,
List<String> dsoTypes, String dsoScope, List<SearchFilter> searchFilters,
DiscoverResult searchResult, DiscoveryConfiguration configuration, Pageable page,
Projection projection) {
FacetResultsRest facetResultsRest = new FacetResultsRest();
facetResultsRest.setProjection(projection);
setRequestInformation(context, facetName, prefix, query, dsoType, dsoScope, searchFilters, searchResult,
setRequestInformation(context, facetName, prefix, query, dsoTypes, dsoScope, searchFilters, searchResult,
configuration, facetResultsRest, page, projection);
addToFacetResultList(facetName, searchResult, facetResultsRest, configuration, page, projection);
@@ -72,14 +73,14 @@ public class DiscoverFacetResultsConverter {
return facetValueConverter.convert(value, projection);
}
private void setRequestInformation(Context context, String facetName, String prefix, String query, String dsoType,
String dsoScope, List<SearchFilter> searchFilters, DiscoverResult searchResult,
DiscoveryConfiguration configuration, FacetResultsRest facetResultsRest,
Pageable page, Projection projection) {
private void setRequestInformation(Context context, String facetName, String prefix, String query,
List<String> dsoTypes, String dsoScope, List<SearchFilter> searchFilters,
DiscoverResult searchResult, DiscoveryConfiguration configuration,
FacetResultsRest facetResultsRest, Pageable page, Projection projection) {
facetResultsRest.setQuery(query);
facetResultsRest.setPrefix(prefix);
facetResultsRest.setScope(dsoScope);
facetResultsRest.setDsoType(dsoType);
facetResultsRest.setDsoTypes(dsoTypes);
facetResultsRest.setFacetEntry(convertFacetEntry(facetName, searchResult, configuration, page, projection));

View File

@@ -38,7 +38,7 @@ public class DiscoverFacetsConverter {
@Autowired
private SearchService searchService;
public SearchResultsRest convert(Context context, String query, String dsoType, String configurationName,
public SearchResultsRest convert(Context context, String query, List<String> dsoTypes, String configurationName,
String dsoScope, List<SearchFilter> searchFilters, final Pageable page,
DiscoveryConfiguration configuration, DiscoverResult searchResult,
Projection projection) {
@@ -46,7 +46,7 @@ public class DiscoverFacetsConverter {
SearchResultsRest searchResultsRest = new SearchResultsRest();
searchResultsRest.setProjection(projection);
setRequestInformation(context, query, dsoType, configurationName, dsoScope, searchFilters, page,
setRequestInformation(context, query, dsoTypes, configurationName, dsoScope, searchFilters, page,
searchResultsRest);
addFacetValues(context, searchResult, searchResultsRest, configuration, projection);
@@ -129,13 +129,13 @@ public class DiscoverFacetsConverter {
}
}
private void setRequestInformation(final Context context, final String query, final String dsoType,
private void setRequestInformation(final Context context, final String query, final List<String> dsoTypes,
final String configurationName, final String scope,
final List<SearchFilter> searchFilters, final Pageable page,
final SearchResultsRest resultsRest) {
resultsRest.setQuery(query);
resultsRest.setConfiguration(configurationName);
resultsRest.setDsoType(dsoType);
resultsRest.setDsoTypes(dsoTypes);
resultsRest.setSort(SearchResultsRest.Sorting.fromPage(page));
resultsRest.setScope(scope);

View File

@@ -43,7 +43,7 @@ public class DiscoverResultConverter {
@Autowired
private SearchFilterToAppliedFilterConverter searchFilterToAppliedFilterConverter;
public SearchResultsRest convert(final Context context, final String query, final String dsoType,
public SearchResultsRest convert(final Context context, final String query, final List<String> dsoTypes,
final String configurationName, final String scope,
final List<SearchFilter> searchFilters, final Pageable page,
final DiscoverResult searchResult, final DiscoveryConfiguration configuration,
@@ -52,7 +52,7 @@ public class DiscoverResultConverter {
SearchResultsRest resultsRest = new SearchResultsRest();
resultsRest.setProjection(projection);
setRequestInformation(context, query, dsoType, configurationName, scope, searchFilters, page, resultsRest);
setRequestInformation(context, query, dsoTypes, configurationName, scope, searchFilters, page, resultsRest);
addSearchResults(searchResult, resultsRest, projection);
@@ -101,13 +101,13 @@ public class DiscoverResultConverter {
return null;
}
private void setRequestInformation(final Context context, final String query, final String dsoType,
private void setRequestInformation(final Context context, final String query, final List<String> dsoTypes,
final String configurationName, final String scope,
final List<SearchFilter> searchFilters, final Pageable page,
final SearchResultsRest resultsRest) {
resultsRest.setQuery(query);
resultsRest.setConfiguration(configurationName);
resultsRest.setDsoType(dsoType);
resultsRest.setDsoTypes(dsoTypes);
resultsRest.setScope(scope);

View File

@@ -14,6 +14,8 @@ import java.sql.SQLException;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
import org.dspace.app.rest.security.RestAuthenticationService;
import org.dspace.authorize.AuthorizeException;
import org.springframework.beans.TypeMismatchException;
@@ -41,6 +43,7 @@ import org.springframework.web.servlet.mvc.method.annotation.ResponseEntityExcep
*/
@ControllerAdvice
public class DSpaceApiExceptionControllerAdvice extends ResponseEntityExceptionHandler {
private static final Logger log = LogManager.getLogger(DSpaceApiExceptionControllerAdvice.class);
@Autowired
private RestAuthenticationService restAuthenticationService;
@@ -49,16 +52,16 @@ public class DSpaceApiExceptionControllerAdvice extends ResponseEntityExceptionH
protected void handleAuthorizeException(HttpServletRequest request, HttpServletResponse response, Exception ex)
throws IOException {
if (restAuthenticationService.hasAuthenticationData(request)) {
sendErrorResponse(request, response, ex, ex.getMessage(), HttpServletResponse.SC_FORBIDDEN);
sendErrorResponse(request, response, ex, "Access is denied", HttpServletResponse.SC_FORBIDDEN);
} else {
sendErrorResponse(request, response, ex, ex.getMessage(), HttpServletResponse.SC_UNAUTHORIZED);
sendErrorResponse(request, response, ex, "Authentication is required", HttpServletResponse.SC_UNAUTHORIZED);
}
}
@ExceptionHandler({IllegalArgumentException.class, MultipartException.class})
protected void handleWrongRequestException(HttpServletRequest request, HttpServletResponse response,
Exception ex) throws IOException {
sendErrorResponse(request, response, ex, ex.getMessage(), HttpServletResponse.SC_BAD_REQUEST);
sendErrorResponse(request, response, ex, "Request is invalid or incorrect", HttpServletResponse.SC_BAD_REQUEST);
}
@ExceptionHandler(SQLException.class)
@@ -72,24 +75,24 @@ public class DSpaceApiExceptionControllerAdvice extends ResponseEntityExceptionH
protected void handleIOException(HttpServletRequest request, HttpServletResponse response, Exception ex)
throws IOException {
sendErrorResponse(request, response, ex,
"An internal read or write operation failed (IO Exception)",
"An internal read or write operation failed",
HttpServletResponse.SC_INTERNAL_SERVER_ERROR);
}
@ExceptionHandler(MethodNotAllowedException.class)
protected void methodNotAllowedException(HttpServletRequest request, HttpServletResponse response,
Exception ex) throws IOException {
sendErrorResponse(request, response, ex, ex.getMessage(), HttpServletResponse.SC_METHOD_NOT_ALLOWED);
sendErrorResponse(request, response, ex, "Method is not allowed or supported",
HttpServletResponse.SC_METHOD_NOT_ALLOWED);
}
@ExceptionHandler( {UnprocessableEntityException.class})
protected void handleUnprocessableEntityException(HttpServletRequest request, HttpServletResponse response,
Exception ex) throws IOException {
//422 is not defined in HttpServletResponse. Its meaning is "Unprocessable Entity".
//Using the value from HttpStatus.
sendErrorResponse(request, response, null,
ex.getMessage(),
"Unprocessable or invalid entity",
HttpStatus.UNPROCESSABLE_ENTITY.value());
}
@@ -98,7 +101,7 @@ public class DSpaceApiExceptionControllerAdvice extends ResponseEntityExceptionH
throws IOException {
// we want the 400 status for missing parameters, see https://jira.lyrasis.org/browse/DS-4428
sendErrorResponse(request, response, null,
ex.getMessage(),
"A required parameter is invalid",
HttpStatus.BAD_REQUEST.value());
}
@@ -107,7 +110,7 @@ public class DSpaceApiExceptionControllerAdvice extends ResponseEntityExceptionH
throws IOException {
// we want the 400 status for missing parameters, see https://jira.lyrasis.org/browse/DS-4428
sendErrorResponse(request, response, null,
ex.getMessage(),
"A required parameter is missing",
HttpStatus.BAD_REQUEST.value());
}
@@ -137,7 +140,7 @@ public class DSpaceApiExceptionControllerAdvice extends ResponseEntityExceptionH
} else {
returnCode = HttpServletResponse.SC_INTERNAL_SERVER_ERROR;
}
sendErrorResponse(request, response, ex, "An Exception has occured", returnCode);
sendErrorResponse(request, response, ex, "An exception has occurred", returnCode);
}
@@ -147,6 +150,13 @@ public class DSpaceApiExceptionControllerAdvice extends ResponseEntityExceptionH
//Make sure Spring picks up this exception
request.setAttribute(EXCEPTION_ATTRIBUTE, ex);
// For now, just logging server errors.
// We don't want to fill logs with bad/invalid REST API requests.
if (statusCode == HttpServletResponse.SC_INTERNAL_SERVER_ERROR) {
// Log the full error and status code
log.error("{} (status:{})", message, statusCode, ex);
}
//Exception properties will be set by org.springframework.boot.web.support.ErrorPageFilter
response.sendError(statusCode, message);
}

View File

@@ -28,7 +28,7 @@ public abstract class DiscoveryRestHalLinkFactory<T> extends HalLinkFactory<T, D
public UriComponentsBuilder buildSearchBaseLink(final DiscoveryResultsRest data) {
try {
UriComponentsBuilder uriBuilder = uriBuilder(getMethodOn()
.getSearchObjects(data.getQuery(), data.getDsoType(),
.getSearchObjects(data.getQuery(), data.getDsoTypes(),
data.getScope(), data.getConfiguration(),
null, null));
@@ -43,7 +43,7 @@ public abstract class DiscoveryRestHalLinkFactory<T> extends HalLinkFactory<T, D
try {
UriComponentsBuilder uriBuilder = uriBuilder(
getMethodOn().getFacetValues(data.getFacetEntry().getName(), data.getPrefix(), data.getQuery(),
data.getDsoType(), data.getScope(), data.getConfiguration(), null, null));
data.getDsoTypes(), data.getScope(), data.getConfiguration(), null, null));
return addFilterParams(uriBuilder, data);
} catch (Exception ex) {
@@ -54,7 +54,7 @@ public abstract class DiscoveryRestHalLinkFactory<T> extends HalLinkFactory<T, D
protected UriComponentsBuilder buildSearchFacetsBaseLink(final SearchResultsRest data) {
try {
UriComponentsBuilder uriBuilder = uriBuilder(getMethodOn().getFacets(data.getQuery(), data.getDsoType(),
UriComponentsBuilder uriBuilder = uriBuilder(getMethodOn().getFacets(data.getQuery(), data.getDsoTypes(),
data.getScope(), data.getConfiguration(), null, null));
uriBuilder = addSortingParms(uriBuilder, data);

View File

@@ -8,6 +8,7 @@
package org.dspace.app.rest.link.search;
import java.util.LinkedList;
import java.util.List;
import org.apache.commons.collections4.CollectionUtils;
import org.apache.commons.lang3.BooleanUtils;
@@ -39,7 +40,7 @@ public class SearchFacetEntryHalLinkFactory extends DiscoveryRestHalLinkFactory<
DiscoveryResultsRest searchData = halResource.getSearchData();
String query = searchData == null ? null : searchData.getQuery();
String dsoType = searchData == null ? null : searchData.getDsoType();
List<String> dsoType = searchData == null ? null : searchData.getDsoTypes();
String scope = searchData == null ? null : searchData.getScope();
String configuration = searchData == null ? null : searchData.getConfiguration();

View File

@@ -16,6 +16,10 @@ import com.fasterxml.jackson.annotation.JsonProperty;
* @author Jelle Pelgrims (jelle.pelgrims at atmire.com)
*/
@LinksRest(links = {
@LinkRest(
name = BundleRest.ITEM,
method = "getItem"
),
@LinkRest(
name = BundleRest.BITSTREAMS,
method = "getBitstreams"
@@ -30,6 +34,7 @@ public class BundleRest extends DSpaceObjectRest {
public static final String PLURAL_NAME = "bundles";
public static final String CATEGORY = RestAddressableModel.CORE;
public static final String ITEM = "item";
public static final String BITSTREAMS = "bitstreams";
public static final String PRIMARY_BITSTREAM = "primaryBitstream";

View File

@@ -27,7 +27,7 @@ public abstract class DiscoveryResultsRest extends BaseObjectRest<String> {
private List<SearchResultsRest.AppliedFilter> appliedFilters;
private SearchResultsRest.Sorting sort;
@JsonIgnore
private String dsoType;
private List<String> dsoTypes;
@JsonIgnore
private List<SearchFilter> searchFilters;
private String configuration;
@@ -52,12 +52,12 @@ public abstract class DiscoveryResultsRest extends BaseObjectRest<String> {
this.query = query;
}
public String getDsoType() {
return dsoType;
public List<String> getDsoTypes() {
return dsoTypes;
}
public void setDsoType(final String dsoType) {
this.dsoType = dsoType;
public void setDsoTypes(final List<String> dsoTypes) {
this.dsoTypes = dsoTypes;
}
public String getScope() {

View File

@@ -27,7 +27,6 @@ public class RelationshipRest extends BaseObjectRest<Integer> {
@JsonIgnore
private UUID rightId;
private int relationshipTypeId;
private RelationshipTypeRest relationshipType;
private int leftPlace;
private int rightPlace;
@@ -90,14 +89,6 @@ public class RelationshipRest extends BaseObjectRest<Integer> {
this.rightPlace = rightPlace;
}
public int getRelationshipTypeId() {
return relationshipTypeId;
}
public void setRelationshipTypeId(int relationshipTypeId) {
this.relationshipTypeId = relationshipTypeId;
}
public String getRightwardValue() {
return rightwardValue;
}

View File

@@ -7,6 +7,10 @@
*/
package org.dspace.app.rest.repository;
import static java.util.Collections.emptyList;
import static java.util.Collections.singletonList;
import static org.apache.commons.lang3.StringUtils.isNotBlank;
import java.sql.SQLException;
import java.util.ArrayList;
import java.util.List;
@@ -145,9 +149,11 @@ public class AuthorizationRestRepository extends DSpaceRestRepository<Authorizat
@PreAuthorize("#epersonUuid==null || hasPermission(#epersonUuid, 'EPERSON', 'READ')")
@SearchRestMethod(name = "object")
public Page<AuthorizationRest> findByObject(@Parameter(value = "uri", required = true) String uri,
@Parameter(value = "eperson") UUID epersonUuid,
@Parameter(value = "eperson") UUID epersonUuid, @Parameter(value = "feature") String featureName,
Pageable pageable) throws AuthorizeException, SQLException {
Context context = obtainContext();
BaseObjectRest obj = utils.getBaseObjectRestFromUri(context, uri);
if (obj == null) {
return null;
@@ -162,11 +168,16 @@ public class AuthorizationRestRepository extends DSpaceRestRepository<Authorizat
context.switchContextUser(user);
}
List<AuthorizationFeature> features = authorizationFeatureService.findByResourceType(obj.getUniqueType());
List<Authorization> authorizations = new ArrayList<Authorization>();
for (AuthorizationFeature f : features) {
if (authorizationFeatureService.isAuthorized(context, f, obj)) {
authorizations.add(new Authorization(user, f, obj));
List<Authorization> authorizations;
if (isNotBlank(featureName)) {
authorizations = findByObjectAndFeature(context, user, obj, featureName);
} else {
List<AuthorizationFeature> features = authorizationFeatureService.findByResourceType(obj.getUniqueType());
authorizations = new ArrayList<>();
for (AuthorizationFeature f : features) {
if (authorizationFeatureService.isAuthorized(context, f, obj)) {
authorizations.add(new Authorization(user, f, obj));
}
}
}
@@ -177,57 +188,17 @@ public class AuthorizationRestRepository extends DSpaceRestRepository<Authorizat
return converter.toRestPage(authorizations, pageable, utils.obtainProjection());
}
/**
* It returns the authorization related to the requested feature if granted to the specified eperson or to the
* anonymous user. Only administrators and the user identified by the epersonUuid parameter can access this method
*
* @param uri
* the uri of the object to check the authorization against
* @param epersonUuid
* the eperson uuid to use in the authorization evaluation
* @param featureName
* limit the authorization check to only the feature identified via its name
* @param pageable
* the pagination options
* @return the list of matching authorization available for the requested user and object, filtered by feature if
* provided
* @throws AuthorizeException
* @throws SQLException
*/
@PreAuthorize("#epersonUuid==null || hasPermission(#epersonUuid, 'EPERSON', 'READ')")
@SearchRestMethod(name = "objectAndFeature")
public AuthorizationRest findByObjectAndFeature(@Parameter(value = "uri", required = true) String uri,
@Parameter(value = "eperson") UUID epersonUuid,
@Parameter(value = "feature", required = true) String featureName,
Pageable pageable) throws AuthorizeException, SQLException {
Context context = obtainContext();
BaseObjectRest obj = utils.getBaseObjectRestFromUri(context, uri);
if (obj == null) {
return null;
private List<Authorization> findByObjectAndFeature(
Context context, EPerson user, BaseObjectRest obj, String featureName
) throws SQLException {
AuthorizationFeature feature = authorizationFeatureService.find(featureName);
if (!authorizationFeatureService.isAuthorized(context, feature, obj)) {
return emptyList();
}
EPerson currUser = context.getCurrentUser();
// get the user specified in the requested parameters, can be null for anonymous
EPerson user = getUserFromRequestParameter(context, epersonUuid);
if (currUser != user) {
// Temporarily change the Context's current user in order to retrieve
// authorizations based on that user
context.switchContextUser(user);
}
AuthorizationFeature feature = authorizationFeatureService.find(featureName);
AuthorizationRest authorizationRest = null;
if (authorizationFeatureService.isAuthorized(context, feature, obj)) {
Authorization authz = new Authorization();
authz.setEperson(user);
authz.setFeature(feature);
authz.setObject(obj);
authorizationRest = converter.toRest(authz, utils.obtainProjection());
}
if (currUser != user) {
// restore the real current user
context.restoreContextUser();
}
return authorizationRest;
return singletonList(new Authorization(user, feature, obj));
}
/**
@@ -242,25 +213,27 @@ public class AuthorizationRestRepository extends DSpaceRestRepository<Authorizat
*/
private EPerson getUserFromRequestParameter(Context context, UUID epersonUuid)
throws AuthorizeException, SQLException {
EPerson currUser = context.getCurrentUser();
EPerson user = currUser;
if (epersonUuid != null) {
if (epersonUuid == null) {
// no user is specified in the request parameters, check the permissions for the current user
return currUser;
} else {
// a user is specified in the request parameters
if (currUser == null) {
throw new AuthorizeException("attempt to anonymously access the authorization of the eperson "
+ epersonUuid);
} else {
// an user is specified in the request parameters
if (!authorizeService.isAdmin(context) && !epersonUuid.equals(currUser.getID())) {
throw new AuthorizeException("attempt to access the authorization of the eperson " + epersonUuid
+ " only system administrators can see the authorization of other users");
}
user = epersonService.find(context, epersonUuid);
} else if (!authorizeService.isAdmin(context) && !epersonUuid.equals(currUser.getID())) {
throw new AuthorizeException("attempt to access the authorization of the eperson " + epersonUuid
+ " as a non-admin; only system administrators can see the authorization of other users");
}
} else {
// the request asks to check the permission for the anonymous user
user = null;
return epersonService.find(context, epersonUuid);
}
return user;
}
@Override

View File

@@ -0,0 +1,62 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.app.rest.repository;
import java.sql.SQLException;
import java.util.UUID;
import javax.annotation.Nullable;
import javax.servlet.http.HttpServletRequest;
import org.dspace.app.rest.model.BundleRest;
import org.dspace.app.rest.model.ItemRest;
import org.dspace.app.rest.projection.Projection;
import org.dspace.content.Bundle;
import org.dspace.content.Item;
import org.dspace.content.service.BundleService;
import org.dspace.core.Context;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.data.domain.Pageable;
import org.springframework.data.rest.webmvc.ResourceNotFoundException;
import org.springframework.security.access.prepost.PreAuthorize;
import org.springframework.stereotype.Component;
/**
* Link repository for "item" subresource of an individual bundle.
*/
@Component(BundleRest.CATEGORY + "." + BundleRest.NAME + "." + BundleRest.ITEM)
public class BundleItemLinkRepository extends AbstractDSpaceRestRepository
implements LinkRestRepository {
@Autowired
BundleService bundleService;
/**
* Get the item where the provided bundle resides in
*/
@PreAuthorize("hasPermission(#bundleId, 'BUNDLE', 'READ')")
public ItemRest getItem(@Nullable HttpServletRequest request,
UUID bundleId,
@Nullable Pageable optionalPageable,
Projection projection) {
try {
Context context = obtainContext();
Bundle bundle = bundleService.find(context, bundleId);
if (bundle == null) {
throw new ResourceNotFoundException("No such bundle: " + bundleId);
}
Item item = bundle.getItems().get(0);
if (item == null) {
return null;
}
return converter.toRest(item, projection);
} catch (SQLException e) {
throw new RuntimeException(e);
}
}
}

View File

@@ -89,7 +89,7 @@ public class DiscoveryRestRepository extends AbstractDSpaceRestRepository {
return discoverConfigurationConverter.convert(discoveryConfiguration, utils.obtainProjection());
}
public SearchResultsRest getSearchObjects(final String query, final String dsoType, final String dsoScope,
public SearchResultsRest getSearchObjects(final String query, final List<String> dsoTypes, final String dsoScope,
final String configuration,
final List<SearchFilter> searchFilters, final Pageable page,
final Projection projection) {
@@ -103,7 +103,7 @@ public class DiscoveryRestRepository extends AbstractDSpaceRestRepository {
try {
discoverQuery = queryBuilder
.buildQuery(context, scopeObject, discoveryConfiguration, query, searchFilters, dsoType, page);
.buildQuery(context, scopeObject, discoveryConfiguration, query, searchFilters, dsoTypes, page);
searchResult = searchService.search(context, scopeObject, discoverQuery);
} catch (SearchServiceException e) {
@@ -112,7 +112,7 @@ public class DiscoveryRestRepository extends AbstractDSpaceRestRepository {
}
return discoverResultConverter
.convert(context, query, dsoType, configuration, dsoScope, searchFilters, page, searchResult,
.convert(context, query, dsoTypes, configuration, dsoScope, searchFilters, page, searchResult,
discoveryConfiguration, projection);
}
@@ -130,7 +130,7 @@ public class DiscoveryRestRepository extends AbstractDSpaceRestRepository {
return discoverSearchSupportConverter.convert();
}
public FacetResultsRest getFacetObjects(String facetName, String prefix, String query, String dsoType,
public FacetResultsRest getFacetObjects(String facetName, String prefix, String query, List<String> dsoTypes,
String dsoScope, final String configuration, List<SearchFilter> searchFilters, Pageable page) {
Context context = obtainContext();
@@ -143,7 +143,7 @@ public class DiscoveryRestRepository extends AbstractDSpaceRestRepository {
DiscoverQuery discoverQuery = null;
try {
discoverQuery = queryBuilder.buildFacetQuery(context, scopeObject, discoveryConfiguration, prefix, query,
searchFilters, dsoType, page, facetName);
searchFilters, dsoTypes, page, facetName);
searchResult = searchService.search(context, scopeObject, discoverQuery);
} catch (SearchServiceException e) {
@@ -152,12 +152,12 @@ public class DiscoveryRestRepository extends AbstractDSpaceRestRepository {
}
FacetResultsRest facetResultsRest = discoverFacetResultsConverter.convert(context, facetName, prefix, query,
dsoType, dsoScope, searchFilters, searchResult, discoveryConfiguration, page,
dsoTypes, dsoScope, searchFilters, searchResult, discoveryConfiguration, page,
utils.obtainProjection());
return facetResultsRest;
}
public SearchResultsRest getAllFacets(String query, String dsoType, String dsoScope, String configuration,
public SearchResultsRest getAllFacets(String query, List<String> dsoTypes, String dsoScope, String configuration,
List<SearchFilter> searchFilters) {
Context context = obtainContext();
@@ -171,14 +171,14 @@ public class DiscoveryRestRepository extends AbstractDSpaceRestRepository {
try {
discoverQuery = queryBuilder
.buildQuery(context, scopeObject, discoveryConfiguration, query, searchFilters, dsoType, page);
.buildQuery(context, scopeObject, discoveryConfiguration, query, searchFilters, dsoTypes, page);
searchResult = searchService.search(context, scopeObject, discoverQuery);
} catch (SearchServiceException e) {
log.error("Error while searching with Discovery", e);
}
SearchResultsRest searchResultsRest = discoverFacetsConverter.convert(context, query, dsoType,
SearchResultsRest searchResultsRest = discoverFacetsConverter.convert(context, query, dsoTypes,
configuration, dsoScope, searchFilters, page, discoveryConfiguration, searchResult,
utils.obtainProjection());

View File

@@ -35,7 +35,6 @@ public class DSpace401AuthenticationEntryPoint implements AuthenticationEntryPoi
response.setHeader("WWW-Authenticate",
restAuthenticationService.getWwwAuthenticateHeaderValue(request, response));
response.sendError(HttpServletResponse.SC_UNAUTHORIZED,
authException.getMessage());
response.sendError(HttpServletResponse.SC_UNAUTHORIZED, "Authentication is required");
}
}

View File

@@ -75,7 +75,7 @@ public class PoolTaskRestPermissionEvaluatorPlugin extends RestObjectPermissionE
XmlWorkflowItem workflowItem = poolTask.getWorkflowItem();
PoolTask poolTask2 = poolTaskService.findByWorkflowIdAndEPerson(context, workflowItem, ePerson);
if (poolTask2 != null && poolTask2.getID() == poolTask.getID()) {
if (poolTask2 != null && poolTask2.getID().equals(poolTask.getID())) {
return true;
}
} catch (SQLException | AuthorizeException | IOException e) {

View File

@@ -77,20 +77,21 @@ public class StatelessAuthenticationFilter extends BasicAuthenticationFilter {
HttpServletResponse res,
FilterChain chain) throws IOException, ServletException {
Authentication authentication = null;
Authentication authentication;
try {
authentication = getAuthentication(req, res);
} catch (AuthorizeException e) {
res.sendError(HttpServletResponse.SC_UNAUTHORIZED, e.getMessage());
log.error(e.getMessage(), e);
// just return an error, but do not log
res.sendError(HttpServletResponse.SC_UNAUTHORIZED, "Authentication is required");
return;
} catch (IllegalArgumentException | SQLException e) {
res.sendError(HttpServletResponse.SC_BAD_REQUEST, e.getMessage());
log.error(e.getMessage(), e);
res.sendError(HttpServletResponse.SC_BAD_REQUEST, "Authentication request is invalid or incorrect");
log.error("Authentication request is invalid or incorrect (status:{})",
HttpServletResponse.SC_BAD_REQUEST, e);
return;
} catch (AccessDeniedException e) {
res.sendError(HttpServletResponse.SC_FORBIDDEN, e.getMessage());
log.error(e.getMessage(), e);
res.sendError(HttpServletResponse.SC_FORBIDDEN, "Access is denied");
log.error("Access is denied (status:{})", HttpServletResponse.SC_FORBIDDEN, e);
return;
}
if (authentication != null) {
@@ -134,7 +135,7 @@ public class StatelessAuthenticationFilter extends BasicAuthenticationFilter {
if (configurationService.getBooleanProperty("webui.user.assumelogin")) {
return getOnBehalfOfAuthentication(context, onBehalfOfParameterValue, res);
} else {
throw new IllegalArgumentException("The login as feature is not allowed" +
throw new IllegalArgumentException("The 'login as' feature is not allowed" +
" due to the current configuration");
}
}
@@ -146,7 +147,7 @@ public class StatelessAuthenticationFilter extends BasicAuthenticationFilter {
}
} else {
if (request.getHeader(ON_BEHALF_OF_REQUEST_PARAM) != null) {
throw new AuthorizeException("Only admins are allowed to use the login as feature");
throw new AuthorizeException("Must be logged in (as an admin) to use the 'login as' feature");
}
}

View File

@@ -154,6 +154,7 @@ public class JWTTokenRestAuthenticationServiceImpl implements RestAuthentication
Cookie cookie = new Cookie(AUTHORIZATION_COOKIE, "");
cookie.setHttpOnly(true);
cookie.setMaxAge(0);
cookie.setSecure(true);
response.addCookie(cookie);
}

View File

@@ -7,6 +7,10 @@
*/
package org.dspace.app.rest.utils;
import static java.util.Collections.emptyList;
import static java.util.Collections.singletonList;
import static org.apache.commons.collections4.CollectionUtils.isNotEmpty;
import java.sql.SQLException;
import java.util.ArrayList;
import java.util.List;
@@ -65,14 +69,47 @@ public class DiscoverQueryBuilder implements InitializingBean {
pageSizeLimit = configurationService.getIntProperty("rest.search.max.results", 100);
}
/**
* Build a discovery query
*
* @param context the DSpace context
* @param scope the scope for this discovery query
* @param discoveryConfiguration the discovery configuration for this discovery query
* @param query the query string for this discovery query
* @param searchFilters the search filters for this discovery query
* @param dsoType only include search results with this type
* @param page the pageable for this discovery query
*/
public DiscoverQuery buildQuery(Context context, IndexableObject scope,
DiscoveryConfiguration discoveryConfiguration,
String query, List<SearchFilter> searchFilters,
String dsoType, Pageable page)
throws DSpaceBadRequestException {
List<String> dsoTypes = dsoType != null ? singletonList(dsoType) : emptyList();
return buildQuery(context, scope, discoveryConfiguration, query, searchFilters, dsoTypes, page);
}
/**
* Build a discovery query
*
* @param context the DSpace context
* @param scope the scope for this discovery query
* @param discoveryConfiguration the discovery configuration for this discovery query
* @param query the query string for this discovery query
* @param searchFilters the search filters for this discovery query
* @param dsoTypes only include search results with one of these types
* @param page the pageable for this discovery query
*/
public DiscoverQuery buildQuery(Context context, IndexableObject scope,
DiscoveryConfiguration discoveryConfiguration,
String query, List<SearchFilter> searchFilters,
List<String> dsoTypes, Pageable page)
throws DSpaceBadRequestException {
DiscoverQuery queryArgs = buildCommonDiscoverQuery(context, discoveryConfiguration, query, searchFilters,
dsoType);
dsoTypes);
//When all search criteria are set, configure facet results
addFaceting(context, scope, queryArgs, discoveryConfiguration);
@@ -98,14 +135,52 @@ public class DiscoverQueryBuilder implements InitializingBean {
}
}
/**
* Create a discovery facet query.
*
* @param context the DSpace context
* @param scope the scope for this discovery query
* @param discoveryConfiguration the discovery configuration for this discovery query
* @param prefix limit the facets results to those starting with the given prefix.
* @param query the query string for this discovery query
* @param searchFilters the search filters for this discovery query
* @param dsoType only include search results with this type
* @param page the pageable for this discovery query
* @param facetName the facet field
*/
public DiscoverQuery buildFacetQuery(Context context, IndexableObject scope,
DiscoveryConfiguration discoveryConfiguration,
String prefix, String query, List<SearchFilter> searchFilters,
String dsoType, Pageable page, String facetName)
throws DSpaceBadRequestException {
List<String> dsoTypes = dsoType != null ? singletonList(dsoType) : emptyList();
return buildFacetQuery(
context, scope, discoveryConfiguration, prefix, query, searchFilters, dsoTypes, page, facetName);
}
/**
* Create a discovery facet query.
*
* @param context the DSpace context
* @param scope the scope for this discovery query
* @param discoveryConfiguration the discovery configuration for this discovery query
* @param prefix limit the facets results to those starting with the given prefix.
* @param query the query string for this discovery query
* @param searchFilters the search filters for this discovery query
* @param dsoTypes only include search results with one of these types
* @param page the pageable for this discovery query
* @param facetName the facet field
*/
public DiscoverQuery buildFacetQuery(Context context, IndexableObject scope,
DiscoveryConfiguration discoveryConfiguration,
String prefix, String query, List<SearchFilter> searchFilters,
List<String> dsoTypes, Pageable page, String facetName)
throws DSpaceBadRequestException {
DiscoverQuery queryArgs = buildCommonDiscoverQuery(context, discoveryConfiguration, query, searchFilters,
dsoType);
dsoTypes);
//When all search criteria are set, configure facet results
addFacetingForFacets(context, scope, prefix, queryArgs, discoveryConfiguration, facetName, page);
@@ -170,7 +245,7 @@ public class DiscoverQueryBuilder implements InitializingBean {
private DiscoverQuery buildCommonDiscoverQuery(Context context, DiscoveryConfiguration discoveryConfiguration,
String query,
List<SearchFilter> searchFilters, String dsoType)
List<SearchFilter> searchFilters, List<String> dsoTypes)
throws DSpaceBadRequestException {
DiscoverQuery queryArgs = buildBaseQueryForConfiguration(discoveryConfiguration);
@@ -182,10 +257,13 @@ public class DiscoverQueryBuilder implements InitializingBean {
queryArgs.setQuery(query);
}
//Limit results to DSO type
if (StringUtils.isNotBlank(dsoType)) {
queryArgs.setDSpaceObjectFilter(getDsoType(dsoType));
//Limit results to DSO types
if (isNotEmpty(dsoTypes)) {
dsoTypes.stream()
.map(this::getDsoType)
.forEach(queryArgs::addDSpaceObjectFilter);
}
return queryArgs;
}

View File

@@ -19,15 +19,22 @@
<context:annotation-config/> <!-- allows us to use spring annotations in beans -->
<bean class="org.dspace.discovery.SolrServiceImpl" id="org.dspace.discovery.SearchService"/>
<bean class="org.dspace.discovery.SolrServiceImpl"
id="org.dspace.discovery.SearchService"/>
<alias name="org.dspace.discovery.SearchService" alias="org.dspace.discovery.IndexingService"/>
<alias name="org.dspace.discovery.SearchService"
alias="org.dspace.discovery.IndexingService"/>
<bean class="org.dspace.discovery.MockSolrSearchCore" autowire-candidate="true"/>
<bean class="org.dspace.discovery.MockSolrSearchCore"
autowire-candidate="true"/>
<!--<bean class="org.dspace.discovery.SolrServiceIndexOutputPlugin" id="solrServiceIndexOutputPlugin"/>-->
<!--<bean class="org.dspace.discovery.SolrServiceIndexOutputPlugin"
id="solrServiceIndexOutputPlugin"/>-->
<!-- Statistics services are both lazy loaded (by name), as you are likely just using ONE of them and not both -->
<bean id="solrLoggerService" class="org.dspace.statistics.MockSolrLoggerServiceImpl" lazy-init="true"/>
<!-- Statistics services are both lazy loaded (by name), as you are likely
just using ONE of them and not both -->
<bean id="solrLoggerService"
class="org.dspace.statistics.MockSolrLoggerServiceImpl"
lazy-init="true"/>
</beans>

View File

@@ -29,9 +29,9 @@ import com.lyncode.xoai.dataprovider.services.impl.BaseDateProvider;
import com.lyncode.xoai.dataprovider.xml.xoaiconfig.Configuration;
import com.lyncode.xoai.dataprovider.xml.xoaiconfig.ContextConfiguration;
import org.apache.commons.lang3.time.DateUtils;
import org.dspace.app.rest.builder.CollectionBuilder;
import org.dspace.app.rest.builder.CommunityBuilder;
import org.dspace.app.rest.test.AbstractControllerIntegrationTest;
import org.dspace.builder.CollectionBuilder;
import org.dspace.builder.CommunityBuilder;
import org.dspace.content.Community;
import org.dspace.services.ConfigurationService;
import org.dspace.xoai.services.api.EarliestDateResolver;
@@ -76,7 +76,7 @@ public class OAIpmhIT extends AbstractControllerIntegrationTest {
private EarliestDateResolver earliestDateResolver;
// XOAI's BaseDateProvider (used for date-based testing below)
private static BaseDateProvider baseDateProvider = new BaseDateProvider();
private static final BaseDateProvider baseDateProvider = new BaseDateProvider();
// Spy on the current XOAIManagerResolver bean, to allow us to change behavior of XOAIManager in tests
// See also: createMockXOAIManager() method
@@ -278,6 +278,6 @@ public class OAIpmhIT extends AbstractControllerIntegrationTest {
* @throws ConfigurationException
*/
private XOAIManager createMockXOAIManager(Configuration xoaiConfig) throws ConfigurationException {
return new XOAIManager(filterResolver, resourceResolver, xoaiConfig);
return new XOAIManager(filterResolver, resourceResolver, xoaiConfig);
}
}

View File

@@ -12,10 +12,10 @@ import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.
import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.status;
import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.xpath;
import org.dspace.app.rest.builder.CollectionBuilder;
import org.dspace.app.rest.builder.CommunityBuilder;
import org.dspace.app.rest.builder.ItemBuilder;
import org.dspace.app.rest.test.AbstractControllerIntegrationTest;
import org.dspace.builder.CollectionBuilder;
import org.dspace.builder.CommunityBuilder;
import org.dspace.builder.ItemBuilder;
import org.dspace.content.Collection;
import org.dspace.content.Community;
import org.dspace.content.Item;
@@ -210,7 +210,7 @@ public class OpenSearchControllerIT extends AbstractControllerIntegrationTest {
.andExpect(xpath("OpenSearchDescription/LongName").string("DSpace at My University"))
.andExpect(xpath("OpenSearchDescription/Description")
.string("DSpace at My University DSpace repository")
)
)
;
/* Expected response for the service document is:
<?xml version="1.0" encoding="UTF-8"?>

View File

@@ -14,8 +14,8 @@ import static org.mockito.Mockito.doReturn;
import java.net.URI;
import org.dspace.app.rest.builder.CommunityBuilder;
import org.dspace.app.rest.test.AbstractWebClientIntegrationTest;
import org.dspace.builder.CommunityBuilder;
import org.dspace.content.Community;
import org.dspace.content.service.SiteService;
import org.dspace.rdf.RDFUtil;

Some files were not shown because too many files have changed in this diff Show More