Merge branch 'main' into draft_vocabulary

This commit is contained in:
ddinuzzo
2020-08-06 10:20:30 +02:00
committed by GitHub
185 changed files with 3454 additions and 1633 deletions

View File

@@ -1,8 +1,7 @@
## References ## References
_Add references/links to any related tickets or PRs. These may include:_ _Add references/links to any related issues or PRs. These may include:_
* Link to [JIRA](https://jira.lyrasis.org/projects/DS/summary) ticket(s), if any * Related to [REST Contract](https://github.com/DSpace/Rest7Contract) or an open REST Contract PR, if any
* Link to [REST Contract](https://github.com/DSpace/Rest7Contract) or an open REST Contract PR, if any * Fixes [GitHub issue](https://github.com/DSpace/DSpace/issues), if any
* Link to [Angular issue or PR](https://github.com/DSpace/dspace-angular/issues) related to this PR, if any
## Description ## Description
Short summary of changes (1-2 sentences). Short summary of changes (1-2 sentences).

View File

@@ -30,14 +30,14 @@ install: "echo 'Skipping install stage, dependencies will be downloaded during b
script: script:
# Summary of flags used (below): # Summary of flags used (below):
# license:check => Validate all source code license headers # license:check => Validate all source code license headers
# -Dmaven.test.skip=false => Enable DSpace Unit Tests # -DskipTests=false => Enable DSpace Unit Tests
# -DskipITs=false => Enable DSpace Integration Tests # -DskipITs=false => Enable DSpace Integration Tests
# -Pdspace-rest => Enable optional dspace-rest module as part of build # -Pdspace-rest => Enable optional dspace-rest module as part of build
# -P !assembly => Skip assembly of "dspace-installer" directory (as it can be memory intensive) # -P !assembly => Skip assembly of "dspace-installer" directory (as it can be memory intensive)
# -B => Maven batch/non-interactive mode (recommended for CI) # -B => Maven batch/non-interactive mode (recommended for CI)
# -V => Display Maven version info before build # -V => Display Maven version info before build
# -Dsurefire.rerunFailingTestsCount=2 => try again for flakey tests, and keep track of/report on number of retries # -Dsurefire.rerunFailingTestsCount=2 => try again for flakey tests, and keep track of/report on number of retries
- "mvn clean install license:check -Dmaven.test.skip=false -DskipITs=false -Pdspace-rest -P !assembly -B -V -Dsurefire.rerunFailingTestsCount=2" - "mvn clean install license:check -DskipTests=false -DskipITs=false -Pdspace-rest -P !assembly -B -V -Dsurefire.rerunFailingTestsCount=2"
# After a successful build and test (see 'script'), send code coverage reports to coveralls.io # After a successful build and test (see 'script'), send code coverage reports to coveralls.io
# These code coverage reports are generated by jacoco-maven-plugin (during test process above). # These code coverage reports are generated by jacoco-maven-plugin (during test process above).

View File

@@ -90,33 +90,33 @@ run automatically by [Travis CI](https://travis-ci.com/DSpace/DSpace/) for all P
* How to run both Unit Tests (via `maven-surefire-plugin`) and Integration Tests (via `maven-failsafe-plugin`): * How to run both Unit Tests (via `maven-surefire-plugin`) and Integration Tests (via `maven-failsafe-plugin`):
``` ```
mvn clean test -Dmaven.test.skip=false -DskipITs=false mvn clean test -DskipTests=false -DskipITs=false
``` ```
* How to run just Unit Tests: * How to run just Unit Tests:
``` ```
mvn test -Dmaven.test.skip=false mvn test -DskipTests=false
``` ```
* How to run a *single* Unit Test * How to run a *single* Unit Test
``` ```
# Run all tests in a specific test class # Run all tests in a specific test class
# NOTE: failIfNoTests=false is required to skip tests in other modules # NOTE: failIfNoTests=false is required to skip tests in other modules
mvn test -Dmaven.test.skip=false -Dtest=[full.package.testClassName] -DfailIfNoTests=false mvn test -DskipTests=false -Dtest=[full.package.testClassName] -DfailIfNoTests=false
# Run one test method in a specific test class # Run one test method in a specific test class
mvn test -Dmaven.test.skip=false -Dtest=[full.package.testClassName]#[testMethodName] -DfailIfNoTests=false mvn test -DskipTests=false -Dtest=[full.package.testClassName]#[testMethodName] -DfailIfNoTests=false
``` ```
* How to run Integration Tests (requires enabling Unit tests too) * How to run Integration Tests (requires enabling Unit tests too)
``` ```
mvn verify -Dmaven.test.skip=false -DskipITs=false mvn verify -DskipTests=false -DskipITs=false
``` ```
* How to run a *single* Integration Test (requires enabling Unit tests too) * How to run a *single* Integration Test (requires enabling Unit tests too)
``` ```
# Run all integration tests in a specific test class # Run all integration tests in a specific test class
# NOTE: failIfNoTests=false is required to skip tests in other modules # NOTE: failIfNoTests=false is required to skip tests in other modules
mvn test -Dmaven.test.skip=false -DskipITs=false -Dtest=[full.package.testClassName] -DfailIfNoTests=false mvn test -DskipTests=false -DskipITs=false -Dtest=[full.package.testClassName] -DfailIfNoTests=false
# Run one test method in a specific test class # Run one test method in a specific test class
mvn test -Dmaven.test.skip=false -DskipITs=false -Dtest=[full.package.testClassName]#[testMethodName] -DfailIfNoTests=false mvn test -DskipTests=false -DskipITs=false -Dtest=[full.package.testClassName]#[testMethodName] -DfailIfNoTests=false
``` ```
* How to run only tests of a specific DSpace module * How to run only tests of a specific DSpace module
``` ```

View File

@@ -137,7 +137,7 @@
<activation> <activation>
<activeByDefault>false</activeByDefault> <activeByDefault>false</activeByDefault>
<!-- property> <!-- property>
<name>maven.test.skip</name> <name>skipTests</name>
<value>false</value> <value>false</value>
</property --> </property -->
</activation> </activation>
@@ -158,7 +158,7 @@
<activation> <activation>
<activeByDefault>false</activeByDefault> <activeByDefault>false</activeByDefault>
<property> <property>
<name>maven.test.skip</name> <name>skipTests</name>
<value>false</value> <value>false</value>
</property> </property>
</activation> </activation>
@@ -241,6 +241,7 @@
<dspace.dir>${agnostic.build.dir}/testing/dspace/</dspace.dir> <dspace.dir>${agnostic.build.dir}/testing/dspace/</dspace.dir>
<!-- Turn off any DSpace logging --> <!-- Turn off any DSpace logging -->
<dspace.log.init.disable>true</dspace.log.init.disable> <dspace.log.init.disable>true</dspace.log.init.disable>
<solr.install.dir>${agnostic.build.dir}/testing/dspace/solr/</solr.install.dir>
</systemPropertyVariables> </systemPropertyVariables>
</configuration> </configuration>
</plugin> </plugin>
@@ -255,6 +256,7 @@
<dspace.dir>${agnostic.build.dir}/testing/dspace/</dspace.dir> <dspace.dir>${agnostic.build.dir}/testing/dspace/</dspace.dir>
<!-- Turn off any DSpace logging --> <!-- Turn off any DSpace logging -->
<dspace.log.init.disable>true</dspace.log.init.disable> <dspace.log.init.disable>true</dspace.log.init.disable>
<solr.install.dir>${agnostic.build.dir}/testing/dspace/solr/</solr.install.dir>
</systemPropertyVariables> </systemPropertyVariables>
</configuration> </configuration>
</plugin> </plugin>
@@ -291,9 +293,20 @@
</dependency> </dependency>
<dependency> <dependency>
<groupId>org.dspace</groupId> <groupId>net.handle</groupId>
<artifactId>handle</artifactId> <artifactId>handle</artifactId>
</dependency> </dependency>
<dependency>
<groupId>net.cnri</groupId>
<artifactId>cnri-servlet-container</artifactId>
<exclusions>
<!-- Newer versions provided in our parent POM -->
<exclusion>
<groupId>org.ow2.asm</groupId>
<artifactId>asm-commons</artifactId>
</exclusion>
</exclusions>
</dependency>
<!-- Jetty is needed to run Handle Server --> <!-- Jetty is needed to run Handle Server -->
<dependency> <dependency>
<groupId>org.eclipse.jetty</groupId> <groupId>org.eclipse.jetty</groupId>
@@ -312,6 +325,10 @@
<artifactId>apache-jena-libs</artifactId> <artifactId>apache-jena-libs</artifactId>
<type>pom</type> <type>pom</type>
</dependency> </dependency>
<dependency>
<groupId>commons-cli</groupId>
<artifactId>commons-cli</artifactId>
</dependency>
<dependency> <dependency>
<groupId>commons-codec</groupId> <groupId>commons-codec</groupId>
<artifactId>commons-codec</artifactId> <artifactId>commons-codec</artifactId>
@@ -468,16 +485,164 @@
<dependency> <dependency>
<groupId>org.apache.solr</groupId> <groupId>org.apache.solr</groupId>
<artifactId>solr-cell</artifactId> <artifactId>solr-solrj</artifactId>
<version>${solr.client.version}</version>
</dependency>
<!-- Solr Core is needed for Integration Tests (to run a MockSolrServer) -->
<!-- The following Solr / Lucene dependencies also support integration tests -->
<dependency>
<groupId>org.apache.solr</groupId>
<artifactId>solr-core</artifactId>
<scope>test</scope>
<version>${solr.client.version}</version> <version>${solr.client.version}</version>
<exclusions> <exclusions>
<!-- Newer version provided in our parent POM --> <exclusion>
<groupId>commons-cli</groupId>
<artifactId>commons-cli</artifactId>
</exclusion>
<exclusion>
<groupId>org.eclipse.jetty</groupId>
<artifactId>jetty-continuation</artifactId>
</exclusion>
<exclusion>
<groupId>org.eclipse.jetty</groupId>
<artifactId>jetty-deploy</artifactId>
</exclusion>
<exclusion>
<groupId>org.eclipse.jetty</groupId>
<artifactId>jetty-http</artifactId>
</exclusion>
<exclusion>
<groupId>org.eclipse.jetty</groupId>
<artifactId>jetty-io</artifactId>
</exclusion>
<exclusion>
<groupId>org.eclipse.jetty</groupId>
<artifactId>jetty-jmx</artifactId>
</exclusion>
<exclusion>
<groupId>org.eclipse.jetty</groupId>
<artifactId>jetty-rewrite</artifactId>
</exclusion>
<exclusion>
<groupId>org.eclipse.jetty</groupId>
<artifactId>jetty-security</artifactId>
</exclusion>
<exclusion>
<groupId>org.eclipse.jetty</groupId>
<artifactId>jetty-server</artifactId>
</exclusion>
<exclusion>
<groupId>org.eclipse.jetty</groupId>
<artifactId>jetty-servlet</artifactId>
</exclusion>
<exclusion>
<groupId>org.eclipse.jetty</groupId>
<artifactId>jetty-servlets</artifactId>
</exclusion>
<exclusion>
<groupId>org.eclipse.jetty</groupId>
<artifactId>jetty-util</artifactId>
</exclusion>
<exclusion>
<groupId>org.eclipse.jetty</groupId>
<artifactId>jetty-webapp</artifactId>
</exclusion>
<exclusion>
<groupId>org.eclipse.jetty</groupId>
<artifactId>jetty-xml</artifactId>
</exclusion>
</exclusions>
</dependency>
<dependency>
<groupId>org.apache.solr</groupId>
<artifactId>solr-cell</artifactId>
<exclusions>
<!-- Newer versions provided in our parent POM -->
<exclusion>
<groupId>commons-cli</groupId>
<artifactId>commons-cli</artifactId>
</exclusion>
<exclusion> <exclusion>
<groupId>org.ow2.asm</groupId> <groupId>org.ow2.asm</groupId>
<artifactId>asm-commons</artifactId> <artifactId>asm-commons</artifactId>
</exclusion> </exclusion>
<exclusion>
<groupId>org.bouncycastle</groupId>
<artifactId>bcpkix-jdk15on</artifactId>
</exclusion>
<exclusion>
<groupId>org.bouncycastle</groupId>
<artifactId>bcprov-jdk15on</artifactId>
</exclusion>
<exclusion>
<groupId>org.eclipse.jetty</groupId>
<artifactId>jetty-xml</artifactId>
</exclusion>
<exclusion>
<groupId>org.eclipse.jetty</groupId>
<artifactId>jetty-http</artifactId>
</exclusion>
<exclusion>
<groupId>org.eclipse.jetty</groupId>
<artifactId>jetty-servlet</artifactId>
</exclusion>
<exclusion>
<groupId>org.eclipse.jetty</groupId>
<artifactId>jetty-webapp</artifactId>
</exclusion>
<exclusion>
<groupId>org.eclipse.jetty</groupId>
<artifactId>jetty-util</artifactId>
</exclusion>
<exclusion>
<groupId>org.eclipse.jetty</groupId>
<artifactId>jetty-deploy</artifactId>
</exclusion>
<exclusion>
<groupId>org.eclipse.jetty</groupId>
<artifactId>jetty-continuation</artifactId>
</exclusion>
<exclusion>
<groupId>org.eclipse.jetty</groupId>
<artifactId>jetty-servlets</artifactId>
</exclusion>
<exclusion>
<groupId>org.eclipse.jetty</groupId>
<artifactId>jetty-io</artifactId>
</exclusion>
<exclusion>
<groupId>org.eclipse.jetty</groupId>
<artifactId>jetty-security</artifactId>
</exclusion>
</exclusions> </exclusions>
</dependency> </dependency>
<dependency>
<groupId>org.apache.lucene</groupId>
<artifactId>lucene-core</artifactId>
</dependency>
<!-- Reminder: Keep icu4j (in Parent POM) synced with version used by lucene-analyzers-icu below,
otherwise ICUFoldingFilterFactory may throw errors in tests. -->
<dependency>
<groupId>org.apache.lucene</groupId>
<artifactId>lucene-analyzers-icu</artifactId>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.apache.lucene</groupId>
<artifactId>lucene-analyzers-smartcn</artifactId>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.apache.lucene</groupId>
<artifactId>lucene-analyzers-stempel</artifactId>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.apache.xmlbeans</groupId>
<artifactId>xmlbeans</artifactId>
<version>2.6.0</version>
</dependency>
<dependency> <dependency>
<groupId>com.maxmind.geoip2</groupId> <groupId>com.maxmind.geoip2</groupId>
@@ -658,7 +823,7 @@
<dependency> <dependency>
<groupId>org.xmlunit</groupId> <groupId>org.xmlunit</groupId>
<artifactId>xmlunit-matchers</artifactId> <artifactId>xmlunit-core</artifactId>
<version>2.6.3</version> <version>2.6.3</version>
<scope>test</scope> <scope>test</scope>
</dependency> </dependency>

View File

@@ -1519,6 +1519,12 @@ public class ItemImportServiceImpl implements ItemImportService, InitializingBea
if (!dir.exists() && !dir.mkdirs()) { if (!dir.exists() && !dir.mkdirs()) {
log.error("Unable to create directory: " + dir.getAbsolutePath()); log.error("Unable to create directory: " + dir.getAbsolutePath());
} }
// Verify that the directory the entry is using is a subpath of zipDir (and not somewhere else!)
if (!dir.toPath().normalize().startsWith(zipDir)) {
throw new IOException("Bad zip entry: '" + entry.getName()
+ "' in file '" + zipfile.getAbsolutePath() + "'!"
+ " Cannot process this file.");
}
//Entries could have too many directories, and we need to adjust the sourcedir //Entries could have too many directories, and we need to adjust the sourcedir
// file1.zip (SimpleArchiveFormat / item1 / contents|dublin_core|... // file1.zip (SimpleArchiveFormat / item1 / contents|dublin_core|...
@@ -1539,9 +1545,16 @@ public class ItemImportServiceImpl implements ItemImportService, InitializingBea
} }
byte[] buffer = new byte[1024]; byte[] buffer = new byte[1024];
int len; int len;
File outFile = new File(zipDir + entry.getName());
// Verify that this file will be created in our zipDir (and not somewhere else!)
if (!outFile.toPath().normalize().startsWith(zipDir)) {
throw new IOException("Bad zip entry: '" + entry.getName()
+ "' in file '" + zipfile.getAbsolutePath() + "'!"
+ " Cannot process this file.");
}
InputStream in = zf.getInputStream(entry); InputStream in = zf.getInputStream(entry);
BufferedOutputStream out = new BufferedOutputStream( BufferedOutputStream out = new BufferedOutputStream(
new FileOutputStream(zipDir + entry.getName())); new FileOutputStream(outFile));
while ((len = in.read(buffer)) >= 0) { while ((len = in.read(buffer)) >= 0) {
out.write(buffer, 0, len); out.write(buffer, 0, len);
} }

View File

@@ -48,6 +48,9 @@ public class SHERPAResponse {
factory.setValidating(false); factory.setValidating(false);
factory.setIgnoringComments(true); factory.setIgnoringComments(true);
factory.setIgnoringElementContentWhitespace(true); factory.setIgnoringElementContentWhitespace(true);
// disallow DTD parsing to ensure no XXE attacks can occur.
// See https://cheatsheetseries.owasp.org/cheatsheets/XML_External_Entity_Prevention_Cheat_Sheet.html
factory.setFeature("http://apache.org/xml/features/disallow-doctype-decl", true);
DocumentBuilder db = factory.newDocumentBuilder(); DocumentBuilder db = factory.newDocumentBuilder();
Document inDoc = db.parse(xmlData); Document inDoc = db.parse(xmlData);

View File

@@ -153,7 +153,7 @@ public class BitstreamFormatServiceImpl implements BitstreamFormatService {
// If the exception was thrown, unknown will == null so goahead and // If the exception was thrown, unknown will == null so goahead and
// load s. If not, check that the unknown's registry's name is not // load s. If not, check that the unknown's registry's name is not
// being reset. // being reset.
if (unknown == null || unknown.getID() != bitstreamFormat.getID()) { if (unknown == null || !unknown.getID().equals(bitstreamFormat.getID())) {
bitstreamFormat.setShortDescriptionInternal(shortDescription); bitstreamFormat.setShortDescriptionInternal(shortDescription);
} }
} }
@@ -208,7 +208,7 @@ public class BitstreamFormatServiceImpl implements BitstreamFormatService {
// Find "unknown" type // Find "unknown" type
BitstreamFormat unknown = findUnknown(context); BitstreamFormat unknown = findUnknown(context);
if (unknown.getID() == bitstreamFormat.getID()) { if (unknown.getID().equals(bitstreamFormat.getID())) {
throw new IllegalArgumentException("The Unknown bitstream format may not be deleted."); throw new IllegalArgumentException("The Unknown bitstream format may not be deleted.");
} }
@@ -270,4 +270,4 @@ public class BitstreamFormatServiceImpl implements BitstreamFormatService {
} }
return null; return null;
} }
} }

View File

@@ -9,6 +9,7 @@ package org.dspace.content;
import java.sql.SQLException; import java.sql.SQLException;
import java.util.ArrayList; import java.util.ArrayList;
import java.util.Collections;
import java.util.Iterator; import java.util.Iterator;
import java.util.List; import java.util.List;
@@ -101,7 +102,7 @@ public class MetadataDSpaceCsvExportServiceImpl implements MetadataDSpaceCsvExpo
throws SQLException { throws SQLException {
// Add all the collections // Add all the collections
List<Collection> collections = community.getCollections(); List<Collection> collections = community.getCollections();
Iterator<Item> result = null; Iterator<Item> result = Collections.<Item>emptyIterator();
for (Collection collection : collections) { for (Collection collection : collections) {
Iterator<Item> items = itemService.findByCollection(context, collection); Iterator<Item> items = itemService.findByCollection(context, collection);
result = addItemsToResult(result, items); result = addItemsToResult(result, items);

View File

@@ -168,11 +168,11 @@ public class MetadataField implements ReloadableEntity<Integer> {
return false; return false;
} }
Class<?> objClass = HibernateProxyHelper.getClassWithoutInitializingProxy(obj); Class<?> objClass = HibernateProxyHelper.getClassWithoutInitializingProxy(obj);
if (getClass() != objClass) { if (!getClass().equals(objClass)) {
return false; return false;
} }
final MetadataField other = (MetadataField) obj; final MetadataField other = (MetadataField) obj;
if (this.getID() != other.getID()) { if (!this.getID().equals(other.getID())) {
return false; return false;
} }
if (!getMetadataSchema().equals(other.getMetadataSchema())) { if (!getMetadataSchema().equals(other.getMetadataSchema())) {

View File

@@ -67,11 +67,11 @@ public class MetadataSchema implements ReloadableEntity<Integer> {
return false; return false;
} }
Class<?> objClass = HibernateProxyHelper.getClassWithoutInitializingProxy(obj); Class<?> objClass = HibernateProxyHelper.getClassWithoutInitializingProxy(obj);
if (getClass() != objClass) { if (!getClass().equals(objClass)) {
return false; return false;
} }
final MetadataSchema other = (MetadataSchema) obj; final MetadataSchema other = (MetadataSchema) obj;
if (this.id != other.id) { if (!this.id.equals(other.id)) {
return false; return false;
} }
if ((this.namespace == null) ? (other.namespace != null) : !this.namespace.equals(other.namespace)) { if ((this.namespace == null) ? (other.namespace != null) : !this.namespace.equals(other.namespace)) {

View File

@@ -239,17 +239,17 @@ public class MetadataValue implements ReloadableEntity<Integer> {
return false; return false;
} }
Class<?> objClass = HibernateProxyHelper.getClassWithoutInitializingProxy(obj); Class<?> objClass = HibernateProxyHelper.getClassWithoutInitializingProxy(obj);
if (getClass() != objClass) { if (!getClass().equals(objClass)) {
return false; return false;
} }
final MetadataValue other = (MetadataValue) obj; final MetadataValue other = (MetadataValue) obj;
if (this.id != other.id) { if (!this.id.equals(other.id)) {
return false; return false;
} }
if (this.getID() != other.getID()) { if (!this.getID().equals(other.getID())) {
return false; return false;
} }
if (this.getDSpaceObject().getID() != other.getDSpaceObject().getID()) { if (!this.getDSpaceObject().getID().equals(other.getDSpaceObject().getID())) {
return false; return false;
} }
return true; return true;

View File

@@ -156,11 +156,11 @@ public class WorkspaceItem
return true; return true;
} }
Class<?> objClass = HibernateProxyHelper.getClassWithoutInitializingProxy(o); Class<?> objClass = HibernateProxyHelper.getClassWithoutInitializingProxy(o);
if (getClass() != objClass) { if (!getClass().equals(objClass)) {
return false; return false;
} }
final WorkspaceItem that = (WorkspaceItem) o; final WorkspaceItem that = (WorkspaceItem) o;
if (this.getID() != that.getID()) { if (!this.getID().equals(that.getID())) {
return false; return false;
} }

View File

@@ -272,12 +272,16 @@ public class METSManifest {
// Set validation feature // Set validation feature
if (validate) { if (validate) {
builder.setFeature("http://apache.org/xml/features/validation/schema", true); builder.setFeature("http://apache.org/xml/features/validation/schema", true);
}
// Tell the parser where local copies of schemas are, to speed up // Tell the parser where local copies of schemas are, to speed up
// validation. Local XSDs are identified in the configuration file. // validation & avoid XXE attacks from remote schemas. Local XSDs are identified in the configuration file.
if (localSchemas.length() > 0) { if (localSchemas.length() > 0) {
builder.setProperty("http://apache.org/xml/properties/schema/external-schemaLocation", localSchemas); builder.setProperty("http://apache.org/xml/properties/schema/external-schemaLocation", localSchemas);
}
} else {
// disallow DTD parsing to ensure no XXE attacks can occur.
// See https://cheatsheetseries.owasp.org/cheatsheets/XML_External_Entity_Prevention_Cheat_Sheet.html
builder.setFeature("http://apache.org/xml/features/disallow-doctype-decl", true);
} }
// Parse the METS file // Parse the METS file

View File

@@ -199,6 +199,9 @@ public class MetadataWebService extends AbstractCurationTask implements Namespac
DocumentBuilderFactory factory = DocumentBuilderFactory.newInstance(); DocumentBuilderFactory factory = DocumentBuilderFactory.newInstance();
factory.setNamespaceAware(true); factory.setNamespaceAware(true);
try { try {
// disallow DTD parsing to ensure no XXE attacks can occur.
// See https://cheatsheetseries.owasp.org/cheatsheets/XML_External_Entity_Prevention_Cheat_Sheet.html
factory.setFeature("http://apache.org/xml/features/disallow-doctype-decl", true);
docBuilder = factory.newDocumentBuilder(); docBuilder = factory.newDocumentBuilder();
} catch (ParserConfigurationException pcE) { } catch (ParserConfigurationException pcE) {
log.error("caught exception: " + pcE); log.error("caught exception: " + pcE);

View File

@@ -8,21 +8,22 @@
package org.dspace.curate; package org.dspace.curate;
import java.io.BufferedReader; import java.io.BufferedReader;
import java.io.File;
import java.io.FileNotFoundException;
import java.io.FileReader; import java.io.FileReader;
import java.io.IOException;
import java.io.OutputStream; import java.io.OutputStream;
import java.io.OutputStreamWriter; import java.io.OutputStreamWriter;
import java.io.PrintStream; import java.io.PrintStream;
import java.io.Writer; import java.io.Writer;
import java.sql.SQLException;
import java.util.HashMap; import java.util.HashMap;
import java.util.Iterator; import java.util.Iterator;
import java.util.Map; import java.util.Map;
import org.apache.commons.cli.CommandLine;
import org.apache.commons.cli.CommandLineParser;
import org.apache.commons.cli.HelpFormatter;
import org.apache.commons.cli.Options;
import org.apache.commons.cli.PosixParser;
import org.apache.commons.io.output.NullOutputStream; import org.apache.commons.io.output.NullOutputStream;
import org.dspace.authorize.AuthorizeException;
import org.dspace.content.DSpaceObject;
import org.dspace.content.factory.ContentServiceFactory; import org.dspace.content.factory.ContentServiceFactory;
import org.dspace.core.Context; import org.dspace.core.Context;
import org.dspace.core.factory.CoreServiceFactory; import org.dspace.core.factory.CoreServiceFactory;
@@ -30,183 +31,86 @@ import org.dspace.curate.factory.CurateServiceFactory;
import org.dspace.eperson.EPerson; import org.dspace.eperson.EPerson;
import org.dspace.eperson.factory.EPersonServiceFactory; import org.dspace.eperson.factory.EPersonServiceFactory;
import org.dspace.eperson.service.EPersonService; import org.dspace.eperson.service.EPersonService;
import org.dspace.handle.factory.HandleServiceFactory;
import org.dspace.handle.service.HandleService;
import org.dspace.scripts.DSpaceRunnable;
import org.dspace.utils.DSpace;
/** /**
* CurationCli provides command-line access to Curation tools and processes. * CurationCli provides command-line access to Curation tools and processes.
* *
* @author richardrodgers * @author richardrodgers
*/ */
public class CurationCli { public class CurationCli extends DSpaceRunnable<CurationScriptConfiguration> {
private EPersonService ePersonService = EPersonServiceFactory.getInstance().getEPersonService();
private Context context;
private CurationClientOptions curationClientOptions;
private String task;
private String taskFile;
private String id;
private String queue;
private String scope;
private String reporter;
private Map<String, String> parameters;
private boolean verbose;
@Override
public void internalRun() throws Exception {
if (curationClientOptions == CurationClientOptions.HELP) {
printHelp();
return;
}
Curator curator = initCurator();
// load curation tasks
if (curationClientOptions == CurationClientOptions.TASK) {
long start = System.currentTimeMillis();
handleCurationTask(curator);
this.endScript(start);
}
// process task queue
if (curationClientOptions == CurationClientOptions.QUEUE) {
// process the task queue
TaskQueue taskQueue = (TaskQueue) CoreServiceFactory.getInstance().getPluginService()
.getSinglePlugin(TaskQueue.class);
if (taskQueue == null) {
super.handler.logError("No implementation configured for queue");
throw new UnsupportedOperationException("No queue service available");
}
long timeRun = this.runQueue(taskQueue, curator);
this.endScript(timeRun);
}
}
/** /**
* Default constructor * Does the curation task (-t) or the task in the given file (-T).
* Checks:
* - if required option -i is missing.
* - if option -t has a valid task option
*/ */
private CurationCli() { } private void handleCurationTask(Curator curator) throws IOException, SQLException {
String taskName;
public static void main(String[] args) throws Exception { if (commandLine.hasOption('t')) {
// create an options object and populate it
CommandLineParser parser = new PosixParser();
Options options = new Options();
options.addOption("t", "task", true,
"curation task name");
options.addOption("T", "taskfile", true,
"file containing curation task names");
options.addOption("i", "id", true,
"Id (handle) of object to perform task on, or 'all' to perform on whole repository");
options.addOption("p", "parameter", true,
"a task parameter 'NAME=VALUE'");
options.addOption("q", "queue", true,
"name of task queue to process");
options.addOption("e", "eperson", true,
"email address of curating eperson");
options.addOption("r", "reporter", true,
"relative or absolute path to the desired report file. "
+ "Use '-' to report to console. "
+ "If absent, no reporting");
options.addOption("s", "scope", true,
"transaction scope to impose: use 'object', 'curation', or 'open'. If absent, 'open' " +
"applies");
options.addOption("v", "verbose", false,
"report activity to stdout");
options.addOption("h", "help", false, "help");
CommandLine line = parser.parse(options, args);
String taskName = null;
String taskFileName = null;
String idName = null;
String taskQueueName = null;
String ePersonName = null;
String reporterName = null;
String scope = null;
boolean verbose = false;
final Map<String, String> parameters = new HashMap<>();
if (line.hasOption('h')) {
HelpFormatter help = new HelpFormatter();
help.printHelp("CurationCli\n", options);
System.out
.println("\nwhole repo: CurationCli -t estimate -i all");
System.out
.println("single item: CurationCli -t generate -i itemId");
System.out
.println("task queue: CurationCli -q monthly");
System.exit(0);
}
if (line.hasOption('t')) { // task
taskName = line.getOptionValue('t');
}
if (line.hasOption('T')) { // task file
taskFileName = line.getOptionValue('T');
}
if (line.hasOption('i')) { // id
idName = line.getOptionValue('i');
}
if (line.hasOption('q')) { // task queue
taskQueueName = line.getOptionValue('q');
}
if (line.hasOption('e')) { // eperson
ePersonName = line.getOptionValue('e');
}
if (line.hasOption('p')) { // parameter
for (String parameter : line.getOptionValues('p')) {
String[] parts = parameter.split("=", 2);
String name = parts[0].trim();
String value;
if (parts.length > 1) {
value = parts[1].trim();
} else {
value = "true";
}
parameters.put(name, value);
}
}
if (line.hasOption('r')) { // report file
reporterName = line.getOptionValue('r');
}
if (line.hasOption('s')) { // transaction scope
scope = line.getOptionValue('s');
}
if (line.hasOption('v')) { // verbose
verbose = true;
}
// now validate the args
if (idName == null && taskQueueName == null) {
System.out.println("Id must be specified: a handle, 'all', or a task queue (-h for help)");
System.exit(1);
}
if (taskName == null && taskFileName == null && taskQueueName == null) {
System.out.println("A curation task or queue must be specified (-h for help)");
System.exit(1);
}
if (scope != null && Curator.TxScope.valueOf(scope.toUpperCase()) == null) {
System.out.println("Bad transaction scope '" + scope + "': only 'object', 'curation' or 'open' recognized");
System.exit(1);
}
EPersonService ePersonService = EPersonServiceFactory.getInstance().getEPersonService();
Context c = new Context(Context.Mode.BATCH_EDIT);
if (ePersonName != null) {
EPerson ePerson = ePersonService.findByEmail(c, ePersonName);
if (ePerson == null) {
System.out.println("EPerson not found: " + ePersonName);
System.exit(1);
}
c.setCurrentUser(ePerson);
} else {
c.turnOffAuthorisationSystem();
}
Curator curator = new Curator();
OutputStream reporter;
if (null == reporterName) {
reporter = new NullOutputStream();
} else if ("-".equals(reporterName)) {
reporter = System.out;
} else {
reporter = new PrintStream(reporterName);
}
Writer reportWriter = new OutputStreamWriter(reporter);
curator.setReporter(reportWriter);
if (scope != null) {
Curator.TxScope txScope = Curator.TxScope.valueOf(scope.toUpperCase());
curator.setTransactionScope(txScope);
}
curator.addParameters(parameters);
// we are operating in batch mode, if anyone cares.
curator.setInvoked(Curator.Invoked.BATCH);
// load curation tasks
if (taskName != null) {
if (verbose) { if (verbose) {
System.out.println("Adding task: " + taskName); handler.logInfo("Adding task: " + this.task);
} }
curator.addTask(taskName); curator.addTask(this.task);
if (verbose && !curator.hasTask(taskName)) { if (verbose && !curator.hasTask(this.task)) {
System.out.println("Task: " + taskName + " not resolved"); handler.logInfo("Task: " + this.task + " not resolved");
} }
} else if (taskQueueName == null) { } else if (commandLine.hasOption('T')) {
// load taskFile // load taskFile
BufferedReader reader = null; BufferedReader reader = null;
try { try {
reader = new BufferedReader(new FileReader(taskFileName)); reader = new BufferedReader(new FileReader(this.taskFile));
while ((taskName = reader.readLine()) != null) { while ((taskName = reader.readLine()) != null) {
if (verbose) { if (verbose) {
System.out.println("Adding task: " + taskName); super.handler.logInfo("Adding task: " + taskName);
} }
curator.addTask(taskName); curator.addTask(taskName);
} }
@@ -217,59 +121,242 @@ public class CurationCli {
} }
} }
// run tasks against object // run tasks against object
long start = System.currentTimeMillis();
if (verbose) { if (verbose) {
System.out.println("Starting curation"); super.handler.logInfo("Starting curation");
super.handler.logInfo("Curating id: " + this.id);
} }
if (idName != null) { if ("all".equals(this.id)) {
// run on whole Site
curator.curate(context,
ContentServiceFactory.getInstance().getSiteService().findSite(context).getHandle());
} else {
curator.curate(context, this.id);
}
}
/**
* Runs task queue (-q set)
*
* @param queue The task queue
* @param curator The curator
* @return Time when queue started
*/
private long runQueue(TaskQueue queue, Curator curator) throws SQLException, AuthorizeException, IOException {
// use current time as our reader 'ticket'
long ticket = System.currentTimeMillis();
Iterator<TaskQueueEntry> entryIter = queue.dequeue(this.queue, ticket).iterator();
while (entryIter.hasNext()) {
TaskQueueEntry entry = entryIter.next();
if (verbose) { if (verbose) {
System.out.println("Curating id: " + idName); super.handler.logInfo("Curating id: " + entry.getObjectId());
} }
if ("all".equals(idName)) { curator.clear();
// run on whole Site // does entry relate to a DSO or workflow object?
curator.curate(c, ContentServiceFactory.getInstance().getSiteService().findSite(c).getHandle()); if (entry.getObjectId().indexOf('/') > 0) {
for (String taskName : entry.getTaskNames()) {
curator.addTask(taskName);
}
curator.curate(context, entry.getObjectId());
} else { } else {
curator.curate(c, idName); // make eperson who queued task the effective user
EPerson agent = ePersonService.findByEmail(context, entry.getEpersonId());
if (agent != null) {
context.setCurrentUser(agent);
}
CurateServiceFactory.getInstance().getWorkflowCuratorService()
.curate(curator, context, entry.getObjectId());
}
}
queue.release(this.queue, ticket, true);
return ticket;
}
/**
* End of curation script; logs script time if -v verbose is set
*
* @param timeRun Time script was started
* @throws SQLException If DSpace contextx can't complete
*/
private void endScript(long timeRun) throws SQLException {
context.complete();
if (verbose) {
long elapsed = System.currentTimeMillis() - timeRun;
this.handler.logInfo("Ending curation. Elapsed time: " + elapsed);
}
}
/**
* Initialize the curator with command line variables
*
* @return Initialised curator
* @throws FileNotFoundException If file of command line variable -r reporter is not found
*/
private Curator initCurator() throws FileNotFoundException {
Curator curator = new Curator();
OutputStream reporterStream;
if (null == this.reporter) {
reporterStream = new NullOutputStream();
} else if ("-".equals(this.reporter)) {
reporterStream = System.out;
} else {
reporterStream = new PrintStream(this.reporter);
}
Writer reportWriter = new OutputStreamWriter(reporterStream);
curator.setReporter(reportWriter);
if (this.scope != null) {
Curator.TxScope txScope = Curator.TxScope.valueOf(this.scope.toUpperCase());
curator.setTransactionScope(txScope);
}
curator.addParameters(parameters);
// we are operating in batch mode, if anyone cares.
curator.setInvoked(Curator.Invoked.BATCH);
return curator;
}
@Override
public void printHelp() {
super.printHelp();
super.handler.logInfo("\nwhole repo: CurationCli -t estimate -i all");
super.handler.logInfo("single item: CurationCli -t generate -i itemId");
super.handler.logInfo("task queue: CurationCli -q monthly");
}
@Override
public CurationScriptConfiguration getScriptConfiguration() {
return new DSpace().getServiceManager().getServiceByName("curate", CurationScriptConfiguration.class);
}
@Override
public void setup() {
if (this.commandLine.hasOption('e')) {
String ePersonEmail = this.commandLine.getOptionValue('e');
this.context = new Context(Context.Mode.BATCH_EDIT);
try {
EPerson ePerson = ePersonService.findByEmail(this.context, ePersonEmail);
if (ePerson == null) {
super.handler.logError("EPerson not found: " + ePersonEmail);
throw new IllegalArgumentException("Unable to find a user with email: " + ePersonEmail);
}
this.context.setCurrentUser(ePerson);
} catch (SQLException e) {
throw new IllegalArgumentException("SQLException trying to find user with email: " + ePersonEmail);
} }
} else { } else {
// process the task queue throw new IllegalArgumentException("Needs an -e to set eperson (admin)");
TaskQueue queue = (TaskQueue) CoreServiceFactory.getInstance().getPluginService()
.getSinglePlugin(TaskQueue.class);
if (queue == null) {
System.out.println("No implementation configured for queue");
throw new UnsupportedOperationException("No queue service available");
}
// use current time as our reader 'ticket'
long ticket = System.currentTimeMillis();
Iterator<TaskQueueEntry> entryIter = queue.dequeue(taskQueueName, ticket).iterator();
while (entryIter.hasNext()) {
TaskQueueEntry entry = entryIter.next();
if (verbose) {
System.out.println("Curating id: " + entry.getObjectId());
}
curator.clear();
// does entry relate to a DSO or workflow object?
if (entry.getObjectId().indexOf("/") > 0) {
for (String task : entry.getTaskNames()) {
curator.addTask(task);
}
curator.curate(c, entry.getObjectId());
} else {
// make eperson who queued task the effective user
EPerson agent = ePersonService.findByEmail(c, entry.getEpersonId());
if (agent != null) {
c.setCurrentUser(agent);
}
CurateServiceFactory.getInstance().getWorkflowCuratorService()
.curate(curator, c, entry.getObjectId());
}
}
queue.release(taskQueueName, ticket, true);
} }
c.complete(); this.curationClientOptions = CurationClientOptions.getClientOption(commandLine);
if (verbose) {
long elapsed = System.currentTimeMillis() - start; if (this.curationClientOptions != null) {
System.out.println("Ending curation. Elapsed time: " + elapsed); this.initGeneralLineOptionsAndCheckIfValid();
if (curationClientOptions == CurationClientOptions.TASK) {
this.initTaskLineOptionsAndCheckIfValid();
} else if (curationClientOptions == CurationClientOptions.QUEUE) {
this.queue = this.commandLine.getOptionValue('q');
}
} else {
throw new IllegalArgumentException("[--help || --task|--taskfile <> -identifier <> || -queue <> ] must be" +
" specified");
}
}
/**
* Fills in some optional command line options.
* Checks if there are missing required options or invalid values for options.
*/
private void initGeneralLineOptionsAndCheckIfValid() {
// report file
if (this.commandLine.hasOption('r')) {
this.reporter = this.commandLine.getOptionValue('r');
}
// parameters
this.parameters = new HashMap<>();
if (this.commandLine.hasOption('p')) {
for (String parameter : this.commandLine.getOptionValues('p')) {
String[] parts = parameter.split("=", 2);
String name = parts[0].trim();
String value;
if (parts.length > 1) {
value = parts[1].trim();
} else {
value = "true";
}
this.parameters.put(name, value);
}
}
// verbose
verbose = false;
if (commandLine.hasOption('v')) {
verbose = true;
}
// scope
if (this.commandLine.getOptionValue('s') != null) {
this.scope = this.commandLine.getOptionValue('s');
if (this.scope != null && Curator.TxScope.valueOf(this.scope.toUpperCase()) == null) {
this.handler.logError("Bad transaction scope '" + this.scope + "': only 'object', 'curation' or " +
"'open' recognized");
throw new IllegalArgumentException(
"Bad transaction scope '" + this.scope + "': only 'object', 'curation' or " +
"'open' recognized");
}
}
}
/**
* Fills in required command line options for the task or taskFile option.
* Checks if there are is a missing required -i option and if -i is either 'all' or a valid dso handle.
* Checks if -t task has a valid task option.
* Checks if -T taskfile is a valid file.
*/
private void initTaskLineOptionsAndCheckIfValid() {
// task or taskFile
if (this.commandLine.hasOption('t')) {
this.task = this.commandLine.getOptionValue('t');
if (!CurationClientOptions.getTaskOptions().contains(this.task)) {
super.handler
.logError("-t task must be one of: " + CurationClientOptions.getTaskOptions());
throw new IllegalArgumentException(
"-t task must be one of: " + CurationClientOptions.getTaskOptions());
}
} else if (this.commandLine.hasOption('T')) {
this.taskFile = this.commandLine.getOptionValue('T');
if (!(new File(this.taskFile).isFile())) {
super.handler
.logError("-T taskFile must be valid file: " + this.taskFile);
throw new IllegalArgumentException("-T taskFile must be valid file: " + this.taskFile);
}
}
if (this.commandLine.hasOption('i')) {
this.id = this.commandLine.getOptionValue('i').toLowerCase();
if (!this.id.equalsIgnoreCase("all")) {
HandleService handleService = HandleServiceFactory.getInstance().getHandleService();
DSpaceObject dso;
try {
dso = handleService.resolveToObject(this.context, id);
} catch (SQLException e) {
super.handler.logError("SQLException trying to resolve handle " + id + " to a valid dso");
throw new IllegalArgumentException(
"SQLException trying to resolve handle " + id + " to a valid dso");
}
if (dso == null) {
super.handler.logError("Id must be specified: a valid dso handle or 'all'; " + this.id + " could " +
"not be resolved to valid dso handle");
throw new IllegalArgumentException(
"Id must be specified: a valid dso handle or 'all'; " + this.id + " could " +
"not be resolved to valid dso handle");
}
}
} else {
super.handler.logError("Id must be specified: a handle, 'all', or no -i and a -q task queue (-h for " +
"help)");
throw new IllegalArgumentException(
"Id must be specified: a handle, 'all', or no -i and a -q task queue (-h for " +
"help)");
} }
} }
} }

View File

@@ -0,0 +1,85 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.curate;
import java.util.ArrayList;
import java.util.List;
import org.apache.commons.cli.CommandLine;
import org.apache.commons.cli.Options;
import org.apache.commons.lang3.StringUtils;
import org.dspace.services.ConfigurationService;
import org.dspace.services.factory.DSpaceServicesFactory;
/**
* This Enum holds all the possible options and combinations for the Curation script
*
* @author Maria Verdonck (Atmire) on 23/06/2020
*/
public enum CurationClientOptions {
TASK,
QUEUE,
HELP;
private static List<String> taskOptions;
/**
* This method resolves the CommandLine parameters to figure out which action the curation script should perform
*
* @param commandLine The relevant CommandLine for the curation script
* @return The curation option to be ran, parsed from the CommandLine
*/
protected static CurationClientOptions getClientOption(CommandLine commandLine) {
if (commandLine.hasOption("h")) {
return CurationClientOptions.HELP;
} else if (commandLine.hasOption("t") || commandLine.hasOption("T")) {
return CurationClientOptions.TASK;
} else if (commandLine.hasOption("q")) {
return CurationClientOptions.QUEUE;
}
return null;
}
protected static Options constructOptions() {
Options options = new Options();
options.addOption("t", "task", true, "curation task name; options: " + getTaskOptions());
options.addOption("T", "taskfile", true, "file containing curation task names");
options.addOption("i", "id", true,
"Id (handle) of object to perform task on, or 'all' to perform on whole repository");
options.addOption("p", "parameter", true, "a task parameter 'NAME=VALUE'");
options.addOption("q", "queue", true, "name of task queue to process");
options.addOption("e", "eperson", true, "email address of curating eperson");
options.addOption("r", "reporter", true,
"relative or absolute path to the desired report file. Use '-' to report to console. If absent, no " +
"reporting");
options.addOption("s", "scope", true,
"transaction scope to impose: use 'object', 'curation', or 'open'. If absent, 'open' applies");
options.addOption("v", "verbose", false, "report activity to stdout");
options.addOption("h", "help", false, "help");
return options;
}
/**
* Creates list of the taskOptions' keys from the configs of plugin.named.org.dspace.curate.CurationTask
*
* @return List of the taskOptions' keys from the configs of plugin.named.org.dspace.curate.CurationTask
*/
public static List<String> getTaskOptions() {
if (taskOptions == null) {
ConfigurationService configurationService = DSpaceServicesFactory.getInstance().getConfigurationService();
String[] taskConfigs = configurationService.getArrayProperty("plugin.named.org.dspace.curate.CurationTask");
taskOptions = new ArrayList<>();
for (String taskConfig : taskConfigs) {
taskOptions.add(StringUtils.substringAfterLast(taskConfig, "=").trim());
}
}
return taskOptions;
}
}

View File

@@ -0,0 +1,61 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.curate;
import java.sql.SQLException;
import org.apache.commons.cli.Options;
import org.dspace.authorize.service.AuthorizeService;
import org.dspace.core.Context;
import org.dspace.scripts.configuration.ScriptConfiguration;
import org.springframework.beans.factory.annotation.Autowired;
/**
* The {@link ScriptConfiguration} for the {@link CurationCli} script
*
* @author Maria Verdonck (Atmire) on 23/06/2020
*/
public class CurationScriptConfiguration<T extends CurationCli> extends ScriptConfiguration<T> {
@Autowired
private AuthorizeService authorizeService;
private Class<T> dspaceRunnableClass;
@Override
public Class<T> getDspaceRunnableClass() {
return this.dspaceRunnableClass;
}
@Override
public void setDspaceRunnableClass(Class<T> dspaceRunnableClass) {
this.dspaceRunnableClass = dspaceRunnableClass;
}
/**
* Only admin can run Curation script via the scripts and processes endpoints.
* @param context The relevant DSpace context
* @return True if currentUser is admin, otherwise false
*/
@Override
public boolean isAllowedToExecute(Context context) {
try {
return authorizeService.isAdmin(context);
} catch (SQLException e) {
throw new RuntimeException("SQLException occurred when checking if the current user is an admin", e);
}
}
@Override
public Options getOptions() {
if (options == null) {
super.options = CurationClientOptions.constructOptions();
}
return options;
}
}

View File

@@ -98,6 +98,7 @@ public class Curator {
communityService = ContentServiceFactory.getInstance().getCommunityService(); communityService = ContentServiceFactory.getInstance().getCommunityService();
itemService = ContentServiceFactory.getInstance().getItemService(); itemService = ContentServiceFactory.getInstance().getItemService();
handleService = HandleServiceFactory.getInstance().getHandleService(); handleService = HandleServiceFactory.getInstance().getHandleService();
resolver = new TaskResolver();
} }
/** /**
@@ -142,10 +143,10 @@ public class Curator {
// performance order currently FIFO - to be revisited // performance order currently FIFO - to be revisited
perfList.add(taskName); perfList.add(taskName);
} catch (IOException ioE) { } catch (IOException ioE) {
log.error("Task: '" + taskName + "' initialization failure: " + ioE.getMessage()); System.out.println("Task: '" + taskName + "' initialization failure: " + ioE.getMessage());
} }
} else { } else {
log.error("Task: '" + taskName + "' does not resolve"); System.out.println("Task: '" + taskName + "' does not resolve");
} }
return this; return this;
} }
@@ -259,13 +260,6 @@ public class Curator {
/** /**
* Performs all configured tasks upon DSpace object * Performs all configured tasks upon DSpace object
* (Community, Collection or Item). * (Community, Collection or Item).
* <P>
* Note: Site-wide tasks will default to running as
* an Anonymous User unless you call the Site-wide task
* via the {@link curate(Context,String)} or
* {@link #curate(Context, DSpaceObject)} method with an
* authenticated Context object.
*
* @param dso the DSpace object * @param dso the DSpace object
* @throws IOException if IO error * @throws IOException if IO error
*/ */
@@ -325,7 +319,7 @@ public class Curator {
taskQ.enqueue(queueId, new TaskQueueEntry(c.getCurrentUser().getName(), taskQ.enqueue(queueId, new TaskQueueEntry(c.getCurrentUser().getName(),
System.currentTimeMillis(), perfList, id)); System.currentTimeMillis(), perfList, id));
} else { } else {
log.error("curate - no TaskQueue implemented"); System.out.println("curate - no TaskQueue implemented");
} }
} }
@@ -346,7 +340,7 @@ public class Curator {
try { try {
reporter.append(message); reporter.append(message);
} catch (IOException ex) { } catch (IOException ex) {
log.error("Task reporting failure", ex); System.out.println("Task reporting failure: " + ex);
} }
} }
@@ -552,7 +546,7 @@ public class Curator {
return !suspend(statusCode); return !suspend(statusCode);
} catch (IOException ioe) { } catch (IOException ioe) {
//log error & pass exception upwards //log error & pass exception upwards
log.error("Error executing curation task '" + task.getName() + "'", ioe); System.out.println("Error executing curation task '" + task.getName() + "'; " + ioe);
throw ioe; throw ioe;
} }
} }
@@ -568,7 +562,7 @@ public class Curator {
return !suspend(statusCode); return !suspend(statusCode);
} catch (IOException ioe) { } catch (IOException ioe) {
//log error & pass exception upwards //log error & pass exception upwards
log.error("Error executing curation task '" + task.getName() + "'", ioe); System.out.println("Error executing curation task '" + task.getName() + "'; " + ioe);
throw ioe; throw ioe;
} }
} }

View File

@@ -7,6 +7,9 @@
*/ */
package org.dspace.discovery; package org.dspace.discovery;
import static java.util.Collections.singletonList;
import static org.apache.commons.lang3.StringUtils.isNotBlank;
import java.util.ArrayList; import java.util.ArrayList;
import java.util.Arrays; import java.util.Arrays;
import java.util.Collections; import java.util.Collections;
@@ -31,7 +34,7 @@ public class DiscoverQuery {
**/ **/
private String query; private String query;
private List<String> filterQueries; private List<String> filterQueries;
private String DSpaceObjectFilter = null; private List<String> dspaceObjectFilters = new ArrayList<>();
private List<String> fieldPresentQueries; private List<String> fieldPresentQueries;
private boolean spellCheck; private boolean spellCheck;
@@ -118,20 +121,33 @@ public class DiscoverQuery {
* Sets the DSpace object filter, must be an DSpace Object type integer * Sets the DSpace object filter, must be an DSpace Object type integer
* can be used to only return objects from a certain DSpace Object type * can be used to only return objects from a certain DSpace Object type
* *
* @param DSpaceObjectFilter the DSpace object filer * @param dspaceObjectFilter the DSpace object filter
*/ */
public void setDSpaceObjectFilter(String DSpaceObjectFilter) { public void setDSpaceObjectFilter(String dspaceObjectFilter) {
this.DSpaceObjectFilter = DSpaceObjectFilter; this.dspaceObjectFilters = singletonList(dspaceObjectFilter);
} }
/** /**
* Gets the DSpace object filter * Adds a DSpace object filter, must be an DSpace Object type integer.
* can be used to only return objects from a certain DSpace Object type * Can be used to also return objects from a certain DSpace Object type.
* *
* @return the DSpace object filer * @param dspaceObjectFilter the DSpace object filer
*/ */
public String getDSpaceObjectFilter() { public void addDSpaceObjectFilter(String dspaceObjectFilter) {
return DSpaceObjectFilter;
if (isNotBlank(dspaceObjectFilter)) {
this.dspaceObjectFilters.add(dspaceObjectFilter);
}
}
/**
* Gets the DSpace object filters
* can be used to only return objects from certain DSpace Object types
*
* @return the DSpace object filters
*/
public List<String> getDSpaceObjectFilters() {
return dspaceObjectFilters;
} }
/** /**

View File

@@ -7,6 +7,8 @@
*/ */
package org.dspace.discovery; package org.dspace.discovery;
import static java.util.stream.Collectors.joining;
import java.io.IOException; import java.io.IOException;
import java.io.PrintWriter; import java.io.PrintWriter;
import java.io.StringWriter; import java.io.StringWriter;
@@ -751,8 +753,13 @@ public class SolrServiceImpl implements SearchService, IndexingService {
String filterQuery = discoveryQuery.getFilterQueries().get(i); String filterQuery = discoveryQuery.getFilterQueries().get(i);
solrQuery.addFilterQuery(filterQuery); solrQuery.addFilterQuery(filterQuery);
} }
if (discoveryQuery.getDSpaceObjectFilter() != null) { if (discoveryQuery.getDSpaceObjectFilters() != null) {
solrQuery.addFilterQuery(SearchUtils.RESOURCE_TYPE_FIELD + ":" + discoveryQuery.getDSpaceObjectFilter()); solrQuery.addFilterQuery(
discoveryQuery.getDSpaceObjectFilters()
.stream()
.map(filter -> SearchUtils.RESOURCE_TYPE_FIELD + ":" + filter)
.collect(joining(" OR "))
);
} }
for (int i = 0; i < discoveryQuery.getFieldPresentQueries().size(); i++) { for (int i = 0; i < discoveryQuery.getFieldPresentQueries().size(); i++) {

View File

@@ -134,11 +134,13 @@ public class HarvestScheduler implements Runnable {
if (maxActiveThreads == 0) { if (maxActiveThreads == 0) {
maxActiveThreads = 3; maxActiveThreads = 3;
} }
minHeartbeat = ConfigurationManager.getIntProperty("oai", "harvester.minHeartbeat") * 1000; minHeartbeat = ConfigurationManager.getIntProperty("oai", "harvester.minHeartbeat");
minHeartbeat = minHeartbeat * 1000; // multiple by 1000 to turn seconds to ms
if (minHeartbeat == 0) { if (minHeartbeat == 0) {
minHeartbeat = 30000; minHeartbeat = 30000;
} }
maxHeartbeat = ConfigurationManager.getIntProperty("oai", "harvester.maxHeartbeat") * 1000; maxHeartbeat = ConfigurationManager.getIntProperty("oai", "harvester.maxHeartbeat");
maxHeartbeat = maxHeartbeat * 1000; // multiple by 1000 to turn seconds to ms
if (maxHeartbeat == 0) { if (maxHeartbeat == 0) {
maxHeartbeat = 3600000; maxHeartbeat = 3600000;
} }

View File

@@ -75,6 +75,10 @@ public class CCLicenseConnectorServiceImpl implements CCLicenseConnectorService,
.disableAutomaticRetries() .disableAutomaticRetries()
.setMaxConnTotal(5) .setMaxConnTotal(5)
.build(); .build();
// disallow DTD parsing to ensure no XXE attacks can occur.
// See https://cheatsheetseries.owasp.org/cheatsheets/XML_External_Entity_Prevention_Cheat_Sheet.html
parser.setFeature("http://apache.org/xml/features/disallow-doctype-decl", true);
} }
/** /**

View File

@@ -15,6 +15,7 @@ import java.util.Iterator;
import javax.servlet.http.HttpServletResponse; import javax.servlet.http.HttpServletResponse;
import org.apache.commons.lang3.StringUtils; import org.apache.commons.lang3.StringUtils;
import org.apache.commons.validator.routines.UrlValidator;
import org.apache.logging.log4j.Logger; import org.apache.logging.log4j.Logger;
import org.dspace.rdf.RDFUtil; import org.dspace.rdf.RDFUtil;
import org.dspace.services.factory.DSpaceServicesFactory; import org.dspace.services.factory.DSpaceServicesFactory;
@@ -197,6 +198,7 @@ public class Negotiator {
if (extraPathInfo == null) { if (extraPathInfo == null) {
extraPathInfo = ""; extraPathInfo = "";
} }
UrlValidator urlValidator = new UrlValidator(UrlValidator.ALLOW_LOCAL_URLS);
StringBuilder urlBuilder = new StringBuilder(); StringBuilder urlBuilder = new StringBuilder();
String lang = null; String lang = null;
@@ -256,12 +258,15 @@ public class Negotiator {
urlBuilder.append(handle).append("/").append(extraPathInfo); urlBuilder.append(handle).append("/").append(extraPathInfo);
} }
String url = urlBuilder.toString(); String url = urlBuilder.toString();
if (urlValidator.isValid(url)) {
log.debug("Will forward to '" + url + "'."); log.debug("Will forward to '" + url + "'.");
response.setStatus(HttpServletResponse.SC_SEE_OTHER); response.setStatus(HttpServletResponse.SC_SEE_OTHER);
response.setHeader("Location", url); response.setHeader("Location", url);
response.flushBuffer(); response.flushBuffer();
return true; return true;
} else {
throw new IOException("Invalid URL '" + url + "', cannot redirect.");
}
} }
// currently we cannot serve statistics as rdf // currently we cannot serve statistics as rdf
@@ -287,10 +292,14 @@ public class Negotiator {
urlBuilder.append("/handle/").append(handle); urlBuilder.append("/handle/").append(handle);
urlBuilder.append("/").append(lang); urlBuilder.append("/").append(lang);
String url = urlBuilder.toString(); String url = urlBuilder.toString();
log.debug("Will forward to '" + url + "'."); if (urlValidator.isValid(url)) {
response.setStatus(HttpServletResponse.SC_SEE_OTHER); log.debug("Will forward to '" + url + "'.");
response.setHeader("Location", url); response.setStatus(HttpServletResponse.SC_SEE_OTHER);
response.flushBuffer(); response.setHeader("Location", url);
return true; response.flushBuffer();
return true;
} else {
throw new IOException("Invalid URL '" + url + "', cannot redirect.");
}
} }
} }

View File

@@ -113,6 +113,9 @@ public class ArXivService {
factory.setValidating(false); factory.setValidating(false);
factory.setIgnoringComments(true); factory.setIgnoringComments(true);
factory.setIgnoringElementContentWhitespace(true); factory.setIgnoringElementContentWhitespace(true);
// disallow DTD parsing to ensure no XXE attacks can occur.
// See https://cheatsheetseries.owasp.org/cheatsheets/XML_External_Entity_Prevention_Cheat_Sheet.html
factory.setFeature("http://apache.org/xml/features/disallow-doctype-decl", true);
DocumentBuilder db = factory.newDocumentBuilder(); DocumentBuilder db = factory.newDocumentBuilder();
Document inDoc = db.parse(response.getEntity().getContent()); Document inDoc = db.parse(response.getEntity().getContent());

View File

@@ -102,6 +102,9 @@ public class CiNiiService {
factory.setValidating(false); factory.setValidating(false);
factory.setIgnoringComments(true); factory.setIgnoringComments(true);
factory.setIgnoringElementContentWhitespace(true); factory.setIgnoringElementContentWhitespace(true);
// disallow DTD parsing to ensure no XXE attacks can occur.
// See https://cheatsheetseries.owasp.org/cheatsheets/XML_External_Entity_Prevention_Cheat_Sheet.html
factory.setFeature("http://apache.org/xml/features/disallow-doctype-decl", true);
DocumentBuilder db = factory.newDocumentBuilder(); DocumentBuilder db = factory.newDocumentBuilder();
Document inDoc = db.parse(response.getEntity().getContent()); Document inDoc = db.parse(response.getEntity().getContent());
@@ -178,6 +181,9 @@ public class CiNiiService {
factory.setValidating(false); factory.setValidating(false);
factory.setIgnoringComments(true); factory.setIgnoringComments(true);
factory.setIgnoringElementContentWhitespace(true); factory.setIgnoringElementContentWhitespace(true);
// disallow DTD parsing to ensure no XXE attacks can occur.
// See https://cheatsheetseries.owasp.org/cheatsheets/XML_External_Entity_Prevention_Cheat_Sheet.html
factory.setFeature("http://apache.org/xml/features/disallow-doctype-decl", true);
DocumentBuilder db = factory.newDocumentBuilder(); DocumentBuilder db = factory.newDocumentBuilder();
Document inDoc = db.parse(response.getEntity().getContent()); Document inDoc = db.parse(response.getEntity().getContent());

View File

@@ -99,6 +99,9 @@ public class CrossRefService {
factory.setValidating(false); factory.setValidating(false);
factory.setIgnoringComments(true); factory.setIgnoringComments(true);
factory.setIgnoringElementContentWhitespace(true); factory.setIgnoringElementContentWhitespace(true);
// disallow DTD parsing to ensure no XXE attacks can occur.
// See https://cheatsheetseries.owasp.org/cheatsheets/XML_External_Entity_Prevention_Cheat_Sheet.html
factory.setFeature("http://apache.org/xml/features/disallow-doctype-decl", true);
DocumentBuilder db = factory DocumentBuilder db = factory
.newDocumentBuilder(); .newDocumentBuilder();

View File

@@ -119,6 +119,9 @@ public class PubmedService {
factory.setValidating(false); factory.setValidating(false);
factory.setIgnoringComments(true); factory.setIgnoringComments(true);
factory.setIgnoringElementContentWhitespace(true); factory.setIgnoringElementContentWhitespace(true);
// disallow DTD parsing to ensure no XXE attacks can occur.
// See https://cheatsheetseries.owasp.org/cheatsheets/XML_External_Entity_Prevention_Cheat_Sheet.html
factory.setFeature("http://apache.org/xml/features/disallow-doctype-decl", true);
DocumentBuilder builder; DocumentBuilder builder;
try { try {
@@ -156,6 +159,9 @@ public class PubmedService {
factory.setValidating(false); factory.setValidating(false);
factory.setIgnoringComments(true); factory.setIgnoringComments(true);
factory.setIgnoringElementContentWhitespace(true); factory.setIgnoringElementContentWhitespace(true);
// disallow DTD parsing to ensure no XXE attacks can occur.
// See https://cheatsheetseries.owasp.org/cheatsheets/XML_External_Entity_Prevention_Cheat_Sheet.html
factory.setFeature("http://apache.org/xml/features/disallow-doctype-decl", true);
DocumentBuilder builder = factory.newDocumentBuilder(); DocumentBuilder builder = factory.newDocumentBuilder();
Document inDoc = builder.parse(stream); Document inDoc = builder.parse(stream);
@@ -216,6 +222,9 @@ public class PubmedService {
factory.setValidating(false); factory.setValidating(false);
factory.setIgnoringComments(true); factory.setIgnoringComments(true);
factory.setIgnoringElementContentWhitespace(true); factory.setIgnoringElementContentWhitespace(true);
// disallow DTD parsing to ensure no XXE attacks can occur.
// See https://cheatsheetseries.owasp.org/cheatsheets/XML_External_Entity_Prevention_Cheat_Sheet.html
factory.setFeature("http://apache.org/xml/features/disallow-doctype-decl", true);
DocumentBuilder builder = factory.newDocumentBuilder(); DocumentBuilder builder = factory.newDocumentBuilder();
Document inDoc = builder Document inDoc = builder

View File

@@ -135,12 +135,12 @@ public class Version implements ReloadableEntity<Integer> {
return true; return true;
} }
Class<?> objClass = HibernateProxyHelper.getClassWithoutInitializingProxy(o); Class<?> objClass = HibernateProxyHelper.getClassWithoutInitializingProxy(o);
if (getClass() != objClass) { if (!getClass().equals(objClass)) {
return false; return false;
} }
final Version that = (Version) o; final Version that = (Version) o;
if (this.getID() != that.getID()) { if (!this.getID().equals(that.getID())) {
return false; return false;
} }

View File

@@ -93,12 +93,12 @@ public class VersionHistory implements ReloadableEntity<Integer> {
return true; return true;
} }
Class<?> objClass = HibernateProxyHelper.getClassWithoutInitializingProxy(o); Class<?> objClass = HibernateProxyHelper.getClassWithoutInitializingProxy(o);
if (getClass() != objClass) { if (!getClass().equals(objClass)) {
return false; return false;
} }
final VersionHistory that = (VersionHistory) o; final VersionHistory that = (VersionHistory) o;
if (this.getID() != that.getID()) { if (!this.getID().equals(that.getID())) {
return false; return false;
} }

View File

@@ -0,0 +1,2 @@
checklinks
requiredmetadata

View File

@@ -19,6 +19,12 @@
<property name="dspaceRunnableClass" value="org.dspace.app.bulkedit.MetadataExport"/> <property name="dspaceRunnableClass" value="org.dspace.app.bulkedit.MetadataExport"/>
</bean> </bean>
<bean id="curate" class="org.dspace.curate.CurationScriptConfiguration">
<property name="description" value="Curation tasks"/>
<property name="dspaceRunnableClass" value="org.dspace.curate.CurationCli"/>
</bean>
<!-- Keep as last script; for test ScriptRestRepository#findOneScriptByNameTest -->
<bean id="mock-script" class="org.dspace.scripts.MockDSpaceRunnableScriptConfiguration" scope="prototype"> <bean id="mock-script" class="org.dspace.scripts.MockDSpaceRunnableScriptConfiguration" scope="prototype">
<property name="description" value="Mocking a script for testing purposes" /> <property name="description" value="Mocking a script for testing purposes" />
<property name="dspaceRunnableClass" value="org.dspace.scripts.impl.MockDSpaceRunnableScript"/> <property name="dspaceRunnableClass" value="org.dspace.scripts.impl.MockDSpaceRunnableScript"/>

View File

@@ -19,19 +19,29 @@
<context:annotation-config/> <!-- allows us to use spring annotations in beans --> <context:annotation-config/> <!-- allows us to use spring annotations in beans -->
<bean class="org.dspace.discovery.SolrServiceImpl" id="org.dspace.discovery.SearchService"/> <bean class="org.dspace.discovery.SolrServiceImpl"
id="org.dspace.discovery.SearchService"/>
<alias name="org.dspace.discovery.SearchService" alias="org.dspace.discovery.IndexingService"/> <alias name="org.dspace.discovery.SearchService"
alias="org.dspace.discovery.IndexingService"/>
<!-- These beans have been added so that we can mock our AuthoritySearchService in the tests--> <!-- These beans have been added so that we can mock our AuthoritySearchService in the tests-->
<bean class="org.dspace.authority.MockAuthoritySolrServiceImpl" id="org.dspace.authority.AuthoritySearchService"/> <bean class="org.dspace.authority.MockAuthoritySolrServiceImpl"
<alias name="org.dspace.authority.AuthoritySearchService" alias="org.dspace.authority.indexer.AuthorityIndexingService"/> id="org.dspace.authority.AuthoritySearchService"/>
<alias name="org.dspace.authority.AuthoritySearchService"
alias="org.dspace.authority.indexer.AuthorityIndexingService"/>
<bean id="org.dspace.discovery.SolrSearchCore" class="org.dspace.discovery.MockSolrSearchCore" autowire-candidate="true"/> <bean id="org.dspace.discovery.MockSolrSearchCore"
class="org.dspace.discovery.MockSolrSearchCore"
autowire-candidate="true"/>
<!--<bean class="org.dspace.discovery.SolrServiceIndexOutputPlugin" id="solrServiceIndexOutputPlugin"/>--> <!--<bean class="org.dspace.discovery.SolrServiceIndexOutputPlugin"
id="solrServiceIndexOutputPlugin"/>-->
<!-- Statistics services are both lazy loaded (by name), as you are likely just using ONE of them and not both --> <!-- Statistics services are both lazy loaded (by name), as you are likely
<bean id="solrLoggerService" class="org.dspace.statistics.MockSolrLoggerServiceImpl" lazy-init="true"/> just using ONE of them and not both -->
<bean id="solrLoggerService"
class="org.dspace.statistics.MockSolrLoggerServiceImpl"
lazy-init="true"/>
</beans> </beans>

View File

@@ -237,7 +237,7 @@ it, please enter the types and the actual numbers or codes.</hint>
<form name="journalVolumeStep"> <form name="journalVolumeStep">
<row> <row>
<relation-field> <relation-field>
<relationship-type>isVolumeOfJournal</relationship-type> <relationship-type>isJournalOfVolume</relationship-type>
<search-configuration>periodical</search-configuration> <search-configuration>periodical</search-configuration>
<filter>creativework.publisher:somepublishername</filter> <filter>creativework.publisher:somepublishername</filter>
<label>Journal</label> <label>Journal</label>

View File

@@ -0,0 +1,3 @@
<?xml version='1.0'?>
<!-- This empty configuration is required to start EmbeddedSolrServer for Integration Tests (see MockSolrServer) -->
<solr/>

View File

@@ -5,7 +5,7 @@
* *
* http://www.dspace.org/license/ * http://www.dspace.org/license/
*/ */
package org.dspace.app.rest.test; package org.dspace;
import static org.junit.Assert.fail; import static org.junit.Assert.fail;
@@ -17,7 +17,7 @@ import java.util.TimeZone;
import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger; import org.apache.logging.log4j.Logger;
import org.dspace.app.rest.builder.AbstractBuilder; import org.dspace.builder.AbstractBuilder;
import org.dspace.servicemanager.DSpaceKernelImpl; import org.dspace.servicemanager.DSpaceKernelImpl;
import org.dspace.servicemanager.DSpaceKernelInit; import org.dspace.servicemanager.DSpaceKernelInit;
import org.junit.AfterClass; import org.junit.AfterClass;
@@ -90,8 +90,9 @@ public class AbstractDSpaceIntegrationTest {
} }
/** /**
* This method will be run after all tests finish as per @AfterClass. It * This method will be run after all tests finish as per @AfterClass. It
* will clean resources initialized by the @BeforeClass methods. * will clean resources initialized by the @BeforeClass methods.
* @throws java.sql.SQLException
*/ */
@AfterClass @AfterClass
public static void destroyTestEnvironment() throws SQLException { public static void destroyTestEnvironment() throws SQLException {

View File

@@ -5,7 +5,7 @@
* *
* http://www.dspace.org/license/ * http://www.dspace.org/license/
*/ */
package org.dspace.app.rest.test; package org.dspace;
import static org.junit.Assert.fail; import static org.junit.Assert.fail;
@@ -14,21 +14,20 @@ import java.sql.SQLException;
import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger; import org.apache.logging.log4j.Logger;
import org.dspace.app.launcher.ScriptLauncher; import org.dspace.app.launcher.ScriptLauncher;
import org.dspace.app.rest.builder.AbstractBuilder;
import org.dspace.app.scripts.handler.impl.TestDSpaceRunnableHandler; import org.dspace.app.scripts.handler.impl.TestDSpaceRunnableHandler;
import org.dspace.authority.AuthoritySearchService;
import org.dspace.authority.MockAuthoritySolrServiceImpl; import org.dspace.authority.MockAuthoritySolrServiceImpl;
import org.dspace.authorize.AuthorizeException; import org.dspace.authorize.AuthorizeException;
import org.dspace.builder.AbstractBuilder;
import org.dspace.content.Community; import org.dspace.content.Community;
import org.dspace.core.Context; import org.dspace.core.Context;
import org.dspace.core.I18nUtil; import org.dspace.core.I18nUtil;
import org.dspace.discovery.MockSolrSearchCore; import org.dspace.discovery.MockSolrSearchCore;
import org.dspace.discovery.SolrSearchCore;
import org.dspace.eperson.EPerson; import org.dspace.eperson.EPerson;
import org.dspace.eperson.Group; import org.dspace.eperson.Group;
import org.dspace.eperson.factory.EPersonServiceFactory; import org.dspace.eperson.factory.EPersonServiceFactory;
import org.dspace.eperson.service.EPersonService; import org.dspace.eperson.service.EPersonService;
import org.dspace.eperson.service.GroupService; import org.dspace.eperson.service.GroupService;
import org.dspace.kernel.ServiceManager;
import org.dspace.services.factory.DSpaceServicesFactory; import org.dspace.services.factory.DSpaceServicesFactory;
import org.dspace.statistics.MockSolrLoggerServiceImpl; import org.dspace.statistics.MockSolrLoggerServiceImpl;
import org.dspace.storage.rdbms.DatabaseUtils; import org.dspace.storage.rdbms.DatabaseUtils;
@@ -181,21 +180,20 @@ public class AbstractIntegrationTestWithDatabase extends AbstractDSpaceIntegrati
parentCommunity = null; parentCommunity = null;
cleanupContext(); cleanupContext();
ServiceManager serviceManager = DSpaceServicesFactory.getInstance().getServiceManager();
// Clear the search core. // Clear the search core.
MockSolrSearchCore searchService = DSpaceServicesFactory.getInstance() MockSolrSearchCore searchService = serviceManager
.getServiceManager() .getServiceByName(null, MockSolrSearchCore.class);
.getServiceByName(SolrSearchCore.class.getName(), MockSolrSearchCore.class);
searchService.reset(); searchService.reset();
MockSolrLoggerServiceImpl statisticsService = DSpaceServicesFactory.getInstance() MockSolrLoggerServiceImpl statisticsService = serviceManager
.getServiceManager() .getServiceByName(null, MockSolrLoggerServiceImpl.class);
.getServiceByName("solrLoggerService", MockSolrLoggerServiceImpl.class);
statisticsService.reset(); statisticsService.reset();
MockAuthoritySolrServiceImpl authorityService = DSpaceServicesFactory.getInstance() MockAuthoritySolrServiceImpl authorityService = serviceManager
.getServiceManager() .getServiceByName(null, MockAuthoritySolrServiceImpl.class);
.getServiceByName(AuthoritySearchService.class.getName(), MockAuthoritySolrServiceImpl.class);
authorityService.reset(); authorityService.reset();
// Reload our ConfigurationService (to reset configs to defaults again) // Reload our ConfigurationService (to reset configs to defaults again)
DSpaceServicesFactory.getInstance().getConfigurationService().reloadConfig(); DSpaceServicesFactory.getInstance().getConfigurationService().reloadConfig();
@@ -209,6 +207,7 @@ public class AbstractIntegrationTestWithDatabase extends AbstractDSpaceIntegrati
/** /**
* Utility method to cleanup a created Context object (to save memory). * Utility method to cleanup a created Context object (to save memory).
* This can also be used by individual tests to cleanup context objects they create. * This can also be used by individual tests to cleanup context objects they create.
* @throws java.sql.SQLException passed through.
*/ */
protected void cleanupContext() throws SQLException { protected void cleanupContext() throws SQLException {
// If context still valid, flush all database changes and close it // If context still valid, flush all database changes and close it

View File

@@ -5,7 +5,7 @@
* *
* http://www.dspace.org/license/ * http://www.dspace.org/license/
*/ */
package org.dspace.app.rest.test; package org.dspace;
public class ExitException extends SecurityException { public class ExitException extends SecurityException {
private final int status; private final int status;

View File

@@ -5,7 +5,7 @@
* *
* http://www.dspace.org/license/ * http://www.dspace.org/license/
*/ */
package org.dspace.app.rest.test; package org.dspace;
import java.security.Permission; import java.security.Permission;

View File

@@ -0,0 +1,62 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.app.bulkedit;
import static junit.framework.TestCase.assertTrue;
import java.io.File;
import java.io.FileInputStream;
import java.nio.charset.StandardCharsets;
import org.apache.commons.io.IOUtils;
import org.dspace.AbstractIntegrationTestWithDatabase;
import org.dspace.app.launcher.ScriptLauncher;
import org.dspace.app.scripts.handler.impl.TestDSpaceRunnableHandler;
import org.dspace.builder.CollectionBuilder;
import org.dspace.builder.CommunityBuilder;
import org.dspace.builder.ItemBuilder;
import org.dspace.content.Collection;
import org.dspace.content.Community;
import org.dspace.content.Item;
import org.dspace.services.ConfigurationService;
import org.dspace.services.factory.DSpaceServicesFactory;
import org.junit.Test;
public class MetadataExportIT
extends AbstractIntegrationTestWithDatabase {
private final ConfigurationService configurationService
= DSpaceServicesFactory.getInstance().getConfigurationService();
@Test
public void metadataExportToCsvTest() throws Exception {
context.turnOffAuthorisationSystem();
Community community = CommunityBuilder.createCommunity(context)
.build();
Collection collection = CollectionBuilder.createCollection(context, community)
.build();
Item item = ItemBuilder.createItem(context, collection)
.withAuthor("Donald, Smith")
.build();
context.restoreAuthSystemState();
String fileLocation = configurationService.getProperty("dspace.dir")
+ testProps.get("test.exportcsv").toString();
String[] args = new String[] {"metadata-export",
"-i", String.valueOf(item.getHandle()),
"-f", fileLocation};
TestDSpaceRunnableHandler testDSpaceRunnableHandler
= new TestDSpaceRunnableHandler();
ScriptLauncher.handleScript(args, ScriptLauncher.getConfig(kernelImpl),
testDSpaceRunnableHandler, kernelImpl);
File file = new File(fileLocation);
String fileContent = IOUtils.toString(new FileInputStream(file), StandardCharsets.UTF_8);
assertTrue(fileContent.contains("Donald, Smith"));
assertTrue(fileContent.contains(String.valueOf(item.getID())));
}
}

View File

@@ -1,71 +0,0 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.app.bulkedit;
import static junit.framework.TestCase.assertTrue;
import java.io.File;
import java.io.FileInputStream;
import java.nio.charset.StandardCharsets;
import org.apache.commons.io.IOUtils;
import org.dspace.AbstractIntegrationTest;
import org.dspace.app.launcher.ScriptLauncher;
import org.dspace.app.scripts.handler.impl.TestDSpaceRunnableHandler;
import org.dspace.content.Collection;
import org.dspace.content.Community;
import org.dspace.content.Item;
import org.dspace.content.WorkspaceItem;
import org.dspace.content.factory.ContentServiceFactory;
import org.dspace.content.service.CollectionService;
import org.dspace.content.service.CommunityService;
import org.dspace.content.service.InstallItemService;
import org.dspace.content.service.ItemService;
import org.dspace.content.service.WorkspaceItemService;
import org.dspace.services.ConfigurationService;
import org.dspace.services.factory.DSpaceServicesFactory;
import org.junit.Test;
public class MetadataExportTest extends AbstractIntegrationTest {
private ItemService itemService = ContentServiceFactory.getInstance().getItemService();
private CollectionService collectionService = ContentServiceFactory.getInstance().getCollectionService();
private CommunityService communityService = ContentServiceFactory.getInstance().getCommunityService();
private WorkspaceItemService workspaceItemService = ContentServiceFactory.getInstance().getWorkspaceItemService();
private InstallItemService installItemService = ContentServiceFactory.getInstance().getInstallItemService();
private ConfigurationService configurationService = DSpaceServicesFactory.getInstance().getConfigurationService();
@Test
public void metadataExportToCsvTest() throws Exception {
context.turnOffAuthorisationSystem();
Community community = communityService.create(null, context);
Collection collection = collectionService.create(context, community);
WorkspaceItem wi = workspaceItemService.create(context, collection, true);
Item item = wi.getItem();
itemService.addMetadata(context, item, "dc", "contributor", "author", null, "Donald, Smith");
item = installItemService.installItem(context, wi);
context.restoreAuthSystemState();
String fileLocation = configurationService.getProperty("dspace.dir") + testProps.get("test.exportcsv")
.toString();
String[] args = new String[] {"metadata-export", "-i", String.valueOf(item.getHandle()), "-f", fileLocation};
TestDSpaceRunnableHandler testDSpaceRunnableHandler = new TestDSpaceRunnableHandler();
ScriptLauncher.handleScript(args, ScriptLauncher.getConfig(kernelImpl), testDSpaceRunnableHandler, kernelImpl);
File file = new File(fileLocation);
String fileContent = IOUtils.toString(new FileInputStream(file), StandardCharsets.UTF_8);
assertTrue(fileContent.contains("Donald, Smith"));
assertTrue(fileContent.contains(String.valueOf(item.getID())));
context.turnOffAuthorisationSystem();
itemService.delete(context, itemService.find(context, item.getID()));
collectionService.delete(context, collectionService.find(context, collection.getID()));
communityService.delete(context, communityService.find(context, community.getID()));
context.restoreAuthSystemState();
}
}

View File

@@ -22,16 +22,16 @@ import org.dspace.content.factory.ContentServiceFactory;
import org.dspace.content.service.CollectionService; import org.dspace.content.service.CollectionService;
import org.dspace.content.service.CommunityService; import org.dspace.content.service.CommunityService;
import org.dspace.content.service.ItemService; import org.dspace.content.service.ItemService;
import org.dspace.services.ConfigurationService;
import org.dspace.services.factory.DSpaceServicesFactory;
import org.junit.Test; import org.junit.Test;
public class MetadataImportTest extends AbstractIntegrationTest { public class MetadataImportTest extends AbstractIntegrationTest {
private ItemService itemService = ContentServiceFactory.getInstance().getItemService(); private final ItemService itemService
private CollectionService collectionService = ContentServiceFactory.getInstance().getCollectionService(); = ContentServiceFactory.getInstance().getItemService();
private CommunityService communityService = ContentServiceFactory.getInstance().getCommunityService(); private final CollectionService collectionService
private ConfigurationService configurationService = DSpaceServicesFactory.getInstance().getConfigurationService(); = ContentServiceFactory.getInstance().getCollectionService();
private final CommunityService communityService
= ContentServiceFactory.getInstance().getCommunityService();
@Test @Test
public void metadataImportTest() throws Exception { public void metadataImportTest() throws Exception {

View File

@@ -21,4 +21,8 @@ public class MockAuthoritySolrServiceImpl extends AuthoritySolrServiceImpl imple
//We don't use SOLR in the tests of this module //We don't use SOLR in the tests of this module
solr = null; solr = null;
} }
public void reset() {
// This method intentionally left blank.
}
} }

View File

@@ -20,7 +20,7 @@ import org.junit.Test;
* @author Andrea Bollini (andrea.bollini at 4science.it) * @author Andrea Bollini (andrea.bollini at 4science.it)
* *
*/ */
public class AuthorizeConfigIntegrationTest extends AbstractIntegrationTest { public class AuthorizeConfigIT extends AbstractIntegrationTest {
@Test @Test
public void testReloadConfiguration() { public void testReloadConfiguration() {

View File

@@ -5,18 +5,18 @@
* *
* http://www.dspace.org/license/ * http://www.dspace.org/license/
*/ */
package org.dspace.app.rest.builder; package org.dspace.builder;
import java.sql.SQLException; import java.sql.SQLException;
import java.util.List; import java.util.List;
import org.apache.commons.collections4.CollectionUtils; import org.apache.commons.collections4.CollectionUtils;
import org.apache.logging.log4j.Logger; import org.apache.logging.log4j.Logger;
import org.dspace.app.rest.builder.util.AbstractBuilderCleanupUtil;
import org.dspace.authorize.AuthorizeException; import org.dspace.authorize.AuthorizeException;
import org.dspace.authorize.factory.AuthorizeServiceFactory; import org.dspace.authorize.factory.AuthorizeServiceFactory;
import org.dspace.authorize.service.AuthorizeService; import org.dspace.authorize.service.AuthorizeService;
import org.dspace.authorize.service.ResourcePolicyService; import org.dspace.authorize.service.ResourcePolicyService;
import org.dspace.builder.util.AbstractBuilderCleanupUtil;
import org.dspace.content.Bitstream; import org.dspace.content.Bitstream;
import org.dspace.content.factory.ContentServiceFactory; import org.dspace.content.factory.ContentServiceFactory;
import org.dspace.content.service.BitstreamFormatService; import org.dspace.content.service.BitstreamFormatService;
@@ -55,8 +55,8 @@ import org.dspace.xmlworkflow.storedcomponents.service.XmlWorkflowItemService;
/** /**
* Abstract builder class that holds references to all available services * Abstract builder class that holds references to all available services
* *
* @param <T> This param represents the Model object for the Builder * @param <T> This parameter represents the Model object for the Builder
* @param <S> This param represents the Service object for the builder * @param <S> This parameter represents the Service object for the builder
* @author Jonas Van Goolen - (jonas@atmire.com) * @author Jonas Van Goolen - (jonas@atmire.com)
*/ */
public abstract class AbstractBuilder<T, S> { public abstract class AbstractBuilder<T, S> {
@@ -96,7 +96,8 @@ public abstract class AbstractBuilder<T, S> {
* This static class will make sure that the objects built with the builders are disposed of in a foreign-key * This static class will make sure that the objects built with the builders are disposed of in a foreign-key
* constraint safe manner by predefining an order * constraint safe manner by predefining an order
*/ */
private static AbstractBuilderCleanupUtil abstractBuilderCleanupUtil = new AbstractBuilderCleanupUtil(); private static final AbstractBuilderCleanupUtil abstractBuilderCleanupUtil
= new AbstractBuilderCleanupUtil();
/** /**
* log4j category * log4j category
*/ */

View File

@@ -5,7 +5,7 @@
* *
* http://www.dspace.org/license/ * http://www.dspace.org/license/
*/ */
package org.dspace.app.rest.builder; package org.dspace.builder;
import org.dspace.core.Context; import org.dspace.core.Context;
import org.dspace.core.ReloadableEntity; import org.dspace.core.ReloadableEntity;
@@ -13,6 +13,8 @@ import org.dspace.service.DSpaceCRUDService;
/** /**
* @author Jonas Van Goolen - (jonas@atmire.com) * @author Jonas Van Goolen - (jonas@atmire.com)
*
* @param <T> A specific kind of ReloadableEntity.
*/ */
public abstract class AbstractCRUDBuilder<T extends ReloadableEntity> extends AbstractBuilder<T, DSpaceCRUDService> { public abstract class AbstractCRUDBuilder<T extends ReloadableEntity> extends AbstractBuilder<T, DSpaceCRUDService> {
@@ -20,8 +22,10 @@ public abstract class AbstractCRUDBuilder<T extends ReloadableEntity> extends Ab
super(context); super(context);
} }
@Override
protected abstract DSpaceCRUDService getService(); protected abstract DSpaceCRUDService getService();
@Override
public abstract T build(); public abstract T build();
public void delete(T dso) throws Exception { public void delete(T dso) throws Exception {

View File

@@ -5,7 +5,7 @@
* *
* http://www.dspace.org/license/ * http://www.dspace.org/license/
*/ */
package org.dspace.app.rest.builder; package org.dspace.builder;
import java.sql.SQLException; import java.sql.SQLException;
import java.util.Date; import java.util.Date;
@@ -43,12 +43,15 @@ public abstract class AbstractDSpaceObjectBuilder<T extends DSpaceObject>
this.context = context; this.context = context;
} }
@Override
public abstract void cleanup() throws Exception; public abstract void cleanup() throws Exception;
@Override
protected abstract DSpaceObjectService<T> getService(); protected abstract DSpaceObjectService<T> getService();
@Override
protected <B> B handleException(final Exception e) { protected <B> B handleException(final Exception e) {
log.error(e.getMessage(), e); log.error(e.getMessage(), e);
return null; return null;
@@ -231,13 +234,15 @@ public abstract class AbstractDSpaceObjectBuilder<T extends DSpaceObject>
return (B) this; return (B) this;
} }
@Override
public abstract T build() throws SQLException, AuthorizeException; public abstract T build() throws SQLException, AuthorizeException;
@Override
public void delete(Context c, T dso) throws Exception { public void delete(Context c, T dso) throws Exception {
if (dso != null) { if (dso != null) {
getService().delete(c, dso); getService().delete(c, dso);
} }
c.complete(); c.complete();
indexingService.commit(); indexingService.commit();
} }
} }

View File

@@ -5,7 +5,7 @@
* *
* http://www.dspace.org/license/ * http://www.dspace.org/license/
*/ */
package org.dspace.app.rest.builder; package org.dspace.builder;
import java.io.IOException; import java.io.IOException;
import java.io.InputStream; import java.io.InputStream;
@@ -129,6 +129,7 @@ public class BitstreamBuilder extends AbstractDSpaceObjectBuilder<Bitstream> {
return this; return this;
} }
@Override
public Bitstream build() { public Bitstream build() {
try { try {
bitstreamService.update(context, bitstream); bitstreamService.update(context, bitstream);
@@ -152,7 +153,7 @@ public class BitstreamBuilder extends AbstractDSpaceObjectBuilder<Bitstream> {
@Override @Override
public void cleanup() throws Exception { public void cleanup() throws Exception {
try (Context c = new Context()) { try (Context c = new Context()) {
c.turnOffAuthorisationSystem(); c.turnOffAuthorisationSystem();
// Ensure object and any related objects are reloaded before checking to see what needs cleanup // Ensure object and any related objects are reloaded before checking to see what needs cleanup
bitstream = c.reloadEntity(bitstream); bitstream = c.reloadEntity(bitstream);
@@ -163,6 +164,7 @@ public class BitstreamBuilder extends AbstractDSpaceObjectBuilder<Bitstream> {
} }
} }
@Override
protected DSpaceObjectService<Bitstream> getService() { protected DSpaceObjectService<Bitstream> getService() {
return bitstreamService; return bitstreamService;
} }

View File

@@ -5,7 +5,7 @@
* *
* http://www.dspace.org/license/ * http://www.dspace.org/license/
*/ */
package org.dspace.app.rest.builder; package org.dspace.builder;
import java.io.IOException; import java.io.IOException;
import java.sql.SQLException; import java.sql.SQLException;
@@ -71,7 +71,6 @@ public class BitstreamFormatBuilder extends AbstractCRUDBuilder<BitstreamFormat>
log.error(e); log.error(e);
} catch (AuthorizeException e) { } catch (AuthorizeException e) {
log.error(e); log.error(e);
;
} }
return bitstreamFormat; return bitstreamFormat;
} }

View File

@@ -5,7 +5,7 @@
* *
* http://www.dspace.org/license/ * http://www.dspace.org/license/
*/ */
package org.dspace.app.rest.builder; package org.dspace.builder;
import java.io.IOException; import java.io.IOException;
import java.sql.SQLException; import java.sql.SQLException;
@@ -25,7 +25,7 @@ public class BundleBuilder extends AbstractDSpaceObjectBuilder<Bundle> {
private Bundle bundle; private Bundle bundle;
private Item item; private Item item;
private String name; private String name;
private List<Bitstream> bitstreams = new ArrayList<>(); private final List<Bitstream> bitstreams = new ArrayList<>();
protected BundleBuilder(Context context) { protected BundleBuilder(Context context) {
super(context); super(context);
@@ -52,6 +52,7 @@ public class BundleBuilder extends AbstractDSpaceObjectBuilder<Bundle> {
return this; return this;
} }
@Override
public void cleanup() throws Exception { public void cleanup() throws Exception {
try (Context c = new Context()) { try (Context c = new Context()) {
c.turnOffAuthorisationSystem(); c.turnOffAuthorisationSystem();
@@ -64,10 +65,12 @@ public class BundleBuilder extends AbstractDSpaceObjectBuilder<Bundle> {
} }
} }
@Override
protected DSpaceObjectService<Bundle> getService() { protected DSpaceObjectService<Bundle> getService() {
return bundleService; return bundleService;
} }
@Override
public Bundle build() throws SQLException, AuthorizeException { public Bundle build() throws SQLException, AuthorizeException {
bundle = bundleService.create(context, item, name); bundle = bundleService.create(context, item, name);

View File

@@ -5,7 +5,7 @@
* *
* http://www.dspace.org/license/ * http://www.dspace.org/license/
*/ */
package org.dspace.app.rest.builder; package org.dspace.builder;
import java.io.InputStream; import java.io.InputStream;

View File

@@ -5,7 +5,7 @@
* *
* http://www.dspace.org/license/ * http://www.dspace.org/license/
*/ */
package org.dspace.app.rest.builder; package org.dspace.builder;
import java.io.IOException; import java.io.IOException;
import java.io.InputStream; import java.io.InputStream;

View File

@@ -5,7 +5,7 @@
* *
* http://www.dspace.org/license/ * http://www.dspace.org/license/
*/ */
package org.dspace.app.rest.builder; package org.dspace.builder;
import java.io.IOException; import java.io.IOException;
import java.io.InputStream; import java.io.InputStream;

View File

@@ -5,12 +5,14 @@
* *
* http://www.dspace.org/license/ * http://www.dspace.org/license/
*/ */
package org.dspace.app.rest.builder; package org.dspace.builder;
import java.io.IOException; import java.io.IOException;
import java.sql.SQLException; import java.sql.SQLException;
import java.util.UUID; import java.util.UUID;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
import org.dspace.authorize.AuthorizeException; import org.dspace.authorize.AuthorizeException;
import org.dspace.content.service.DSpaceObjectService; import org.dspace.content.service.DSpaceObjectService;
import org.dspace.core.Context; import org.dspace.core.Context;
@@ -19,6 +21,7 @@ import org.dspace.eperson.EPerson;
import org.dspace.eperson.Group; import org.dspace.eperson.Group;
public class EPersonBuilder extends AbstractDSpaceObjectBuilder<EPerson> { public class EPersonBuilder extends AbstractDSpaceObjectBuilder<EPerson> {
private static final Logger LOG = LogManager.getLogger(EPersonBuilder.class);
private EPerson ePerson; private EPerson ePerson;
@@ -28,7 +31,7 @@ public class EPersonBuilder extends AbstractDSpaceObjectBuilder<EPerson> {
@Override @Override
public void cleanup() throws Exception { public void cleanup() throws Exception {
try (Context c = new Context()) { try (Context c = new Context()) {
c.turnOffAuthorisationSystem(); c.turnOffAuthorisationSystem();
// Ensure object and any related objects are reloaded before checking to see what needs cleanup // Ensure object and any related objects are reloaded before checking to see what needs cleanup
ePerson = c.reloadEntity(ePerson); ePerson = c.reloadEntity(ePerson);
@@ -36,23 +39,21 @@ public class EPersonBuilder extends AbstractDSpaceObjectBuilder<EPerson> {
delete(c, ePerson); delete(c, ePerson);
c.complete(); c.complete();
} }
} }
} }
@Override
protected DSpaceObjectService<EPerson> getService() { protected DSpaceObjectService<EPerson> getService() {
return ePersonService; return ePersonService;
} }
@Override
public EPerson build() { public EPerson build() {
try { try {
ePersonService.update(context, ePerson); ePersonService.update(context, ePerson);
indexingService.commit(); indexingService.commit();
} catch (SearchServiceException e) { } catch (SearchServiceException | SQLException | AuthorizeException e) {
e.printStackTrace(); LOG.warn("Failed to complete the EPerson", e);
} catch (SQLException e) {
e.printStackTrace();
} catch (AuthorizeException e) {
e.printStackTrace();
} }
return ePerson; return ePerson;
} }
@@ -65,10 +66,8 @@ public class EPersonBuilder extends AbstractDSpaceObjectBuilder<EPerson> {
private EPersonBuilder create() { private EPersonBuilder create() {
try { try {
ePerson = ePersonService.create(context); ePerson = ePersonService.create(context);
} catch (SQLException e) { } catch (SQLException | AuthorizeException e) {
e.printStackTrace(); LOG.warn("Failed to create the EPerson", e);
} catch (AuthorizeException e) {
e.printStackTrace();
} }
return this; return this;
} }

View File

@@ -5,7 +5,7 @@
* *
* http://www.dspace.org/license/ * http://www.dspace.org/license/
*/ */
package org.dspace.app.rest.builder; package org.dspace.builder;
import java.sql.SQLException; import java.sql.SQLException;
@@ -53,6 +53,7 @@ public class EntityTypeBuilder extends AbstractBuilder<EntityType, EntityTypeSer
} }
} }
@Override
public EntityType build() { public EntityType build() {
try { try {
@@ -91,7 +92,7 @@ public class EntityTypeBuilder extends AbstractBuilder<EntityType, EntityTypeSer
this.entityType = entityTypeService.create(context, entityType); this.entityType = entityTypeService.create(context, entityType);
} catch (SQLException | AuthorizeException e) { } catch (SQLException | AuthorizeException e) {
e.printStackTrace(); log.warn("Failed to create the EntityType", e);
} }
return this; return this;

View File

@@ -5,7 +5,7 @@
* *
* http://www.dspace.org/license/ * http://www.dspace.org/license/
*/ */
package org.dspace.app.rest.builder; package org.dspace.builder;
import java.io.IOException; import java.io.IOException;
import java.sql.SQLException; import java.sql.SQLException;
@@ -34,7 +34,7 @@ public class GroupBuilder extends AbstractDSpaceObjectBuilder<Group> {
@Override @Override
public void cleanup() throws Exception { public void cleanup() throws Exception {
try (Context c = new Context()) { try (Context c = new Context()) {
c.turnOffAuthorisationSystem(); c.turnOffAuthorisationSystem();
// Ensure object and any related objects are reloaded before checking to see what needs cleanup // Ensure object and any related objects are reloaded before checking to see what needs cleanup
group = c.reloadEntity(group); group = c.reloadEntity(group);
@@ -42,7 +42,7 @@ public class GroupBuilder extends AbstractDSpaceObjectBuilder<Group> {
delete(c, group); delete(c, group);
c.complete(); c.complete();
} }
} }
} }
public static GroupBuilder createGroup(final Context context) { public static GroupBuilder createGroup(final Context context) {

View File

@@ -5,7 +5,7 @@
* *
* http://www.dspace.org/license/ * http://www.dspace.org/license/
*/ */
package org.dspace.app.rest.builder; package org.dspace.builder;
import java.io.IOException; import java.io.IOException;
import java.sql.SQLException; import java.sql.SQLException;

View File

@@ -5,7 +5,7 @@
* *
* http://www.dspace.org/license/ * http://www.dspace.org/license/
*/ */
package org.dspace.app.rest.builder; package org.dspace.builder;
import java.io.IOException; import java.io.IOException;
import java.sql.SQLException; import java.sql.SQLException;
@@ -64,17 +64,9 @@ public class MetadataFieldBuilder extends AbstractBuilder<MetadataField, Metadat
context.dispatchEvents(); context.dispatchEvents();
indexingService.commit(); indexingService.commit();
} catch (SearchServiceException e) { } catch (SearchServiceException | SQLException | AuthorizeException
log.error(e); | NonUniqueMetadataException | IOException e) {
} catch (SQLException e) { log.error("Failed to complete MetadataField", e);
log.error(e);
} catch (AuthorizeException e) {
log.error(e);
;
} catch (NonUniqueMetadataException e) {
e.printStackTrace();
} catch (IOException e) {
e.printStackTrace();
} }
return metadataField; return metadataField;
} }
@@ -104,7 +96,7 @@ public class MetadataFieldBuilder extends AbstractBuilder<MetadataField, Metadat
MetadataField metadataField = metadataFieldService.find(c, id); MetadataField metadataField = metadataFieldService.find(c, id);
if (metadataField != null) { if (metadataField != null) {
try { try {
metadataFieldService.delete(c, metadataField); metadataFieldService.delete(c, metadataField);
} catch (AuthorizeException e) { } catch (AuthorizeException e) {
throw new RuntimeException(e); throw new RuntimeException(e);
} }
@@ -141,7 +133,7 @@ public class MetadataFieldBuilder extends AbstractBuilder<MetadataField, Metadat
metadataField = metadataFieldService metadataField = metadataFieldService
.create(context, schema, element, qualifier, scopeNote); .create(context, schema, element, qualifier, scopeNote);
} catch (NonUniqueMetadataException e) { } catch (NonUniqueMetadataException e) {
e.printStackTrace(); log.error("Failed to create MetadataField", e);
} }
return this; return this;

View File

@@ -5,7 +5,7 @@
* *
* http://www.dspace.org/license/ * http://www.dspace.org/license/
*/ */
package org.dspace.app.rest.builder; package org.dspace.builder;
import java.io.IOException; import java.io.IOException;
import java.sql.SQLException; import java.sql.SQLException;
@@ -63,15 +63,10 @@ public class MetadataSchemaBuilder extends AbstractBuilder<MetadataSchema, Metad
context.dispatchEvents(); context.dispatchEvents();
indexingService.commit(); indexingService.commit();
} catch (SearchServiceException e) { } catch (SearchServiceException | SQLException | AuthorizeException e) {
log.error(e); log.error(e);
} catch (SQLException e) {
log.error(e);
} catch (AuthorizeException e) {
log.error(e);
;
} catch (NonUniqueMetadataException e) { } catch (NonUniqueMetadataException e) {
e.printStackTrace(); log.error("Failed to complete MetadataSchema", e);
} }
return metadataSchema; return metadataSchema;
} }
@@ -101,7 +96,7 @@ public class MetadataSchemaBuilder extends AbstractBuilder<MetadataSchema, Metad
MetadataSchema metadataSchema = metadataSchemaService.find(c, id); MetadataSchema metadataSchema = metadataSchemaService.find(c, id);
if (metadataSchema != null) { if (metadataSchema != null) {
try { try {
metadataSchemaService.delete(c, metadataSchema); metadataSchemaService.delete(c, metadataSchema);
} catch (AuthorizeException e) { } catch (AuthorizeException e) {
throw new RuntimeException(e); throw new RuntimeException(e);
} }
@@ -123,7 +118,7 @@ public class MetadataSchemaBuilder extends AbstractBuilder<MetadataSchema, Metad
try { try {
metadataSchema = metadataSchemaService.create(context, name, namespace); metadataSchema = metadataSchemaService.create(context, name, namespace);
} catch (NonUniqueMetadataException e) { } catch (NonUniqueMetadataException e) {
e.printStackTrace(); log.error("Failed to create MetadataSchema", e);
} }
return this; return this;

View File

@@ -5,7 +5,7 @@
* *
* http://www.dspace.org/license/ * http://www.dspace.org/license/
*/ */
package org.dspace.app.rest.builder; package org.dspace.builder;
import java.io.InputStream; import java.io.InputStream;

View File

@@ -5,7 +5,7 @@
* *
* http://www.dspace.org/license/ * http://www.dspace.org/license/
*/ */
package org.dspace.app.rest.builder; package org.dspace.builder;
import java.io.IOException; import java.io.IOException;
import java.sql.SQLException; import java.sql.SQLException;
@@ -57,6 +57,7 @@ public class ProcessBuilder extends AbstractBuilder<Process, ProcessService> {
} }
} }
@Override
public Process build() { public Process build() {
try { try {
processService.update(context, process); processService.update(context, process);
@@ -68,6 +69,7 @@ public class ProcessBuilder extends AbstractBuilder<Process, ProcessService> {
return process; return process;
} }
@Override
protected ProcessService getService() { protected ProcessService getService() {
return processService; return processService;
} }

View File

@@ -5,7 +5,7 @@
* *
* http://www.dspace.org/license/ * http://www.dspace.org/license/
*/ */
package org.dspace.app.rest.builder; package org.dspace.builder;
import java.io.IOException; import java.io.IOException;
import java.sql.SQLException; import java.sql.SQLException;
@@ -56,6 +56,7 @@ public class RelationshipBuilder extends AbstractBuilder<Relationship, Relations
} }
} }
@Override
public Relationship build() { public Relationship build() {
try { try {
@@ -117,7 +118,7 @@ public class RelationshipBuilder extends AbstractBuilder<Relationship, Relations
try { try {
relationship = relationshipService.create(context, leftItem, rightItem, relationshipType, 0, 0); relationship = relationshipService.create(context, leftItem, rightItem, relationshipType, 0, 0);
} catch (SQLException | AuthorizeException e) { } catch (SQLException | AuthorizeException e) {
e.printStackTrace(); log.warn("Failed to create relationship", e);
} }
return this; return this;

View File

@@ -5,7 +5,7 @@
* *
* http://www.dspace.org/license/ * http://www.dspace.org/license/
*/ */
package org.dspace.app.rest.builder; package org.dspace.builder;
import java.sql.SQLException; import java.sql.SQLException;
import java.util.List; import java.util.List;
@@ -62,6 +62,7 @@ public class RelationshipTypeBuilder extends AbstractBuilder<RelationshipType, R
} }
} }
@Override
public RelationshipType build() { public RelationshipType build() {
try { try {
@@ -116,7 +117,7 @@ public class RelationshipTypeBuilder extends AbstractBuilder<RelationshipType, R
leftCardinalityMax, rightCardinalityMin, rightCardinalityMax); leftCardinalityMax, rightCardinalityMin, rightCardinalityMax);
} catch (SQLException | AuthorizeException e) { } catch (SQLException | AuthorizeException e) {
e.printStackTrace(); log.error("Failed to create RelationshipType", e);
} }
return this; return this;

View File

@@ -5,7 +5,7 @@
* *
* http://www.dspace.org/license/ * http://www.dspace.org/license/
*/ */
package org.dspace.app.rest.builder; package org.dspace.builder;
import java.io.IOException; import java.io.IOException;
import java.sql.SQLException; import java.sql.SQLException;

View File

@@ -5,7 +5,7 @@
* *
* http://www.dspace.org/license/ * http://www.dspace.org/license/
*/ */
package org.dspace.app.rest.builder; package org.dspace.builder;
import org.dspace.content.Site; import org.dspace.content.Site;
import org.dspace.content.service.DSpaceObjectService; import org.dspace.content.service.DSpaceObjectService;

View File

@@ -5,7 +5,7 @@
* *
* http://www.dspace.org/license/ * http://www.dspace.org/license/
*/ */
package org.dspace.app.rest.builder; package org.dspace.builder;
import java.io.IOException; import java.io.IOException;
import java.io.InputStream; import java.io.InputStream;

View File

@@ -5,7 +5,7 @@
* *
* http://www.dspace.org/license/ * http://www.dspace.org/license/
*/ */
package org.dspace.app.rest.builder; package org.dspace.builder;
import java.io.IOException; import java.io.IOException;
import java.io.InputStream; import java.io.InputStream;

View File

@@ -5,32 +5,32 @@
* *
* http://www.dspace.org/license/ * http://www.dspace.org/license/
*/ */
package org.dspace.app.rest.builder.util; package org.dspace.builder.util;
import java.util.LinkedHashMap; import java.util.LinkedHashMap;
import java.util.LinkedList; import java.util.LinkedList;
import java.util.List; import java.util.List;
import java.util.Map; import java.util.Map;
import org.dspace.app.rest.builder.AbstractBuilder; import org.dspace.builder.AbstractBuilder;
import org.dspace.app.rest.builder.BitstreamBuilder; import org.dspace.builder.BitstreamBuilder;
import org.dspace.app.rest.builder.BitstreamFormatBuilder; import org.dspace.builder.BitstreamFormatBuilder;
import org.dspace.app.rest.builder.ClaimedTaskBuilder; import org.dspace.builder.ClaimedTaskBuilder;
import org.dspace.app.rest.builder.CollectionBuilder; import org.dspace.builder.CollectionBuilder;
import org.dspace.app.rest.builder.CommunityBuilder; import org.dspace.builder.CommunityBuilder;
import org.dspace.app.rest.builder.EPersonBuilder; import org.dspace.builder.EPersonBuilder;
import org.dspace.app.rest.builder.EntityTypeBuilder; import org.dspace.builder.EntityTypeBuilder;
import org.dspace.app.rest.builder.GroupBuilder; import org.dspace.builder.GroupBuilder;
import org.dspace.app.rest.builder.ItemBuilder; import org.dspace.builder.ItemBuilder;
import org.dspace.app.rest.builder.MetadataFieldBuilder; import org.dspace.builder.MetadataFieldBuilder;
import org.dspace.app.rest.builder.MetadataSchemaBuilder; import org.dspace.builder.MetadataSchemaBuilder;
import org.dspace.app.rest.builder.PoolTaskBuilder; import org.dspace.builder.PoolTaskBuilder;
import org.dspace.app.rest.builder.ProcessBuilder; import org.dspace.builder.ProcessBuilder;
import org.dspace.app.rest.builder.RelationshipBuilder; import org.dspace.builder.RelationshipBuilder;
import org.dspace.app.rest.builder.RelationshipTypeBuilder; import org.dspace.builder.RelationshipTypeBuilder;
import org.dspace.app.rest.builder.SiteBuilder; import org.dspace.builder.SiteBuilder;
import org.dspace.app.rest.builder.WorkflowItemBuilder; import org.dspace.builder.WorkflowItemBuilder;
import org.dspace.app.rest.builder.WorkspaceItemBuilder; import org.dspace.builder.WorkspaceItemBuilder;
/** /**
* This class will ensure that all the builders that are registered will be cleaned up in the order as defined * This class will ensure that all the builders that are registered will be cleaned up in the order as defined
@@ -39,7 +39,8 @@ import org.dspace.app.rest.builder.WorkspaceItemBuilder;
*/ */
public class AbstractBuilderCleanupUtil { public class AbstractBuilderCleanupUtil {
private LinkedHashMap<String, List<AbstractBuilder>> map = new LinkedHashMap<>(); private final LinkedHashMap<String, List<AbstractBuilder>> map
= new LinkedHashMap<>();
/** /**
* Constructor that will initialize the Map with a predefined order for deletion * Constructor that will initialize the Map with a predefined order for deletion

View File

@@ -8,23 +8,27 @@
package org.dspace.curate; package org.dspace.curate;
import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertEquals;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.when;
import java.util.HashMap; import java.util.HashMap;
import java.util.Map; import java.util.Map;
import org.dspace.AbstractUnitTest; import org.dspace.AbstractUnitTest;
import org.dspace.content.DSpaceObject; import org.dspace.content.DSpaceObject;
import org.dspace.content.Item;
import org.dspace.content.factory.ContentServiceFactory; import org.dspace.content.factory.ContentServiceFactory;
import org.dspace.content.service.SiteService; import org.dspace.content.service.SiteService;
import org.dspace.core.factory.CoreServiceFactory;
import org.dspace.ctask.general.NoOpCurationTask;
import org.dspace.services.ConfigurationService; import org.dspace.services.ConfigurationService;
import org.junit.Test; import org.junit.Test;
/** /**
*
* @author mhwood * @author mhwood
*/ */
public class CuratorTest public class CuratorTest extends AbstractUnitTest {
extends AbstractUnitTest {
private static final SiteService SITE_SERVICE = ContentServiceFactory.getInstance().getSiteService(); private static final SiteService SITE_SERVICE = ContentServiceFactory.getInstance().getSiteService();
static final String RUN_PARAMETER_NAME = "runParameter"; static final String RUN_PARAMETER_NAME = "runParameter";
@@ -32,20 +36,24 @@ public class CuratorTest
static final String TASK_PROPERTY_NAME = "taskProperty"; static final String TASK_PROPERTY_NAME = "taskProperty";
static final String TASK_PROPERTY_VALUE = "a property"; static final String TASK_PROPERTY_VALUE = "a property";
/** Value of a known runtime parameter, if any. */ /**
* Value of a known runtime parameter, if any.
*/
static String runParameter; static String runParameter;
/** Value of a known task property, if any. */ /**
* Value of a known task property, if any.
*/
static String taskProperty; static String taskProperty;
/** /**
* Test of curate method, of class Curator. * Test of curate method, of class Curator.
* Currently this just tests task properties and run parameters. * Currently this just tests task properties and run parameters.
*
* @throws java.lang.Exception passed through. * @throws java.lang.Exception passed through.
*/ */
@Test @Test
public void testCurate_DSpaceObject() public void testCurate_DSpaceObject() throws Exception {
throws Exception {
System.out.println("curate"); System.out.println("curate");
final String TASK_NAME = "dummyTask"; final String TASK_NAME = "dummyTask";
@@ -53,7 +61,7 @@ public class CuratorTest
// Configure the task to be run. // Configure the task to be run.
ConfigurationService cfg = kernelImpl.getConfigurationService(); ConfigurationService cfg = kernelImpl.getConfigurationService();
cfg.setProperty("plugin.named.org.dspace.curate.CurationTask", cfg.setProperty("plugin.named.org.dspace.curate.CurationTask",
DummyTask.class.getName() + " = " + TASK_NAME); DummyTask.class.getName() + " = " + TASK_NAME);
cfg.setProperty(TASK_NAME + '.' + TASK_PROPERTY_NAME, TASK_PROPERTY_VALUE); cfg.setProperty(TASK_NAME + '.' + TASK_PROPERTY_NAME, TASK_PROPERTY_VALUE);
// Get and configure a Curator. // Get and configure a Curator.
@@ -72,12 +80,40 @@ public class CuratorTest
// Check the result. // Check the result.
System.out.format("Task %s result was '%s'%n", System.out.format("Task %s result was '%s'%n",
TASK_NAME, instance.getResult(TASK_NAME)); TASK_NAME, instance.getResult(TASK_NAME));
System.out.format("Task %s status was %d%n", System.out.format("Task %s status was %d%n",
TASK_NAME, instance.getStatus(TASK_NAME)); TASK_NAME, instance.getStatus(TASK_NAME));
assertEquals("Unexpected task status", assertEquals("Unexpected task status",
Curator.CURATE_SUCCESS, instance.getStatus(TASK_NAME)); Curator.CURATE_SUCCESS, instance.getStatus(TASK_NAME));
assertEquals("Wrong run parameter", RUN_PARAMETER_VALUE, runParameter); assertEquals("Wrong run parameter", RUN_PARAMETER_VALUE, runParameter);
assertEquals("Wrong task property", TASK_PROPERTY_VALUE, taskProperty); assertEquals("Wrong task property", TASK_PROPERTY_VALUE, taskProperty);
} }
@Test
public void testCurate_NoOpTask() throws Exception {
CoreServiceFactory.getInstance().getPluginService().clearNamedPluginClasses();
final String TASK_NAME = "noop";
// Configure the noop task to be run.
ConfigurationService cfg = kernelImpl.getConfigurationService();
cfg.setProperty("plugin.named.org.dspace.curate.CurationTask",
NoOpCurationTask.class.getName() + " = " + TASK_NAME);
// Get and configure a Curator.
Curator curator = new Curator();
StringBuilder reporterOutput = new StringBuilder();
curator.setReporter(reporterOutput); // Send any report to our StringBuilder.
curator.addTask(TASK_NAME);
Item item = mock(Item.class);
when(item.getType()).thenReturn(2);
when(item.getHandle()).thenReturn("testHandle");
curator.curate(context, item);
assertEquals(Curator.CURATE_SUCCESS, curator.getStatus(TASK_NAME));
assertEquals(reporterOutput.toString(), "No operation performed on testHandle");
}
} }

View File

@@ -7,19 +7,35 @@
*/ */
package org.dspace.discovery; package org.dspace.discovery;
import org.dspace.solr.MockSolrServer;
import org.springframework.beans.factory.DisposableBean;
import org.springframework.beans.factory.InitializingBean; import org.springframework.beans.factory.InitializingBean;
import org.springframework.stereotype.Service; import org.springframework.stereotype.Service;
/** /**
* Mock SOLR service for the Search Core * Mock SOLR service for the Search Core. Manages an in-process Solr server
* with an in-memory "search" core.
*/ */
@Service @Service
public class MockSolrSearchCore extends SolrSearchCore implements InitializingBean { public class MockSolrSearchCore extends SolrSearchCore
implements InitializingBean, DisposableBean {
private MockSolrServer mockSolrServer;
@Override @Override
public void afterPropertiesSet() throws Exception { public void afterPropertiesSet() throws Exception {
//We don't use SOLR in the tests of this module mockSolrServer = new MockSolrServer("search");
solr = null; solr = mockSolrServer.getSolrServer();
} }
/**
* Reset the core for the next test. See {@link MockSolrServer#reset()}.
*/
public void reset() {
mockSolrServer.reset();
}
@Override
public void destroy() throws Exception {
mockSolrServer.destroy();
}
} }

View File

@@ -29,6 +29,7 @@ public class MockCCLicenseConnectorServiceImpl extends CCLicenseConnectorService
* @param language - the language * @param language - the language
* @return a map of mocked licenses with the id and the license * @return a map of mocked licenses with the id and the license
*/ */
@Override
public Map<String, CCLicense> retrieveLicenses(String language) { public Map<String, CCLicense> retrieveLicenses(String language) {
Map<String, CCLicense> ccLicenses = new HashMap<>(); Map<String, CCLicense> ccLicenses = new HashMap<>();
CCLicense mockLicense1 = createMockLicense(1, new int[]{3, 2, 3}); CCLicense mockLicense1 = createMockLicense(1, new int[]{3, 2, 3});
@@ -89,6 +90,7 @@ public class MockCCLicenseConnectorServiceImpl extends CCLicenseConnectorService
* @param answerMap - the answers to the different field questions * @param answerMap - the answers to the different field questions
* @return the CC License URI * @return the CC License URI
*/ */
@Override
public String retrieveRightsByQuestion(final String licenseId, public String retrieveRightsByQuestion(final String licenseId,
final String language, final String language,
final Map<String, String> answerMap) { final Map<String, String> answerMap) {
@@ -105,6 +107,7 @@ public class MockCCLicenseConnectorServiceImpl extends CCLicenseConnectorService
* @return a mock license RDF document or null when the URI contains invalid * @return a mock license RDF document or null when the URI contains invalid
* @throws IOException * @throws IOException
*/ */
@Override
public Document retrieveLicenseRDFDoc(String licenseURI) throws IOException { public Document retrieveLicenseRDFDoc(String licenseURI) throws IOException {
if (!StringUtils.contains(licenseURI, "invalid")) { if (!StringUtils.contains(licenseURI, "invalid")) {
InputStream cclicense = null; InputStream cclicense = null;

View File

@@ -19,7 +19,7 @@ import org.apache.solr.client.solrj.SolrClient;
import org.apache.solr.client.solrj.SolrServerException; import org.apache.solr.client.solrj.SolrServerException;
import org.apache.solr.client.solrj.embedded.EmbeddedSolrServer; import org.apache.solr.client.solrj.embedded.EmbeddedSolrServer;
import org.apache.solr.core.CoreContainer; import org.apache.solr.core.CoreContainer;
import org.dspace.app.rest.test.AbstractDSpaceIntegrationTest; import org.dspace.AbstractDSpaceIntegrationTest;
/** /**
* Factory of connections to an in-process embedded Solr service. * Factory of connections to an in-process embedded Solr service.
@@ -110,7 +110,7 @@ public class MockSolrServer {
server.deleteByQuery("*:*"); server.deleteByQuery("*:*");
server.commit(); server.commit();
} catch (SolrServerException | IOException e) { } catch (SolrServerException | IOException e) {
e.printStackTrace(System.err); log.error("Failed to empty Solr index: {}", e.getMessage(), e);
} }
loadedCores.put(coreName, server); loadedCores.put(coreName, server);

View File

@@ -27,27 +27,29 @@ import com.maxmind.geoip2.record.MaxMind;
import com.maxmind.geoip2.record.Postal; import com.maxmind.geoip2.record.Postal;
import com.maxmind.geoip2.record.RepresentedCountry; import com.maxmind.geoip2.record.RepresentedCountry;
import com.maxmind.geoip2.record.Traits; import com.maxmind.geoip2.record.Traits;
import org.dspace.solr.MockSolrServer;
import org.springframework.beans.factory.DisposableBean;
import org.springframework.beans.factory.InitializingBean; import org.springframework.beans.factory.InitializingBean;
import org.springframework.stereotype.Service;
/** /**
* Mock service that uses an embedded SOLR server for the statistics core. * Mock service that uses an embedded SOLR server for the statistics core.
* <p>
* <strong>NOTE:</strong> this class is overridden by one <em>of the same name</em>
* defined in dspace-server-webapp and declared as a bean there.
* See {@code test/data/dspaceFolder/config/spring/api/solr-services.xml}. Some kind of classpath
* magic makes this work.
*/ */
@Service
public class MockSolrLoggerServiceImpl public class MockSolrLoggerServiceImpl
extends SolrLoggerServiceImpl extends SolrLoggerServiceImpl
implements InitializingBean { implements InitializingBean, DisposableBean {
private MockSolrServer mockSolrServer;
public MockSolrLoggerServiceImpl() { public MockSolrLoggerServiceImpl() {
} }
@Override @Override
public void afterPropertiesSet() throws Exception { public void afterPropertiesSet() throws Exception {
//We don't use SOLR in the tests of this module // Initialize our service with a Mock Solr statistics core
solr = null; mockSolrServer = new MockSolrServer("statistics");
solr = mockSolrServer.getSolrServer();
// Mock GeoIP's DatabaseReader // Mock GeoIP's DatabaseReader
DatabaseReader reader = mock(DatabaseReader.class); DatabaseReader reader = mock(DatabaseReader.class);
@@ -58,14 +60,16 @@ public class MockSolrLoggerServiceImpl
} }
/** /**
* A mock/fake GeoIP CityResponse, which will be used for *all* test statistical requests * A mock/fake GeoIP CityResponse, which will be used for *all* test
* statistical requests.
*
* @return faked CityResponse * @return faked CityResponse
*/ */
private CityResponse mockCityResponse() { private CityResponse mockCityResponse() {
List<String> cityNames = new ArrayList<String>(Collections.singleton("New York")); List<String> cityNames = new ArrayList<>(Collections.singleton("New York"));
City city = new City(cityNames, 1, 1, new HashMap()); City city = new City(cityNames, 1, 1, new HashMap());
List<String> countryNames = new ArrayList<String>(Collections.singleton("United States")); List<String> countryNames = new ArrayList<>(Collections.singleton("United States"));
Country country = new Country(countryNames, 1, 1, "US", new HashMap()); Country country = new Country(countryNames, 1, 1, "US", new HashMap());
Location location = new Location(1, 1, 40.760498D, -73.9933D, 501, 1, "EST"); Location location = new Location(1, 1, 40.760498D, -73.9933D, 501, 1, "EST");
@@ -73,7 +77,17 @@ public class MockSolrLoggerServiceImpl
Postal postal = new Postal("10036", 1); Postal postal = new Postal("10036", 1);
return new CityResponse(city, new Continent(), country, location, new MaxMind(), postal, return new CityResponse(city, new Continent(), country, location, new MaxMind(), postal,
country, new RepresentedCountry(), new ArrayList<>(0), country, new RepresentedCountry(), new ArrayList<>(0),
new Traits()); new Traits());
}
/** Reset the core for the next test. See {@link MockSolrServer#reset()}. */
public void reset() {
mockSolrServer.reset();
}
@Override
public void destroy() throws Exception {
mockSolrServer.destroy();
} }
} }

View File

@@ -10,8 +10,10 @@ package org.dspace.xmlworkflow;
import static junit.framework.TestCase.assertEquals; import static junit.framework.TestCase.assertEquals;
import static org.junit.Assert.fail; import static org.junit.Assert.fail;
import java.io.IOException;
import java.sql.SQLException; import java.sql.SQLException;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger; import org.apache.logging.log4j.Logger;
import org.dspace.AbstractUnitTest; import org.dspace.AbstractUnitTest;
import org.dspace.authorize.AuthorizeException; import org.dspace.authorize.AuthorizeException;
@@ -35,9 +37,11 @@ import org.junit.Test;
*/ */
public class XmlWorkflowFactoryTest extends AbstractUnitTest { public class XmlWorkflowFactoryTest extends AbstractUnitTest {
private CollectionService collectionService = ContentServiceFactory.getInstance().getCollectionService(); private final CollectionService collectionService
private CommunityService communityService = ContentServiceFactory.getInstance().getCommunityService(); = ContentServiceFactory.getInstance().getCollectionService();
private XmlWorkflowFactory xmlWorkflowFactory private final CommunityService communityService
= ContentServiceFactory.getInstance().getCommunityService();
private final XmlWorkflowFactory xmlWorkflowFactory
= new DSpace().getServiceManager().getServiceByName("xmlWorkflowFactory", = new DSpace().getServiceManager().getServiceByName("xmlWorkflowFactory",
XmlWorkflowFactoryImpl.class); XmlWorkflowFactoryImpl.class);
private Community owningCommunity; private Community owningCommunity;
@@ -47,7 +51,7 @@ public class XmlWorkflowFactoryTest extends AbstractUnitTest {
/** /**
* log4j category * log4j category
*/ */
private static final Logger log = org.apache.logging.log4j.LogManager.getLogger(XmlWorkflowFactoryTest.class); private static final Logger log = LogManager.getLogger(XmlWorkflowFactoryTest.class);
/** /**
* This method will be run before every test as per @Before. It will * This method will be run before every test as per @Before. It will
@@ -94,7 +98,7 @@ public class XmlWorkflowFactoryTest extends AbstractUnitTest {
this.collectionService.delete(context, this.nonMappedCollection); this.collectionService.delete(context, this.nonMappedCollection);
this.collectionService.delete(context, this.mappedCollection); this.collectionService.delete(context, this.mappedCollection);
this.communityService.delete(context, this.owningCommunity); this.communityService.delete(context, this.owningCommunity);
} catch (Exception e) { } catch (IOException | SQLException | AuthorizeException e) {
log.error("Error in destroy", e); log.error("Error in destroy", e);
} }
@@ -112,12 +116,12 @@ public class XmlWorkflowFactoryTest extends AbstractUnitTest {
@Test @Test
public void workflowMapping_NonMappedCollection() throws WorkflowConfigurationException { public void workflowMapping_NonMappedCollection() throws WorkflowConfigurationException {
Workflow workflow = xmlWorkflowFactory.getWorkflow(this.nonMappedCollection); Workflow workflow = xmlWorkflowFactory.getWorkflow(this.nonMappedCollection);
assertEquals(workflow.getID(), "defaultWorkflow"); assertEquals("defaultWorkflow", workflow.getID());
} }
@Test @Test
public void workflowMapping_MappedCollection() throws WorkflowConfigurationException { public void workflowMapping_MappedCollection() throws WorkflowConfigurationException {
Workflow workflow = xmlWorkflowFactory.getWorkflow(this.mappedCollection); Workflow workflow = xmlWorkflowFactory.getWorkflow(this.mappedCollection);
assertEquals(workflow.getID(), "selectSingleReviewer"); assertEquals("selectSingleReviewer", workflow.getID());
} }
} }

View File

@@ -86,7 +86,8 @@ public class LocalURIRedirectionServlet extends HttpServlet {
response.sendError(HttpServletResponse.SC_NOT_FOUND); response.sendError(HttpServletResponse.SC_NOT_FOUND);
return; return;
} }
// use object's reported handle for redirect (just in case user provided handle had odd characters)
handle = dso.getHandle();
// close the context and send forward. // close the context and send forward.
context.abort(); context.abort();
Negotiator.sendRedirect(response, handle, "", requestedMimeType, true); Negotiator.sendRedirect(response, handle, "", requestedMimeType, true);

View File

@@ -15,7 +15,7 @@ var Report = function() {
this.ROOTPATH = "/xmlui/handle/" this.ROOTPATH = "/xmlui/handle/"
//this.ROOTPATH = "/jspui/handle/" //this.ROOTPATH = "/jspui/handle/"
//this.ROOTPATH = "/handle/" //this.ROOTPATH = "/handle/"
//Indicate if Password Authentication is supported //Indicate if Password Authentication is supported
this.makeAuthLink = function(){return false;}; this.makeAuthLink = function(){return false;};
@@ -27,34 +27,34 @@ var Report = function() {
this.getId = function(obj) { this.getId = function(obj) {
return obj.uuid; return obj.uuid;
} }
//Override this method is sortable.js has been included //Override this method is sortable.js has been included
this.hasSorttable = function() { this.hasSorttable = function() {
return false; return false;
} }
this.getDefaultParameters = function(){ this.getDefaultParameters = function(){
return {}; return {};
} }
this.getCurrentParameters = function(){ this.getCurrentParameters = function(){
return {}; return {};
} }
this.saveUrl = function() { this.saveUrl = function() {
this.myReportParameters.saveAsUrl(this.getCurrentParameters()); this.myReportParameters.saveAsUrl(this.getCurrentParameters());
} }
this.getLoginPayload = function() { this.getLoginPayload = function() {
//Placeholder to allow a customized report to prompt for email/password //Placeholder to allow a customized report to prompt for email/password
//If not enabled, the authenticaton callback will be called immediately //If not enabled, the authenticaton callback will be called immediately
var email = $("#restemail").val(); var email = $("#restemail").val();
var pass = $("#restpass").val(); var pass = $("#restpass").val();
if (email == "" || pass == "") { if (email == "" || pass == "") {
return undefined; return undefined;
} else if (email == null || pass == null) { } else if (email == null || pass == null) {
return undefined; return undefined;
} else { } else {
return {email: email, password: pass}; return {email: email, password: pass};
} }
} }
this.getLangSuffix = function(){ this.getLangSuffix = function(){
@@ -82,15 +82,15 @@ var Report = function() {
className: 'spinner', // The CSS class to assign to the spinner className: 'spinner', // The CSS class to assign to the spinner
zIndex: 2e9, // The z-index (defaults to 2000000000) zIndex: 2e9, // The z-index (defaults to 2000000000)
top: '400px', // Top position relative to parent top: '400px', // Top position relative to parent
left: '600px' // Left position relative to parent left: '600px' // Left position relative to parent
}); });
this.displayItems = function(itemsTitle, offset, limit, total, funcdec, funcinc) { this.displayItems = function(itemsTitle, offset, limit, total, funcdec, funcinc) {
var count = $("#itemtable tr.data").length; var count = $("#itemtable tr.data").length;
var last = offset + limit; var last = offset + limit;
var suff = ""; var suff = "";
if (total == null) { if (total == null) {
last = offset + count; last = offset + count;
suff = (count == limit) ? " of " + last + "+ " : " of " + last; suff = (count == limit) ? " of " + last + "+ " : " of " + last;
@@ -102,7 +102,7 @@ var Report = function() {
suff = " of " + total; suff = " of " + total;
} }
suff += " unfiltered; displaying " + count + " filtered" ; suff += " unfiltered; displaying " + count + " filtered" ;
itemsTitle += " (" + (offset+1) + " - " + last + suff + ")"; itemsTitle += " (" + (offset+1) + " - " + last + suff + ")";
$("#prev,#next").attr("disabled",true); $("#prev,#next").attr("disabled",true);
$("#itemdiv h3").text(itemsTitle); $("#itemdiv h3").text(itemsTitle);
@@ -110,34 +110,34 @@ var Report = function() {
if (offset > 0) $("#prev").attr("disabled", false); if (offset > 0) $("#prev").attr("disabled", false);
$("#prev").off("click").on("click", funcdec); $("#prev").off("click").on("click", funcdec);
//in case of filters, always allow next //in case of filters, always allow next
if (total == null) { if (total == null) {
$("#next").attr("disabled", false); $("#next").attr("disabled", false);
} else if (offset + limit < total) { } else if (offset + limit < total) {
$("#next").attr("disabled", false); $("#next").attr("disabled", false);
$("#exlimit").addClass("red"); $("#exlimit").addClass("red");
} else if (limit == total) { } else if (limit == total) {
//total may only be accurate to one page //total may only be accurate to one page
$("#next").attr("disabled", false); $("#next").attr("disabled", false);
$("#exlimit").addClass("red"); $("#exlimit").addClass("red");
} }
$("#next").off("click").on("click", funcinc); $("#next").off("click").on("click", funcinc);
} }
this.myReportParameters = undefined; this.myReportParameters = undefined;
this.myFilters = undefined; this.myFilters = undefined;
this.myMetadataFields = undefined; this.myMetadataFields = undefined;
this.initMetadataFields = function() { this.initMetadataFields = function() {
this.myMetadataFields = new MetadataFields(self); this.myMetadataFields = new MetadataFields(self);
this.myMetadataFields.load(); this.myMetadataFields.load();
} }
this.initBitstreamFields = function() { this.initBitstreamFields = function() {
this.myBitstreamFields = new BitstreamFields(self); this.myBitstreamFields = new BitstreamFields(self);
this.myBitstreamFields.load(); this.myBitstreamFields.load();
} }
this.baseInit = function() { this.baseInit = function() {
this.myReportParameters = new ReportParameters( this.myReportParameters = new ReportParameters(
this.getDefaultParameters(), this.getDefaultParameters(),
@@ -173,13 +173,13 @@ var Report = function() {
}); });
return itemdata; return itemdata;
} }
this.export = function(rows) { this.export = function(rows) {
var itemdata = "data:text/csv;charset=utf-8," + this.makeCsv(rows); var itemdata = "data:text/csv;charset=utf-8," + this.makeCsv(rows);
var encodedUri = encodeURI(itemdata); var encodedUri = encodeURI(itemdata);
window.open(encodedUri); window.open(encodedUri);
} }
//this is meant to be overridden for each report //this is meant to be overridden for each report
this.exportCol = function(colnum, col) { this.exportCol = function(colnum, col) {
var data = ""; var data = "";
@@ -187,7 +187,7 @@ var Report = function() {
data += self.exportCell(col); data += self.exportCell(col);
return data; return data;
} }
this.exportCell = function(col) { this.exportCell = function(col) {
data = "\""; data = "\"";
$(col).contents().each(function(i, node){ $(col).contents().each(function(i, node){
@@ -198,16 +198,16 @@ var Report = function() {
if ($(node).is("div:not(:last-child)")) { if ($(node).is("div:not(:last-child)")) {
data += "||"; data += "||";
} }
} }
}); });
data += "\""; data += "\"";
return data; return data;
} }
this.init = function() { this.init = function() {
this.baseInit(); this.baseInit();
} }
} }
var Auth = function(report) { var Auth = function(report) {
@@ -242,17 +242,17 @@ var Auth = function(report) {
self.authStat(); self.authStat();
self.callback(); self.callback();
} }
}); });
} }
this.verifyShibLogin = function() { this.verifyShibLogin = function() {
var self = this; var self = this;
$.ajax({ $.ajax({
url: "/rest/shibboleth-login", url: "/rest/shibboleth-login",
success: self.authStat success: self.authStat
}); });
} }
this.authStat = function() { this.authStat = function() {
var self = this; var self = this;
$.ajax({ $.ajax({
@@ -264,7 +264,7 @@ var Auth = function(report) {
success: function(data) { success: function(data) {
var user = ""; var user = "";
if (data.email != undefined) { if (data.email != undefined) {
user = data.email; user = data.email;
} else { } else {
user = "You are not logged in. Some items may be excluded from reports."; user = "You are not logged in. Some items may be excluded from reports.";
} }
@@ -279,10 +279,10 @@ var Auth = function(report) {
if (data.email == undefined && self.report.makeShibLink()) { if (data.email == undefined && self.report.makeShibLink()) {
self.verifyShibLogin(); self.verifyShibLogin();
} }
} }
}); });
} }
this.logout = function() { this.logout = function() {
var self = this; var self = this;
$.ajax({ $.ajax({
@@ -293,7 +293,7 @@ var Auth = function(report) {
complete: function(xhr, status) { complete: function(xhr, status) {
self.authStat(); self.authStat();
} }
}); });
} }
this.getHeaders = function() { this.getHeaders = function() {
var HEADERS = {}; var HEADERS = {};
@@ -314,14 +314,14 @@ var ReportParameters = function(defaultParams, prmstr) {
var field = tmparr[0]; var field = tmparr[0];
var val = decodeURIComponent(tmparr[1]); var val = decodeURIComponent(tmparr[1]);
var pval = this.params[field]; var pval = this.params[field];
if ($.isArray(pval)) { if ($.isArray(pval)) {
pval[pval.length] = val; pval[pval.length] = val;
} else { } else {
this.params[field] = val; this.params[field] = val;
} }
} }
$("#limit").val(this.params.limit); $("#limit").val(this.params.limit);
$("#offset").val(this.params.offset); $("#offset").val(this.params.offset);
this.limit = this.params.limit; this.limit = this.params.limit;
this.offset = this.params.offset; this.offset = this.params.offset;
@@ -350,11 +350,11 @@ var ReportParameters = function(defaultParams, prmstr) {
var lim = $("#limit").val(); var lim = $("#limit").val();
if ($.isNumeric(val) && $.isNumeric(lim)) { if ($.isNumeric(val) && $.isNumeric(lim)) {
if (increment) { if (increment) {
$("#offset").val(this.getNextOffset()); $("#offset").val(this.getNextOffset());
} else { } else {
$("#offset").val(this.getPrevOffset()); $("#offset").val(this.getPrevOffset());
} }
} }
} }
this.saveAsUrl = function(params) { this.saveAsUrl = function(params) {
@@ -381,7 +381,7 @@ var Filters = function() {
$("#filter-reload").attr("disabled", false); $("#filter-reload").attr("disabled", false);
} }
); );
$.getJSON( $.getJSON(
"/rest/filters", "/rest/filters",
function(data){ function(data){
@@ -444,13 +444,13 @@ var Filters = function() {
list = "none"; list = "none";
} }
return list; return list;
} }
} }
var MetadataFields = function(report) { var MetadataFields = function(report) {
this.metadataSchemas = undefined; this.metadataSchemas = undefined;
var self = this; var self = this;
this.load = function(){ this.load = function(){
$.ajax({ $.ajax({
url: "/rest/registries/schema", url: "/rest/registries/schema",
@@ -463,15 +463,15 @@ var MetadataFields = function(report) {
}, },
complete: function(xhr, status) { complete: function(xhr, status) {
} }
}); });
} }
this.initFields = function(data, report) { this.initFields = function(data, report) {
var params = report.myReportParameters.params; var params = report.myReportParameters.params;
self.metadataSchemas = data; self.metadataSchemas = data;
self.drawShowFields(params["show_fields[]"]); self.drawShowFields(params["show_fields[]"]);
} }
this.getShowFields = function(){ this.getShowFields = function(){
var val = $("#show-fields select").val(); var val = $("#show-fields select").val();
return val == null ? Array() : val; return val == null ? Array() : val;
@@ -497,7 +497,7 @@ var MetadataFields = function(report) {
}); });
}); });
} }
this.initQueries = function(){}; this.initQueries = function(){};
} }
@@ -508,15 +508,15 @@ var BitstreamFields = function(report) {
} }
this.map = [ this.map = [
{ {
key: "original-file-names", key: "original-file-names",
name: "Original File Names", name: "Original File Names",
ftest: self.isOriginal, ftest: self.isOriginal,
fval: function(bit) { fval: function(bit) {
return bit.name; return bit.name;
} }
}, },
{ {
key: "mime-type", key: "mime-type",
name: "Mime Type", name: "Mime Type",
ftest: self.isOriginal, ftest: self.isOriginal,
fval: function(bit) { fval: function(bit) {
@@ -524,7 +524,7 @@ var BitstreamFields = function(report) {
} }
}, },
{ {
key: "bitstream-format", key: "bitstream-format",
name: "Bitstream Format", name: "Bitstream Format",
ftest: self.isOriginal, ftest: self.isOriginal,
fval: function(bit) { fval: function(bit) {
@@ -532,7 +532,7 @@ var BitstreamFields = function(report) {
} }
}, },
{ {
key: "bitstream-description", key: "bitstream-description",
name: "Bitstream Description", name: "Bitstream Description",
ftest: self.isOriginal, ftest: self.isOriginal,
fval: function(bit) { fval: function(bit) {
@@ -540,7 +540,7 @@ var BitstreamFields = function(report) {
} }
}, },
{ {
key: "bitstream-size", key: "bitstream-size",
name: "Bitstream Size", name: "Bitstream Size",
ftest: self.isOriginal, ftest: self.isOriginal,
fval: function(bit) { fval: function(bit) {
@@ -548,18 +548,18 @@ var BitstreamFields = function(report) {
} }
}, },
{ {
key: "bitstream-checksum", key: "bitstream-checksum",
name: "MD5 Checksum", name: "MD5 Checksum",
ftest: self.isOriginal, ftest: self.isOriginal,
fval: function(bit) { fval: function(bit) {
if (bit.checkSum.checkSumAlgorithm === "MD5") { if (bit.checkSum.checkSumAlgorithm === "MD5") {
return bit.checkSum.value; return bit.checkSum.value;
} }
return ""; return "";
} }
}, },
]; ];
this.load = function(){ this.load = function(){
self.initFields(report); self.initFields(report);
} }
@@ -568,7 +568,7 @@ var BitstreamFields = function(report) {
var params = report.myReportParameters.params; var params = report.myReportParameters.params;
self.drawShowFieldsBits(params["show_fields_bits[]"]); self.drawShowFieldsBits(params["show_fields_bits[]"]);
}; };
this.hasBitstreamFields = function() { this.hasBitstreamFields = function() {
return self.getShowFieldsBits() != null; return self.getShowFieldsBits() != null;
} }
@@ -576,20 +576,20 @@ var BitstreamFields = function(report) {
var val = $("#show-fields-bits select").val(); var val = $("#show-fields-bits select").val();
return val == null ? Array() : val; return val == null ? Array() : val;
} }
this.drawShowFieldsBits = function(pfieldsBits) { this.drawShowFieldsBits = function(pfieldsBits) {
var sel = $("<select name='show_fields_bits'/>"); var sel = $("<select name='show_fields_bits'/>");
sel.attr("multiple","true").attr("size","8").appendTo("#show-fields-bits"); sel.attr("multiple","true").attr("size","8").appendTo("#show-fields-bits");
for(var i=0; i<this.map.length; i++) { for(var i=0; i<this.map.length; i++) {
var opt = report.myHtmlUtil.addOpt(sel, this.map[i].name, this.map[i].key); var opt = report.myHtmlUtil.addOpt(sel, this.map[i].name, this.map[i].key);
if (pfieldsBits != null) { if (pfieldsBits != null) {
opt.attr("selected", pfieldsBits[this.map[i].key] != undefined ? "Y" : null); opt.attr("selected", pfieldsBits[this.map[i].key] != undefined ? "Y" : null);
} }
sel.append(opt); sel.append(opt);
} }
} }
this.getKeyText = function(key, item, bitfields) { this.getKeyText = function(key, item, bitfields) {
var ret = []; var ret = [];
if (bitfields == null || item.bitstreams == null) { if (bitfields == null || item.bitstreams == null) {
@@ -608,20 +608,20 @@ var BitstreamFields = function(report) {
if (mapval == null) { if (mapval == null) {
return ret; return ret;
} }
$.each(item.bitstreams, function(colindex, bitstream) { $.each(item.bitstreams, function(colindex, bitstream) {
if (mapval.ftest(bitstream)) { if (mapval.ftest(bitstream)) {
var val = mapval.fval(bitstream); var val = mapval.fval(bitstream);
if (val != null) { if (val != null) {
if (isNaN(val) || ret.length == 0) { if (isNaN(val) || ret.length == 0) {
ret.push(val); ret.push(val);
} else { } else {
ret[0] += val; ret[0] += val;
} }
} }
} }
}); });
return ret; return ret;
} }
} }
@@ -661,6 +661,7 @@ var HtmlUtil = function() {
a.append(val); a.append(val);
a.attr("href", href); a.attr("href", href);
a.attr("target", "_blank"); a.attr("target", "_blank");
a.attr("rel", "noopener noreferrer");
return a; return a;
} }
@@ -704,7 +705,7 @@ var CommunitySelector = function(report, parent, paramCollSel) {
var collSel = $("<select/>").attr("id","collSel").attr("name","collSel").attr("multiple", true).attr("size",15); var collSel = $("<select/>").attr("id","collSel").attr("name","collSel").attr("multiple", true).attr("size",15);
parent.append(collSel); parent.append(collSel);
report.myHtmlUtil.addOpt(collSel, "Whole Repository", ""); report.myHtmlUtil.addOpt(collSel, "Whole Repository", "");
$.ajax({ $.ajax({
url: "/rest/hierarchy", url: "/rest/hierarchy",
dataType: "json", dataType: "json",
@@ -722,7 +723,7 @@ var CommunitySelector = function(report, parent, paramCollSel) {
}, },
complete: function(xhr, status) { complete: function(xhr, status) {
} }
}); });
this.addCommLabel = function(collSel, comm, indent, paramCollSel) { this.addCommLabel = function(collSel, comm, indent, paramCollSel) {
var prefix = ""; var prefix = "";
@@ -738,12 +739,12 @@ var CommunitySelector = function(report, parent, paramCollSel) {
opt.attr("selected", true); opt.attr("selected", true);
} }
}); });
}); });
} }
if (comm.community != null) { if (comm.community != null) {
$.each(comm.community, function(index, scomm) { $.each(comm.community, function(index, scomm) {
self.addCommLabel(collSel, scomm, indent + 1, paramCollSel); self.addCommLabel(collSel, scomm, indent + 1, paramCollSel);
}); });
} }
} }
} }

View File

@@ -37,7 +37,7 @@
<activation> <activation>
<activeByDefault>false</activeByDefault> <activeByDefault>false</activeByDefault>
<property> <property>
<name>maven.test.skip</name> <name>skipTests</name>
<value>false</value> <value>false</value>
</property> </property>
</activation> </activation>
@@ -307,6 +307,13 @@
<artifactId>dspace-api</artifactId> <artifactId>dspace-api</artifactId>
</dependency> </dependency>
<dependency>
<groupId>org.dspace</groupId>
<artifactId>dspace-api</artifactId>
<type>test-jar</type>
<scope>test</scope>
</dependency>
<dependency> <dependency>
<groupId>org.dspace</groupId> <groupId>org.dspace</groupId>
<artifactId>dspace-services</artifactId> <artifactId>dspace-services</artifactId>
@@ -460,6 +467,14 @@
<artifactId>solr-cell</artifactId> <artifactId>solr-cell</artifactId>
<scope>test</scope> <scope>test</scope>
<exclusions> <exclusions>
<exclusion>
<groupId>org.bouncycastle</groupId>
<artifactId>bcpkix-jdk15on</artifactId>
</exclusion>
<exclusion>
<groupId>org.bouncycastle</groupId>
<artifactId>bcprov-jdk15on</artifactId>
</exclusion>
<exclusion> <exclusion>
<groupId>org.eclipse.jetty</groupId> <groupId>org.eclipse.jetty</groupId>
<artifactId>jetty-continuation</artifactId> <artifactId>jetty-continuation</artifactId>
@@ -524,13 +539,11 @@
<dependency> <dependency>
<groupId>org.apache.lucene</groupId> <groupId>org.apache.lucene</groupId>
<artifactId>lucene-analyzers-smartcn</artifactId> <artifactId>lucene-analyzers-smartcn</artifactId>
<version>${solr.client.version}</version>
<scope>test</scope> <scope>test</scope>
</dependency> </dependency>
<dependency> <dependency>
<groupId>org.apache.lucene</groupId> <groupId>org.apache.lucene</groupId>
<artifactId>lucene-analyzers-stempel</artifactId> <artifactId>lucene-analyzers-stempel</artifactId>
<version>${solr.client.version}</version>
<scope>test</scope> <scope>test</scope>
</dependency> </dependency>

View File

@@ -7,6 +7,8 @@
*/ */
package org.dspace.app.rest; package org.dspace.app.rest;
import static org.apache.commons.collections4.ListUtils.emptyIfNull;
import java.util.Arrays; import java.util.Arrays;
import java.util.List; import java.util.List;
import java.util.Objects; import java.util.Objects;
@@ -100,51 +102,55 @@ public class DiscoveryRestController implements InitializingBean {
@RequestMapping(method = RequestMethod.GET, value = "/search/facets") @RequestMapping(method = RequestMethod.GET, value = "/search/facets")
public FacetsResource getFacets(@RequestParam(name = "query", required = false) String query, public FacetsResource getFacets(@RequestParam(name = "query", required = false) String query,
@RequestParam(name = "dsoType", required = false) String dsoType, @RequestParam(name = "dsoType", required = false) List<String> dsoTypes,
@RequestParam(name = "scope", required = false) String dsoScope, @RequestParam(name = "scope", required = false) String dsoScope,
@RequestParam(name = "configuration", required = false) String configuration, @RequestParam(name = "configuration", required = false) String configuration,
List<SearchFilter> searchFilters, List<SearchFilter> searchFilters,
Pageable page) throws Exception { Pageable page) throws Exception {
dsoTypes = emptyIfNull(dsoTypes);
if (log.isTraceEnabled()) { if (log.isTraceEnabled()) {
log.trace("Searching with scope: " + StringUtils.trimToEmpty(dsoScope) log.trace("Searching with scope: " + StringUtils.trimToEmpty(dsoScope)
+ ", configuration name: " + StringUtils.trimToEmpty(configuration) + ", configuration name: " + StringUtils.trimToEmpty(configuration)
+ ", dsoType: " + StringUtils.trimToEmpty(dsoType) + ", dsoTypes: " + String.join(", ", dsoTypes)
+ ", query: " + StringUtils.trimToEmpty(query) + ", query: " + StringUtils.trimToEmpty(query)
+ ", filters: " + Objects.toString(searchFilters)); + ", filters: " + Objects.toString(searchFilters));
} }
SearchResultsRest searchResultsRest = discoveryRestRepository SearchResultsRest searchResultsRest = discoveryRestRepository
.getAllFacets(query, dsoType, dsoScope, configuration, searchFilters); .getAllFacets(query, dsoTypes, dsoScope, configuration, searchFilters);
FacetsResource facetsResource = new FacetsResource(searchResultsRest, page); FacetsResource facetsResource = new FacetsResource(searchResultsRest, page);
halLinkService.addLinks(facetsResource, page); halLinkService.addLinks(facetsResource, page);
return facetsResource; return facetsResource;
} }
@RequestMapping(method = RequestMethod.GET, value = "/search/objects") @RequestMapping(method = RequestMethod.GET, value = "/search/objects")
public SearchResultsResource getSearchObjects(@RequestParam(name = "query", required = false) String query, public SearchResultsResource getSearchObjects(@RequestParam(name = "query", required = false) String query,
@RequestParam(name = "dsoType", required = false) String dsoType, @RequestParam(name = "dsoType", required = false)
List<String> dsoTypes,
@RequestParam(name = "scope", required = false) String dsoScope, @RequestParam(name = "scope", required = false) String dsoScope,
@RequestParam(name = "configuration", required = false) String @RequestParam(name = "configuration", required = false) String
configuration, configuration,
List<SearchFilter> searchFilters, List<SearchFilter> searchFilters,
Pageable page) throws Exception { Pageable page) throws Exception {
dsoTypes = emptyIfNull(dsoTypes);
if (log.isTraceEnabled()) { if (log.isTraceEnabled()) {
log.trace("Searching with scope: " + StringUtils.trimToEmpty(dsoScope) log.trace("Searching with scope: " + StringUtils.trimToEmpty(dsoScope)
+ ", configuration name: " + StringUtils.trimToEmpty(configuration) + ", configuration name: " + StringUtils.trimToEmpty(configuration)
+ ", dsoType: " + StringUtils.trimToEmpty(dsoType) + ", dsoTypes: " + String.join(", ", dsoTypes)
+ ", query: " + StringUtils.trimToEmpty(query) + ", query: " + StringUtils.trimToEmpty(query)
+ ", filters: " + Objects.toString(searchFilters) + ", filters: " + Objects.toString(searchFilters)
+ ", page: " + Objects.toString(page)); + ", page: " + Objects.toString(page));
} }
//Get the Search results in JSON format //Get the Search results in JSON format
SearchResultsRest searchResultsRest = discoveryRestRepository SearchResultsRest searchResultsRest = discoveryRestRepository
.getSearchObjects(query, dsoType, dsoScope, configuration, searchFilters, page, utils.obtainProjection()); .getSearchObjects(query, dsoTypes, dsoScope, configuration, searchFilters, page, utils.obtainProjection());
//Convert the Search JSON results to paginated HAL resources //Convert the Search JSON results to paginated HAL resources
SearchResultsResource searchResultsResource = new SearchResultsResource(searchResultsRest, utils, page); SearchResultsResource searchResultsResource = new SearchResultsResource(searchResultsRest, utils, page);
@@ -174,15 +180,18 @@ public class DiscoveryRestController implements InitializingBean {
public RepresentationModel getFacetValues(@PathVariable("name") String facetName, public RepresentationModel getFacetValues(@PathVariable("name") String facetName,
@RequestParam(name = "prefix", required = false) String prefix, @RequestParam(name = "prefix", required = false) String prefix,
@RequestParam(name = "query", required = false) String query, @RequestParam(name = "query", required = false) String query,
@RequestParam(name = "dsoType", required = false) String dsoType, @RequestParam(name = "dsoType", required = false) List<String> dsoTypes,
@RequestParam(name = "scope", required = false) String dsoScope, @RequestParam(name = "scope", required = false) String dsoScope,
@RequestParam(name = "configuration", required = false) String @RequestParam(name = "configuration", required = false) String
configuration, configuration,
List<SearchFilter> searchFilters, List<SearchFilter> searchFilters,
Pageable page) throws Exception { Pageable page) throws Exception {
dsoTypes = emptyIfNull(dsoTypes);
if (log.isTraceEnabled()) { if (log.isTraceEnabled()) {
log.trace("Facetting on facet " + facetName + " with scope: " + StringUtils.trimToEmpty(dsoScope) log.trace("Facetting on facet " + facetName + " with scope: " + StringUtils.trimToEmpty(dsoScope)
+ ", dsoType: " + StringUtils.trimToEmpty(dsoType) + ", dsoTypes: " + String.join(", ", dsoTypes)
+ ", prefix: " + StringUtils.trimToEmpty(prefix) + ", prefix: " + StringUtils.trimToEmpty(prefix)
+ ", query: " + StringUtils.trimToEmpty(query) + ", query: " + StringUtils.trimToEmpty(query)
+ ", filters: " + Objects.toString(searchFilters) + ", filters: " + Objects.toString(searchFilters)
@@ -190,7 +199,7 @@ public class DiscoveryRestController implements InitializingBean {
} }
FacetResultsRest facetResultsRest = discoveryRestRepository FacetResultsRest facetResultsRest = discoveryRestRepository
.getFacetObjects(facetName, prefix, query, dsoType, dsoScope, configuration, searchFilters, page); .getFacetObjects(facetName, prefix, query, dsoTypes, dsoScope, configuration, searchFilters, page);
FacetResultsResource facetResultsResource = converter.toResource(facetResultsRest); FacetResultsResource facetResultsResource = converter.toResource(facetResultsRest);

View File

@@ -34,6 +34,7 @@ import org.dspace.content.service.CollectionService;
import org.dspace.content.service.CommunityService; import org.dspace.content.service.CommunityService;
import org.dspace.core.Context; import org.dspace.core.Context;
import org.dspace.core.LogManager; import org.dspace.core.LogManager;
import org.dspace.core.Utils;
import org.dspace.discovery.DiscoverQuery; import org.dspace.discovery.DiscoverQuery;
import org.dspace.discovery.DiscoverResult; import org.dspace.discovery.DiscoverResult;
import org.dspace.discovery.IndexableObject; import org.dspace.discovery.IndexableObject;
@@ -103,7 +104,8 @@ public class OpenSearchController {
// do some sanity checking // do some sanity checking
if (!openSearchService.getFormats().contains(format)) { if (!openSearchService.getFormats().contains(format)) {
String err = "Format " + format + " is not supported."; // Since we are returning error response as HTML, escape any HTML in "format" param
String err = "Format " + Utils.addEntities(format) + " is not supported.";
response.setContentType("text/html"); response.setContentType("text/html");
response.setContentLength(err.length()); response.setContentLength(err.length());
response.getWriter().write(err); response.getWriter().write(err);

View File

@@ -11,7 +11,9 @@ import java.io.IOException;
import java.util.Arrays; import java.util.Arrays;
import javax.servlet.http.HttpServletResponse; import javax.servlet.http.HttpServletResponse;
import org.apache.commons.lang3.StringUtils;
import org.dspace.app.rest.model.AuthnRest; import org.dspace.app.rest.model.AuthnRest;
import org.dspace.core.Utils;
import org.dspace.services.ConfigurationService; import org.dspace.services.ConfigurationService;
import org.slf4j.Logger; import org.slf4j.Logger;
import org.slf4j.LoggerFactory; import org.slf4j.LoggerFactory;
@@ -47,14 +49,29 @@ public class ShibbolethRestController implements InitializingBean {
.register(this, Arrays.asList(new Link("/api/" + AuthnRest.CATEGORY, "shibboleth"))); .register(this, Arrays.asList(new Link("/api/" + AuthnRest.CATEGORY, "shibboleth")));
} }
// LGTM.com thinks this method has an unvalidated URL redirect (https://lgtm.com/rules/4840088/) in `redirectUrl`,
// even though we are clearly validating the hostname of `redirectUrl` and test it in ShibbolethRestControllerIT
@SuppressWarnings("lgtm[java/unvalidated-url-redirection]")
@RequestMapping(method = RequestMethod.GET) @RequestMapping(method = RequestMethod.GET)
public void shibboleth(HttpServletResponse response, public void shibboleth(HttpServletResponse response,
@RequestParam(name = "redirectUrl", required = false) String redirectUrl) throws IOException { @RequestParam(name = "redirectUrl", required = false) String redirectUrl) throws IOException {
if (redirectUrl == null) { if (redirectUrl == null) {
redirectUrl = configurationService.getProperty("dspace.ui.url"); redirectUrl = configurationService.getProperty("dspace.ui.url");
} }
log.info("Redirecting to " + redirectUrl);
response.sendRedirect(redirectUrl); // Validate that the redirectURL matches either the server or UI hostname. It *cannot* be an arbitrary URL.
String redirectHostName = Utils.getHostName(redirectUrl);
String serverHostName = Utils.getHostName(configurationService.getProperty("dspace.server.url"));
String clientHostName = Utils.getHostName(configurationService.getProperty("dspace.ui.url"));
if (StringUtils.equalsAnyIgnoreCase(redirectHostName, serverHostName, clientHostName)) {
log.debug("Shibboleth redirecting to " + redirectUrl);
response.sendRedirect(redirectUrl);
} else {
log.error("Invalid Shibboleth redirectURL=" + redirectUrl +
". URL doesn't match hostname of server or UI!");
response.sendError(HttpServletResponse.SC_BAD_REQUEST,
"Invalid redirectURL! Must match server or ui hostname.");
}
} }
} }

View File

@@ -35,13 +35,14 @@ public class DiscoverFacetResultsConverter {
@Autowired @Autowired
private SearchFilterToAppliedFilterConverter searchFilterToAppliedFilterConverter; private SearchFilterToAppliedFilterConverter searchFilterToAppliedFilterConverter;
public FacetResultsRest convert(Context context, String facetName, String prefix, String query, String dsoType, public FacetResultsRest convert(Context context, String facetName, String prefix, String query,
String dsoScope, List<SearchFilter> searchFilters, DiscoverResult searchResult, List<String> dsoTypes, String dsoScope, List<SearchFilter> searchFilters,
DiscoveryConfiguration configuration, Pageable page, Projection projection) { DiscoverResult searchResult, DiscoveryConfiguration configuration, Pageable page,
Projection projection) {
FacetResultsRest facetResultsRest = new FacetResultsRest(); FacetResultsRest facetResultsRest = new FacetResultsRest();
facetResultsRest.setProjection(projection); facetResultsRest.setProjection(projection);
setRequestInformation(context, facetName, prefix, query, dsoType, dsoScope, searchFilters, searchResult, setRequestInformation(context, facetName, prefix, query, dsoTypes, dsoScope, searchFilters, searchResult,
configuration, facetResultsRest, page, projection); configuration, facetResultsRest, page, projection);
addToFacetResultList(facetName, searchResult, facetResultsRest, configuration, page, projection); addToFacetResultList(facetName, searchResult, facetResultsRest, configuration, page, projection);
@@ -72,14 +73,14 @@ public class DiscoverFacetResultsConverter {
return facetValueConverter.convert(value, projection); return facetValueConverter.convert(value, projection);
} }
private void setRequestInformation(Context context, String facetName, String prefix, String query, String dsoType, private void setRequestInformation(Context context, String facetName, String prefix, String query,
String dsoScope, List<SearchFilter> searchFilters, DiscoverResult searchResult, List<String> dsoTypes, String dsoScope, List<SearchFilter> searchFilters,
DiscoveryConfiguration configuration, FacetResultsRest facetResultsRest, DiscoverResult searchResult, DiscoveryConfiguration configuration,
Pageable page, Projection projection) { FacetResultsRest facetResultsRest, Pageable page, Projection projection) {
facetResultsRest.setQuery(query); facetResultsRest.setQuery(query);
facetResultsRest.setPrefix(prefix); facetResultsRest.setPrefix(prefix);
facetResultsRest.setScope(dsoScope); facetResultsRest.setScope(dsoScope);
facetResultsRest.setDsoType(dsoType); facetResultsRest.setDsoTypes(dsoTypes);
facetResultsRest.setFacetEntry(convertFacetEntry(facetName, searchResult, configuration, page, projection)); facetResultsRest.setFacetEntry(convertFacetEntry(facetName, searchResult, configuration, page, projection));

View File

@@ -38,7 +38,7 @@ public class DiscoverFacetsConverter {
@Autowired @Autowired
private SearchService searchService; private SearchService searchService;
public SearchResultsRest convert(Context context, String query, String dsoType, String configurationName, public SearchResultsRest convert(Context context, String query, List<String> dsoTypes, String configurationName,
String dsoScope, List<SearchFilter> searchFilters, final Pageable page, String dsoScope, List<SearchFilter> searchFilters, final Pageable page,
DiscoveryConfiguration configuration, DiscoverResult searchResult, DiscoveryConfiguration configuration, DiscoverResult searchResult,
Projection projection) { Projection projection) {
@@ -46,7 +46,7 @@ public class DiscoverFacetsConverter {
SearchResultsRest searchResultsRest = new SearchResultsRest(); SearchResultsRest searchResultsRest = new SearchResultsRest();
searchResultsRest.setProjection(projection); searchResultsRest.setProjection(projection);
setRequestInformation(context, query, dsoType, configurationName, dsoScope, searchFilters, page, setRequestInformation(context, query, dsoTypes, configurationName, dsoScope, searchFilters, page,
searchResultsRest); searchResultsRest);
addFacetValues(context, searchResult, searchResultsRest, configuration, projection); addFacetValues(context, searchResult, searchResultsRest, configuration, projection);
@@ -129,13 +129,13 @@ public class DiscoverFacetsConverter {
} }
} }
private void setRequestInformation(final Context context, final String query, final String dsoType, private void setRequestInformation(final Context context, final String query, final List<String> dsoTypes,
final String configurationName, final String scope, final String configurationName, final String scope,
final List<SearchFilter> searchFilters, final Pageable page, final List<SearchFilter> searchFilters, final Pageable page,
final SearchResultsRest resultsRest) { final SearchResultsRest resultsRest) {
resultsRest.setQuery(query); resultsRest.setQuery(query);
resultsRest.setConfiguration(configurationName); resultsRest.setConfiguration(configurationName);
resultsRest.setDsoType(dsoType); resultsRest.setDsoTypes(dsoTypes);
resultsRest.setSort(SearchResultsRest.Sorting.fromPage(page)); resultsRest.setSort(SearchResultsRest.Sorting.fromPage(page));
resultsRest.setScope(scope); resultsRest.setScope(scope);

View File

@@ -43,7 +43,7 @@ public class DiscoverResultConverter {
@Autowired @Autowired
private SearchFilterToAppliedFilterConverter searchFilterToAppliedFilterConverter; private SearchFilterToAppliedFilterConverter searchFilterToAppliedFilterConverter;
public SearchResultsRest convert(final Context context, final String query, final String dsoType, public SearchResultsRest convert(final Context context, final String query, final List<String> dsoTypes,
final String configurationName, final String scope, final String configurationName, final String scope,
final List<SearchFilter> searchFilters, final Pageable page, final List<SearchFilter> searchFilters, final Pageable page,
final DiscoverResult searchResult, final DiscoveryConfiguration configuration, final DiscoverResult searchResult, final DiscoveryConfiguration configuration,
@@ -52,7 +52,7 @@ public class DiscoverResultConverter {
SearchResultsRest resultsRest = new SearchResultsRest(); SearchResultsRest resultsRest = new SearchResultsRest();
resultsRest.setProjection(projection); resultsRest.setProjection(projection);
setRequestInformation(context, query, dsoType, configurationName, scope, searchFilters, page, resultsRest); setRequestInformation(context, query, dsoTypes, configurationName, scope, searchFilters, page, resultsRest);
addSearchResults(searchResult, resultsRest, projection); addSearchResults(searchResult, resultsRest, projection);
@@ -101,13 +101,13 @@ public class DiscoverResultConverter {
return null; return null;
} }
private void setRequestInformation(final Context context, final String query, final String dsoType, private void setRequestInformation(final Context context, final String query, final List<String> dsoTypes,
final String configurationName, final String scope, final String configurationName, final String scope,
final List<SearchFilter> searchFilters, final Pageable page, final List<SearchFilter> searchFilters, final Pageable page,
final SearchResultsRest resultsRest) { final SearchResultsRest resultsRest) {
resultsRest.setQuery(query); resultsRest.setQuery(query);
resultsRest.setConfiguration(configurationName); resultsRest.setConfiguration(configurationName);
resultsRest.setDsoType(dsoType); resultsRest.setDsoTypes(dsoTypes);
resultsRest.setScope(scope); resultsRest.setScope(scope);

View File

@@ -14,6 +14,8 @@ import java.sql.SQLException;
import javax.servlet.http.HttpServletRequest; import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse; import javax.servlet.http.HttpServletResponse;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
import org.dspace.app.rest.security.RestAuthenticationService; import org.dspace.app.rest.security.RestAuthenticationService;
import org.dspace.authorize.AuthorizeException; import org.dspace.authorize.AuthorizeException;
import org.springframework.beans.TypeMismatchException; import org.springframework.beans.TypeMismatchException;
@@ -41,6 +43,7 @@ import org.springframework.web.servlet.mvc.method.annotation.ResponseEntityExcep
*/ */
@ControllerAdvice @ControllerAdvice
public class DSpaceApiExceptionControllerAdvice extends ResponseEntityExceptionHandler { public class DSpaceApiExceptionControllerAdvice extends ResponseEntityExceptionHandler {
private static final Logger log = LogManager.getLogger(DSpaceApiExceptionControllerAdvice.class);
@Autowired @Autowired
private RestAuthenticationService restAuthenticationService; private RestAuthenticationService restAuthenticationService;
@@ -49,16 +52,16 @@ public class DSpaceApiExceptionControllerAdvice extends ResponseEntityExceptionH
protected void handleAuthorizeException(HttpServletRequest request, HttpServletResponse response, Exception ex) protected void handleAuthorizeException(HttpServletRequest request, HttpServletResponse response, Exception ex)
throws IOException { throws IOException {
if (restAuthenticationService.hasAuthenticationData(request)) { if (restAuthenticationService.hasAuthenticationData(request)) {
sendErrorResponse(request, response, ex, ex.getMessage(), HttpServletResponse.SC_FORBIDDEN); sendErrorResponse(request, response, ex, "Access is denied", HttpServletResponse.SC_FORBIDDEN);
} else { } else {
sendErrorResponse(request, response, ex, ex.getMessage(), HttpServletResponse.SC_UNAUTHORIZED); sendErrorResponse(request, response, ex, "Authentication is required", HttpServletResponse.SC_UNAUTHORIZED);
} }
} }
@ExceptionHandler({IllegalArgumentException.class, MultipartException.class}) @ExceptionHandler({IllegalArgumentException.class, MultipartException.class})
protected void handleWrongRequestException(HttpServletRequest request, HttpServletResponse response, protected void handleWrongRequestException(HttpServletRequest request, HttpServletResponse response,
Exception ex) throws IOException { Exception ex) throws IOException {
sendErrorResponse(request, response, ex, ex.getMessage(), HttpServletResponse.SC_BAD_REQUEST); sendErrorResponse(request, response, ex, "Request is invalid or incorrect", HttpServletResponse.SC_BAD_REQUEST);
} }
@ExceptionHandler(SQLException.class) @ExceptionHandler(SQLException.class)
@@ -72,24 +75,24 @@ public class DSpaceApiExceptionControllerAdvice extends ResponseEntityExceptionH
protected void handleIOException(HttpServletRequest request, HttpServletResponse response, Exception ex) protected void handleIOException(HttpServletRequest request, HttpServletResponse response, Exception ex)
throws IOException { throws IOException {
sendErrorResponse(request, response, ex, sendErrorResponse(request, response, ex,
"An internal read or write operation failed (IO Exception)", "An internal read or write operation failed",
HttpServletResponse.SC_INTERNAL_SERVER_ERROR); HttpServletResponse.SC_INTERNAL_SERVER_ERROR);
} }
@ExceptionHandler(MethodNotAllowedException.class) @ExceptionHandler(MethodNotAllowedException.class)
protected void methodNotAllowedException(HttpServletRequest request, HttpServletResponse response, protected void methodNotAllowedException(HttpServletRequest request, HttpServletResponse response,
Exception ex) throws IOException { Exception ex) throws IOException {
sendErrorResponse(request, response, ex, ex.getMessage(), HttpServletResponse.SC_METHOD_NOT_ALLOWED); sendErrorResponse(request, response, ex, "Method is not allowed or supported",
HttpServletResponse.SC_METHOD_NOT_ALLOWED);
} }
@ExceptionHandler( {UnprocessableEntityException.class}) @ExceptionHandler( {UnprocessableEntityException.class})
protected void handleUnprocessableEntityException(HttpServletRequest request, HttpServletResponse response, protected void handleUnprocessableEntityException(HttpServletRequest request, HttpServletResponse response,
Exception ex) throws IOException { Exception ex) throws IOException {
//422 is not defined in HttpServletResponse. Its meaning is "Unprocessable Entity". //422 is not defined in HttpServletResponse. Its meaning is "Unprocessable Entity".
//Using the value from HttpStatus. //Using the value from HttpStatus.
sendErrorResponse(request, response, null, sendErrorResponse(request, response, null,
ex.getMessage(), "Unprocessable or invalid entity",
HttpStatus.UNPROCESSABLE_ENTITY.value()); HttpStatus.UNPROCESSABLE_ENTITY.value());
} }
@@ -98,7 +101,7 @@ public class DSpaceApiExceptionControllerAdvice extends ResponseEntityExceptionH
throws IOException { throws IOException {
// we want the 400 status for missing parameters, see https://jira.lyrasis.org/browse/DS-4428 // we want the 400 status for missing parameters, see https://jira.lyrasis.org/browse/DS-4428
sendErrorResponse(request, response, null, sendErrorResponse(request, response, null,
ex.getMessage(), "A required parameter is invalid",
HttpStatus.BAD_REQUEST.value()); HttpStatus.BAD_REQUEST.value());
} }
@@ -107,7 +110,7 @@ public class DSpaceApiExceptionControllerAdvice extends ResponseEntityExceptionH
throws IOException { throws IOException {
// we want the 400 status for missing parameters, see https://jira.lyrasis.org/browse/DS-4428 // we want the 400 status for missing parameters, see https://jira.lyrasis.org/browse/DS-4428
sendErrorResponse(request, response, null, sendErrorResponse(request, response, null,
ex.getMessage(), "A required parameter is missing",
HttpStatus.BAD_REQUEST.value()); HttpStatus.BAD_REQUEST.value());
} }
@@ -137,7 +140,7 @@ public class DSpaceApiExceptionControllerAdvice extends ResponseEntityExceptionH
} else { } else {
returnCode = HttpServletResponse.SC_INTERNAL_SERVER_ERROR; returnCode = HttpServletResponse.SC_INTERNAL_SERVER_ERROR;
} }
sendErrorResponse(request, response, ex, "An Exception has occured", returnCode); sendErrorResponse(request, response, ex, "An exception has occurred", returnCode);
} }
@@ -147,6 +150,13 @@ public class DSpaceApiExceptionControllerAdvice extends ResponseEntityExceptionH
//Make sure Spring picks up this exception //Make sure Spring picks up this exception
request.setAttribute(EXCEPTION_ATTRIBUTE, ex); request.setAttribute(EXCEPTION_ATTRIBUTE, ex);
// For now, just logging server errors.
// We don't want to fill logs with bad/invalid REST API requests.
if (statusCode == HttpServletResponse.SC_INTERNAL_SERVER_ERROR) {
// Log the full error and status code
log.error("{} (status:{})", message, statusCode, ex);
}
//Exception properties will be set by org.springframework.boot.web.support.ErrorPageFilter //Exception properties will be set by org.springframework.boot.web.support.ErrorPageFilter
response.sendError(statusCode, message); response.sendError(statusCode, message);
} }

View File

@@ -28,7 +28,7 @@ public abstract class DiscoveryRestHalLinkFactory<T> extends HalLinkFactory<T, D
public UriComponentsBuilder buildSearchBaseLink(final DiscoveryResultsRest data) { public UriComponentsBuilder buildSearchBaseLink(final DiscoveryResultsRest data) {
try { try {
UriComponentsBuilder uriBuilder = uriBuilder(getMethodOn() UriComponentsBuilder uriBuilder = uriBuilder(getMethodOn()
.getSearchObjects(data.getQuery(), data.getDsoType(), .getSearchObjects(data.getQuery(), data.getDsoTypes(),
data.getScope(), data.getConfiguration(), data.getScope(), data.getConfiguration(),
null, null)); null, null));
@@ -43,7 +43,7 @@ public abstract class DiscoveryRestHalLinkFactory<T> extends HalLinkFactory<T, D
try { try {
UriComponentsBuilder uriBuilder = uriBuilder( UriComponentsBuilder uriBuilder = uriBuilder(
getMethodOn().getFacetValues(data.getFacetEntry().getName(), data.getPrefix(), data.getQuery(), getMethodOn().getFacetValues(data.getFacetEntry().getName(), data.getPrefix(), data.getQuery(),
data.getDsoType(), data.getScope(), data.getConfiguration(), null, null)); data.getDsoTypes(), data.getScope(), data.getConfiguration(), null, null));
return addFilterParams(uriBuilder, data); return addFilterParams(uriBuilder, data);
} catch (Exception ex) { } catch (Exception ex) {
@@ -54,7 +54,7 @@ public abstract class DiscoveryRestHalLinkFactory<T> extends HalLinkFactory<T, D
protected UriComponentsBuilder buildSearchFacetsBaseLink(final SearchResultsRest data) { protected UriComponentsBuilder buildSearchFacetsBaseLink(final SearchResultsRest data) {
try { try {
UriComponentsBuilder uriBuilder = uriBuilder(getMethodOn().getFacets(data.getQuery(), data.getDsoType(), UriComponentsBuilder uriBuilder = uriBuilder(getMethodOn().getFacets(data.getQuery(), data.getDsoTypes(),
data.getScope(), data.getConfiguration(), null, null)); data.getScope(), data.getConfiguration(), null, null));
uriBuilder = addSortingParms(uriBuilder, data); uriBuilder = addSortingParms(uriBuilder, data);

View File

@@ -8,6 +8,7 @@
package org.dspace.app.rest.link.search; package org.dspace.app.rest.link.search;
import java.util.LinkedList; import java.util.LinkedList;
import java.util.List;
import org.apache.commons.collections4.CollectionUtils; import org.apache.commons.collections4.CollectionUtils;
import org.apache.commons.lang3.BooleanUtils; import org.apache.commons.lang3.BooleanUtils;
@@ -39,7 +40,7 @@ public class SearchFacetEntryHalLinkFactory extends DiscoveryRestHalLinkFactory<
DiscoveryResultsRest searchData = halResource.getSearchData(); DiscoveryResultsRest searchData = halResource.getSearchData();
String query = searchData == null ? null : searchData.getQuery(); String query = searchData == null ? null : searchData.getQuery();
String dsoType = searchData == null ? null : searchData.getDsoType(); List<String> dsoType = searchData == null ? null : searchData.getDsoTypes();
String scope = searchData == null ? null : searchData.getScope(); String scope = searchData == null ? null : searchData.getScope();
String configuration = searchData == null ? null : searchData.getConfiguration(); String configuration = searchData == null ? null : searchData.getConfiguration();

View File

@@ -16,6 +16,10 @@ import com.fasterxml.jackson.annotation.JsonProperty;
* @author Jelle Pelgrims (jelle.pelgrims at atmire.com) * @author Jelle Pelgrims (jelle.pelgrims at atmire.com)
*/ */
@LinksRest(links = { @LinksRest(links = {
@LinkRest(
name = BundleRest.ITEM,
method = "getItem"
),
@LinkRest( @LinkRest(
name = BundleRest.BITSTREAMS, name = BundleRest.BITSTREAMS,
method = "getBitstreams" method = "getBitstreams"
@@ -30,6 +34,7 @@ public class BundleRest extends DSpaceObjectRest {
public static final String PLURAL_NAME = "bundles"; public static final String PLURAL_NAME = "bundles";
public static final String CATEGORY = RestAddressableModel.CORE; public static final String CATEGORY = RestAddressableModel.CORE;
public static final String ITEM = "item";
public static final String BITSTREAMS = "bitstreams"; public static final String BITSTREAMS = "bitstreams";
public static final String PRIMARY_BITSTREAM = "primaryBitstream"; public static final String PRIMARY_BITSTREAM = "primaryBitstream";

View File

@@ -27,7 +27,7 @@ public abstract class DiscoveryResultsRest extends BaseObjectRest<String> {
private List<SearchResultsRest.AppliedFilter> appliedFilters; private List<SearchResultsRest.AppliedFilter> appliedFilters;
private SearchResultsRest.Sorting sort; private SearchResultsRest.Sorting sort;
@JsonIgnore @JsonIgnore
private String dsoType; private List<String> dsoTypes;
@JsonIgnore @JsonIgnore
private List<SearchFilter> searchFilters; private List<SearchFilter> searchFilters;
private String configuration; private String configuration;
@@ -52,12 +52,12 @@ public abstract class DiscoveryResultsRest extends BaseObjectRest<String> {
this.query = query; this.query = query;
} }
public String getDsoType() { public List<String> getDsoTypes() {
return dsoType; return dsoTypes;
} }
public void setDsoType(final String dsoType) { public void setDsoTypes(final List<String> dsoTypes) {
this.dsoType = dsoType; this.dsoTypes = dsoTypes;
} }
public String getScope() { public String getScope() {

View File

@@ -27,7 +27,6 @@ public class RelationshipRest extends BaseObjectRest<Integer> {
@JsonIgnore @JsonIgnore
private UUID rightId; private UUID rightId;
private int relationshipTypeId;
private RelationshipTypeRest relationshipType; private RelationshipTypeRest relationshipType;
private int leftPlace; private int leftPlace;
private int rightPlace; private int rightPlace;
@@ -90,14 +89,6 @@ public class RelationshipRest extends BaseObjectRest<Integer> {
this.rightPlace = rightPlace; this.rightPlace = rightPlace;
} }
public int getRelationshipTypeId() {
return relationshipTypeId;
}
public void setRelationshipTypeId(int relationshipTypeId) {
this.relationshipTypeId = relationshipTypeId;
}
public String getRightwardValue() { public String getRightwardValue() {
return rightwardValue; return rightwardValue;
} }

View File

@@ -7,6 +7,10 @@
*/ */
package org.dspace.app.rest.repository; package org.dspace.app.rest.repository;
import static java.util.Collections.emptyList;
import static java.util.Collections.singletonList;
import static org.apache.commons.lang3.StringUtils.isNotBlank;
import java.sql.SQLException; import java.sql.SQLException;
import java.util.ArrayList; import java.util.ArrayList;
import java.util.List; import java.util.List;
@@ -145,9 +149,11 @@ public class AuthorizationRestRepository extends DSpaceRestRepository<Authorizat
@PreAuthorize("#epersonUuid==null || hasPermission(#epersonUuid, 'EPERSON', 'READ')") @PreAuthorize("#epersonUuid==null || hasPermission(#epersonUuid, 'EPERSON', 'READ')")
@SearchRestMethod(name = "object") @SearchRestMethod(name = "object")
public Page<AuthorizationRest> findByObject(@Parameter(value = "uri", required = true) String uri, public Page<AuthorizationRest> findByObject(@Parameter(value = "uri", required = true) String uri,
@Parameter(value = "eperson") UUID epersonUuid, @Parameter(value = "eperson") UUID epersonUuid, @Parameter(value = "feature") String featureName,
Pageable pageable) throws AuthorizeException, SQLException { Pageable pageable) throws AuthorizeException, SQLException {
Context context = obtainContext(); Context context = obtainContext();
BaseObjectRest obj = utils.getBaseObjectRestFromUri(context, uri); BaseObjectRest obj = utils.getBaseObjectRestFromUri(context, uri);
if (obj == null) { if (obj == null) {
return null; return null;
@@ -162,11 +168,16 @@ public class AuthorizationRestRepository extends DSpaceRestRepository<Authorizat
context.switchContextUser(user); context.switchContextUser(user);
} }
List<AuthorizationFeature> features = authorizationFeatureService.findByResourceType(obj.getUniqueType()); List<Authorization> authorizations;
List<Authorization> authorizations = new ArrayList<Authorization>(); if (isNotBlank(featureName)) {
for (AuthorizationFeature f : features) { authorizations = findByObjectAndFeature(context, user, obj, featureName);
if (authorizationFeatureService.isAuthorized(context, f, obj)) { } else {
authorizations.add(new Authorization(user, f, obj)); List<AuthorizationFeature> features = authorizationFeatureService.findByResourceType(obj.getUniqueType());
authorizations = new ArrayList<>();
for (AuthorizationFeature f : features) {
if (authorizationFeatureService.isAuthorized(context, f, obj)) {
authorizations.add(new Authorization(user, f, obj));
}
} }
} }
@@ -177,57 +188,17 @@ public class AuthorizationRestRepository extends DSpaceRestRepository<Authorizat
return converter.toRestPage(authorizations, pageable, utils.obtainProjection()); return converter.toRestPage(authorizations, pageable, utils.obtainProjection());
} }
/** private List<Authorization> findByObjectAndFeature(
* It returns the authorization related to the requested feature if granted to the specified eperson or to the Context context, EPerson user, BaseObjectRest obj, String featureName
* anonymous user. Only administrators and the user identified by the epersonUuid parameter can access this method ) throws SQLException {
*
* @param uri AuthorizationFeature feature = authorizationFeatureService.find(featureName);
* the uri of the object to check the authorization against
* @param epersonUuid if (!authorizationFeatureService.isAuthorized(context, feature, obj)) {
* the eperson uuid to use in the authorization evaluation return emptyList();
* @param featureName
* limit the authorization check to only the feature identified via its name
* @param pageable
* the pagination options
* @return the list of matching authorization available for the requested user and object, filtered by feature if
* provided
* @throws AuthorizeException
* @throws SQLException
*/
@PreAuthorize("#epersonUuid==null || hasPermission(#epersonUuid, 'EPERSON', 'READ')")
@SearchRestMethod(name = "objectAndFeature")
public AuthorizationRest findByObjectAndFeature(@Parameter(value = "uri", required = true) String uri,
@Parameter(value = "eperson") UUID epersonUuid,
@Parameter(value = "feature", required = true) String featureName,
Pageable pageable) throws AuthorizeException, SQLException {
Context context = obtainContext();
BaseObjectRest obj = utils.getBaseObjectRestFromUri(context, uri);
if (obj == null) {
return null;
} }
EPerson currUser = context.getCurrentUser(); return singletonList(new Authorization(user, feature, obj));
// get the user specified in the requested parameters, can be null for anonymous
EPerson user = getUserFromRequestParameter(context, epersonUuid);
if (currUser != user) {
// Temporarily change the Context's current user in order to retrieve
// authorizations based on that user
context.switchContextUser(user);
}
AuthorizationFeature feature = authorizationFeatureService.find(featureName);
AuthorizationRest authorizationRest = null;
if (authorizationFeatureService.isAuthorized(context, feature, obj)) {
Authorization authz = new Authorization();
authz.setEperson(user);
authz.setFeature(feature);
authz.setObject(obj);
authorizationRest = converter.toRest(authz, utils.obtainProjection());
}
if (currUser != user) {
// restore the real current user
context.restoreContextUser();
}
return authorizationRest;
} }
/** /**
@@ -242,25 +213,27 @@ public class AuthorizationRestRepository extends DSpaceRestRepository<Authorizat
*/ */
private EPerson getUserFromRequestParameter(Context context, UUID epersonUuid) private EPerson getUserFromRequestParameter(Context context, UUID epersonUuid)
throws AuthorizeException, SQLException { throws AuthorizeException, SQLException {
EPerson currUser = context.getCurrentUser(); EPerson currUser = context.getCurrentUser();
EPerson user = currUser;
if (epersonUuid != null) { if (epersonUuid == null) {
// no user is specified in the request parameters, check the permissions for the current user
return currUser;
} else {
// a user is specified in the request parameters
if (currUser == null) { if (currUser == null) {
throw new AuthorizeException("attempt to anonymously access the authorization of the eperson " throw new AuthorizeException("attempt to anonymously access the authorization of the eperson "
+ epersonUuid); + epersonUuid);
} else {
// an user is specified in the request parameters } else if (!authorizeService.isAdmin(context) && !epersonUuid.equals(currUser.getID())) {
if (!authorizeService.isAdmin(context) && !epersonUuid.equals(currUser.getID())) { throw new AuthorizeException("attempt to access the authorization of the eperson " + epersonUuid
throw new AuthorizeException("attempt to access the authorization of the eperson " + epersonUuid + " as a non-admin; only system administrators can see the authorization of other users");
+ " only system administrators can see the authorization of other users");
}
user = epersonService.find(context, epersonUuid);
} }
} else {
// the request asks to check the permission for the anonymous user return epersonService.find(context, epersonUuid);
user = null;
} }
return user;
} }
@Override @Override

View File

@@ -0,0 +1,62 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.app.rest.repository;
import java.sql.SQLException;
import java.util.UUID;
import javax.annotation.Nullable;
import javax.servlet.http.HttpServletRequest;
import org.dspace.app.rest.model.BundleRest;
import org.dspace.app.rest.model.ItemRest;
import org.dspace.app.rest.projection.Projection;
import org.dspace.content.Bundle;
import org.dspace.content.Item;
import org.dspace.content.service.BundleService;
import org.dspace.core.Context;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.data.domain.Pageable;
import org.springframework.data.rest.webmvc.ResourceNotFoundException;
import org.springframework.security.access.prepost.PreAuthorize;
import org.springframework.stereotype.Component;
/**
* Link repository for "item" subresource of an individual bundle.
*/
@Component(BundleRest.CATEGORY + "." + BundleRest.NAME + "." + BundleRest.ITEM)
public class BundleItemLinkRepository extends AbstractDSpaceRestRepository
implements LinkRestRepository {
@Autowired
BundleService bundleService;
/**
* Get the item where the provided bundle resides in
*/
@PreAuthorize("hasPermission(#bundleId, 'BUNDLE', 'READ')")
public ItemRest getItem(@Nullable HttpServletRequest request,
UUID bundleId,
@Nullable Pageable optionalPageable,
Projection projection) {
try {
Context context = obtainContext();
Bundle bundle = bundleService.find(context, bundleId);
if (bundle == null) {
throw new ResourceNotFoundException("No such bundle: " + bundleId);
}
Item item = bundle.getItems().get(0);
if (item == null) {
return null;
}
return converter.toRest(item, projection);
} catch (SQLException e) {
throw new RuntimeException(e);
}
}
}

View File

@@ -89,7 +89,7 @@ public class DiscoveryRestRepository extends AbstractDSpaceRestRepository {
return discoverConfigurationConverter.convert(discoveryConfiguration, utils.obtainProjection()); return discoverConfigurationConverter.convert(discoveryConfiguration, utils.obtainProjection());
} }
public SearchResultsRest getSearchObjects(final String query, final String dsoType, final String dsoScope, public SearchResultsRest getSearchObjects(final String query, final List<String> dsoTypes, final String dsoScope,
final String configuration, final String configuration,
final List<SearchFilter> searchFilters, final Pageable page, final List<SearchFilter> searchFilters, final Pageable page,
final Projection projection) { final Projection projection) {
@@ -103,7 +103,7 @@ public class DiscoveryRestRepository extends AbstractDSpaceRestRepository {
try { try {
discoverQuery = queryBuilder discoverQuery = queryBuilder
.buildQuery(context, scopeObject, discoveryConfiguration, query, searchFilters, dsoType, page); .buildQuery(context, scopeObject, discoveryConfiguration, query, searchFilters, dsoTypes, page);
searchResult = searchService.search(context, scopeObject, discoverQuery); searchResult = searchService.search(context, scopeObject, discoverQuery);
} catch (SearchServiceException e) { } catch (SearchServiceException e) {
@@ -112,7 +112,7 @@ public class DiscoveryRestRepository extends AbstractDSpaceRestRepository {
} }
return discoverResultConverter return discoverResultConverter
.convert(context, query, dsoType, configuration, dsoScope, searchFilters, page, searchResult, .convert(context, query, dsoTypes, configuration, dsoScope, searchFilters, page, searchResult,
discoveryConfiguration, projection); discoveryConfiguration, projection);
} }
@@ -130,7 +130,7 @@ public class DiscoveryRestRepository extends AbstractDSpaceRestRepository {
return discoverSearchSupportConverter.convert(); return discoverSearchSupportConverter.convert();
} }
public FacetResultsRest getFacetObjects(String facetName, String prefix, String query, String dsoType, public FacetResultsRest getFacetObjects(String facetName, String prefix, String query, List<String> dsoTypes,
String dsoScope, final String configuration, List<SearchFilter> searchFilters, Pageable page) { String dsoScope, final String configuration, List<SearchFilter> searchFilters, Pageable page) {
Context context = obtainContext(); Context context = obtainContext();
@@ -143,7 +143,7 @@ public class DiscoveryRestRepository extends AbstractDSpaceRestRepository {
DiscoverQuery discoverQuery = null; DiscoverQuery discoverQuery = null;
try { try {
discoverQuery = queryBuilder.buildFacetQuery(context, scopeObject, discoveryConfiguration, prefix, query, discoverQuery = queryBuilder.buildFacetQuery(context, scopeObject, discoveryConfiguration, prefix, query,
searchFilters, dsoType, page, facetName); searchFilters, dsoTypes, page, facetName);
searchResult = searchService.search(context, scopeObject, discoverQuery); searchResult = searchService.search(context, scopeObject, discoverQuery);
} catch (SearchServiceException e) { } catch (SearchServiceException e) {
@@ -152,12 +152,12 @@ public class DiscoveryRestRepository extends AbstractDSpaceRestRepository {
} }
FacetResultsRest facetResultsRest = discoverFacetResultsConverter.convert(context, facetName, prefix, query, FacetResultsRest facetResultsRest = discoverFacetResultsConverter.convert(context, facetName, prefix, query,
dsoType, dsoScope, searchFilters, searchResult, discoveryConfiguration, page, dsoTypes, dsoScope, searchFilters, searchResult, discoveryConfiguration, page,
utils.obtainProjection()); utils.obtainProjection());
return facetResultsRest; return facetResultsRest;
} }
public SearchResultsRest getAllFacets(String query, String dsoType, String dsoScope, String configuration, public SearchResultsRest getAllFacets(String query, List<String> dsoTypes, String dsoScope, String configuration,
List<SearchFilter> searchFilters) { List<SearchFilter> searchFilters) {
Context context = obtainContext(); Context context = obtainContext();
@@ -171,14 +171,14 @@ public class DiscoveryRestRepository extends AbstractDSpaceRestRepository {
try { try {
discoverQuery = queryBuilder discoverQuery = queryBuilder
.buildQuery(context, scopeObject, discoveryConfiguration, query, searchFilters, dsoType, page); .buildQuery(context, scopeObject, discoveryConfiguration, query, searchFilters, dsoTypes, page);
searchResult = searchService.search(context, scopeObject, discoverQuery); searchResult = searchService.search(context, scopeObject, discoverQuery);
} catch (SearchServiceException e) { } catch (SearchServiceException e) {
log.error("Error while searching with Discovery", e); log.error("Error while searching with Discovery", e);
} }
SearchResultsRest searchResultsRest = discoverFacetsConverter.convert(context, query, dsoType, SearchResultsRest searchResultsRest = discoverFacetsConverter.convert(context, query, dsoTypes,
configuration, dsoScope, searchFilters, page, discoveryConfiguration, searchResult, configuration, dsoScope, searchFilters, page, discoveryConfiguration, searchResult,
utils.obtainProjection()); utils.obtainProjection());

View File

@@ -35,7 +35,6 @@ public class DSpace401AuthenticationEntryPoint implements AuthenticationEntryPoi
response.setHeader("WWW-Authenticate", response.setHeader("WWW-Authenticate",
restAuthenticationService.getWwwAuthenticateHeaderValue(request, response)); restAuthenticationService.getWwwAuthenticateHeaderValue(request, response));
response.sendError(HttpServletResponse.SC_UNAUTHORIZED, response.sendError(HttpServletResponse.SC_UNAUTHORIZED, "Authentication is required");
authException.getMessage());
} }
} }

View File

@@ -31,7 +31,7 @@ import org.springframework.stereotype.Component;
/** /**
* An authenticated user is allowed to interact with a pool task only if it is in his list. * An authenticated user is allowed to interact with a pool task only if it is in his list.
* *
* @author Andrea Bollini (andrea.bollini at 4science.it) * @author Andrea Bollini (andrea.bollini at 4science.it)
*/ */
@Component @Component
@@ -75,7 +75,7 @@ public class PoolTaskRestPermissionEvaluatorPlugin extends RestObjectPermissionE
XmlWorkflowItem workflowItem = poolTask.getWorkflowItem(); XmlWorkflowItem workflowItem = poolTask.getWorkflowItem();
PoolTask poolTask2 = poolTaskService.findByWorkflowIdAndEPerson(context, workflowItem, ePerson); PoolTask poolTask2 = poolTaskService.findByWorkflowIdAndEPerson(context, workflowItem, ePerson);
if (poolTask2 != null && poolTask2.getID() == poolTask.getID()) { if (poolTask2 != null && poolTask2.getID().equals(poolTask.getID())) {
return true; return true;
} }
} catch (SQLException | AuthorizeException | IOException e) { } catch (SQLException | AuthorizeException | IOException e) {

View File

@@ -77,20 +77,21 @@ public class StatelessAuthenticationFilter extends BasicAuthenticationFilter {
HttpServletResponse res, HttpServletResponse res,
FilterChain chain) throws IOException, ServletException { FilterChain chain) throws IOException, ServletException {
Authentication authentication = null; Authentication authentication;
try { try {
authentication = getAuthentication(req, res); authentication = getAuthentication(req, res);
} catch (AuthorizeException e) { } catch (AuthorizeException e) {
res.sendError(HttpServletResponse.SC_UNAUTHORIZED, e.getMessage()); // just return an error, but do not log
log.error(e.getMessage(), e); res.sendError(HttpServletResponse.SC_UNAUTHORIZED, "Authentication is required");
return; return;
} catch (IllegalArgumentException | SQLException e) { } catch (IllegalArgumentException | SQLException e) {
res.sendError(HttpServletResponse.SC_BAD_REQUEST, e.getMessage()); res.sendError(HttpServletResponse.SC_BAD_REQUEST, "Authentication request is invalid or incorrect");
log.error(e.getMessage(), e); log.error("Authentication request is invalid or incorrect (status:{})",
HttpServletResponse.SC_BAD_REQUEST, e);
return; return;
} catch (AccessDeniedException e) { } catch (AccessDeniedException e) {
res.sendError(HttpServletResponse.SC_FORBIDDEN, e.getMessage()); res.sendError(HttpServletResponse.SC_FORBIDDEN, "Access is denied");
log.error(e.getMessage(), e); log.error("Access is denied (status:{})", HttpServletResponse.SC_FORBIDDEN, e);
return; return;
} }
if (authentication != null) { if (authentication != null) {
@@ -134,7 +135,7 @@ public class StatelessAuthenticationFilter extends BasicAuthenticationFilter {
if (configurationService.getBooleanProperty("webui.user.assumelogin")) { if (configurationService.getBooleanProperty("webui.user.assumelogin")) {
return getOnBehalfOfAuthentication(context, onBehalfOfParameterValue, res); return getOnBehalfOfAuthentication(context, onBehalfOfParameterValue, res);
} else { } else {
throw new IllegalArgumentException("The login as feature is not allowed" + throw new IllegalArgumentException("The 'login as' feature is not allowed" +
" due to the current configuration"); " due to the current configuration");
} }
} }
@@ -146,7 +147,7 @@ public class StatelessAuthenticationFilter extends BasicAuthenticationFilter {
} }
} else { } else {
if (request.getHeader(ON_BEHALF_OF_REQUEST_PARAM) != null) { if (request.getHeader(ON_BEHALF_OF_REQUEST_PARAM) != null) {
throw new AuthorizeException("Only admins are allowed to use the login as feature"); throw new AuthorizeException("Must be logged in (as an admin) to use the 'login as' feature");
} }
} }

View File

@@ -154,6 +154,7 @@ public class JWTTokenRestAuthenticationServiceImpl implements RestAuthentication
Cookie cookie = new Cookie(AUTHORIZATION_COOKIE, ""); Cookie cookie = new Cookie(AUTHORIZATION_COOKIE, "");
cookie.setHttpOnly(true); cookie.setHttpOnly(true);
cookie.setMaxAge(0); cookie.setMaxAge(0);
cookie.setSecure(true);
response.addCookie(cookie); response.addCookie(cookie);
} }

View File

@@ -7,6 +7,10 @@
*/ */
package org.dspace.app.rest.utils; package org.dspace.app.rest.utils;
import static java.util.Collections.emptyList;
import static java.util.Collections.singletonList;
import static org.apache.commons.collections4.CollectionUtils.isNotEmpty;
import java.sql.SQLException; import java.sql.SQLException;
import java.util.ArrayList; import java.util.ArrayList;
import java.util.List; import java.util.List;
@@ -65,14 +69,47 @@ public class DiscoverQueryBuilder implements InitializingBean {
pageSizeLimit = configurationService.getIntProperty("rest.search.max.results", 100); pageSizeLimit = configurationService.getIntProperty("rest.search.max.results", 100);
} }
/**
* Build a discovery query
*
* @param context the DSpace context
* @param scope the scope for this discovery query
* @param discoveryConfiguration the discovery configuration for this discovery query
* @param query the query string for this discovery query
* @param searchFilters the search filters for this discovery query
* @param dsoType only include search results with this type
* @param page the pageable for this discovery query
*/
public DiscoverQuery buildQuery(Context context, IndexableObject scope, public DiscoverQuery buildQuery(Context context, IndexableObject scope,
DiscoveryConfiguration discoveryConfiguration, DiscoveryConfiguration discoveryConfiguration,
String query, List<SearchFilter> searchFilters, String query, List<SearchFilter> searchFilters,
String dsoType, Pageable page) String dsoType, Pageable page)
throws DSpaceBadRequestException { throws DSpaceBadRequestException {
List<String> dsoTypes = dsoType != null ? singletonList(dsoType) : emptyList();
return buildQuery(context, scope, discoveryConfiguration, query, searchFilters, dsoTypes, page);
}
/**
* Build a discovery query
*
* @param context the DSpace context
* @param scope the scope for this discovery query
* @param discoveryConfiguration the discovery configuration for this discovery query
* @param query the query string for this discovery query
* @param searchFilters the search filters for this discovery query
* @param dsoTypes only include search results with one of these types
* @param page the pageable for this discovery query
*/
public DiscoverQuery buildQuery(Context context, IndexableObject scope,
DiscoveryConfiguration discoveryConfiguration,
String query, List<SearchFilter> searchFilters,
List<String> dsoTypes, Pageable page)
throws DSpaceBadRequestException {
DiscoverQuery queryArgs = buildCommonDiscoverQuery(context, discoveryConfiguration, query, searchFilters, DiscoverQuery queryArgs = buildCommonDiscoverQuery(context, discoveryConfiguration, query, searchFilters,
dsoType); dsoTypes);
//When all search criteria are set, configure facet results //When all search criteria are set, configure facet results
addFaceting(context, scope, queryArgs, discoveryConfiguration); addFaceting(context, scope, queryArgs, discoveryConfiguration);
@@ -98,14 +135,52 @@ public class DiscoverQueryBuilder implements InitializingBean {
} }
} }
/**
* Create a discovery facet query.
*
* @param context the DSpace context
* @param scope the scope for this discovery query
* @param discoveryConfiguration the discovery configuration for this discovery query
* @param prefix limit the facets results to those starting with the given prefix.
* @param query the query string for this discovery query
* @param searchFilters the search filters for this discovery query
* @param dsoType only include search results with this type
* @param page the pageable for this discovery query
* @param facetName the facet field
*/
public DiscoverQuery buildFacetQuery(Context context, IndexableObject scope, public DiscoverQuery buildFacetQuery(Context context, IndexableObject scope,
DiscoveryConfiguration discoveryConfiguration, DiscoveryConfiguration discoveryConfiguration,
String prefix, String query, List<SearchFilter> searchFilters, String prefix, String query, List<SearchFilter> searchFilters,
String dsoType, Pageable page, String facetName) String dsoType, Pageable page, String facetName)
throws DSpaceBadRequestException { throws DSpaceBadRequestException {
List<String> dsoTypes = dsoType != null ? singletonList(dsoType) : emptyList();
return buildFacetQuery(
context, scope, discoveryConfiguration, prefix, query, searchFilters, dsoTypes, page, facetName);
}
/**
* Create a discovery facet query.
*
* @param context the DSpace context
* @param scope the scope for this discovery query
* @param discoveryConfiguration the discovery configuration for this discovery query
* @param prefix limit the facets results to those starting with the given prefix.
* @param query the query string for this discovery query
* @param searchFilters the search filters for this discovery query
* @param dsoTypes only include search results with one of these types
* @param page the pageable for this discovery query
* @param facetName the facet field
*/
public DiscoverQuery buildFacetQuery(Context context, IndexableObject scope,
DiscoveryConfiguration discoveryConfiguration,
String prefix, String query, List<SearchFilter> searchFilters,
List<String> dsoTypes, Pageable page, String facetName)
throws DSpaceBadRequestException {
DiscoverQuery queryArgs = buildCommonDiscoverQuery(context, discoveryConfiguration, query, searchFilters, DiscoverQuery queryArgs = buildCommonDiscoverQuery(context, discoveryConfiguration, query, searchFilters,
dsoType); dsoTypes);
//When all search criteria are set, configure facet results //When all search criteria are set, configure facet results
addFacetingForFacets(context, scope, prefix, queryArgs, discoveryConfiguration, facetName, page); addFacetingForFacets(context, scope, prefix, queryArgs, discoveryConfiguration, facetName, page);
@@ -170,7 +245,7 @@ public class DiscoverQueryBuilder implements InitializingBean {
private DiscoverQuery buildCommonDiscoverQuery(Context context, DiscoveryConfiguration discoveryConfiguration, private DiscoverQuery buildCommonDiscoverQuery(Context context, DiscoveryConfiguration discoveryConfiguration,
String query, String query,
List<SearchFilter> searchFilters, String dsoType) List<SearchFilter> searchFilters, List<String> dsoTypes)
throws DSpaceBadRequestException { throws DSpaceBadRequestException {
DiscoverQuery queryArgs = buildBaseQueryForConfiguration(discoveryConfiguration); DiscoverQuery queryArgs = buildBaseQueryForConfiguration(discoveryConfiguration);
@@ -182,10 +257,13 @@ public class DiscoverQueryBuilder implements InitializingBean {
queryArgs.setQuery(query); queryArgs.setQuery(query);
} }
//Limit results to DSO type //Limit results to DSO types
if (StringUtils.isNotBlank(dsoType)) { if (isNotEmpty(dsoTypes)) {
queryArgs.setDSpaceObjectFilter(getDsoType(dsoType)); dsoTypes.stream()
.map(this::getDsoType)
.forEach(queryArgs::addDSpaceObjectFilter);
} }
return queryArgs; return queryArgs;
} }

View File

@@ -19,15 +19,22 @@
<context:annotation-config/> <!-- allows us to use spring annotations in beans --> <context:annotation-config/> <!-- allows us to use spring annotations in beans -->
<bean class="org.dspace.discovery.SolrServiceImpl" id="org.dspace.discovery.SearchService"/> <bean class="org.dspace.discovery.SolrServiceImpl"
id="org.dspace.discovery.SearchService"/>
<alias name="org.dspace.discovery.SearchService" alias="org.dspace.discovery.IndexingService"/> <alias name="org.dspace.discovery.SearchService"
alias="org.dspace.discovery.IndexingService"/>
<bean class="org.dspace.discovery.MockSolrSearchCore" autowire-candidate="true"/> <bean class="org.dspace.discovery.MockSolrSearchCore"
autowire-candidate="true"/>
<!--<bean class="org.dspace.discovery.SolrServiceIndexOutputPlugin" id="solrServiceIndexOutputPlugin"/>--> <!--<bean class="org.dspace.discovery.SolrServiceIndexOutputPlugin"
id="solrServiceIndexOutputPlugin"/>-->
<!-- Statistics services are both lazy loaded (by name), as you are likely just using ONE of them and not both --> <!-- Statistics services are both lazy loaded (by name), as you are likely
<bean id="solrLoggerService" class="org.dspace.statistics.MockSolrLoggerServiceImpl" lazy-init="true"/> just using ONE of them and not both -->
<bean id="solrLoggerService"
class="org.dspace.statistics.MockSolrLoggerServiceImpl"
lazy-init="true"/>
</beans> </beans>

View File

@@ -29,9 +29,9 @@ import com.lyncode.xoai.dataprovider.services.impl.BaseDateProvider;
import com.lyncode.xoai.dataprovider.xml.xoaiconfig.Configuration; import com.lyncode.xoai.dataprovider.xml.xoaiconfig.Configuration;
import com.lyncode.xoai.dataprovider.xml.xoaiconfig.ContextConfiguration; import com.lyncode.xoai.dataprovider.xml.xoaiconfig.ContextConfiguration;
import org.apache.commons.lang3.time.DateUtils; import org.apache.commons.lang3.time.DateUtils;
import org.dspace.app.rest.builder.CollectionBuilder;
import org.dspace.app.rest.builder.CommunityBuilder;
import org.dspace.app.rest.test.AbstractControllerIntegrationTest; import org.dspace.app.rest.test.AbstractControllerIntegrationTest;
import org.dspace.builder.CollectionBuilder;
import org.dspace.builder.CommunityBuilder;
import org.dspace.content.Community; import org.dspace.content.Community;
import org.dspace.services.ConfigurationService; import org.dspace.services.ConfigurationService;
import org.dspace.xoai.services.api.EarliestDateResolver; import org.dspace.xoai.services.api.EarliestDateResolver;
@@ -76,7 +76,7 @@ public class OAIpmhIT extends AbstractControllerIntegrationTest {
private EarliestDateResolver earliestDateResolver; private EarliestDateResolver earliestDateResolver;
// XOAI's BaseDateProvider (used for date-based testing below) // XOAI's BaseDateProvider (used for date-based testing below)
private static BaseDateProvider baseDateProvider = new BaseDateProvider(); private static final BaseDateProvider baseDateProvider = new BaseDateProvider();
// Spy on the current XOAIManagerResolver bean, to allow us to change behavior of XOAIManager in tests // Spy on the current XOAIManagerResolver bean, to allow us to change behavior of XOAIManager in tests
// See also: createMockXOAIManager() method // See also: createMockXOAIManager() method
@@ -278,6 +278,6 @@ public class OAIpmhIT extends AbstractControllerIntegrationTest {
* @throws ConfigurationException * @throws ConfigurationException
*/ */
private XOAIManager createMockXOAIManager(Configuration xoaiConfig) throws ConfigurationException { private XOAIManager createMockXOAIManager(Configuration xoaiConfig) throws ConfigurationException {
return new XOAIManager(filterResolver, resourceResolver, xoaiConfig); return new XOAIManager(filterResolver, resourceResolver, xoaiConfig);
} }
} }

View File

@@ -12,10 +12,10 @@ import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.
import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.status; import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.status;
import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.xpath; import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.xpath;
import org.dspace.app.rest.builder.CollectionBuilder;
import org.dspace.app.rest.builder.CommunityBuilder;
import org.dspace.app.rest.builder.ItemBuilder;
import org.dspace.app.rest.test.AbstractControllerIntegrationTest; import org.dspace.app.rest.test.AbstractControllerIntegrationTest;
import org.dspace.builder.CollectionBuilder;
import org.dspace.builder.CommunityBuilder;
import org.dspace.builder.ItemBuilder;
import org.dspace.content.Collection; import org.dspace.content.Collection;
import org.dspace.content.Community; import org.dspace.content.Community;
import org.dspace.content.Item; import org.dspace.content.Item;
@@ -210,7 +210,7 @@ public class OpenSearchControllerIT extends AbstractControllerIntegrationTest {
.andExpect(xpath("OpenSearchDescription/LongName").string("DSpace at My University")) .andExpect(xpath("OpenSearchDescription/LongName").string("DSpace at My University"))
.andExpect(xpath("OpenSearchDescription/Description") .andExpect(xpath("OpenSearchDescription/Description")
.string("DSpace at My University DSpace repository") .string("DSpace at My University DSpace repository")
) )
; ;
/* Expected response for the service document is: /* Expected response for the service document is:
<?xml version="1.0" encoding="UTF-8"?> <?xml version="1.0" encoding="UTF-8"?>

View File

@@ -14,8 +14,8 @@ import static org.mockito.Mockito.doReturn;
import java.net.URI; import java.net.URI;
import org.dspace.app.rest.builder.CommunityBuilder;
import org.dspace.app.rest.test.AbstractWebClientIntegrationTest; import org.dspace.app.rest.test.AbstractWebClientIntegrationTest;
import org.dspace.builder.CommunityBuilder;
import org.dspace.content.Community; import org.dspace.content.Community;
import org.dspace.content.service.SiteService; import org.dspace.content.service.SiteService;
import org.dspace.rdf.RDFUtil; import org.dspace.rdf.RDFUtil;

Some files were not shown because too many files have changed in this diff Show More