diff --git a/dspace-api/pom.xml b/dspace-api/pom.xml
index cf37158fdc..194d84b446 100644
--- a/dspace-api/pom.xml
+++ b/dspace-api/pom.xml
@@ -548,17 +548,11 @@
com.ibm.icuicu4j
-
+
org.dspaceoclc-harvester2
-
-
- xalan
- xalan
- org.dspacedspace-services
@@ -600,7 +594,7 @@
org.jbibtexjbibtex
- 1.0.10
+ 1.0.20org.apache.httpcomponents
@@ -801,7 +795,7 @@
com.amazonawsaws-java-sdk-s3
- 1.12.116
+ 1.12.261
@@ -850,7 +844,7 @@
com.opencsvopencsv
- 5.2
+ 5.6
diff --git a/dspace-api/src/main/java/org/dspace/administer/CreateAdministrator.java b/dspace-api/src/main/java/org/dspace/administer/CreateAdministrator.java
index 80d69f3b66..81250e9c82 100644
--- a/dspace-api/src/main/java/org/dspace/administer/CreateAdministrator.java
+++ b/dspace-api/src/main/java/org/dspace/administer/CreateAdministrator.java
@@ -14,6 +14,7 @@ import java.util.Locale;
import org.apache.commons.cli.CommandLine;
import org.apache.commons.cli.CommandLineParser;
import org.apache.commons.cli.DefaultParser;
+import org.apache.commons.cli.HelpFormatter;
import org.apache.commons.cli.Options;
import org.apache.commons.lang3.StringUtils;
import org.dspace.core.Context;
@@ -54,14 +55,14 @@ public final class CreateAdministrator {
protected GroupService groupService;
/**
- * For invoking via the command line. If called with no command line arguments,
+ * For invoking via the command line. If called with no command line arguments,
* it will negotiate with the user for the administrator details
*
* @param argv the command line arguments given
* @throws Exception if error
*/
public static void main(String[] argv)
- throws Exception {
+ throws Exception {
CommandLineParser parser = new DefaultParser();
Options options = new Options();
@@ -69,19 +70,41 @@ public final class CreateAdministrator {
options.addOption("e", "email", true, "administrator email address");
options.addOption("f", "first", true, "administrator first name");
+ options.addOption("h", "help", false, "explain create-administrator options");
options.addOption("l", "last", true, "administrator last name");
options.addOption("c", "language", true, "administrator language");
options.addOption("p", "password", true, "administrator password");
- CommandLine line = parser.parse(options, argv);
+ CommandLine line = null;
+
+ try {
+
+ line = parser.parse(options, argv);
+
+ } catch (Exception e) {
+
+ System.out.println(e.getMessage() + "\nTry \"dspace create-administrator -h\" to print help information.");
+ System.exit(1);
+
+ }
if (line.hasOption("e") && line.hasOption("f") && line.hasOption("l") &&
- line.hasOption("c") && line.hasOption("p")) {
+ line.hasOption("c") && line.hasOption("p")) {
ca.createAdministrator(line.getOptionValue("e"),
- line.getOptionValue("f"), line.getOptionValue("l"),
- line.getOptionValue("c"), line.getOptionValue("p"));
+ line.getOptionValue("f"), line.getOptionValue("l"),
+ line.getOptionValue("c"), line.getOptionValue("p"));
+ } else if (line.hasOption("h")) {
+ String header = "\nA command-line tool for creating an initial administrator for setting up a" +
+ " DSpace site. Unless all the required parameters are passed it will" +
+ " prompt for an e-mail address, last name, first name and password from" +
+ " standard input.. An administrator group is then created and the data passed" +
+ " in used to create an e-person in that group.\n\n";
+ String footer = "\n";
+ HelpFormatter formatter = new HelpFormatter();
+ formatter.printHelp("dspace create-administrator", header, options, footer, true);
+ return;
} else {
- ca.negotiateAdministratorDetails();
+ ca.negotiateAdministratorDetails(line);
}
}
@@ -91,7 +114,7 @@ public final class CreateAdministrator {
* @throws Exception if error
*/
protected CreateAdministrator()
- throws Exception {
+ throws Exception {
context = new Context();
groupService = EPersonServiceFactory.getInstance().getGroupService();
ePersonService = EPersonServiceFactory.getInstance().getEPersonService();
@@ -103,20 +126,20 @@ public final class CreateAdministrator {
*
* @throws Exception if error
*/
- protected void negotiateAdministratorDetails()
- throws Exception {
+ protected void negotiateAdministratorDetails(CommandLine line)
+ throws Exception {
Console console = System.console();
System.out.println("Creating an initial administrator account");
- boolean dataOK = false;
-
- String email = null;
- String firstName = null;
- String lastName = null;
- char[] password1 = null;
- char[] password2 = null;
+ String email = line.getOptionValue('e');
+ String firstName = line.getOptionValue('f');
+ String lastName = line.getOptionValue('l');
String language = I18nUtil.getDefaultLocale().getLanguage();
+ ConfigurationService cfg = DSpaceServicesFactory.getInstance().getConfigurationService();
+ boolean flag = line.hasOption('p');
+ char[] password = null;
+ boolean dataOK = line.hasOption('f') && line.hasOption('e') && line.hasOption('l');
while (!dataOK) {
System.out.print("E-mail address: ");
@@ -147,8 +170,6 @@ public final class CreateAdministrator {
if (lastName != null) {
lastName = lastName.trim();
}
-
- ConfigurationService cfg = DSpaceServicesFactory.getInstance().getConfigurationService();
if (cfg.hasProperty("webui.supported.locales")) {
System.out.println("Select one of the following languages: "
+ cfg.getProperty("webui.supported.locales"));
@@ -163,46 +184,59 @@ public final class CreateAdministrator {
}
}
- System.out.println("Password will not display on screen.");
- System.out.print("Password: ");
+ System.out.print("Is the above data correct? (y or n): ");
System.out.flush();
- password1 = console.readPassword();
+ String s = console.readLine();
- System.out.print("Again to confirm: ");
- System.out.flush();
-
- password2 = console.readPassword();
-
- //TODO real password validation
- if (password1.length > 1 && Arrays.equals(password1, password2)) {
- // password OK
- System.out.print("Is the above data correct? (y or n): ");
- System.out.flush();
-
- String s = console.readLine();
-
- if (s != null) {
- s = s.trim();
- if (s.toLowerCase().startsWith("y")) {
- dataOK = true;
- }
+ if (s != null) {
+ s = s.trim();
+ if (s.toLowerCase().startsWith("y")) {
+ dataOK = true;
}
- } else {
- System.out.println("Passwords don't match");
}
+
+ }
+ if (!flag) {
+ password = getPassword(console);
+ if (password == null) {
+ return;
+ }
+ } else {
+ password = line.getOptionValue("p").toCharArray();
}
-
// if we make it to here, we are ready to create an administrator
- createAdministrator(email, firstName, lastName, language, String.valueOf(password1));
+ createAdministrator(email, firstName, lastName, language, String.valueOf(password));
- //Cleaning arrays that held password
- Arrays.fill(password1, ' ');
- Arrays.fill(password2, ' ');
+ }
+
+ private char[] getPassword(Console console) {
+ char[] password1 = null;
+ char[] password2 = null;
+ System.out.println("Password will not display on screen.");
+ System.out.print("Password: ");
+ System.out.flush();
+
+ password1 = console.readPassword();
+
+ System.out.print("Again to confirm: ");
+ System.out.flush();
+
+ password2 = console.readPassword();
+
+ // TODO real password validation
+ if (password1.length > 1 && Arrays.equals(password1, password2)) {
+ // password OK
+ Arrays.fill(password2, ' ');
+ return password1;
+ } else {
+ System.out.println("Passwords don't match");
+ return null;
+ }
}
/**
- * Create the administrator with the given details. If the user
+ * Create the administrator with the given details. If the user
* already exists then they are simply upped to administrator status
*
* @param email the email for the user
@@ -213,8 +247,8 @@ public final class CreateAdministrator {
* @throws Exception if error
*/
protected void createAdministrator(String email, String first, String last,
- String language, String pw)
- throws Exception {
+ String language, String pw)
+ throws Exception {
// Of course we aren't an administrator yet so we need to
// circumvent authorisation
context.turnOffAuthorisationSystem();
diff --git a/dspace-api/src/main/java/org/dspace/administer/ProcessCleaner.java b/dspace-api/src/main/java/org/dspace/administer/ProcessCleaner.java
new file mode 100644
index 0000000000..ee6b8d08b0
--- /dev/null
+++ b/dspace-api/src/main/java/org/dspace/administer/ProcessCleaner.java
@@ -0,0 +1,140 @@
+/**
+ * The contents of this file are subject to the license and copyright
+ * detailed in the LICENSE and NOTICE files at the root of the source
+ * tree and available online at
+ *
+ * http://www.dspace.org/license/
+ */
+package org.dspace.administer;
+
+import java.io.IOException;
+import java.sql.SQLException;
+import java.util.ArrayList;
+import java.util.Date;
+import java.util.List;
+
+import org.apache.commons.cli.ParseException;
+import org.apache.commons.lang.time.DateUtils;
+import org.dspace.authorize.AuthorizeException;
+import org.dspace.content.ProcessStatus;
+import org.dspace.core.Context;
+import org.dspace.scripts.DSpaceRunnable;
+import org.dspace.scripts.Process;
+import org.dspace.scripts.factory.ScriptServiceFactory;
+import org.dspace.scripts.service.ProcessService;
+import org.dspace.services.ConfigurationService;
+import org.dspace.services.factory.DSpaceServicesFactory;
+import org.dspace.utils.DSpace;
+
+/**
+ * Script to cleanup the old processes in the specified state.
+ *
+ * @author Luca Giamminonni (luca.giamminonni at 4science.it)
+ *
+ */
+public class ProcessCleaner extends DSpaceRunnable> {
+
+ private ConfigurationService configurationService;
+
+ private ProcessService processService;
+
+
+ private boolean cleanCompleted = false;
+
+ private boolean cleanFailed = false;
+
+ private boolean cleanRunning = false;
+
+ private boolean help = false;
+
+ private Integer days;
+
+
+ @Override
+ public void setup() throws ParseException {
+
+ this.configurationService = DSpaceServicesFactory.getInstance().getConfigurationService();
+ this.processService = ScriptServiceFactory.getInstance().getProcessService();
+
+ this.help = commandLine.hasOption('h');
+ this.cleanFailed = commandLine.hasOption('f');
+ this.cleanRunning = commandLine.hasOption('r');
+ this.cleanCompleted = commandLine.hasOption('c') || (!cleanFailed && !cleanRunning);
+
+ this.days = configurationService.getIntProperty("process-cleaner.days", 14);
+
+ if (this.days <= 0) {
+ throw new IllegalStateException("The number of days must be a positive integer.");
+ }
+
+ }
+
+ @Override
+ public void internalRun() throws Exception {
+
+ if (help) {
+ printHelp();
+ return;
+ }
+
+ Context context = new Context();
+
+ try {
+ context.turnOffAuthorisationSystem();
+ performDeletion(context);
+ } finally {
+ context.restoreAuthSystemState();
+ context.complete();
+ }
+
+ }
+
+ /**
+ * Delete the processes based on the specified statuses and the configured days
+ * from their creation.
+ */
+ private void performDeletion(Context context) throws SQLException, IOException, AuthorizeException {
+
+ List statuses = getProcessToDeleteStatuses();
+ Date creationDate = calculateCreationDate();
+
+ handler.logInfo("Searching for processes with status: " + statuses);
+ List processes = processService.findByStatusAndCreationTimeOlderThan(context, statuses, creationDate);
+ handler.logInfo("Found " + processes.size() + " processes to be deleted");
+ for (Process process : processes) {
+ processService.delete(context, process);
+ }
+
+ handler.logInfo("Process cleanup completed");
+
+ }
+
+ /**
+ * Returns the list of Process statuses do be deleted.
+ */
+ private List getProcessToDeleteStatuses() {
+ List statuses = new ArrayList();
+ if (cleanCompleted) {
+ statuses.add(ProcessStatus.COMPLETED);
+ }
+ if (cleanFailed) {
+ statuses.add(ProcessStatus.FAILED);
+ }
+ if (cleanRunning) {
+ statuses.add(ProcessStatus.RUNNING);
+ }
+ return statuses;
+ }
+
+ private Date calculateCreationDate() {
+ return DateUtils.addDays(new Date(), -days);
+ }
+
+ @Override
+ @SuppressWarnings("unchecked")
+ public ProcessCleanerConfiguration getScriptConfiguration() {
+ return new DSpace().getServiceManager()
+ .getServiceByName("process-cleaner", ProcessCleanerConfiguration.class);
+ }
+
+}
diff --git a/dspace-api/src/main/java/org/dspace/administer/ProcessCleanerCli.java b/dspace-api/src/main/java/org/dspace/administer/ProcessCleanerCli.java
new file mode 100644
index 0000000000..292c6c372e
--- /dev/null
+++ b/dspace-api/src/main/java/org/dspace/administer/ProcessCleanerCli.java
@@ -0,0 +1,18 @@
+/**
+ * The contents of this file are subject to the license and copyright
+ * detailed in the LICENSE and NOTICE files at the root of the source
+ * tree and available online at
+ *
+ * http://www.dspace.org/license/
+ */
+package org.dspace.administer;
+
+/**
+ * The {@link ProcessCleaner} for CLI.
+ *
+ * @author Luca Giamminonni (luca.giamminonni at 4science.it)
+ *
+ */
+public class ProcessCleanerCli extends ProcessCleaner {
+
+}
diff --git a/dspace-api/src/main/java/org/dspace/administer/ProcessCleanerCliConfiguration.java b/dspace-api/src/main/java/org/dspace/administer/ProcessCleanerCliConfiguration.java
new file mode 100644
index 0000000000..043990156d
--- /dev/null
+++ b/dspace-api/src/main/java/org/dspace/administer/ProcessCleanerCliConfiguration.java
@@ -0,0 +1,18 @@
+/**
+ * The contents of this file are subject to the license and copyright
+ * detailed in the LICENSE and NOTICE files at the root of the source
+ * tree and available online at
+ *
+ * http://www.dspace.org/license/
+ */
+package org.dspace.administer;
+
+/**
+ * The {@link ProcessCleanerConfiguration} for CLI.
+ *
+ * @author Luca Giamminonni (luca.giamminonni at 4science.it)
+ *
+ */
+public class ProcessCleanerCliConfiguration extends ProcessCleanerConfiguration {
+
+}
diff --git a/dspace-api/src/main/java/org/dspace/administer/ProcessCleanerConfiguration.java b/dspace-api/src/main/java/org/dspace/administer/ProcessCleanerConfiguration.java
new file mode 100644
index 0000000000..8d189038d9
--- /dev/null
+++ b/dspace-api/src/main/java/org/dspace/administer/ProcessCleanerConfiguration.java
@@ -0,0 +1,70 @@
+/**
+ * The contents of this file are subject to the license and copyright
+ * detailed in the LICENSE and NOTICE files at the root of the source
+ * tree and available online at
+ *
+ * http://www.dspace.org/license/
+ */
+package org.dspace.administer;
+
+import java.sql.SQLException;
+
+import org.apache.commons.cli.Options;
+import org.dspace.authorize.service.AuthorizeService;
+import org.dspace.core.Context;
+import org.dspace.scripts.configuration.ScriptConfiguration;
+import org.springframework.beans.factory.annotation.Autowired;
+
+/**
+ * The {@link ScriptConfiguration} for the {@link ProcessCleaner} script.
+ */
+public class ProcessCleanerConfiguration extends ScriptConfiguration {
+
+ @Autowired
+ private AuthorizeService authorizeService;
+
+ private Class dspaceRunnableClass;
+
+ @Override
+ public boolean isAllowedToExecute(Context context) {
+ try {
+ return authorizeService.isAdmin(context);
+ } catch (SQLException e) {
+ throw new RuntimeException("SQLException occurred when checking if the current user is an admin", e);
+ }
+ }
+
+ @Override
+ public Options getOptions() {
+ if (options == null) {
+
+ Options options = new Options();
+
+ options.addOption("h", "help", false, "help");
+
+ options.addOption("r", "running", false, "delete the process with RUNNING status");
+ options.getOption("r").setType(boolean.class);
+
+ options.addOption("f", "failed", false, "delete the process with FAILED status");
+ options.getOption("f").setType(boolean.class);
+
+ options.addOption("c", "completed", false,
+ "delete the process with COMPLETED status (default if no statuses are specified)");
+ options.getOption("c").setType(boolean.class);
+
+ super.options = options;
+ }
+ return options;
+ }
+
+ @Override
+ public Class getDspaceRunnableClass() {
+ return dspaceRunnableClass;
+ }
+
+ @Override
+ public void setDspaceRunnableClass(Class dspaceRunnableClass) {
+ this.dspaceRunnableClass = dspaceRunnableClass;
+ }
+
+}
diff --git a/dspace-api/src/main/java/org/dspace/administer/StructBuilder.java b/dspace-api/src/main/java/org/dspace/administer/StructBuilder.java
index 41a657dd61..13a1b3b5bb 100644
--- a/dspace-api/src/main/java/org/dspace/administer/StructBuilder.java
+++ b/dspace-api/src/main/java/org/dspace/administer/StructBuilder.java
@@ -42,6 +42,7 @@ import org.apache.commons.cli.HelpFormatter;
import org.apache.commons.cli.Option;
import org.apache.commons.cli.Options;
import org.apache.commons.cli.ParseException;
+import org.apache.commons.lang3.StringUtils;
import org.dspace.authorize.AuthorizeException;
import org.dspace.content.Collection;
import org.dspace.content.Community;
@@ -55,6 +56,8 @@ import org.dspace.content.service.CommunityService;
import org.dspace.core.Context;
import org.dspace.eperson.factory.EPersonServiceFactory;
import org.dspace.eperson.service.EPersonService;
+import org.dspace.handle.factory.HandleServiceFactory;
+import org.dspace.handle.service.HandleService;
import org.jdom2.Element;
import org.jdom2.output.Format;
import org.jdom2.output.XMLOutputter;
@@ -79,6 +82,7 @@ import org.xml.sax.SAXException;
*
*
* }
+ *
*
* It can be arbitrarily deep, and supports all the metadata elements
* that make up the community and collection metadata. See the system
@@ -107,12 +111,14 @@ public class StructBuilder {
*/
private static final Map communityMap = new HashMap<>();
- protected static CommunityService communityService
+ protected static final CommunityService communityService
= ContentServiceFactory.getInstance().getCommunityService();
- protected static CollectionService collectionService
+ protected static final CollectionService collectionService
= ContentServiceFactory.getInstance().getCollectionService();
- protected static EPersonService ePersonService
+ protected static final EPersonService ePersonService
= EPersonServiceFactory.getInstance().getEPersonService();
+ protected static final HandleService handleService
+ = HandleServiceFactory.getInstance().getHandleService();
/**
* Default constructor
@@ -138,6 +144,7 @@ public class StructBuilder {
* @throws SQLException passed through.
* @throws FileNotFoundException if input or output could not be opened.
* @throws TransformerException if the input document is invalid.
+ * @throws XPathExpressionException passed through.
*/
public static void main(String[] argv)
throws ParserConfigurationException, SQLException,
@@ -148,6 +155,7 @@ public class StructBuilder {
options.addOption("h", "help", false, "Print this help message.");
options.addOption("?", "help");
options.addOption("x", "export", false, "Export the current structure as XML.");
+ options.addOption("k", "keep-handles", false, "Apply Handles from input document.");
options.addOption(Option.builder("e").longOpt("eperson")
.desc("User who is manipulating the repository's structure.")
@@ -209,6 +217,7 @@ public class StructBuilder {
// Export? Import?
if (line.hasOption('x')) { // export
exportStructure(context, outputStream);
+ outputStream.close();
} else { // Must be import
String input = line.getOptionValue('f');
if (null == input) {
@@ -223,7 +232,12 @@ public class StructBuilder {
inputStream = new FileInputStream(input);
}
- importStructure(context, inputStream, outputStream);
+ boolean keepHandles = options.hasOption("k");
+ importStructure(context, inputStream, outputStream, keepHandles);
+
+ inputStream.close();
+ outputStream.close();
+
// save changes from import
context.complete();
}
@@ -236,14 +250,17 @@ public class StructBuilder {
* @param context
* @param input XML which describes the new communities and collections.
* @param output input, annotated with the new objects' identifiers.
+ * @param keepHandles true if Handles should be set from input.
* @throws IOException
* @throws ParserConfigurationException
* @throws SAXException
* @throws TransformerException
* @throws SQLException
*/
- static void importStructure(Context context, InputStream input, OutputStream output)
- throws IOException, ParserConfigurationException, SQLException, TransformerException, XPathExpressionException {
+ static void importStructure(Context context, InputStream input,
+ OutputStream output, boolean keepHandles)
+ throws IOException, ParserConfigurationException, SQLException,
+ TransformerException, XPathExpressionException {
// load the XML
Document document = null;
@@ -271,7 +288,19 @@ public class StructBuilder {
NodeList identifierNodes = (NodeList) xPath.compile("//*[@identifier]")
.evaluate(document, XPathConstants.NODESET);
if (identifierNodes.getLength() > 0) {
- System.err.println("The input document has 'identifier' attributes, which will be ignored.");
+ if (!keepHandles) {
+ System.err.println("The input document has 'identifier' attributes, which will be ignored.");
+ } else {
+ for (int i = 0; i < identifierNodes.getLength() ; i++) {
+ String identifier = identifierNodes.item(i).getAttributes().item(0).getTextContent();
+ if (handleService.resolveToURL(context, identifier) != null) {
+ System.err.printf("The input document contains handle %s,"
+ + " which is in use already. Aborting...%n",
+ identifier);
+ System.exit(1);
+ }
+ }
+ }
}
// load the mappings into the member variable hashmaps
@@ -296,7 +325,7 @@ public class StructBuilder {
.evaluate(document, XPathConstants.NODESET);
// run the import starting with the top level communities
- elements = handleCommunities(context, first, null);
+ elements = handleCommunities(context, first, null, keepHandles);
} catch (TransformerException ex) {
System.err.format("Input content not understood: %s%n", ex.getMessage());
System.exit(1);
@@ -619,23 +648,29 @@ public class StructBuilder {
* @param context the context of the request
* @param communities a nodelist of communities to create along with their sub-structures
* @param parent the parent community of the nodelist of communities to create
+ * @param keepHandles use Handles from input.
* @return an element array containing additional information regarding the
* created communities (e.g. the handles they have been assigned)
*/
- private static Element[] handleCommunities(Context context, NodeList communities, Community parent)
- throws TransformerException, SQLException, AuthorizeException, XPathExpressionException {
+ private static Element[] handleCommunities(Context context, NodeList communities,
+ Community parent, boolean keepHandles)
+ throws TransformerException, SQLException, AuthorizeException,
+ XPathExpressionException {
Element[] elements = new Element[communities.getLength()];
XPath xPath = XPathFactory.newInstance().newXPath();
for (int i = 0; i < communities.getLength(); i++) {
- Community community;
- Element element = new Element("community");
+ Node tn = communities.item(i);
+ Node identifier = tn.getAttributes().getNamedItem("identifier");
// create the community or sub community
- if (parent != null) {
+ Community community;
+ if (null == identifier
+ || StringUtils.isBlank(identifier.getNodeValue())
+ || !keepHandles) {
community = communityService.create(parent, context);
} else {
- community = communityService.create(null, context);
+ community = communityService.create(parent, context, identifier.getNodeValue());
}
// default the short description to be an empty string
@@ -643,7 +678,6 @@ public class StructBuilder {
MD_SHORT_DESCRIPTION, null, " ");
// now update the metadata
- Node tn = communities.item(i);
for (Map.Entry entry : communityMap.entrySet()) {
NodeList nl = (NodeList) xPath.compile(entry.getKey()).evaluate(tn, XPathConstants.NODESET);
if (nl.getLength() == 1) {
@@ -669,6 +703,7 @@ public class StructBuilder {
// but it's here to keep it separate from the create process in
// case
// we want to move it or make it switchable later
+ Element element = new Element("community");
element.setAttribute("identifier", community.getHandle());
Element nameElement = new Element("name");
@@ -711,12 +746,16 @@ public class StructBuilder {
}
// handle sub communities
- NodeList subCommunities = (NodeList) xPath.compile("community").evaluate(tn, XPathConstants.NODESET);
- Element[] subCommunityElements = handleCommunities(context, subCommunities, community);
+ NodeList subCommunities = (NodeList) xPath.compile("community")
+ .evaluate(tn, XPathConstants.NODESET);
+ Element[] subCommunityElements = handleCommunities(context,
+ subCommunities, community, keepHandles);
// handle collections
- NodeList collections = (NodeList) xPath.compile("collection").evaluate(tn, XPathConstants.NODESET);
- Element[] collectionElements = handleCollections(context, collections, community);
+ NodeList collections = (NodeList) xPath.compile("collection")
+ .evaluate(tn, XPathConstants.NODESET);
+ Element[] collectionElements = handleCollections(context,
+ collections, community, keepHandles);
int j;
for (j = 0; j < subCommunityElements.length; j++) {
@@ -741,21 +780,31 @@ public class StructBuilder {
* @return an Element array containing additional information about the
* created collections (e.g. the handle)
*/
- private static Element[] handleCollections(Context context, NodeList collections, Community parent)
+ private static Element[] handleCollections(Context context,
+ NodeList collections, Community parent, boolean keepHandles)
throws SQLException, AuthorizeException, XPathExpressionException {
Element[] elements = new Element[collections.getLength()];
XPath xPath = XPathFactory.newInstance().newXPath();
for (int i = 0; i < collections.getLength(); i++) {
- Element element = new Element("collection");
- Collection collection = collectionService.create(context, parent);
+ Node tn = collections.item(i);
+ Node identifier = tn.getAttributes().getNamedItem("identifier");
+
+ // Create the Collection.
+ Collection collection;
+ if (null == identifier
+ || StringUtils.isBlank(identifier.getNodeValue())
+ || !keepHandles) {
+ collection = collectionService.create(context, parent);
+ } else {
+ collection = collectionService.create(context, parent, identifier.getNodeValue());
+ }
// default the short description to the empty string
collectionService.setMetadataSingleValue(context, collection,
MD_SHORT_DESCRIPTION, Item.ANY, " ");
// import the rest of the metadata
- Node tn = collections.item(i);
for (Map.Entry entry : collectionMap.entrySet()) {
NodeList nl = (NodeList) xPath.compile(entry.getKey()).evaluate(tn, XPathConstants.NODESET);
if (nl.getLength() == 1) {
@@ -766,6 +815,7 @@ public class StructBuilder {
collectionService.update(context, collection);
+ Element element = new Element("collection");
element.setAttribute("identifier", collection.getHandle());
Element nameElement = new Element("name");
diff --git a/dspace-api/src/main/java/org/dspace/app/itemexport/ItemExportCLITool.java b/dspace-api/src/main/java/org/dspace/app/itemexport/ItemExportCLITool.java
index d6a69b5823..c1d72f1c5a 100644
--- a/dspace-api/src/main/java/org/dspace/app/itemexport/ItemExportCLITool.java
+++ b/dspace-api/src/main/java/org/dspace/app/itemexport/ItemExportCLITool.java
@@ -204,7 +204,7 @@ public class ItemExportCLITool {
|| (mycollection.getType() != Constants.COLLECTION)) {
mycollection = null;
}
- } else if (myIDString != null) {
+ } else {
mycollection = collectionService.find(c, UUID.fromString(myIDString));
}
diff --git a/dspace-api/src/main/java/org/dspace/app/itemexport/ItemExportServiceImpl.java b/dspace-api/src/main/java/org/dspace/app/itemexport/ItemExportServiceImpl.java
index 6578e57de2..b342db9f14 100644
--- a/dspace-api/src/main/java/org/dspace/app/itemexport/ItemExportServiceImpl.java
+++ b/dspace-api/src/main/java/org/dspace/app/itemexport/ItemExportServiceImpl.java
@@ -64,17 +64,21 @@ import org.springframework.beans.factory.annotation.Autowired;
* Item exporter to create simple AIPs for DSpace content. Currently exports
* individual items, or entire collections. For instructions on use, see
* printUsage() method.
- *
+ *
* ItemExport creates the simple AIP package that the importer also uses. It
* consists of:
- *
- * /exportdir/42/ (one directory per item) / dublin_core.xml - qualified dublin
- * core in RDF schema / contents - text file, listing one file per line / file1
- * - files contained in the item / file2 / ...
- *
+ *
{@code
+ * /exportdir/42/ (one directory per item)
+ * / dublin_core.xml - qualified dublin core in RDF schema
+ * / contents - text file, listing one file per line
+ * / file1 - files contained in the item
+ * / file2
+ * / ...
+ * }
+ *
* issues -doesn't handle special characters in metadata (needs to turn {@code &'s} into
* {@code &}, etc.)
- *
+ *
* Modified by David Little, UCSD Libraries 12/21/04 to allow the registration
* of files (bitstreams) into DSpace.
*
@@ -101,7 +105,7 @@ public class ItemExportServiceImpl implements ItemExportService {
/**
* log4j logger
*/
- private final Logger log = org.apache.logging.log4j.LogManager.getLogger(ItemExportServiceImpl.class);
+ private final Logger log = org.apache.logging.log4j.LogManager.getLogger();
protected ItemExportServiceImpl() {
@@ -168,6 +172,7 @@ public class ItemExportServiceImpl implements ItemExportService {
// make it this far, now start exporting
writeMetadata(c, myItem, itemDir, migrate);
writeBitstreams(c, myItem, itemDir, excludeBitstreams);
+ writeCollections(myItem, itemDir);
if (!migrate) {
writeHandle(c, myItem, itemDir);
}
@@ -343,6 +348,33 @@ public class ItemExportServiceImpl implements ItemExportService {
}
}
+ /**
+ * Create the 'collections' file. List handles of all Collections which
+ * contain this Item. The "owning" Collection is listed first.
+ *
+ * @param item list collections holding this Item.
+ * @param destDir write the file here.
+ * @throws IOException if the file cannot be created or written.
+ */
+ protected void writeCollections(Item item, File destDir)
+ throws IOException {
+ File outFile = new File(destDir, "collections");
+ if (outFile.createNewFile()) {
+ try (PrintWriter out = new PrintWriter(new FileWriter(outFile))) {
+ String ownerHandle = item.getOwningCollection().getHandle();
+ out.println(ownerHandle);
+ for (Collection collection : item.getCollections()) {
+ String collectionHandle = collection.getHandle();
+ if (!collectionHandle.equals(ownerHandle)) {
+ out.println(collectionHandle);
+ }
+ }
+ }
+ } else {
+ throw new IOException("Cannot create 'collections' in " + destDir);
+ }
+ }
+
/**
* Create both the bitstreams and the contents file. Any bitstreams that
* were originally registered will be marked in the contents file as such.
@@ -630,11 +662,9 @@ public class ItemExportServiceImpl implements ItemExportService {
Thread go = new Thread() {
@Override
public void run() {
- Context context = null;
+ Context context = new Context();
Iterator iitems = null;
try {
- // create a new dspace context
- context = new Context();
// ignore auths
context.turnOffAuthorisationSystem();
diff --git a/dspace-api/src/main/java/org/dspace/app/itemimport/ItemImportServiceImpl.java b/dspace-api/src/main/java/org/dspace/app/itemimport/ItemImportServiceImpl.java
index a034fe963c..dfada83d95 100644
--- a/dspace-api/src/main/java/org/dspace/app/itemimport/ItemImportServiceImpl.java
+++ b/dspace-api/src/main/java/org/dspace/app/itemimport/ItemImportServiceImpl.java
@@ -264,16 +264,12 @@ public class ItemImportServiceImpl implements ItemImportService, InitializingBea
// sneaky isResume == true means open file in append mode
outFile = new File(mapFile);
mapOut = new PrintWriter(new FileWriter(outFile, isResume));
-
- if (mapOut == null) {
- throw new Exception("can't open mapfile: " + mapFile);
- }
}
// open and process the source directory
File d = new java.io.File(sourceDir);
- if (d == null || !d.isDirectory()) {
+ if (!d.isDirectory()) {
throw new Exception("Error, cannot open source directory " + sourceDir);
}
@@ -433,12 +429,16 @@ public class ItemImportServiceImpl implements ItemImportService, InitializingBea
/**
* Read the relationship manifest file.
*
- * Each line in the file contains a relationship type id and an item identifier in the following format:
- *
- * relation.
- *
- * The input_item_folder should refer the folder name of another item in this import batch.
- *
+ * Each line in the file contains a relationship type id and an item
+ * identifier in the following format:
+ *
+ *
+ * {@code relation.}
+ *
+ *
+ * The {@code input_item_folder} should refer the folder name of another
+ * item in this import batch.
+ *
* @param path The main import folder path.
* @param filename The name of the manifest file to check ('relationships')
* @return Map of found relationships
@@ -558,9 +558,10 @@ public class ItemImportServiceImpl implements ItemImportService, InitializingBea
/**
* Lookup an item by a (unique) meta value.
*
- * @param metaKey
- * @param metaValue
- * @return Item
+ * @param c current DSpace session.
+ * @param metaKey name of the metadata field to match.
+ * @param metaValue value to be matched.
+ * @return the matching Item.
* @throws Exception if single item not found.
*/
protected Item findItemByMetaValue(Context c, String metaKey, String metaValue) throws Exception {
@@ -604,7 +605,7 @@ public class ItemImportServiceImpl implements ItemImportService, InitializingBea
// verify the source directory
File d = new java.io.File(sourceDir);
- if (d == null || !d.isDirectory()) {
+ if (!d.isDirectory()) {
throw new Exception("Error, cannot open source directory "
+ sourceDir);
}
@@ -643,10 +644,6 @@ public class ItemImportServiceImpl implements ItemImportService, InitializingBea
File handleFile = new File(sourceDir + File.separatorChar + newItemName + File.separatorChar + "handle");
PrintWriter handleOut = new PrintWriter(new FileWriter(handleFile, true));
- if (handleOut == null) {
- throw new Exception("can't open handle file: " + handleFile.getCanonicalPath());
- }
-
handleOut.println(oldHandle);
handleOut.close();
@@ -1668,26 +1665,27 @@ public class ItemImportServiceImpl implements ItemImportService, InitializingBea
.trim();
}
+ if (isTest) {
+ continue;
+ }
+
Bitstream bs = null;
- boolean notfound = true;
boolean updateRequired = false;
- if (!isTest) {
- // find bitstream
- List bitstreams = itemService.getNonInternalBitstreams(c, myItem);
- for (int j = 0; j < bitstreams.size() && notfound; j++) {
- if (bitstreams.get(j).getName().equals(bitstreamName)) {
- bs = bitstreams.get(j);
- notfound = false;
- }
+ // find bitstream
+ List bitstreams = itemService.getNonInternalBitstreams(c, myItem);
+ for (Bitstream bitstream : bitstreams) {
+ if (bitstream.getName().equals(bitstreamName)) {
+ bs = bitstream;
+ break;
}
}
- if (notfound && !isTest) {
+ if (null == bs) {
// this should never happen
- System.out.println("\tdefault permissions set for "
- + bitstreamName);
- } else if (!isTest) {
+ System.out.printf("\tdefault permissions set for %s%n",
+ bitstreamName);
+ } else {
if (permissionsExist) {
if (myGroup == null) {
System.out.println("\t" + groupName
@@ -2028,15 +2026,11 @@ public class ItemImportServiceImpl implements ItemImportService, InitializingBea
Thread go = new Thread() {
@Override
public void run() {
- Context context = null;
-
+ Context context = new Context();
String importDir = null;
EPerson eperson = null;
try {
-
- // create a new dspace context
- context = new Context();
eperson = ePersonService.find(context, oldEPerson.getID());
context.setCurrentUser(eperson);
context.turnOffAuthorisationSystem();
@@ -2047,7 +2041,8 @@ public class ItemImportServiceImpl implements ItemImportService, InitializingBea
if (theOtherCollections != null) {
for (String colID : theOtherCollections) {
UUID colId = UUID.fromString(colID);
- if (!theOwningCollection.getID().equals(colId)) {
+ if (theOwningCollection != null
+ && !theOwningCollection.getID().equals(colId)) {
Collection col = collectionService.find(context, colId);
if (col != null) {
collectionList.add(col);
diff --git a/dspace-api/src/main/java/org/dspace/app/itemupdate/AddBitstreamsAction.java b/dspace-api/src/main/java/org/dspace/app/itemupdate/AddBitstreamsAction.java
index e9693fb3d1..644745304a 100644
--- a/dspace-api/src/main/java/org/dspace/app/itemupdate/AddBitstreamsAction.java
+++ b/dspace-api/src/main/java/org/dspace/app/itemupdate/AddBitstreamsAction.java
@@ -77,7 +77,7 @@ public class AddBitstreamsAction extends UpdateBitstreamsAction {
ItemUpdate.pr("Contents bitstream count: " + contents.size());
String[] files = dir.list(ItemUpdate.fileFilter);
- List fileList = new ArrayList();
+ List fileList = new ArrayList<>();
for (String filename : files) {
fileList.add(filename);
ItemUpdate.pr("file: " + filename);
@@ -134,9 +134,6 @@ public class AddBitstreamsAction extends UpdateBitstreamsAction {
ItemUpdate.pr("contents entry for bitstream: " + ce.toString());
File f = new File(dir, ce.filename);
- // get an input stream
- BufferedInputStream bis = new BufferedInputStream(new FileInputStream(f));
-
Bitstream bs = null;
String newBundleName = ce.bundlename;
@@ -173,7 +170,9 @@ public class AddBitstreamsAction extends UpdateBitstreamsAction {
targetBundle = bundles.iterator().next();
}
- bs = bitstreamService.create(context, targetBundle, bis);
+ try (BufferedInputStream bis = new BufferedInputStream(new FileInputStream(f));) {
+ bs = bitstreamService.create(context, targetBundle, bis);
+ }
bs.setName(context, ce.filename);
// Identify the format
diff --git a/dspace-api/src/main/java/org/dspace/app/itemupdate/ItemUpdate.java b/dspace-api/src/main/java/org/dspace/app/itemupdate/ItemUpdate.java
index b6aa875f29..a3fe0b2321 100644
--- a/dspace-api/src/main/java/org/dspace/app/itemupdate/ItemUpdate.java
+++ b/dspace-api/src/main/java/org/dspace/app/itemupdate/ItemUpdate.java
@@ -39,29 +39,34 @@ import org.dspace.handle.factory.HandleServiceFactory;
import org.dspace.handle.service.HandleService;
/**
- * Provides some batch editing capabilities for items in DSpace:
- * Metadata fields - Add, Delete
- * Bitstreams - Add, Delete
+ * Provides some batch editing capabilities for items in DSpace.
+ *
+ *
Metadata fields - Add, Delete
+ *
Bitstreams - Add, Delete
+ *
*
- * The design has been for compatibility with ItemImporter
+ *
+ * The design has been for compatibility with
+ * {@link org.dspace.app.itemimport.service.ItemImportService}
* in the use of the DSpace archive format which is used to
* specify changes on a per item basis. The directory names
* to correspond to each item are arbitrary and will only be
* used for logging purposes. The reference to the item is
- * from a required dc.identifier with the item handle to be
- * included in the dublin_core.xml (or similar metadata) file.
+ * from a required {@code dc.identifier} with the item handle to be
+ * included in the {@code dublin_core.xml} (or similar metadata) file.
*
- * Any combination of these actions is permitted in a single run of this class
+ *
+ * Any combination of these actions is permitted in a single run of this class.
* The order of actions is important when used in combination.
- * It is the responsibility of the calling class (here, ItemUpdate)
- * to register UpdateAction classes in the order to which they are
+ * It is the responsibility of the calling class (here, {@code ItemUpdate})
+ * to register {@link UpdateAction} classes in the order which they are
* to be performed.
*
- *
- * It is unfortunate that so much code needs to be borrowed
- * from ItemImport as it is not reusable in private methods, etc.
- * Some of this has been placed into the MetadataUtilities class
- * for possible reuse elsewhere.
+ *
+ * It is unfortunate that so much code needs to be borrowed from
+ * {@link org.dspace.app.itemimport.service.ItemImportService} as it is not
+ * reusable in private methods, etc. Some of this has been placed into the
+ * {@link MetadataUtilities} class for possible reuse elsewhere.
*
* @author W. Hays based on a conceptual design by R. Rodgers
*/
@@ -73,7 +78,7 @@ public class ItemUpdate {
public static final String DELETE_CONTENTS_FILE = "delete_contents";
public static String HANDLE_PREFIX = null;
- public static final Map filterAliases = new HashMap();
+ public static final Map filterAliases = new HashMap<>();
public static boolean verbose = false;
@@ -375,7 +380,7 @@ public class ItemUpdate {
// open and process the source directory
File sourceDir = new File(sourceDirPath);
- if ((sourceDir == null) || !sourceDir.exists() || !sourceDir.isDirectory()) {
+ if (!sourceDir.exists() || !sourceDir.isDirectory()) {
pr("Error, cannot open archive source directory " + sourceDirPath);
throw new Exception("error with archive source directory " + sourceDirPath);
}
diff --git a/dspace-api/src/main/java/org/dspace/app/mediafilter/Brand.java b/dspace-api/src/main/java/org/dspace/app/mediafilter/Brand.java
index 2d963dd3da..9e28edad45 100644
--- a/dspace-api/src/main/java/org/dspace/app/mediafilter/Brand.java
+++ b/dspace-api/src/main/java/org/dspace/app/mediafilter/Brand.java
@@ -21,10 +21,10 @@ import java.awt.image.BufferedImage;
*/
public class Brand {
- private int brandWidth;
- private int brandHeight;
- private Font font;
- private int xOffset;
+ private final int brandWidth;
+ private final int brandHeight;
+ private final Font font;
+ private final int xOffset;
/**
* Constructor to set up footer image attributes.
@@ -92,7 +92,7 @@ public class Brand {
* do the text placements and preparatory work for the brand image generation
*
* @param brandImage a BufferedImage object where the image is created
- * @param identifier and Identifier object describing what text is to be placed in what
+ * @param brandText an Identifier object describing what text is to be placed in what
* position within the brand
*/
private void drawImage(BufferedImage brandImage,
diff --git a/dspace-api/src/main/java/org/dspace/app/mediafilter/BrandText.java b/dspace-api/src/main/java/org/dspace/app/mediafilter/BrandText.java
index ae77f6048b..9110740643 100644
--- a/dspace-api/src/main/java/org/dspace/app/mediafilter/BrandText.java
+++ b/dspace-api/src/main/java/org/dspace/app/mediafilter/BrandText.java
@@ -39,7 +39,7 @@ class BrandText {
* its location within a rectangular area.
*
* @param location one of the class location constants e.g. Identifier.BL
- * @param the text associated with the location
+ * @param text text associated with the location
*/
public BrandText(String location, String text) {
this.location = location;
diff --git a/dspace-api/src/main/java/org/dspace/app/packager/Packager.java b/dspace-api/src/main/java/org/dspace/app/packager/Packager.java
index 0e985bd244..21d1562686 100644
--- a/dspace-api/src/main/java/org/dspace/app/packager/Packager.java
+++ b/dspace-api/src/main/java/org/dspace/app/packager/Packager.java
@@ -631,7 +631,7 @@ public class Packager {
//otherwise, just disseminate a single object to a single package file
dip.disseminate(context, dso, pkgParams, pkgFile);
- if (pkgFile != null && pkgFile.exists()) {
+ if (pkgFile.exists()) {
System.out.println("\nCREATED package file: " + pkgFile.getCanonicalPath());
}
}
diff --git a/dspace-api/src/main/java/org/dspace/app/requestitem/CollectionAdministratorsRequestItemStrategy.java b/dspace-api/src/main/java/org/dspace/app/requestitem/CollectionAdministratorsRequestItemStrategy.java
new file mode 100644
index 0000000000..135406069a
--- /dev/null
+++ b/dspace-api/src/main/java/org/dspace/app/requestitem/CollectionAdministratorsRequestItemStrategy.java
@@ -0,0 +1,46 @@
+/**
+ * The contents of this file are subject to the license and copyright
+ * detailed in the LICENSE and NOTICE files at the root of the source
+ * tree and available online at
+ *
+ * http://www.dspace.org/license/
+ */
+
+package org.dspace.app.requestitem;
+
+import java.sql.SQLException;
+import java.util.ArrayList;
+import java.util.List;
+
+import org.dspace.content.Collection;
+import org.dspace.content.Item;
+import org.dspace.core.Context;
+import org.dspace.eperson.EPerson;
+import org.springframework.lang.NonNull;
+
+/**
+ * Derive request recipients from groups of the Collection which owns an Item.
+ * The list will include all members of the administrators group. If the
+ * resulting list is empty, delegates to {@link RequestItemHelpdeskStrategy}.
+ *
+ * @author Mark H. Wood
+ */
+public class CollectionAdministratorsRequestItemStrategy
+ extends RequestItemHelpdeskStrategy {
+ @Override
+ @NonNull
+ public List getRequestItemAuthor(Context context,
+ Item item)
+ throws SQLException {
+ List recipients = new ArrayList<>();
+ Collection collection = item.getOwningCollection();
+ for (EPerson admin : collection.getAdministrators().getMembers()) {
+ recipients.add(new RequestItemAuthor(admin));
+ }
+ if (recipients.isEmpty()) {
+ return super.getRequestItemAuthor(context, item);
+ } else {
+ return recipients;
+ }
+ }
+}
diff --git a/dspace-api/src/main/java/org/dspace/app/requestitem/CombiningRequestItemStrategy.java b/dspace-api/src/main/java/org/dspace/app/requestitem/CombiningRequestItemStrategy.java
new file mode 100644
index 0000000000..8292c1a728
--- /dev/null
+++ b/dspace-api/src/main/java/org/dspace/app/requestitem/CombiningRequestItemStrategy.java
@@ -0,0 +1,61 @@
+/**
+ * The contents of this file are subject to the license and copyright
+ * detailed in the LICENSE and NOTICE files at the root of the source
+ * tree and available online at
+ *
+ * http://www.dspace.org/license/
+ */
+package org.dspace.app.requestitem;
+
+import java.sql.SQLException;
+import java.util.ArrayList;
+import java.util.List;
+
+import org.dspace.content.Item;
+import org.dspace.core.Context;
+import org.springframework.lang.NonNull;
+import org.springframework.util.Assert;
+
+/**
+ * Assemble a list of recipients from the results of other strategies.
+ * The list of strategy classes is injected as the constructor argument
+ * {@code strategies}.
+ * If the strategy list is not configured, returns an empty List.
+ *
+ * @author Mark H. Wood
+ */
+public class CombiningRequestItemStrategy
+ implements RequestItemAuthorExtractor {
+ /** The strategies to combine. */
+ private final List strategies;
+
+ /**
+ * Initialize a combination of strategies.
+ * @param strategies the author extraction strategies to combine.
+ */
+ public CombiningRequestItemStrategy(@NonNull List strategies) {
+ Assert.notNull(strategies, "Strategy list may not be null");
+ this.strategies = strategies;
+ }
+
+ /**
+ * Do not call.
+ * @throws IllegalArgumentException always
+ */
+ private CombiningRequestItemStrategy() {
+ throw new IllegalArgumentException();
+ }
+
+ @Override
+ @NonNull
+ public List getRequestItemAuthor(Context context, Item item)
+ throws SQLException {
+ List recipients = new ArrayList<>();
+
+ for (RequestItemAuthorExtractor strategy : strategies) {
+ recipients.addAll(strategy.getRequestItemAuthor(context, item));
+ }
+
+ return recipients;
+ }
+}
diff --git a/dspace-api/src/main/java/org/dspace/app/requestitem/RequestItem.java b/dspace-api/src/main/java/org/dspace/app/requestitem/RequestItem.java
index 9e675e97a7..cdefd1298c 100644
--- a/dspace-api/src/main/java/org/dspace/app/requestitem/RequestItem.java
+++ b/dspace-api/src/main/java/org/dspace/app/requestitem/RequestItem.java
@@ -27,7 +27,7 @@ import org.dspace.core.Context;
import org.dspace.core.ReloadableEntity;
/**
- * Object representing an Item Request
+ * Object representing an Item Request.
*/
@Entity
@Table(name = "requestitem")
@@ -94,6 +94,9 @@ public class RequestItem implements ReloadableEntity {
this.allfiles = allfiles;
}
+ /**
+ * @return {@code true} if all of the Item's files are requested.
+ */
public boolean isAllfiles() {
return allfiles;
}
@@ -102,6 +105,9 @@ public class RequestItem implements ReloadableEntity {
this.reqMessage = reqMessage;
}
+ /**
+ * @return a message from the requester.
+ */
public String getReqMessage() {
return reqMessage;
}
@@ -110,6 +116,9 @@ public class RequestItem implements ReloadableEntity {
this.reqName = reqName;
}
+ /**
+ * @return Human-readable name of the user requesting access.
+ */
public String getReqName() {
return reqName;
}
@@ -118,6 +127,9 @@ public class RequestItem implements ReloadableEntity {
this.reqEmail = reqEmail;
}
+ /**
+ * @return address of the user requesting access.
+ */
public String getReqEmail() {
return reqEmail;
}
@@ -126,6 +138,9 @@ public class RequestItem implements ReloadableEntity {
this.token = token;
}
+ /**
+ * @return a unique request identifier which can be emailed.
+ */
public String getToken() {
return token;
}
diff --git a/dspace-api/src/main/java/org/dspace/app/requestitem/RequestItemAuthor.java b/dspace-api/src/main/java/org/dspace/app/requestitem/RequestItemAuthor.java
index 49e26fe00b..a189e4a5ef 100644
--- a/dspace-api/src/main/java/org/dspace/app/requestitem/RequestItemAuthor.java
+++ b/dspace-api/src/main/java/org/dspace/app/requestitem/RequestItemAuthor.java
@@ -11,20 +11,31 @@ import org.dspace.eperson.EPerson;
/**
* Simple DTO to transfer data about the corresponding author for the Request
- * Copy feature
+ * Copy feature.
*
* @author Andrea Bollini
*/
public class RequestItemAuthor {
- private String fullName;
- private String email;
+ private final String fullName;
+ private final String email;
+ /**
+ * Construct an author record from given data.
+ *
+ * @param fullName the author's full name.
+ * @param email the author's email address.
+ */
public RequestItemAuthor(String fullName, String email) {
super();
this.fullName = fullName;
this.email = email;
}
+ /**
+ * Construct an author from an EPerson's metadata.
+ *
+ * @param ePerson the EPerson.
+ */
public RequestItemAuthor(EPerson ePerson) {
super();
this.fullName = ePerson.getFullName();
diff --git a/dspace-api/src/main/java/org/dspace/app/requestitem/RequestItemAuthorExtractor.java b/dspace-api/src/main/java/org/dspace/app/requestitem/RequestItemAuthorExtractor.java
index 9b66030e90..bc97bc64bf 100644
--- a/dspace-api/src/main/java/org/dspace/app/requestitem/RequestItemAuthorExtractor.java
+++ b/dspace-api/src/main/java/org/dspace/app/requestitem/RequestItemAuthorExtractor.java
@@ -8,26 +8,28 @@
package org.dspace.app.requestitem;
import java.sql.SQLException;
+import java.util.List;
import org.dspace.content.Item;
import org.dspace.core.Context;
+import org.springframework.lang.NonNull;
/**
- * Interface to abstract the strategy for select the author to contact for
- * request copy
+ * Interface to abstract the strategy for selecting the author to contact for
+ * request copy.
*
* @author Andrea Bollini
*/
public interface RequestItemAuthorExtractor {
-
/**
- * Retrieve the auhtor to contact for a request copy of the give item.
+ * Retrieve the author to contact for requesting a copy of the given item.
*
* @param context DSpace context object
* @param item item to request
- * @return An object containing name an email address to send the request to
- * or null if no valid email address was found.
+ * @return Names and email addresses to send the request to.
* @throws SQLException if database error
*/
- public RequestItemAuthor getRequestItemAuthor(Context context, Item item) throws SQLException;
+ @NonNull
+ public List getRequestItemAuthor(Context context, Item item)
+ throws SQLException;
}
diff --git a/dspace-api/src/main/java/org/dspace/app/requestitem/RequestItemEmailNotifier.java b/dspace-api/src/main/java/org/dspace/app/requestitem/RequestItemEmailNotifier.java
index d72e42eac1..02054ee1a0 100644
--- a/dspace-api/src/main/java/org/dspace/app/requestitem/RequestItemEmailNotifier.java
+++ b/dspace-api/src/main/java/org/dspace/app/requestitem/RequestItemEmailNotifier.java
@@ -72,28 +72,48 @@ public class RequestItemEmailNotifier {
static public void sendRequest(Context context, RequestItem ri, String responseLink)
throws IOException, SQLException {
// Who is making this request?
- RequestItemAuthor author = requestItemAuthorExtractor
+ List authors = requestItemAuthorExtractor
.getRequestItemAuthor(context, ri.getItem());
- String authorEmail = author.getEmail();
- String authorName = author.getFullName();
// Build an email to the approver.
Email email = Email.getEmail(I18nUtil.getEmailFilename(context.getCurrentLocale(),
"request_item.author"));
- email.addRecipient(authorEmail);
+ for (RequestItemAuthor author : authors) {
+ email.addRecipient(author.getEmail());
+ }
email.setReplyTo(ri.getReqEmail()); // Requester's address
+
email.addArgument(ri.getReqName()); // {0} Requester's name
+
email.addArgument(ri.getReqEmail()); // {1} Requester's address
+
email.addArgument(ri.isAllfiles() // {2} All bitstreams or just one?
? I18nUtil.getMessage("itemRequest.all") : ri.getBitstream().getName());
- email.addArgument(handleService.getCanonicalForm(ri.getItem().getHandle()));
+
+ email.addArgument(handleService.getCanonicalForm(ri.getItem().getHandle())); // {3}
+
email.addArgument(ri.getItem().getName()); // {4} requested item's title
+
email.addArgument(ri.getReqMessage()); // {5} message from requester
+
email.addArgument(responseLink); // {6} Link back to DSpace for action
- email.addArgument(authorName); // {7} corresponding author name
- email.addArgument(authorEmail); // {8} corresponding author email
- email.addArgument(configurationService.getProperty("dspace.name"));
- email.addArgument(configurationService.getProperty("mail.helpdesk"));
+
+ StringBuilder names = new StringBuilder();
+ StringBuilder addresses = new StringBuilder();
+ for (RequestItemAuthor author : authors) {
+ if (names.length() > 0) {
+ names.append("; ");
+ addresses.append("; ");
+ }
+ names.append(author.getFullName());
+ addresses.append(author.getEmail());
+ }
+ email.addArgument(names.toString()); // {7} corresponding author name
+ email.addArgument(addresses.toString()); // {8} corresponding author email
+
+ email.addArgument(configurationService.getProperty("dspace.name")); // {9}
+
+ email.addArgument(configurationService.getProperty("mail.helpdesk")); // {10}
// Send the email.
try {
diff --git a/dspace-api/src/main/java/org/dspace/app/requestitem/RequestItemHelpdeskStrategy.java b/dspace-api/src/main/java/org/dspace/app/requestitem/RequestItemHelpdeskStrategy.java
index 7b63d3ea8d..f440ba380a 100644
--- a/dspace-api/src/main/java/org/dspace/app/requestitem/RequestItemHelpdeskStrategy.java
+++ b/dspace-api/src/main/java/org/dspace/app/requestitem/RequestItemHelpdeskStrategy.java
@@ -8,6 +8,8 @@
package org.dspace.app.requestitem;
import java.sql.SQLException;
+import java.util.ArrayList;
+import java.util.List;
import org.apache.commons.lang3.StringUtils;
import org.dspace.content.Item;
@@ -16,11 +18,11 @@ import org.dspace.core.I18nUtil;
import org.dspace.eperson.EPerson;
import org.dspace.eperson.service.EPersonService;
import org.dspace.services.ConfigurationService;
-import org.dspace.services.factory.DSpaceServicesFactory;
import org.springframework.beans.factory.annotation.Autowired;
+import org.springframework.lang.NonNull;
/**
- * RequestItem strategy to allow DSpace support team's helpdesk to receive requestItem request
+ * RequestItem strategy to allow DSpace support team's helpdesk to receive requestItem request.
* With this enabled, then the Item author/submitter doesn't receive the request, but the helpdesk instead does.
*
* Failover to the RequestItemSubmitterStrategy, which means the submitter would get the request if there is no
@@ -33,19 +35,24 @@ public class RequestItemHelpdeskStrategy extends RequestItemSubmitterStrategy {
@Autowired(required = true)
protected EPersonService ePersonService;
+ @Autowired(required = true)
+ private ConfigurationService configuration;
+
public RequestItemHelpdeskStrategy() {
}
@Override
- public RequestItemAuthor getRequestItemAuthor(Context context, Item item) throws SQLException {
- ConfigurationService configurationService
- = DSpaceServicesFactory.getInstance().getConfigurationService();
- boolean helpdeskOverridesSubmitter = configurationService
+ @NonNull
+ public List getRequestItemAuthor(Context context, Item item)
+ throws SQLException {
+ boolean helpdeskOverridesSubmitter = configuration
.getBooleanProperty("request.item.helpdesk.override", false);
- String helpDeskEmail = configurationService.getProperty("mail.helpdesk");
+ String helpDeskEmail = configuration.getProperty("mail.helpdesk");
if (helpdeskOverridesSubmitter && StringUtils.isNotBlank(helpDeskEmail)) {
- return getHelpDeskPerson(context, helpDeskEmail);
+ List authors = new ArrayList<>(1);
+ authors.add(getHelpDeskPerson(context, helpDeskEmail));
+ return authors;
} else {
//Fallback to default logic (author of Item) if helpdesk isn't fully enabled or setup
return super.getRequestItemAuthor(context, item);
diff --git a/dspace-api/src/main/java/org/dspace/app/requestitem/RequestItemMetadataStrategy.java b/dspace-api/src/main/java/org/dspace/app/requestitem/RequestItemMetadataStrategy.java
index 9838e58697..4372ab9b09 100644
--- a/dspace-api/src/main/java/org/dspace/app/requestitem/RequestItemMetadataStrategy.java
+++ b/dspace-api/src/main/java/org/dspace/app/requestitem/RequestItemMetadataStrategy.java
@@ -8,6 +8,8 @@
package org.dspace.app.requestitem;
import java.sql.SQLException;
+import java.util.ArrayList;
+import java.util.Collections;
import java.util.List;
import org.apache.commons.lang3.StringUtils;
@@ -16,12 +18,13 @@ import org.dspace.content.MetadataValue;
import org.dspace.content.service.ItemService;
import org.dspace.core.Context;
import org.dspace.core.I18nUtil;
-import org.dspace.services.factory.DSpaceServicesFactory;
+import org.dspace.services.ConfigurationService;
import org.springframework.beans.factory.annotation.Autowired;
+import org.springframework.lang.NonNull;
/**
* Try to look to an item metadata for the corresponding author name and email.
- * Failover to the RequestItemSubmitterStrategy
+ * Failover to the RequestItemSubmitterStrategy.
*
* @author Andrea Bollini
*/
@@ -30,6 +33,9 @@ public class RequestItemMetadataStrategy extends RequestItemSubmitterStrategy {
protected String emailMetadata;
protected String fullNameMetadata;
+ @Autowired(required = true)
+ protected ConfigurationService configurationService;
+
@Autowired(required = true)
protected ItemService itemService;
@@ -37,59 +43,72 @@ public class RequestItemMetadataStrategy extends RequestItemSubmitterStrategy {
}
@Override
- public RequestItemAuthor getRequestItemAuthor(Context context, Item item)
+ @NonNull
+ public List getRequestItemAuthor(Context context, Item item)
throws SQLException {
- RequestItemAuthor author = null;
+ List authors;
if (emailMetadata != null) {
List vals = itemService.getMetadataByMetadataString(item, emailMetadata);
- if (vals.size() > 0) {
- String email = vals.iterator().next().getValue();
- String fullname = null;
- if (fullNameMetadata != null) {
- List nameVals = itemService.getMetadataByMetadataString(item, fullNameMetadata);
- if (nameVals.size() > 0) {
- fullname = nameVals.iterator().next().getValue();
+ List nameVals;
+ if (null != fullNameMetadata) {
+ nameVals = itemService.getMetadataByMetadataString(item, fullNameMetadata);
+ } else {
+ nameVals = Collections.EMPTY_LIST;
+ }
+ boolean useNames = vals.size() == nameVals.size();
+ if (!vals.isEmpty()) {
+ authors = new ArrayList<>(vals.size());
+ for (int authorIndex = 0; authorIndex < vals.size(); authorIndex++) {
+ String email = vals.get(authorIndex).getValue();
+ String fullname = null;
+ if (useNames) {
+ fullname = nameVals.get(authorIndex).getValue();
}
+
+ if (StringUtils.isBlank(fullname)) {
+ fullname = I18nUtil.getMessage(
+ "org.dspace.app.requestitem.RequestItemMetadataStrategy.unnamed",
+ context);
+ }
+ RequestItemAuthor author = new RequestItemAuthor(
+ fullname, email);
+ authors.add(author);
}
- if (StringUtils.isBlank(fullname)) {
- fullname = I18nUtil
- .getMessage(
- "org.dspace.app.requestitem.RequestItemMetadataStrategy.unnamed",
- context);
- }
- author = new RequestItemAuthor(fullname, email);
- return author;
+ return authors;
+ } else {
+ return Collections.EMPTY_LIST;
}
} else {
// Uses the basic strategy to look for the original submitter
- author = super.getRequestItemAuthor(context, item);
- // Is the author or his email null, so get the help desk or admin name and email
- if (null == author || null == author.getEmail()) {
- String email = null;
- String name = null;
+ authors = super.getRequestItemAuthor(context, item);
+
+ // Remove from the list authors that do not have email addresses.
+ for (RequestItemAuthor author : authors) {
+ if (null == author.getEmail()) {
+ authors.remove(author);
+ }
+ }
+
+ if (authors.isEmpty()) { // No author email addresses! Fall back
//First get help desk name and email
- email = DSpaceServicesFactory.getInstance()
- .getConfigurationService().getProperty("mail.helpdesk");
- name = DSpaceServicesFactory.getInstance()
- .getConfigurationService().getProperty("mail.helpdesk.name");
+ String email = configurationService.getProperty("mail.helpdesk");
+ String name = configurationService.getProperty("mail.helpdesk.name");
// If help desk mail is null get the mail and name of admin
if (email == null) {
- email = DSpaceServicesFactory.getInstance()
- .getConfigurationService().getProperty("mail.admin");
- name = DSpaceServicesFactory.getInstance()
- .getConfigurationService().getProperty("mail.admin.name");
+ email = configurationService.getProperty("mail.admin");
+ name = configurationService.getProperty("mail.admin.name");
}
- author = new RequestItemAuthor(name, email);
+ authors.add(new RequestItemAuthor(name, email));
}
+ return authors;
}
- return author;
}
- public void setEmailMetadata(String emailMetadata) {
+ public void setEmailMetadata(@NonNull String emailMetadata) {
this.emailMetadata = emailMetadata;
}
- public void setFullNameMetadata(String fullNameMetadata) {
+ public void setFullNameMetadata(@NonNull String fullNameMetadata) {
this.fullNameMetadata = fullNameMetadata;
}
diff --git a/dspace-api/src/main/java/org/dspace/app/requestitem/RequestItemSubmitterStrategy.java b/dspace-api/src/main/java/org/dspace/app/requestitem/RequestItemSubmitterStrategy.java
index 2708c24ba9..dcc1a3e80e 100644
--- a/dspace-api/src/main/java/org/dspace/app/requestitem/RequestItemSubmitterStrategy.java
+++ b/dspace-api/src/main/java/org/dspace/app/requestitem/RequestItemSubmitterStrategy.java
@@ -8,10 +8,13 @@
package org.dspace.app.requestitem;
import java.sql.SQLException;
+import java.util.ArrayList;
+import java.util.List;
import org.dspace.content.Item;
import org.dspace.core.Context;
import org.dspace.eperson.EPerson;
+import org.springframework.lang.NonNull;
/**
* Basic strategy that looks to the original submitter.
@@ -24,21 +27,23 @@ public class RequestItemSubmitterStrategy implements RequestItemAuthorExtractor
}
/**
- * Returns the submitter of an Item as RequestItemAuthor or null if the
- * Submitter is deleted.
+ * Returns the submitter of an Item as RequestItemAuthor or an empty List if
+ * the Submitter is deleted.
*
- * @return The submitter of the item or null if the submitter is deleted
+ * @return The submitter of the item or empty List if the submitter is deleted
* @throws SQLException if database error
*/
@Override
- public RequestItemAuthor getRequestItemAuthor(Context context, Item item)
+ @NonNull
+ public List getRequestItemAuthor(Context context, Item item)
throws SQLException {
EPerson submitter = item.getSubmitter();
- RequestItemAuthor author = null;
+ List authors = new ArrayList<>(1);
if (null != submitter) {
- author = new RequestItemAuthor(
- submitter.getFullName(), submitter.getEmail());
+ RequestItemAuthor author = new RequestItemAuthor(
+ submitter.getFullName(), submitter.getEmail());
+ authors.add(author);
}
- return author;
+ return authors;
}
}
diff --git a/dspace-api/src/main/java/org/dspace/app/statistics/LogAnalyser.java b/dspace-api/src/main/java/org/dspace/app/statistics/LogAnalyser.java
index 264fb1b317..2e4ed69b26 100644
--- a/dspace-api/src/main/java/org/dspace/app/statistics/LogAnalyser.java
+++ b/dspace-api/src/main/java/org/dspace/app/statistics/LogAnalyser.java
@@ -29,6 +29,10 @@ import java.util.TimeZone;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
+import org.apache.commons.cli.CommandLine;
+import org.apache.commons.cli.DefaultParser;
+import org.apache.commons.cli.Option;
+import org.apache.commons.cli.Options;
import org.apache.commons.lang3.StringUtils;
import org.dspace.core.Context;
import org.dspace.core.LogHelper;
@@ -44,6 +48,7 @@ import org.dspace.services.factory.DSpaceServicesFactory;
* files. Most input can be configured; use the -help flag for a full list
* of usage information.
*
+ *
* The output of this file is plain text and forms an "aggregation" file which
* can then be used for display purposes using the related ReportGenerator
* class.
@@ -167,7 +172,7 @@ public class LogAnalyser {
/**
* the average number of views per item
*/
- private static int views = 0;
+ private static long views = 0;
///////////////////////
// regular expressions
@@ -236,12 +241,12 @@ public class LogAnalyser {
/**
* pattern to match commented out lines from the config file
*/
- private static final Pattern comment = Pattern.compile("^#");
+ private static final Pattern COMMENT = Pattern.compile("^#");
/**
* pattern to match genuine lines from the config file
*/
- private static final Pattern real = Pattern.compile("^(.+)=(.+)");
+ private static final Pattern REAL = Pattern.compile("^(.+)=(.+)");
/**
* pattern to match all search types
@@ -337,44 +342,73 @@ public class LogAnalyser {
Date myEndDate = null;
boolean myLookUp = false;
- // read in our command line options
- for (int i = 0; i < argv.length; i++) {
- if (argv[i].equals("-log")) {
- myLogDir = argv[i + 1];
- }
+ // Define command line options.
+ Options options = new Options();
+ Option option;
- if (argv[i].equals("-file")) {
- myFileTemplate = argv[i + 1];
- }
+ option = Option.builder().longOpt("log").hasArg().build();
+ options.addOption(option);
- if (argv[i].equals("-cfg")) {
- myConfigFile = argv[i + 1];
- }
+ option = Option.builder().longOpt("file").hasArg().build();
+ options.addOption(option);
- if (argv[i].equals("-out")) {
- myOutFile = argv[i + 1];
- }
+ option = Option.builder().longOpt("cfg").hasArg().build();
+ options.addOption(option);
- if (argv[i].equals("-help")) {
- LogAnalyser.usage();
- System.exit(0);
- }
+ option = Option.builder().longOpt("out").hasArg().build();
+ options.addOption(option);
- if (argv[i].equals("-start")) {
- myStartDate = parseDate(argv[i + 1]);
- }
+ option = Option.builder().longOpt("help").build();
+ options.addOption(option);
- if (argv[i].equals("-end")) {
- myEndDate = parseDate(argv[i + 1]);
- }
+ option = Option.builder().longOpt("start").hasArg().build();
+ options.addOption(option);
- if (argv[i].equals("-lookup")) {
- myLookUp = true;
- }
+ option = Option.builder().longOpt("end").hasArg().build();
+ options.addOption(option);
+
+ option = Option.builder().longOpt("lookup").build();
+ options.addOption(option);
+
+ // Parse the command.
+ DefaultParser cmdParser = new DefaultParser();
+ CommandLine cmd = cmdParser.parse(options, argv);
+
+ // Analyze the command.
+ if (cmd.hasOption("help")) {
+ LogAnalyser.usage();
+ System.exit(0);
}
+ if (cmd.hasOption("log")) {
+ myLogDir = cmd.getOptionValue("log");
+ }
+
+ if (cmd.hasOption("file")) {
+ myFileTemplate = cmd.getOptionValue("file");
+ }
+
+ if (cmd.hasOption("cfg")) {
+ myConfigFile = cmd.getOptionValue("cfg");
+ }
+
+ if (cmd.hasOption("out")) {
+ myOutFile = cmd.getOptionValue("out");
+ }
+
+ if (cmd.hasOption("start")) {
+ myStartDate = parseDate(cmd.getOptionValue("start"));
+ }
+
+ if (cmd.hasOption("end")) {
+ myEndDate = parseDate(cmd.getOptionValue("end"));
+ }
+
+ myLookUp = cmd.hasOption("lookup");
+
// now call the method which actually processes the logs
- processLogs(context, myLogDir, myFileTemplate, myConfigFile, myOutFile, myStartDate, myEndDate, myLookUp);
+ processLogs(context, myLogDir, myFileTemplate, myConfigFile, myOutFile,
+ myStartDate, myEndDate, myLookUp);
}
/**
@@ -406,18 +440,18 @@ public class LogAnalyser {
startTime = new GregorianCalendar();
//instantiate aggregators
- actionAggregator = new HashMap();
- searchAggregator = new HashMap();
- userAggregator = new HashMap();
- itemAggregator = new HashMap();
- archiveStats = new HashMap();
+ actionAggregator = new HashMap<>();
+ searchAggregator = new HashMap<>();
+ userAggregator = new HashMap<>();
+ itemAggregator = new HashMap<>();
+ archiveStats = new HashMap<>();
//instantiate lists
- generalSummary = new ArrayList();
- excludeWords = new ArrayList();
- excludeTypes = new ArrayList();
- excludeChars = new ArrayList();
- itemTypes = new ArrayList();
+ generalSummary = new ArrayList<>();
+ excludeWords = new ArrayList<>();
+ excludeTypes = new ArrayList<>();
+ excludeChars = new ArrayList<>();
+ itemTypes = new ArrayList<>();
// set the parameters for this analysis
setParameters(myLogDir, myFileTemplate, myConfigFile, myOutFile, myStartDate, myEndDate, myLookUp);
@@ -529,10 +563,11 @@ public class LogAnalyser {
// for each search word add to the aggregator or
// increment the aggregator's counter
- for (int j = 0; j < words.length; j++) {
+ for (String word : words) {
// FIXME: perhaps aggregators ought to be objects
// themselves
- searchAggregator.put(words[j], increment(searchAggregator, words[j]));
+ searchAggregator.put(word,
+ increment(searchAggregator, word));
}
}
@@ -591,13 +626,13 @@ public class LogAnalyser {
}
// do the average views analysis
- if ((archiveStats.get("All Items")).intValue() != 0) {
+ if ((archiveStats.get("All Items")) != 0) {
// FIXME: this is dependent on their being a query on the db, which
// there might not always be if it becomes configurable
- Double avg = Math.ceil(
+ double avg = Math.ceil(
(actionAggregator.get("view_item")).doubleValue() /
(archiveStats.get("All Items")).doubleValue());
- views = avg.intValue();
+ views = Math.round(avg);
}
// finally, write the output
@@ -672,55 +707,55 @@ public class LogAnalyser {
Iterator keys = null;
// output the number of lines parsed
- summary.append("log_lines=" + Integer.toString(lineCount) + "\n");
+ summary.append("log_lines=").append(Integer.toString(lineCount)).append("\n");
// output the number of warnings encountered
- summary.append("warnings=" + Integer.toString(warnCount) + "\n");
- summary.append("exceptions=" + Integer.toString(excCount) + "\n");
+ summary.append("warnings=").append(Integer.toString(warnCount)).append("\n");
+ summary.append("exceptions=").append(Integer.toString(excCount)).append("\n");
// set the general summary config up in the aggregator file
for (int i = 0; i < generalSummary.size(); i++) {
- summary.append("general_summary=" + generalSummary.get(i) + "\n");
+ summary.append("general_summary=").append(generalSummary.get(i)).append("\n");
}
// output the host name
- summary.append("server_name=" + hostName + "\n");
+ summary.append("server_name=").append(hostName).append("\n");
// output the service name
- summary.append("service_name=" + name + "\n");
+ summary.append("service_name=").append(name).append("\n");
// output the date information if necessary
SimpleDateFormat sdf = new SimpleDateFormat("dd'/'MM'/'yyyy");
if (startDate != null) {
- summary.append("start_date=" + sdf.format(startDate) + "\n");
+ summary.append("start_date=").append(sdf.format(startDate)).append("\n");
} else if (logStartDate != null) {
- summary.append("start_date=" + sdf.format(logStartDate) + "\n");
+ summary.append("start_date=").append(sdf.format(logStartDate)).append("\n");
}
if (endDate != null) {
- summary.append("end_date=" + sdf.format(endDate) + "\n");
+ summary.append("end_date=").append(sdf.format(endDate)).append("\n");
} else if (logEndDate != null) {
- summary.append("end_date=" + sdf.format(logEndDate) + "\n");
+ summary.append("end_date=").append(sdf.format(logEndDate)).append("\n");
}
// write out the archive stats
keys = archiveStats.keySet().iterator();
while (keys.hasNext()) {
String key = keys.next();
- summary.append("archive." + key + "=" + archiveStats.get(key) + "\n");
+ summary.append("archive.").append(key).append("=").append(archiveStats.get(key)).append("\n");
}
// write out the action aggregation results
keys = actionAggregator.keySet().iterator();
while (keys.hasNext()) {
String key = keys.next();
- summary.append("action." + key + "=" + actionAggregator.get(key) + "\n");
+ summary.append("action.").append(key).append("=").append(actionAggregator.get(key)).append("\n");
}
// depending on the config settings for reporting on emails output the
// login information
- summary.append("user_email=" + userEmail + "\n");
+ summary.append("user_email=").append(userEmail).append("\n");
int address = 1;
keys = userAggregator.keySet().iterator();
@@ -731,9 +766,10 @@ public class LogAnalyser {
String key = keys.next();
summary.append("user.");
if (userEmail.equals("on")) {
- summary.append(key + "=" + userAggregator.get(key) + "\n");
+ summary.append(key).append("=").append(userAggregator.get(key)).append("\n");
} else if (userEmail.equals("alias")) {
- summary.append("Address " + Integer.toString(address++) + "=" + userAggregator.get(key) + "\n");
+ summary.append("Address ").append(Integer.toString(address++))
+ .append("=").append(userAggregator.get(key)).append("\n");
}
}
@@ -742,12 +778,13 @@ public class LogAnalyser {
// the listing there are
// output the search word information
- summary.append("search_floor=" + searchFloor + "\n");
+ summary.append("search_floor=").append(searchFloor).append("\n");
keys = searchAggregator.keySet().iterator();
while (keys.hasNext()) {
String key = keys.next();
- if ((searchAggregator.get(key)).intValue() >= searchFloor) {
- summary.append("search." + key + "=" + searchAggregator.get(key) + "\n");
+ if ((searchAggregator.get(key)) >= searchFloor) {
+ summary.append("search.").append(key).append("=")
+ .append(searchAggregator.get(key)).append("\n");
}
}
@@ -759,35 +796,35 @@ public class LogAnalyser {
// be the same thing.
// item viewing information
- summary.append("item_floor=" + itemFloor + "\n");
- summary.append("host_url=" + url + "\n");
- summary.append("item_lookup=" + itemLookup + "\n");
+ summary.append("item_floor=").append(itemFloor).append("\n");
+ summary.append("host_url=").append(url).append("\n");
+ summary.append("item_lookup=").append(itemLookup).append("\n");
// write out the item access information
keys = itemAggregator.keySet().iterator();
while (keys.hasNext()) {
String key = keys.next();
- if ((itemAggregator.get(key)).intValue() >= itemFloor) {
- summary.append("item." + key + "=" + itemAggregator.get(key) + "\n");
+ if ((itemAggregator.get(key)) >= itemFloor) {
+ summary.append("item.").append(key).append("=")
+ .append(itemAggregator.get(key)).append("\n");
}
}
// output the average views per item
if (views > 0) {
- summary.append("avg_item_views=" + views + "\n");
+ summary.append("avg_item_views=").append(views).append("\n");
}
// insert the analysis processing time information
Calendar endTime = new GregorianCalendar();
long timeInMillis = (endTime.getTimeInMillis() - startTime.getTimeInMillis());
- summary.append("analysis_process_time=" + Long.toString(timeInMillis / 1000) + "\n");
+ summary.append("analysis_process_time=")
+ .append(Long.toString(timeInMillis / 1000)).append("\n");
// finally write the string into the output file
- try {
- BufferedWriter out = new BufferedWriter(new FileWriter(outFile));
+ try (BufferedWriter out = new BufferedWriter(new FileWriter(outFile));) {
out.write(summary.toString());
out.flush();
- out.close();
} catch (IOException e) {
System.out.println("Unable to write to output file " + outFile);
System.exit(0);
@@ -891,11 +928,11 @@ public class LogAnalyser {
if (i > 0) {
wordRXString.append("|");
}
- wordRXString.append(" " + excludeWords.get(i) + " ");
+ wordRXString.append(" ").append(excludeWords.get(i)).append(" ");
wordRXString.append("|");
- wordRXString.append("^" + excludeWords.get(i) + " ");
+ wordRXString.append("^").append(excludeWords.get(i)).append(" ");
wordRXString.append("|");
- wordRXString.append(" " + excludeWords.get(i) + "$");
+ wordRXString.append(" ").append(excludeWords.get(i)).append("$");
}
wordRXString.append(")");
wordRX = Pattern.compile(wordRXString.toString());
@@ -956,8 +993,8 @@ public class LogAnalyser {
// read in the config file and set up our instance variables
while ((record = br.readLine()) != null) {
// check to see what kind of line we have
- Matcher matchComment = comment.matcher(record);
- Matcher matchReal = real.matcher(record);
+ Matcher matchComment = COMMENT.matcher(record);
+ Matcher matchReal = REAL.matcher(record);
// if the line is not a comment and is real, read it in
if (!matchComment.matches() && matchReal.matches()) {
@@ -968,7 +1005,7 @@ public class LogAnalyser {
// read the config values into our instance variables (see
// documentation for more info on config params)
if (key.equals("general.summary")) {
- actionAggregator.put(value, Integer.valueOf(0));
+ actionAggregator.put(value, 0);
generalSummary.add(value);
}
@@ -1022,9 +1059,9 @@ public class LogAnalyser {
Integer newValue = null;
if (map.containsKey(key)) {
// FIXME: this seems like a ridiculous way to add Integers
- newValue = Integer.valueOf((map.get(key)).intValue() + 1);
+ newValue = (map.get(key)) + 1;
} else {
- newValue = Integer.valueOf(1);
+ newValue = 1;
}
return newValue;
}
diff --git a/dspace-api/src/main/java/org/dspace/app/statistics/ReportGenerator.java b/dspace-api/src/main/java/org/dspace/app/statistics/ReportGenerator.java
index 25c6d8cb9c..c5fe0072f5 100644
--- a/dspace-api/src/main/java/org/dspace/app/statistics/ReportGenerator.java
+++ b/dspace-api/src/main/java/org/dspace/app/statistics/ReportGenerator.java
@@ -27,6 +27,10 @@ import java.util.StringTokenizer;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
+import org.apache.commons.cli.CommandLine;
+import org.apache.commons.cli.DefaultParser;
+import org.apache.commons.cli.Option;
+import org.apache.commons.cli.Options;
import org.dspace.content.Item;
import org.dspace.content.MetadataSchemaEnum;
import org.dspace.content.MetadataValue;
@@ -162,7 +166,7 @@ public class ReportGenerator {
/**
* pattern that matches an unqualified aggregator property
*/
- private static final Pattern real = Pattern.compile("^(.+)=(.+)");
+ private static final Pattern REAL = Pattern.compile("^(.+)=(.+)");
//////////////////////////
// Miscellaneous variables
@@ -221,28 +225,46 @@ public class ReportGenerator {
String myOutput = null;
String myMap = null;
- // read in our command line options
- for (int i = 0; i < argv.length; i++) {
- if (argv[i].equals("-format")) {
- myFormat = argv[i + 1].toLowerCase();
- }
+ Options options = new Options();
+ Option option;
- if (argv[i].equals("-in")) {
- myInput = argv[i + 1];
- }
+ option = Option.builder().longOpt("format").hasArg().build();
+ options.addOption(option);
- if (argv[i].equals("-out")) {
- myOutput = argv[i + 1];
- }
+ option = Option.builder().longOpt("in").hasArg().build();
+ options.addOption(option);
- if (argv[i].equals("-map")) {
- myMap = argv[i + 1];
- }
+ option = Option.builder().longOpt("out").hasArg().build();
+ options.addOption(option);
- if (argv[i].equals("-help")) {
- usage();
- System.exit(0);
- }
+ option = Option.builder().longOpt("map").hasArg().build();
+ options.addOption(option);
+
+ option = Option.builder().longOpt("help").build();
+ options.addOption(option);
+
+ DefaultParser parser = new DefaultParser();
+ CommandLine cmd = parser.parse(options, argv);
+
+ if (cmd.hasOption("help")) {
+ usage();
+ System.exit(0);
+ }
+
+ if (cmd.hasOption("format")) {
+ myFormat = cmd.getOptionValue("format");
+ }
+
+ if (cmd.hasOption("in")) {
+ myInput = cmd.getOptionValue("in");
+ }
+
+ if (cmd.hasOption("out")) {
+ myOutput = cmd.getOptionValue("out");
+ }
+
+ if (cmd.hasOption("map")) {
+ myMap = cmd.getOptionValue("map");
}
processReport(context, myFormat, myInput, myOutput, myMap);
@@ -576,7 +598,7 @@ public class ReportGenerator {
// loop through the map file and read in the values
while ((record = br.readLine()) != null) {
- Matcher matchReal = real.matcher(record);
+ Matcher matchReal = REAL.matcher(record);
// if the line is real then read it in
if (matchReal.matches()) {
@@ -650,7 +672,7 @@ public class ReportGenerator {
// loop through the aggregator file and read in the values
while ((record = br.readLine()) != null) {
// match real lines
- Matcher matchReal = real.matcher(record);
+ Matcher matchReal = REAL.matcher(record);
// pre-prepare our input strings
String section = null;
diff --git a/dspace-api/src/main/java/org/dspace/app/statistics/StatisticsLoader.java b/dspace-api/src/main/java/org/dspace/app/statistics/StatisticsLoader.java
index fd72b3b805..cc8a7024f1 100644
--- a/dspace-api/src/main/java/org/dspace/app/statistics/StatisticsLoader.java
+++ b/dspace-api/src/main/java/org/dspace/app/statistics/StatisticsLoader.java
@@ -324,11 +324,7 @@ public class StatisticsLoader {
ConfigurationService configurationService
= DSpaceServicesFactory.getInstance().getConfigurationService();
File reportDir = new File(configurationService.getProperty("log.report.dir"));
- if (reportDir != null) {
- return reportDir.listFiles(new AnalysisAndReportFilter());
- }
-
- return null;
+ return reportDir.listFiles(new AnalysisAndReportFilter());
}
/**
diff --git a/dspace-api/src/main/java/org/dspace/app/util/DCInput.java b/dspace-api/src/main/java/org/dspace/app/util/DCInput.java
index f9fc97ec09..d2830bf89a 100644
--- a/dspace-api/src/main/java/org/dspace/app/util/DCInput.java
+++ b/dspace-api/src/main/java/org/dspace/app/util/DCInput.java
@@ -144,8 +144,8 @@ public class DCInput {
private boolean isMetadataField = false;
private String relationshipType = null;
private String searchConfiguration = null;
- private String filter;
- private List externalSources;
+ private final String filter;
+ private final List externalSources;
/**
* The scope of the input sets, this restricts hidden metadata fields from
@@ -213,7 +213,7 @@ public class DCInput {
|| "yes".equalsIgnoreCase(closedVocabularyStr);
// parsing of the element (using the colon as split separator)
- typeBind = new ArrayList();
+ typeBind = new ArrayList<>();
String typeBindDef = fieldMap.get("type-bind");
if (typeBindDef != null && typeBindDef.trim().length() > 0) {
String[] types = typeBindDef.split(",");
@@ -553,7 +553,7 @@ public class DCInput {
}
}
} catch (PatternSyntaxException ex) {
- log.error("Regex validation failed!", ex.getMessage());
+ log.error("Regex validation failed! {}", ex.getMessage());
}
}
@@ -571,18 +571,22 @@ public class DCInput {
}
/**
- * Verify whether the current field contains an entity relationship
- * This also implies a relationship type is defined for this field
- * The field can contain both an entity relationship and a metadata field simultaneously
+ * Verify whether the current field contains an entity relationship.
+ * This also implies a relationship type is defined for this field.
+ * The field can contain both an entity relationship and a metadata field
+ * simultaneously.
+ * @return true if the field contains a relationship.
*/
public boolean isRelationshipField() {
return isRelationshipField;
}
/**
- * Verify whether the current field contains a metadata field
- * This also implies a field type is defined for this field
- * The field can contain both an entity relationship and a metadata field simultaneously
+ * Verify whether the current field contains a metadata field.
+ * This also implies a field type is defined for this field.
+ * The field can contain both an entity relationship and a metadata field
+ * simultaneously.
+ * @return true if the field contains a metadata field.
*/
public boolean isMetadataField() {
return isMetadataField;
diff --git a/dspace-api/src/main/java/org/dspace/app/util/DCInputSet.java b/dspace-api/src/main/java/org/dspace/app/util/DCInputSet.java
index 2359bf1bff..e903c11b13 100644
--- a/dspace-api/src/main/java/org/dspace/app/util/DCInputSet.java
+++ b/dspace-api/src/main/java/org/dspace/app/util/DCInputSet.java
@@ -17,7 +17,6 @@ import org.dspace.core.Utils;
* Class representing all DC inputs required for a submission, organized into pages
*
* @author Brian S. Hughes, based on work by Jenny Toves, OCLC
- * @version $Revision$
*/
public class DCInputSet {
@@ -34,7 +33,6 @@ public class DCInputSet {
* constructor
*
* @param formName form name
- * @param mandatoryFlags
* @param rows the rows
* @param listMap map
*/
diff --git a/dspace-api/src/main/java/org/dspace/app/util/GoogleMetadata.java b/dspace-api/src/main/java/org/dspace/app/util/GoogleMetadata.java
index 72e2af409f..c4f3f2235e 100644
--- a/dspace-api/src/main/java/org/dspace/app/util/GoogleMetadata.java
+++ b/dspace-api/src/main/java/org/dspace/app/util/GoogleMetadata.java
@@ -470,11 +470,7 @@ public class GoogleMetadata {
parsedOptions.add(parsedFields);
}
- if (null != parsedOptions) {
- return parsedOptions;
- } else {
- return null;
- }
+ return parsedOptions;
}
/**
diff --git a/dspace-api/src/main/java/org/dspace/app/util/OptimizeSelectCollection.java b/dspace-api/src/main/java/org/dspace/app/util/OptimizeSelectCollection.java
index 1e018ff889..5dd286726d 100644
--- a/dspace-api/src/main/java/org/dspace/app/util/OptimizeSelectCollection.java
+++ b/dspace-api/src/main/java/org/dspace/app/util/OptimizeSelectCollection.java
@@ -11,7 +11,6 @@ import java.sql.SQLException;
import java.util.ArrayList;
import java.util.List;
-import org.apache.logging.log4j.Logger;
import org.dspace.content.Collection;
import org.dspace.content.factory.ContentServiceFactory;
import org.dspace.content.service.CollectionService;
@@ -23,12 +22,12 @@ import org.dspace.eperson.service.EPersonService;
import org.springframework.util.StopWatch;
/**
+ * A command line tool to verify/test the accuracy and speed gains of
+ * {@link Collection.findAuthorizedOptimized}.
+ * Invocation: {@code dsrun org.dspace.app.util.OptimizeSelectCollection}
* @author peterdietz
- * A command line tool to verify/test the accuracy and speed gains of Collection.findAuthorizedOptimized()
- * Invocation: dsrun org.dspace.app.util.OptimizeSelectCollection
*/
public class OptimizeSelectCollection {
- private static final Logger log = org.apache.logging.log4j.LogManager.getLogger(OptimizeSelectCollection.class);
private static Context context;
private static ArrayList brokenPeople;
@@ -49,7 +48,7 @@ public class OptimizeSelectCollection {
"values as the legacy select-collection logic.");
context = new Context();
- brokenPeople = new ArrayList();
+ brokenPeople = new ArrayList<>();
int peopleChecked = 0;
timeSavedMS = 0L;
@@ -68,7 +67,7 @@ public class OptimizeSelectCollection {
}
}
- if (brokenPeople.size() > 0) {
+ if (!brokenPeople.isEmpty()) {
System.out.println("NOT DONE YET!!! Some people don't have all their collections.");
for (EPerson person : brokenPeople) {
System.out.println("-- " + person.getEmail());
@@ -90,7 +89,7 @@ public class OptimizeSelectCollection {
stopWatch.start("findAuthorized");
List collections = collectionService.findAuthorized(context, null, Constants.ADD);
stopWatch.stop();
- Long defaultMS = stopWatch.getLastTaskTimeMillis();
+ long defaultMS = stopWatch.getLastTaskTimeMillis();
stopWatch.start("ListingCollections");
System.out.println("Legacy Find Authorized");
@@ -100,7 +99,7 @@ public class OptimizeSelectCollection {
stopWatch.start("findAuthorizedOptimized");
List collectionsOptimized = collectionService.findAuthorizedOptimized(context, Constants.ADD);
stopWatch.stop();
- Long optimizedMS = stopWatch.getLastTaskTimeMillis();
+ long optimizedMS = stopWatch.getLastTaskTimeMillis();
timeSavedMS += defaultMS - optimizedMS;
diff --git a/dspace-api/src/main/java/org/dspace/app/util/SyndicationFeed.java b/dspace-api/src/main/java/org/dspace/app/util/SyndicationFeed.java
index 7871518b93..8f155b6330 100644
--- a/dspace-api/src/main/java/org/dspace/app/util/SyndicationFeed.java
+++ b/dspace-api/src/main/java/org/dspace/app/util/SyndicationFeed.java
@@ -420,7 +420,7 @@ public class SyndicationFeed {
// with length of song in seconds
if (extent != null && extent.length() > 0) {
extent = extent.split(" ")[0];
- Integer duration = Integer.parseInt(extent);
+ long duration = Long.parseLong(extent);
itunes.setDuration(new Duration(duration)); //
}
diff --git a/dspace-api/src/main/java/org/dspace/browse/BrowseDAO.java b/dspace-api/src/main/java/org/dspace/browse/BrowseDAO.java
index 22cf02fe13..29debf64e2 100644
--- a/dspace-api/src/main/java/org/dspace/browse/BrowseDAO.java
+++ b/dspace-api/src/main/java/org/dspace/browse/BrowseDAO.java
@@ -346,7 +346,7 @@ public interface BrowseDAO {
public String getFilterValueField();
/**
- * Set he name of the field in which the value to constrain results is
+ * Set the name of the field in which the value to constrain results is
* contained
*
* @param valueField the name of the field
diff --git a/dspace-api/src/main/java/org/dspace/browse/BrowseEngine.java b/dspace-api/src/main/java/org/dspace/browse/BrowseEngine.java
index 099c45d3ad..6f0235e6c1 100644
--- a/dspace-api/src/main/java/org/dspace/browse/BrowseEngine.java
+++ b/dspace-api/src/main/java/org/dspace/browse/BrowseEngine.java
@@ -203,7 +203,12 @@ public class BrowseEngine {
// get the table name that we are going to be getting our data from
dao.setTable(browseIndex.getTableName());
- dao.setStartsWith(StringUtils.lowerCase(scope.getStartsWith()));
+ if (scope.getBrowseIndex() != null && OrderFormat.TITLE.equals(scope.getBrowseIndex().getDataType())) {
+ // For browsing by title, apply the same normalization applied to indexed titles
+ dao.setStartsWith(normalizeJumpToValue(scope.getStartsWith()));
+ } else {
+ dao.setStartsWith(StringUtils.lowerCase(scope.getStartsWith()));
+ }
// tell the browse query whether we are ascending or descending on the value
dao.setAscending(scope.isAscending());
@@ -290,7 +295,7 @@ public class BrowseEngine {
// now, if we don't have any results, we are at the end of the browse. This will
// be because a starts_with value has been supplied for which we don't have
// any items.
- if (results.size() == 0) {
+ if (results.isEmpty()) {
// In this case, we will calculate a new offset for the last page of results
offset = total - scope.getResultsPerPage();
if (offset < 0) {
@@ -450,7 +455,7 @@ public class BrowseEngine {
// now, if we don't have any results, we are at the end of the browse. This will
// be because a starts_with value has been supplied for which we don't have
// any items.
- if (results.size() == 0) {
+ if (results.isEmpty()) {
// In this case, we will calculate a new offset for the last page of results
offset = total - scope.getResultsPerPage();
if (offset < 0) {
@@ -463,7 +468,7 @@ public class BrowseEngine {
}
} else {
// No records, so make an empty list
- results = new ArrayList();
+ results = new ArrayList<>();
}
// construct the BrowseInfo object to pass back
@@ -554,7 +559,7 @@ public class BrowseEngine {
}
String col = "sort_1";
- if (so.getNumber() > 0) {
+ if (so != null && so.getNumber() > 0) {
col = "sort_" + Integer.toString(so.getNumber());
}
@@ -591,7 +596,7 @@ public class BrowseEngine {
}
String col = "sort_1";
- if (so.getNumber() > 0) {
+ if (so != null && so.getNumber() > 0) {
col = "sort_" + Integer.toString(so.getNumber());
}
diff --git a/dspace-api/src/main/java/org/dspace/browse/BrowseIndex.java b/dspace-api/src/main/java/org/dspace/browse/BrowseIndex.java
index 859063272a..8d065c21ce 100644
--- a/dspace-api/src/main/java/org/dspace/browse/BrowseIndex.java
+++ b/dspace-api/src/main/java/org/dspace/browse/BrowseIndex.java
@@ -313,14 +313,6 @@ public final class BrowseIndex {
return name;
}
- /**
- * @param name The name to set.
- */
-// public void setName(String name)
-// {
-// this.name = name;
-// }
-
/**
* Get the SortOption associated with this index.
*
diff --git a/dspace-api/src/main/java/org/dspace/browse/ItemListConfig.java b/dspace-api/src/main/java/org/dspace/browse/ItemListConfig.java
index 9cbbe8f194..6a63659c82 100644
--- a/dspace-api/src/main/java/org/dspace/browse/ItemListConfig.java
+++ b/dspace-api/src/main/java/org/dspace/browse/ItemListConfig.java
@@ -25,22 +25,7 @@ public class ItemListConfig {
/**
* a map of column number to metadata value
*/
- private Map metadata = new HashMap();
-
- /**
- * a map of column number to data type
- */
- private Map types = new HashMap();
-
- /**
- * constant for a DATE column
- */
- private static final int DATE = 1;
-
- /**
- * constant for a TEXT column
- */
- private static final int TEXT = 2;
+ private Map metadata = new HashMap<>();
private final transient ConfigurationService configurationService
= DSpaceServicesFactory.getInstance().getConfigurationService();
@@ -63,14 +48,11 @@ public class ItemListConfig {
// parse the config
int i = 1;
for (String token : browseFields) {
- Integer key = Integer.valueOf(i);
+ Integer key = i;
// find out if the field is a date
if (token.indexOf("(date)") > 0) {
token = token.replaceAll("\\(date\\)", "");
- types.put(key, Integer.valueOf(ItemListConfig.DATE));
- } else {
- types.put(key, Integer.valueOf(ItemListConfig.TEXT));
}
String[] mdBits = interpretField(token.trim(), null);
@@ -100,7 +82,7 @@ public class ItemListConfig {
* @return array of metadata
*/
public String[] getMetadata(int col) {
- return metadata.get(Integer.valueOf(col));
+ return metadata.get(col);
}
/**
diff --git a/dspace-api/src/main/java/org/dspace/content/LicenseUtils.java b/dspace-api/src/main/java/org/dspace/content/LicenseUtils.java
index be804a9bbb..673a30d2dd 100644
--- a/dspace-api/src/main/java/org/dspace/content/LicenseUtils.java
+++ b/dspace-api/src/main/java/org/dspace/content/LicenseUtils.java
@@ -59,7 +59,7 @@ public class LicenseUtils {
* {6} the eperson object that will be formatted using the appropriate
* LicenseArgumentFormatter plugin (if defined)
* {x} any addition argument supplied wrapped in the
- * LicenseArgumentFormatter based on his type (map key)
+ * LicenseArgumentFormatter based on its type (map key)
*
* @param locale Formatter locale
* @param collection collection to get license from
diff --git a/dspace-api/src/main/java/org/dspace/content/crosswalk/MODSDisseminationCrosswalk.java b/dspace-api/src/main/java/org/dspace/content/crosswalk/MODSDisseminationCrosswalk.java
index 57202f656e..1e63be5ba1 100644
--- a/dspace-api/src/main/java/org/dspace/content/crosswalk/MODSDisseminationCrosswalk.java
+++ b/dspace-api/src/main/java/org/dspace/content/crosswalk/MODSDisseminationCrosswalk.java
@@ -417,9 +417,7 @@ public class MODSDisseminationCrosswalk extends SelfNamedPlugin
String title = site.getName();
String url = site.getURL();
- if (identifier_uri != null) {
- metadata.add(createDCValue("identifier.uri", null, identifier_uri));
- }
+ metadata.add(createDCValue("identifier.uri", null, identifier_uri));
//FIXME: adding two URIs for now (site handle and URL), in case site isn't using handles
if (url != null) {
@@ -466,9 +464,7 @@ public class MODSDisseminationCrosswalk extends SelfNamedPlugin
metadata.add(createDCValue("description", "tableofcontents", description_table));
}
- if (identifier_uri != null) {
- metadata.add(createDCValue("identifier.uri", null, identifier_uri));
- }
+ metadata.add(createDCValue("identifier.uri", null, identifier_uri));
if (rights != null) {
metadata.add(createDCValue("rights", null, rights));
@@ -520,9 +516,7 @@ public class MODSDisseminationCrosswalk extends SelfNamedPlugin
metadata.add(createDCValue("description", "tableofcontents", description_table));
}
- if (identifier_uri != null) {
- metadata.add(createDCValue("identifier", "uri", identifier_uri));
- }
+ metadata.add(createDCValue("identifier", "uri", identifier_uri));
if (provenance != null) {
metadata.add(createDCValue("provenance", null, provenance));
diff --git a/dspace-api/src/main/java/org/dspace/content/crosswalk/XHTMLHeadDisseminationCrosswalk.java b/dspace-api/src/main/java/org/dspace/content/crosswalk/XHTMLHeadDisseminationCrosswalk.java
index 2fbbdd9756..7b25f69ce3 100644
--- a/dspace-api/src/main/java/org/dspace/content/crosswalk/XHTMLHeadDisseminationCrosswalk.java
+++ b/dspace-api/src/main/java/org/dspace/content/crosswalk/XHTMLHeadDisseminationCrosswalk.java
@@ -90,17 +90,17 @@ public class XHTMLHeadDisseminationCrosswalk
* Maps DSpace metadata field to name to use in XHTML head element, e.g.
* dc.creator or dc.description.abstract
*/
- private Map names;
+ private final Map names;
/**
* Maps DSpace metadata field to scheme for that field, if any
*/
- private Map schemes;
+ private final Map schemes;
/**
* Schemas to add -- maps schema.NAME to schema URL
*/
- private Map schemaURLs;
+ private final Map schemaURLs;
public XHTMLHeadDisseminationCrosswalk() throws IOException {
names = new HashMap<>();
@@ -109,17 +109,9 @@ public class XHTMLHeadDisseminationCrosswalk
// Read in configuration
Properties crosswalkProps = new Properties();
- FileInputStream fis = new FileInputStream(config);
- try {
+
+ try (FileInputStream fis = new FileInputStream(config);) {
crosswalkProps.load(fis);
- } finally {
- if (fis != null) {
- try {
- fis.close();
- } catch (IOException ioe) {
- // ignore
- }
- }
}
Enumeration e = crosswalkProps.keys();
diff --git a/dspace-api/src/main/java/org/dspace/content/dao/ProcessDAO.java b/dspace-api/src/main/java/org/dspace/content/dao/ProcessDAO.java
index 4ef26cffcb..69bac319c6 100644
--- a/dspace-api/src/main/java/org/dspace/content/dao/ProcessDAO.java
+++ b/dspace-api/src/main/java/org/dspace/content/dao/ProcessDAO.java
@@ -8,8 +8,10 @@
package org.dspace.content.dao;
import java.sql.SQLException;
+import java.util.Date;
import java.util.List;
+import org.dspace.content.ProcessStatus;
import org.dspace.core.Context;
import org.dspace.core.GenericDAO;
import org.dspace.scripts.Process;
@@ -81,4 +83,18 @@ public interface ProcessDAO extends GenericDAO {
int countTotalWithParameters(Context context, ProcessQueryParameterContainer processQueryParameterContainer)
throws SQLException;
+
+ /**
+ * Find all the processes with one of the given status and with a creation time
+ * older than the specified date.
+ *
+ * @param context The relevant DSpace context
+ * @param statuses the statuses of the processes to search for
+ * @param date the creation date to search for
+ * @return The list of all Processes which match requirements
+ * @throws SQLException If something goes wrong
+ */
+ List findByStatusAndCreationTimeOlderThan(Context context, List statuses, Date date)
+ throws SQLException;
+
}
diff --git a/dspace-api/src/main/java/org/dspace/content/dao/impl/ProcessDAOImpl.java b/dspace-api/src/main/java/org/dspace/content/dao/impl/ProcessDAOImpl.java
index 5c8083a86b..23ce6ce381 100644
--- a/dspace-api/src/main/java/org/dspace/content/dao/impl/ProcessDAOImpl.java
+++ b/dspace-api/src/main/java/org/dspace/content/dao/impl/ProcessDAOImpl.java
@@ -7,7 +7,10 @@
*/
package org.dspace.content.dao.impl;
+import static org.dspace.scripts.Process_.CREATION_TIME;
+
import java.sql.SQLException;
+import java.util.Date;
import java.util.LinkedList;
import java.util.List;
import java.util.Map;
@@ -17,6 +20,7 @@ import javax.persistence.criteria.Predicate;
import javax.persistence.criteria.Root;
import org.apache.commons.lang3.StringUtils;
+import org.dspace.content.ProcessStatus;
import org.dspace.content.dao.ProcessDAO;
import org.dspace.core.AbstractHibernateDAO;
import org.dspace.core.Context;
@@ -147,6 +151,23 @@ public class ProcessDAOImpl extends AbstractHibernateDAO implements Pro
}
+ @Override
+ public List findByStatusAndCreationTimeOlderThan(Context context, List statuses,
+ Date date) throws SQLException {
+
+ CriteriaBuilder criteriaBuilder = getCriteriaBuilder(context);
+ CriteriaQuery criteriaQuery = getCriteriaQuery(criteriaBuilder, Process.class);
+
+ Root processRoot = criteriaQuery.from(Process.class);
+ criteriaQuery.select(processRoot);
+
+ Predicate creationTimeLessThanGivenDate = criteriaBuilder.lessThan(processRoot.get(CREATION_TIME), date);
+ Predicate statusIn = processRoot.get(Process_.PROCESS_STATUS).in(statuses);
+ criteriaQuery.where(criteriaBuilder.and(creationTimeLessThanGivenDate, statusIn));
+
+ return list(context, criteriaQuery, false, Process.class, -1, -1);
+ }
+
}
diff --git a/dspace-api/src/main/java/org/dspace/content/logic/condition/AbstractCondition.java b/dspace-api/src/main/java/org/dspace/content/logic/condition/AbstractCondition.java
index 7a87e13066..0202243265 100644
--- a/dspace-api/src/main/java/org/dspace/content/logic/condition/AbstractCondition.java
+++ b/dspace-api/src/main/java/org/dspace/content/logic/condition/AbstractCondition.java
@@ -12,7 +12,6 @@ import java.util.Map;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
import org.dspace.content.Item;
-import org.dspace.content.factory.ContentServiceFactory;
import org.dspace.content.logic.LogicalStatementException;
import org.dspace.content.service.CollectionService;
import org.dspace.content.service.ItemService;
@@ -32,10 +31,10 @@ public abstract class AbstractCondition implements Condition {
private Map parameters;
// Declare and instantiate spring services
- //@Autowired(required = true)
- protected ItemService itemService = ContentServiceFactory.getInstance().getItemService();
- //@Autowired(required = true)
- protected CollectionService collectionService = ContentServiceFactory.getInstance().getCollectionService();
+ @Autowired(required = true)
+ protected ItemService itemService;
+ @Autowired(required = true)
+ protected CollectionService collectionService;
@Autowired(required = true)
protected HandleService handleService;
diff --git a/dspace-api/src/main/java/org/dspace/content/packager/AbstractMETSDisseminator.java b/dspace-api/src/main/java/org/dspace/content/packager/AbstractMETSDisseminator.java
index 03afb5e852..685fd9000d 100644
--- a/dspace-api/src/main/java/org/dspace/content/packager/AbstractMETSDisseminator.java
+++ b/dspace-api/src/main/java/org/dspace/content/packager/AbstractMETSDisseminator.java
@@ -14,9 +14,7 @@ import java.io.FileOutputStream;
import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStream;
-import java.io.UnsupportedEncodingException;
import java.lang.reflect.InvocationTargetException;
-import java.net.URLEncoder;
import java.sql.SQLException;
import java.util.HashMap;
import java.util.Iterator;
@@ -328,45 +326,43 @@ public abstract class AbstractMETSDisseminator
Mets manifest = makeManifest(context, dso, params, extraStreams);
// copy extra (metadata, license, etc) bitstreams into zip, update manifest
- if (extraStreams != null) {
- for (Map.Entry ment : extraStreams.getMap().entrySet()) {
- MdRef ref = ment.getKey();
+ for (Map.Entry ment : extraStreams.getMap().entrySet()) {
+ MdRef ref = ment.getKey();
- // Both Deposit Licenses & CC Licenses which are referenced as "extra streams" may already be
- // included in our Package (if their bundles are already included in the section of manifest).
- // So, do a special check to see if we need to link up extra License entries to the bitstream
- // in the .
- // (this ensures that we don't accidentally add the same License file to our package twice)
- linkLicenseRefsToBitstreams(context, params, dso, ref);
+ // Both Deposit Licenses & CC Licenses which are referenced as "extra streams" may already be
+ // included in our Package (if their bundles are already included in the section of manifest).
+ // So, do a special check to see if we need to link up extra License entries to the bitstream
+ // in the .
+ // (this ensures that we don't accidentally add the same License file to our package twice)
+ linkLicenseRefsToBitstreams(context, params, dso, ref);
- //If this 'mdRef' is NOT already linked up to a file in the package,
- // then its file must be missing. So, we are going to add a new
- // file to the Zip package.
- if (ref.getXlinkHref() == null || ref.getXlinkHref().isEmpty()) {
- InputStream is = ment.getValue();
+ //If this 'mdRef' is NOT already linked up to a file in the package,
+ // then its file must be missing. So, we are going to add a new
+ // file to the Zip package.
+ if (ref.getXlinkHref() == null || ref.getXlinkHref().isEmpty()) {
+ InputStream is = ment.getValue();
- // create a hopefully unique filename within the Zip
- String fname = gensym("metadata");
- // link up this 'mdRef' to point to that file
- ref.setXlinkHref(fname);
- if (log.isDebugEnabled()) {
- log.debug("Writing EXTRA stream to Zip: " + fname);
- }
- //actually add the file to the Zip package
- ZipEntry ze = new ZipEntry(fname);
- if (lmTime != 0) {
- ze.setTime(lmTime);
- } else {
- // Set a default modified date so that checksum of Zip doesn't change if Zip contents are
- // unchanged
- ze.setTime(DEFAULT_MODIFIED_DATE);
- }
- zip.putNextEntry(ze);
- Utils.copy(is, zip);
- zip.closeEntry();
-
- is.close();
+ // create a hopefully unique filename within the Zip
+ String fname = gensym("metadata");
+ // link up this 'mdRef' to point to that file
+ ref.setXlinkHref(fname);
+ if (log.isDebugEnabled()) {
+ log.debug("Writing EXTRA stream to Zip: " + fname);
}
+ //actually add the file to the Zip package
+ ZipEntry ze = new ZipEntry(fname);
+ if (lmTime != 0) {
+ ze.setTime(lmTime);
+ } else {
+ // Set a default modified date so that checksum of Zip doesn't change if Zip contents are
+ // unchanged
+ ze.setTime(DEFAULT_MODIFIED_DATE);
+ }
+ zip.putNextEntry(ze);
+ Utils.copy(is, zip);
+ zip.closeEntry();
+
+ is.close();
}
}
@@ -467,17 +463,17 @@ public abstract class AbstractMETSDisseminator
Utils.copy(input, zip);
input.close();
} else {
- log.warn("Adding zero-length file for Bitstream, SID="
- + String.valueOf(bitstream.getSequenceID())
+ log.warn("Adding zero-length file for Bitstream, uuid="
+ + String.valueOf(bitstream.getID())
+ ", not authorized for READ.");
}
zip.closeEntry();
} else if (unauth != null && unauth.equalsIgnoreCase("skip")) {
- log.warn("Skipping Bitstream, SID=" + String
- .valueOf(bitstream.getSequenceID()) + ", not authorized for READ.");
+ log.warn("Skipping Bitstream, uuid=" + String
+ .valueOf(bitstream.getID()) + ", not authorized for READ.");
} else {
throw new AuthorizeException(
- "Not authorized to read Bitstream, SID=" + String.valueOf(bitstream.getSequenceID()));
+ "Not authorized to read Bitstream, uuid=" + String.valueOf(bitstream.getID()));
}
}
}
@@ -898,12 +894,12 @@ public abstract class AbstractMETSDisseminator
continue;
} else if (!(unauth != null && unauth.equalsIgnoreCase("zero"))) {
throw new AuthorizeException(
- "Not authorized to read Bitstream, SID=" + String.valueOf(bitstream.getSequenceID()));
+ "Not authorized to read Bitstream, uuid=" + String.valueOf(bitstream.getID()));
}
}
- String sid = String.valueOf(bitstream.getSequenceID());
- String fileID = bitstreamIDstart + sid;
+ String uuid = String.valueOf(bitstream.getID());
+ String fileID = bitstreamIDstart + uuid;
edu.harvard.hul.ois.mets.File file = new edu.harvard.hul.ois.mets.File();
file.setID(fileID);
file.setSEQ(bitstream.getSequenceID());
@@ -926,7 +922,7 @@ public abstract class AbstractMETSDisseminator
* extracted text or a thumbnail, so we use the name to work
* out which bitstream to be in the same group as
*/
- String groupID = "GROUP_" + bitstreamIDstart + sid;
+ String groupID = "GROUP_" + bitstreamIDstart + uuid;
if ((bundle.getName() != null)
&& (bundle.getName().equals("THUMBNAIL") ||
bundle.getName().startsWith("TEXT"))) {
@@ -936,7 +932,7 @@ public abstract class AbstractMETSDisseminator
bitstream);
if (original != null) {
groupID = "GROUP_" + bitstreamIDstart
- + original.getSequenceID();
+ + String.valueOf(original.getID());
}
}
file.setGROUPID(groupID);
@@ -1405,7 +1401,7 @@ public abstract class AbstractMETSDisseminator
// if bare manifest, use external "persistent" URI for bitstreams
if (params != null && (params.getBooleanProperty("manifestOnly", false))) {
// Try to build a persistent(-ish) URI for bitstream
- // Format: {site-base-url}/bitstream/{item-handle}/{sequence-id}/{bitstream-name}
+ // Format: {site-ui-url}/bitstreams/{bitstream-uuid}
try {
// get handle of parent Item of this bitstream, if there is one:
String handle = null;
@@ -1416,26 +1412,13 @@ public abstract class AbstractMETSDisseminator
handle = bi.get(0).getHandle();
}
}
- if (handle != null) {
- return configurationService
- .getProperty("dspace.ui.url")
- + "/bitstream/"
- + handle
- + "/"
- + String.valueOf(bitstream.getSequenceID())
- + "/"
- + URLEncoder.encode(bitstream.getName(), "UTF-8");
- } else { //no Handle assigned, so persistent(-ish) URI for bitstream is
- // Format: {site-base-url}/retrieve/{bitstream-internal-id}
- return configurationService
- .getProperty("dspace.ui.url")
- + "/retrieve/"
- + String.valueOf(bitstream.getID());
- }
+ return configurationService
+ .getProperty("dspace.ui.url")
+ + "/bitstreams/"
+ + String.valueOf(bitstream.getID())
+ + "/download";
} catch (SQLException e) {
log.error("Database problem", e);
- } catch (UnsupportedEncodingException e) {
- log.error("Unknown character set", e);
}
// We should only get here if we failed to build a nice URL above
diff --git a/dspace-api/src/main/java/org/dspace/core/Context.java b/dspace-api/src/main/java/org/dspace/core/Context.java
index f0776d5d98..c705be3284 100644
--- a/dspace-api/src/main/java/org/dspace/core/Context.java
+++ b/dspace-api/src/main/java/org/dspace/core/Context.java
@@ -718,7 +718,7 @@ public class Context implements AutoCloseable {
}
/**
- * Restore the user bound to the context and his special groups
+ * Restore the user bound to the context and their special groups
*
* @throws IllegalStateException if no switch was performed before
*/
diff --git a/dspace-api/src/main/java/org/dspace/core/I18nUtil.java b/dspace-api/src/main/java/org/dspace/core/I18nUtil.java
index a853c3597e..0fc48b908b 100644
--- a/dspace-api/src/main/java/org/dspace/core/I18nUtil.java
+++ b/dspace-api/src/main/java/org/dspace/core/I18nUtil.java
@@ -346,7 +346,7 @@ public class I18nUtil {
}
}
- if (fileNameL != null && !fileFound) {
+ if (!fileFound) {
File fileTmp = new File(fileNameL + fileType);
if (fileTmp.exists()) {
fileFound = true;
diff --git a/dspace-api/src/main/java/org/dspace/core/LegacyPluginServiceImpl.java b/dspace-api/src/main/java/org/dspace/core/LegacyPluginServiceImpl.java
index 7bbbd91d0a..e92ea137f3 100644
--- a/dspace-api/src/main/java/org/dspace/core/LegacyPluginServiceImpl.java
+++ b/dspace-api/src/main/java/org/dspace/core/LegacyPluginServiceImpl.java
@@ -10,7 +10,6 @@ package org.dspace.core;
import java.io.BufferedReader;
import java.io.FileReader;
import java.io.IOException;
-import java.io.Serializable;
import java.lang.reflect.Array;
import java.lang.reflect.InvocationTargetException;
import java.util.ArrayList;
@@ -173,7 +172,7 @@ public class LegacyPluginServiceImpl implements PluginService {
throws PluginInstantiationException {
// cache of config data for Sequence Plugins; format its
// -> [ .. ] (value is Array)
- Map sequenceConfig = new HashMap();
+ Map sequenceConfig = new HashMap<>();
// cache the configuration for this interface after grovelling it once:
// format is prefix. =
@@ -220,10 +219,7 @@ public class LegacyPluginServiceImpl implements PluginService {
// Map of named plugin classes, [intfc,name] -> class
// Also contains intfc -> "marker" to mark when interface has been loaded.
- private Map namedPluginClasses = new HashMap();
-
- // Map of cached (reusable) named plugin instances, [class,name] -> instance
- private Map namedInstanceCache = new HashMap();
+ private final Map namedPluginClasses = new HashMap<>();
// load and cache configuration data for the given interface.
private void configureNamedPlugin(String iname)
@@ -413,14 +409,14 @@ public class LegacyPluginServiceImpl implements PluginService {
String iname = interfaceClass.getName();
configureNamedPlugin(iname);
String prefix = iname + SEP;
- ArrayList result = new ArrayList();
+ ArrayList result = new ArrayList<>();
for (String key : namedPluginClasses.keySet()) {
if (key.startsWith(prefix)) {
result.add(key.substring(prefix.length()));
}
}
- if (result.size() == 0) {
+ if (result.isEmpty()) {
log.error("Cannot find any names for named plugin, interface=" + iname);
}
@@ -508,10 +504,10 @@ public class LegacyPluginServiceImpl implements PluginService {
*/
// tables of config keys for each type of config line:
- Map singleKey = new HashMap();
- Map sequenceKey = new HashMap();
- Map namedKey = new HashMap();
- Map selfnamedKey = new HashMap();
+ Map singleKey = new HashMap<>();
+ Map sequenceKey = new HashMap<>();
+ Map namedKey = new HashMap<>();
+ Map selfnamedKey = new HashMap<>();
// Find all property keys starting with "plugin."
List keys = configurationService.getPropertyKeys("plugin.");
@@ -533,7 +529,7 @@ public class LegacyPluginServiceImpl implements PluginService {
// 2. Build up list of all interfaces and test that they are loadable.
// don't bother testing that they are "interface" rather than "class"
// since either one will work for the Plugin Manager.
- ArrayList allInterfaces = new ArrayList();
+ ArrayList allInterfaces = new ArrayList<>();
allInterfaces.addAll(singleKey.keySet());
allInterfaces.addAll(sequenceKey.keySet());
allInterfaces.addAll(namedKey.keySet());
@@ -547,7 +543,6 @@ public class LegacyPluginServiceImpl implements PluginService {
// - each class is loadable.
// - plugin.selfnamed values are each subclass of SelfNamedPlugin
// - save classname in allImpls
- Map allImpls = new HashMap();
// single plugins - just check that it has a valid impl. class
ii = singleKey.keySet().iterator();
@@ -558,9 +553,6 @@ public class LegacyPluginServiceImpl implements PluginService {
log.error("Single plugin config not found for: " + SINGLE_PREFIX + key);
} else {
val = val.trim();
- if (checkClassname(val, "implementation class")) {
- allImpls.put(val, val);
- }
}
}
@@ -571,12 +563,6 @@ public class LegacyPluginServiceImpl implements PluginService {
String[] vals = configurationService.getArrayProperty(SEQUENCE_PREFIX + key);
if (vals == null || vals.length == 0) {
log.error("Sequence plugin config not found for: " + SEQUENCE_PREFIX + key);
- } else {
- for (String val : vals) {
- if (checkClassname(val, "implementation class")) {
- allImpls.put(val, val);
- }
- }
}
}
@@ -591,7 +577,6 @@ public class LegacyPluginServiceImpl implements PluginService {
} else {
for (String val : vals) {
if (checkClassname(val, "selfnamed implementation class")) {
- allImpls.put(val, val);
checkSelfNamed(val);
}
}
@@ -609,15 +594,6 @@ public class LegacyPluginServiceImpl implements PluginService {
log.error("Named plugin config not found for: " + NAMED_PREFIX + key);
} else {
checkNames(key);
- for (String val : vals) {
- // each named plugin has two parts to the value, format:
- // [classname] = [plugin-name]
- String val_split[] = val.split("\\s*=\\s*");
- String classname = val_split[0];
- if (checkClassname(classname, "implementation class")) {
- allImpls.put(classname, classname);
- }
- }
}
}
}
diff --git a/dspace-api/src/main/java/org/dspace/curate/CitationPage.java b/dspace-api/src/main/java/org/dspace/ctask/general/CitationPage.java
similarity index 82%
rename from dspace-api/src/main/java/org/dspace/curate/CitationPage.java
rename to dspace-api/src/main/java/org/dspace/ctask/general/CitationPage.java
index 1b267286a4..fa630029b8 100644
--- a/dspace-api/src/main/java/org/dspace/curate/CitationPage.java
+++ b/dspace-api/src/main/java/org/dspace/ctask/general/CitationPage.java
@@ -5,7 +5,7 @@
*
* http://www.dspace.org/license/
*/
-package org.dspace.curate;
+package org.dspace.ctask.general;
import java.io.ByteArrayInputStream;
import java.io.IOException;
@@ -18,6 +18,9 @@ import java.util.Map;
import org.apache.logging.log4j.Logger;
import org.dspace.authorize.AuthorizeException;
+import org.dspace.authorize.ResourcePolicy;
+import org.dspace.authorize.factory.AuthorizeServiceFactory;
+import org.dspace.authorize.service.ResourcePolicyService;
import org.dspace.content.Bitstream;
import org.dspace.content.Bundle;
import org.dspace.content.DSpaceObject;
@@ -26,6 +29,10 @@ import org.dspace.content.factory.ContentServiceFactory;
import org.dspace.content.service.BitstreamService;
import org.dspace.content.service.BundleService;
import org.dspace.core.Context;
+import org.dspace.curate.AbstractCurationTask;
+import org.dspace.curate.Curator;
+import org.dspace.curate.Distributive;
+import org.dspace.curate.Mutative;
import org.dspace.disseminate.factory.DisseminateServiceFactory;
import org.dspace.disseminate.service.CitationDocumentService;
@@ -67,6 +74,10 @@ public class CitationPage extends AbstractCurationTask {
protected BitstreamService bitstreamService = ContentServiceFactory.getInstance().getBitstreamService();
protected BundleService bundleService = ContentServiceFactory.getInstance().getBundleService();
+ protected ResourcePolicyService resourcePolicyService = AuthorizeServiceFactory.getInstance()
+ .getResourcePolicyService();
+
+ private Map displayMap = new HashMap();
/**
* {@inheritDoc}
@@ -95,13 +106,17 @@ public class CitationPage extends AbstractCurationTask {
protected void performItem(Item item) throws SQLException {
//Determine if the DISPLAY bundle exits. If not, create it.
List dBundles = itemService.getBundles(item, CitationPage.DISPLAY_BUNDLE_NAME);
+ Bundle original = itemService.getBundles(item, "ORIGINAL").get(0);
Bundle dBundle = null;
if (dBundles == null || dBundles.isEmpty()) {
try {
dBundle = bundleService.create(Curator.curationContext(), item, CitationPage.DISPLAY_BUNDLE_NAME);
+ // don't inherit now otherwise they will be copied over the moved bitstreams
+ resourcePolicyService.removeAllPolicies(Curator.curationContext(), dBundle);
} catch (AuthorizeException e) {
- log.error("User not authroized to create bundle on item \""
- + item.getName() + "\": " + e.getMessage());
+ log.error("User not authroized to create bundle on item \"{}\": {}",
+ item::getName, e::getMessage);
+ return;
}
} else {
dBundle = dBundles.get(0);
@@ -109,7 +124,6 @@ public class CitationPage extends AbstractCurationTask {
//Create a map of the bitstreams in the displayBundle. This is used to
//check if the bundle being cited is already in the display bundle.
- Map displayMap = new HashMap<>();
for (Bitstream bs : dBundle.getBitstreams()) {
displayMap.put(bs.getName(), bs);
}
@@ -120,13 +134,15 @@ public class CitationPage extends AbstractCurationTask {
List pBundles = itemService.getBundles(item, CitationPage.PRESERVATION_BUNDLE_NAME);
Bundle pBundle = null;
List bundles = new ArrayList<>();
- if (pBundles != null && pBundles.size() > 0) {
+ if (pBundles != null && !pBundles.isEmpty()) {
pBundle = pBundles.get(0);
bundles.addAll(itemService.getBundles(item, "ORIGINAL"));
bundles.addAll(pBundles);
} else {
try {
pBundle = bundleService.create(Curator.curationContext(), item, CitationPage.PRESERVATION_BUNDLE_NAME);
+ // don't inherit now otherwise they will be copied over the moved bitstreams
+ resourcePolicyService.removeAllPolicies(Curator.curationContext(), pBundle);
} catch (AuthorizeException e) {
log.error("User not authroized to create bundle on item \""
+ item.getName() + "\": " + e.getMessage());
@@ -159,7 +175,10 @@ public class CitationPage extends AbstractCurationTask {
citationDocument.makeCitedDocument(Curator.curationContext(), bitstream).getLeft());
//Add the cited document to the approiate bundle
this.addCitedPageToItem(citedInputStream, bundle, pBundle,
- dBundle, displayMap, item, bitstream);
+ dBundle, item, bitstream);
+ // now set the policies of the preservation and display bundle
+ clonePolicies(Curator.curationContext(), original, pBundle);
+ clonePolicies(Curator.curationContext(), original, dBundle);
} catch (Exception e) {
//Could be many things, but nothing that should be
//expected.
@@ -202,8 +221,6 @@ public class CitationPage extends AbstractCurationTask {
* @param pBundle The preservation bundle. The original document should be
* put in here if it is not already.
* @param dBundle The display bundle. The cited document gets put in here.
- * @param displayMap The map of bitstream names to bitstreams in the display
- * bundle.
* @param item The item containing the bundles being used.
* @param bitstream The original source bitstream.
* @throws SQLException if database error
@@ -211,7 +228,7 @@ public class CitationPage extends AbstractCurationTask {
* @throws IOException if IO error
*/
protected void addCitedPageToItem(InputStream citedDoc, Bundle bundle, Bundle pBundle,
- Bundle dBundle, Map displayMap, Item item,
+ Bundle dBundle, Item item,
Bitstream bitstream) throws SQLException, AuthorizeException, IOException {
//If we are modifying a file that is not in the
//preservation bundle then we have to move it there.
@@ -239,7 +256,8 @@ public class CitationPage extends AbstractCurationTask {
citedBitstream.setName(context, bitstream.getName());
bitstreamService.setFormat(context, citedBitstream, bitstream.getFormat(Curator.curationContext()));
citedBitstream.setDescription(context, bitstream.getDescription());
-
+ displayMap.put(bitstream.getName(), citedBitstream);
+ clonePolicies(context, bitstream, citedBitstream);
this.resBuilder.append(" Added ")
.append(citedBitstream.getName())
.append(" to the ")
@@ -251,4 +269,16 @@ public class CitationPage extends AbstractCurationTask {
itemService.update(context, item);
this.status = Curator.CURATE_SUCCESS;
}
+
+ private void clonePolicies(Context context, DSpaceObject source,DSpaceObject target)
+ throws SQLException, AuthorizeException {
+ resourcePolicyService.removeAllPolicies(context, target);
+ for (ResourcePolicy rp: source.getResourcePolicies()) {
+ ResourcePolicy newPolicy = resourcePolicyService.clone(context, rp);
+ newPolicy.setdSpaceObject(target);
+ newPolicy.setAction(rp.getAction());
+ resourcePolicyService.update(context, newPolicy);
+ }
+
+ }
}
diff --git a/dspace-api/src/main/java/org/dspace/ctask/general/CreateMissingIdentifiers.java b/dspace-api/src/main/java/org/dspace/ctask/general/CreateMissingIdentifiers.java
new file mode 100644
index 0000000000..9639461426
--- /dev/null
+++ b/dspace-api/src/main/java/org/dspace/ctask/general/CreateMissingIdentifiers.java
@@ -0,0 +1,94 @@
+/**
+ * The contents of this file are subject to the license and copyright
+ * detailed in the LICENSE and NOTICE files at the root of the source
+ * tree and available online at
+ *
+ * http://www.dspace.org/license/
+ */
+
+package org.dspace.ctask.general;
+
+import java.io.IOException;
+import java.sql.SQLException;
+
+import org.apache.logging.log4j.LogManager;
+import org.apache.logging.log4j.Logger;
+import org.dspace.authorize.AuthorizeException;
+import org.dspace.content.DSpaceObject;
+import org.dspace.content.Item;
+import org.dspace.core.Constants;
+import org.dspace.core.Context;
+import org.dspace.curate.AbstractCurationTask;
+import org.dspace.curate.Curator;
+import org.dspace.identifier.IdentifierException;
+import org.dspace.identifier.IdentifierProvider;
+import org.dspace.identifier.VersionedHandleIdentifierProviderWithCanonicalHandles;
+import org.dspace.identifier.factory.IdentifierServiceFactory;
+import org.dspace.identifier.service.IdentifierService;
+import org.dspace.services.factory.DSpaceServicesFactory;
+
+/**
+ * Ensure that an object has all of the identifiers that it should, minting them
+ * as necessary.
+ *
+ * @author Mark H. Wood {@literal }
+ */
+public class CreateMissingIdentifiers
+ extends AbstractCurationTask {
+ private static final Logger LOG = LogManager.getLogger();
+
+ @Override
+ public int perform(DSpaceObject dso)
+ throws IOException {
+ // Only some kinds of model objects get identifiers
+ if (!(dso instanceof Item)) {
+ return Curator.CURATE_SKIP;
+ }
+
+ // XXX Temporary escape when an incompatible provider is configured.
+ // XXX Remove this when the provider is fixed.
+ boolean compatible = DSpaceServicesFactory
+ .getInstance()
+ .getServiceManager()
+ .getServiceByName(
+ VersionedHandleIdentifierProviderWithCanonicalHandles.class.getCanonicalName(),
+ IdentifierProvider.class) == null;
+ if (!compatible) {
+ setResult("This task is not compatible with VersionedHandleIdentifierProviderWithCanonicalHandles");
+ return Curator.CURATE_ERROR;
+ }
+ // XXX End of escape
+
+ String typeText = Constants.typeText[dso.getType()];
+
+ // Get a Context
+ Context context;
+ try {
+ context = Curator.curationContext();
+ } catch (SQLException ex) {
+ report("Could not get the curation Context: " + ex.getMessage());
+ return Curator.CURATE_ERROR;
+ }
+
+ // Find the IdentifierService implementation
+ IdentifierService identifierService = IdentifierServiceFactory
+ .getInstance()
+ .getIdentifierService();
+
+ // Register any missing identifiers.
+ try {
+ identifierService.register(context, dso);
+ } catch (AuthorizeException | IdentifierException | SQLException ex) {
+ String message = ex.getMessage();
+ report(String.format("Identifier(s) not minted for %s %s: %s%n",
+ typeText, dso.getID().toString(), message));
+ LOG.error("Identifier(s) not minted: {}", message);
+ return Curator.CURATE_ERROR;
+ }
+
+ // Success!
+ report(String.format("%s %s registered.%n",
+ typeText, dso.getID().toString()));
+ return Curator.CURATE_SUCCESS;
+ }
+}
diff --git a/dspace-api/src/main/java/org/dspace/ctask/general/MetadataWebService.java b/dspace-api/src/main/java/org/dspace/ctask/general/MetadataWebService.java
index edeb2a6d02..f7ab18c01e 100644
--- a/dspace-api/src/main/java/org/dspace/ctask/general/MetadataWebService.java
+++ b/dspace-api/src/main/java/org/dspace/ctask/general/MetadataWebService.java
@@ -10,11 +10,13 @@ package org.dspace.ctask.general;
import java.io.IOException;
import java.io.InputStream;
import java.sql.SQLException;
+import java.util.ArrayDeque;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
+import java.util.Queue;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
import javax.xml.XMLConstants;
@@ -33,6 +35,7 @@ import org.apache.http.HttpStatus;
import org.apache.http.client.methods.HttpGet;
import org.apache.http.impl.client.CloseableHttpClient;
import org.apache.http.impl.client.HttpClientBuilder;
+import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
import org.dspace.authorize.AuthorizeException;
import org.dspace.content.DSpaceObject;
@@ -60,18 +63,18 @@ import org.xml.sax.SAXException;
* Intended use: cataloging tool in workflow and general curation.
* The task uses a URL 'template' to compose the service call, e.g.
*
- * {@code http://www.sherpa.ac.uk/romeo/api29.php?issn=\{dc.identifier.issn\}}
+ *
{@code http://www.sherpa.ac.uk/romeo/api29.php?issn=\{dc.identifier.issn\}}
*
- * Task will substitute the value of the passed item's metadata field
+ *
Task will substitute the value of the passed item's metadata field
* in the {parameter} position. If multiple values are present in the
* item field, the first value is used.
*
- * The task uses another property (the datamap) to determine what data
+ *
The task uses another property (the datamap) to determine what data
* to extract from the service response and how to use it, e.g.
*
- * {@code //publisher/name=>dc.publisher,//romeocolour}
+ *
{@code //publisher/name=>dc.publisher,//romeocolour}
*
- * Task will evaluate the left-hand side (or entire token) of each
+ *
Task will evaluate the left-hand side (or entire token) of each
* comma-separated token in the property as an XPath 1.0 expression into
* the response document, and if there is a mapping symbol (e.g. {@code '=>'}) and
* value, it will assign the response document value(s) to the named
@@ -79,48 +82,52 @@ import org.xml.sax.SAXException;
* multiple values, they will all be assigned to the item field. The
* mapping symbol governs the nature of metadata field assignment:
*
- * {@code '->'} mapping will add to any existing values in the item field
- * {@code '=>'} mapping will replace any existing values in the item field
- * {@code '~>'} mapping will add *only* if item field has no existing values
+ *
+ *
{@code '->'} mapping will add to any existing values in the item field
+ *
{@code '=>'} mapping will replace any existing values in the item field
+ *
{@code '~>'} mapping will add *only* if item field has no existing values
+ *
*
- * Unmapped data (without a mapping symbol) will simply be added to the task
+ *
Unmapped data (without a mapping symbol) will simply be added to the task
* result string, prepended by the XPath expression (a little prettified).
* Each label/value pair in the result string is separated by a space,
* unless the optional 'separator' property is defined.
*
- * A very rudimentary facility for transformation of data is supported, e.g.
+ *
A very rudimentary facility for transformation of data is supported, e.g.
*
- * {@code http://www.crossref.org/openurl/?id=\{doi:dc.relation.isversionof\}&format=unixref}
+ *
{@code http://www.crossref.org/openurl/?id=\{doi:dc.relation.isversionof\}&format=unixref}
*
- * The 'doi:' prefix will cause the task to look for a 'transform' with that
+ *
The 'doi:' prefix will cause the task to look for a 'transform' with that
* name, which is applied to the metadata value before parameter substitution
* occurs. Transforms are defined in a task property such as the following:
*
- * {@code transform.doi = match 10. trunc 60}
+ *
{@code transform.doi = match 10. trunc 60}
*
- * This means exclude the value string up to the occurrence of '10.', then
+ *
This means exclude the value string up to the occurrence of '10.', then
* truncate after 60 characters. The only transform functions currently defined:
*
- * {@code 'cut' } = remove number leading characters
- * {@code 'trunc' } = remove trailing characters after number length
- * {@code 'match' } = start match at pattern
- * {@code 'text' } = append literal characters (enclose in ' ' when whitespace needed)
+ *
+ *
{@code 'cut' } = remove number leading characters
+ *
{@code 'trunc' } = remove trailing characters after number length
+ *
{@code 'match' } = start match at pattern
+ *
{@code 'text' } = append literal characters (enclose in ' ' when whitespace needed)
+ *
*
- * If the transform results in an invalid state (e.g. cutting more characters
+ *
If the transform results in an invalid state (e.g. cutting more characters
* than are in the value), the condition will be logged and the
* un-transformed value used.
*
- * Transforms may also be used in datamaps, e.g.
+ *
Transforms may also be used in datamaps, e.g.
*
- * {@code //publisher/name=>shorten:dc.publisher,//romeocolour}
+ *
{@code //publisher/name=>shorten:dc.publisher,//romeocolour}
*
- * which would apply the 'shorten' transform to the service response value(s)
+ *
which would apply the 'shorten' transform to the service response value(s)
* prior to metadata field assignment.
*
- * An optional property 'headers' may be defined to stipulate any HTTP headers
+ *
An optional property 'headers' may be defined to stipulate any HTTP headers
* required in the service call. The property syntax is double-pipe separated headers:
*
- * {@code Accept: text/xml||Cache-Control: no-cache}
+ *
{@code Accept: text/xml||Cache-Control: no-cache}
*
* @author richardrodgers
*/
@@ -128,9 +135,9 @@ import org.xml.sax.SAXException;
@Suspendable
public class MetadataWebService extends AbstractCurationTask implements NamespaceContext {
/**
- * log4j category
+ * logging category
*/
- private static final Logger log = org.apache.logging.log4j.LogManager.getLogger(MetadataWebService.class);
+ private static final Logger log = LogManager.getLogger();
// transform token parsing pattern
protected Pattern ttPattern = Pattern.compile("\'([^\']*)\'|(\\S+)");
// URL of web service with template parameters
@@ -360,42 +367,45 @@ public class MetadataWebService extends AbstractCurationTask implements Namespac
if (transDef == null) {
return value;
}
- String[] tokens = tokenize(transDef);
+ Queue tokens = tokenize(transDef);
String retValue = value;
- for (int i = 0; i < tokens.length; i += 2) {
- if ("cut".equals(tokens[i]) || "trunc".equals(tokens[i])) {
- int index = Integer.parseInt(tokens[i + 1]);
+ while (!tokens.isEmpty()) {
+ String function = tokens.poll();
+ if ("cut".equals(function) || "trunc".equals(function)) {
+ String argument = tokens.poll();
+ int index = Integer.parseInt(argument);
if (retValue.length() > index) {
- if ("cut".equals(tokens[i])) {
+ if ("cut".equals(function)) {
retValue = retValue.substring(index);
} else {
retValue = retValue.substring(0, index);
}
- } else if ("cut".equals(tokens[i])) {
- log.error("requested cut: " + index + " exceeds value length");
+ } else if ("cut".equals(function)) {
+ log.error("requested cut: {} exceeds value length", index);
return value;
}
- } else if ("match".equals(tokens[i])) {
- int index2 = retValue.indexOf(tokens[i + 1]);
+ } else if ("match".equals(function)) {
+ String argument = tokens.poll();
+ int index2 = retValue.indexOf(argument);
if (index2 > 0) {
retValue = retValue.substring(index2);
} else {
- log.error("requested match: " + tokens[i + 1] + " failed");
+ log.error("requested match: {} failed", argument);
return value;
}
- } else if ("text".equals(tokens[i])) {
- retValue = retValue + tokens[i + 1];
+ } else if ("text".equals(function)) {
+ retValue = retValue + tokens.poll();
} else {
- log.error(" unknown transform operation: " + tokens[i]);
+ log.error(" unknown transform operation: " + function);
return value;
}
}
return retValue;
}
- protected String[] tokenize(String text) {
- List list = new ArrayList<>();
+ protected Queue tokenize(String text) {
Matcher m = ttPattern.matcher(text);
+ Queue list = new ArrayDeque<>(m.groupCount());
while (m.find()) {
if (m.group(1) != null) {
list.add(m.group(1));
@@ -403,7 +413,7 @@ public class MetadataWebService extends AbstractCurationTask implements Namespac
list.add(m.group(2));
}
}
- return list.toArray(new String[0]);
+ return list;
}
protected int getMapIndex(String mapping) {
diff --git a/dspace-api/src/main/java/org/dspace/discovery/SolrSearchCore.java b/dspace-api/src/main/java/org/dspace/discovery/SolrSearchCore.java
index b430a0c973..f31feab612 100644
--- a/dspace-api/src/main/java/org/dspace/discovery/SolrSearchCore.java
+++ b/dspace-api/src/main/java/org/dspace/discovery/SolrSearchCore.java
@@ -21,7 +21,6 @@ import org.apache.solr.client.solrj.impl.HttpSolrClient;
import org.dspace.discovery.indexobject.IndexableItem;
import org.dspace.service.impl.HttpConnectionPoolService;
import org.dspace.services.ConfigurationService;
-import org.dspace.services.factory.DSpaceServicesFactory;
import org.dspace.storage.rdbms.DatabaseUtils;
import org.springframework.beans.factory.annotation.Autowired;
@@ -75,14 +74,13 @@ public class SolrSearchCore {
*/
protected void initSolr() {
if (solr == null) {
- String solrService = DSpaceServicesFactory.getInstance().getConfigurationService()
- .getProperty("discovery.search.server");
+ String solrService = configurationService.getProperty("discovery.search.server");
UrlValidator urlValidator = new UrlValidator(UrlValidator.ALLOW_LOCAL_URLS);
if (urlValidator.isValid(solrService) || configurationService
.getBooleanProperty("discovery.solr.url.validation.enabled", true)) {
try {
- log.debug("Solr URL: " + solrService);
+ log.debug("Solr URL: {}", solrService);
HttpSolrClient solrServer = new HttpSolrClient.Builder(solrService)
.withHttpClient(httpConnectionPoolService.getClient())
.build();
@@ -103,10 +101,13 @@ public class SolrSearchCore {
solr = solrServer;
} catch (SolrServerException | IOException e) {
- log.error("Error while initializing solr server", e);
+ log.error("Error while initializing solr server {}",
+ solrService, e);
+ throw new RuntimeException("Failed to contact Solr at " + solrService
+ + " : " + e.getMessage());
}
} else {
- log.error("Error while initializing solr, invalid url: " + solrService);
+ log.error("Error while initializing solr, invalid url: {}", solrService);
}
}
}
diff --git a/dspace-api/src/main/java/org/dspace/discovery/SolrServiceImpl.java b/dspace-api/src/main/java/org/dspace/discovery/SolrServiceImpl.java
index 1ce0ea8e1a..383121072d 100644
--- a/dspace-api/src/main/java/org/dspace/discovery/SolrServiceImpl.java
+++ b/dspace-api/src/main/java/org/dspace/discovery/SolrServiceImpl.java
@@ -979,7 +979,7 @@ public class SolrServiceImpl implements SearchService, IndexingService {
// if we found stale objects we can decide to skip execution of the remaining code to improve performance
boolean skipLoadingResponse = false;
// use zombieDocs to collect stale found objects
- List zombieDocs = new ArrayList();
+ List zombieDocs = new ArrayList<>();
QueryResponse solrQueryResponse = solrSearchCore.getSolr().query(solrQuery,
solrSearchCore.REQUEST_METHOD);
if (solrQueryResponse != null) {
@@ -1033,12 +1033,6 @@ public class SolrServiceImpl implements SearchService, IndexingService {
//We need to remove all the "_hl" appendix strings from our keys
Map> resultMap = new HashMap<>();
for (String key : highlightedFields.keySet()) {
- List highlightOriginalValue = highlightedFields.get(key);
- List resultHighlightOriginalValue = new ArrayList<>();
- for (String highlightValue : highlightOriginalValue) {
- String[] splitted = highlightValue.split("###");
- resultHighlightOriginalValue.add(splitted);
- }
resultMap.put(key.substring(0, key.lastIndexOf("_hl")), highlightedFields.get(key));
}
@@ -1054,7 +1048,7 @@ public class SolrServiceImpl implements SearchService, IndexingService {
// If any stale entries are found in the current page of results,
// we remove those stale entries and rerun the same query again.
// Otherwise, the query is valid and the results are returned.
- if (zombieDocs.size() != 0) {
+ if (!zombieDocs.isEmpty()) {
log.info("Cleaning " + zombieDocs.size() + " stale objects from Discovery Index");
log.info("ZombieDocs ");
zombieDocs.forEach(log::info);
diff --git a/dspace-api/src/main/java/org/dspace/eperson/GroupServiceImpl.java b/dspace-api/src/main/java/org/dspace/eperson/GroupServiceImpl.java
index c83dabcc4d..2b23ecfeef 100644
--- a/dspace-api/src/main/java/org/dspace/eperson/GroupServiceImpl.java
+++ b/dspace-api/src/main/java/org/dspace/eperson/GroupServiceImpl.java
@@ -22,6 +22,8 @@ import java.util.UUID;
import org.apache.commons.collections4.CollectionUtils;
import org.apache.commons.lang3.StringUtils;
import org.apache.commons.lang3.tuple.Pair;
+import org.apache.logging.log4j.LogManager;
+import org.apache.logging.log4j.Logger;
import org.dspace.authorize.AuthorizeConfiguration;
import org.dspace.authorize.AuthorizeException;
import org.dspace.authorize.ResourcePolicy;
@@ -52,8 +54,6 @@ import org.dspace.xmlworkflow.storedcomponents.PoolTask;
import org.dspace.xmlworkflow.storedcomponents.service.ClaimedTaskService;
import org.dspace.xmlworkflow.storedcomponents.service.CollectionRoleService;
import org.dspace.xmlworkflow.storedcomponents.service.PoolTaskService;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
/**
@@ -64,7 +64,7 @@ import org.springframework.beans.factory.annotation.Autowired;
* @author kevinvandevelde at atmire.com
*/
public class GroupServiceImpl extends DSpaceObjectServiceImpl implements GroupService {
- private static final Logger log = LoggerFactory.getLogger(GroupServiceImpl.class);
+ private static final Logger log = LogManager.getLogger();
@Autowired(required = true)
protected GroupDAO groupDAO;
@@ -473,7 +473,7 @@ public class GroupServiceImpl extends DSpaceObjectServiceImpl implements
@Override
public void delete(Context context, Group group) throws SQLException {
if (group.isPermanent()) {
- log.error("Attempt to delete permanent Group $", group.getName());
+ log.error("Attempt to delete permanent Group {}", group::getName);
throw new SQLException("Attempt to delete a permanent Group");
}
@@ -715,7 +715,7 @@ public class GroupServiceImpl extends DSpaceObjectServiceImpl implements
// if the group is used for one or more roles on a single collection,
// admins can eventually manage it
List collectionRoles = collectionRoleService.findByGroup(context, group);
- if (collectionRoles != null && collectionRoles.size() > 0) {
+ if (collectionRoles != null && !collectionRoles.isEmpty()) {
Set colls = new HashSet<>();
for (CollectionRole cr : collectionRoles) {
colls.add(cr.getCollection());
diff --git a/dspace-api/src/main/java/org/dspace/eperson/dao/GroupDAO.java b/dspace-api/src/main/java/org/dspace/eperson/dao/GroupDAO.java
index ab37aa4047..2cc77129f0 100644
--- a/dspace-api/src/main/java/org/dspace/eperson/dao/GroupDAO.java
+++ b/dspace-api/src/main/java/org/dspace/eperson/dao/GroupDAO.java
@@ -45,7 +45,7 @@ public interface GroupDAO extends DSpaceObjectDAO, DSpaceObjectLegacySupp
* Find all groups ordered by the specified metadata fields ascending
*
* @param context The DSpace context
- * @param sortMetadataFields The metadata fields to sort on
+ * @param metadataSortFields The metadata fields to sort on
* @param pageSize how many results return
* @param offset the position of the first result to return
* @return A list of all groups, ordered by metadata fields
diff --git a/dspace-api/src/main/java/org/dspace/external/provider/impl/OpenAIREFundingDataProvider.java b/dspace-api/src/main/java/org/dspace/external/provider/impl/OpenAIREFundingDataProvider.java
index 3dcd7d16a6..8ca5b7c0ea 100644
--- a/dspace-api/src/main/java/org/dspace/external/provider/impl/OpenAIREFundingDataProvider.java
+++ b/dspace-api/src/main/java/org/dspace/external/provider/impl/OpenAIREFundingDataProvider.java
@@ -15,6 +15,7 @@ import java.util.ArrayList;
import java.util.Base64;
import java.util.Collections;
import java.util.List;
+import java.util.Map;
import java.util.Optional;
import java.util.regex.Pattern;
import java.util.stream.Collectors;
@@ -33,6 +34,7 @@ import org.dspace.content.dto.MetadataValueDTO;
import org.dspace.external.OpenAIRERestConnector;
import org.dspace.external.model.ExternalDataObject;
import org.dspace.external.provider.AbstractExternalDataProvider;
+import org.dspace.importer.external.metadatamapping.MetadataFieldConfig;
import org.springframework.beans.factory.annotation.Autowired;
/**
@@ -40,13 +42,9 @@ import org.springframework.beans.factory.annotation.Autowired;
* will deal with the OpenAIRE External Data lookup
*
* @author paulo-graca
- *
*/
public class OpenAIREFundingDataProvider extends AbstractExternalDataProvider {
- /**
- * log4j logger
- */
private static Logger log = org.apache.logging.log4j.LogManager.getLogger(OpenAIREFundingDataProvider.class);
/**
@@ -54,6 +52,16 @@ public class OpenAIREFundingDataProvider extends AbstractExternalDataProvider {
*/
protected static final String PREFIX = "info:eu-repo/grantAgreement";
+ private static final String TITLE = "dcTitle";
+ private static final String SUBJECT = "dcSubject";
+ private static final String AWARD_URI = "awardURI";
+ private static final String FUNDER_NAME = "funderName";
+ private static final String SPATIAL = "coverageSpatial";
+ private static final String AWARD_NUMBER = "awardNumber";
+ private static final String FUNDER_ID = "funderIdentifier";
+ private static final String FUNDING_STREAM = "fundingStream";
+ private static final String TITLE_ALTERNATIVE = "titleAlternative";
+
/**
* rows default limit
*/
@@ -69,11 +77,9 @@ public class OpenAIREFundingDataProvider extends AbstractExternalDataProvider {
*/
protected OpenAIRERestConnector connector;
- /**
- * required method
- */
- public void init() throws IOException {
- }
+ protected Map metadataFields;
+
+ public void init() throws IOException {}
@Override
public String getSourceIdentifier() {
@@ -266,14 +272,22 @@ public class OpenAIREFundingDataProvider extends AbstractExternalDataProvider {
}
}
+ public Map getMetadataFields() {
+ return metadataFields;
+ }
+
+ public void setMetadataFields(Map metadataFields) {
+ this.metadataFields = metadataFields;
+ }
+
/**
* OpenAIRE Funding External Data Builder Class
*
* @author pgraca
- *
*/
- public static class ExternalDataObjectBuilder {
- ExternalDataObject externalDataObject;
+ public class ExternalDataObjectBuilder {
+
+ private ExternalDataObject externalDataObject;
public ExternalDataObjectBuilder(Project project) {
String funderIdPrefix = "urn:openaire:";
@@ -283,46 +297,42 @@ public class OpenAIREFundingDataProvider extends AbstractExternalDataProvider {
for (FundingTreeType fundingTree : projectHelper.getFundingTreeTypes()) {
FunderType funder = fundingTree.getFunder();
// Funder name
- this.addFunderName(funder.getName());
+ this.addMetadata(metadataFields.get(FUNDER_NAME), funder.getName());
// Funder Id - convert it to an urn
- this.addFunderID(funderIdPrefix + funder.getId());
+ this.addMetadata(metadataFields.get(FUNDER_ID), funderIdPrefix + funder.getId());
// Jurisdiction
- this.addFunderJuristiction(funder.getJurisdiction());
+ this.addMetadata(metadataFields.get(SPATIAL), funder.getJurisdiction());
FundingHelper fundingHelper = new FundingHelper(
- fundingTree.getFundingLevel2OrFundingLevel1OrFundingLevel0());
+ fundingTree.getFundingLevel2OrFundingLevel1OrFundingLevel0());
// Funding description
for (FundingType funding : fundingHelper.getFirstAvailableFunding()) {
- this.addFundingStream(funding.getDescription());
+ this.addMetadata(metadataFields.get(FUNDING_STREAM), funding.getDescription());
}
}
// Title
for (String title : projectHelper.getTitles()) {
- this.addAwardTitle(title);
+ this.addMetadata(metadataFields.get(TITLE), title);
this.setDisplayValue(title);
this.setValue(title);
}
-
// Code
for (String code : projectHelper.getCodes()) {
- this.addAwardNumber(code);
+ this.addMetadata(metadataFields.get(AWARD_NUMBER), code);
}
-
// Website url
for (String url : projectHelper.getWebsiteUrls()) {
- this.addAwardURI(url);
+ this.addMetadata(metadataFields.get(AWARD_URI), url);
}
-
// Acronyms
for (String acronym : projectHelper.getAcronyms()) {
- this.addFundingItemAcronym(acronym);
+ this.addMetadata(metadataFields.get(TITLE_ALTERNATIVE), acronym);
}
-
// Keywords
for (String keyword : projectHelper.getKeywords()) {
- this.addSubject(keyword);
+ this.addMetadata(metadataFields.get(SUBJECT), keyword);
}
}
@@ -366,7 +376,6 @@ public class OpenAIREFundingDataProvider extends AbstractExternalDataProvider {
* @return ExternalDataObjectBuilder
*/
public ExternalDataObjectBuilder setId(String id) {
-
// we use base64 encoding in order to use slashes / and other
// characters that must be escaped for the <:entry-id>
String base64Id = Base64.getEncoder().encodeToString(id.getBytes());
@@ -374,128 +383,10 @@ public class OpenAIREFundingDataProvider extends AbstractExternalDataProvider {
return this;
}
- /**
- * Add metadata dc.identifier
- *
- * @param metadata identifier
- * @return ExternalDataObjectBuilder
- */
- public ExternalDataObjectBuilder addIdentifier(String identifier) {
- this.externalDataObject.addMetadata(new MetadataValueDTO("dc", "identifier", null, null, identifier));
- return this;
- }
-
- /**
- * Add metadata project.funder.name
- *
- * @param metadata funderName
- * @return ExternalDataObjectBuilder
- */
- public ExternalDataObjectBuilder addFunderName(String funderName) {
- this.externalDataObject.addMetadata(new MetadataValueDTO("project", "funder", "name", null, funderName));
- return this;
- }
-
- /**
- * Add metadata project.funder.identifier
- *
- * @param metadata funderId
- * @return ExternalDataObjectBuilder
- */
- public ExternalDataObjectBuilder addFunderID(String funderID) {
- this.externalDataObject
- .addMetadata(new MetadataValueDTO("project", "funder", "identifier", null, funderID));
- return this;
- }
-
- /**
- * Add metadata dc.title
- *
- * @param metadata awardTitle
- * @return ExternalDataObjectBuilder
- */
- public ExternalDataObjectBuilder addAwardTitle(String awardTitle) {
- this.externalDataObject.addMetadata(new MetadataValueDTO("dc", "title", null, null, awardTitle));
- return this;
- }
-
- /**
- * Add metadata oaire.fundingStream
- *
- * @param metadata fundingStream
- * @return ExternalDataObjectBuilder
- */
- public ExternalDataObjectBuilder addFundingStream(String fundingStream) {
- this.externalDataObject
- .addMetadata(new MetadataValueDTO("oaire", "fundingStream", null, null, fundingStream));
- return this;
- }
-
- /**
- * Add metadata oaire.awardNumber
- *
- * @param metadata awardNumber
- * @return ExternalDataObjectBuilder
- */
- public ExternalDataObjectBuilder addAwardNumber(String awardNumber) {
- this.externalDataObject.addMetadata(new MetadataValueDTO("oaire", "awardNumber", null, null, awardNumber));
- return this;
- }
-
- /**
- * Add metadata oaire.awardURI
- *
- * @param metadata websiteUrl
- * @return ExternalDataObjectBuilder
- */
- public ExternalDataObjectBuilder addAwardURI(String websiteUrl) {
- this.externalDataObject.addMetadata(new MetadataValueDTO("oaire", "awardURI", null, null, websiteUrl));
- return this;
- }
-
- /**
- * Add metadata dc.title.alternative
- *
- * @param metadata fundingItemAcronym
- * @return ExternalDataObjectBuilder
- */
- public ExternalDataObjectBuilder addFundingItemAcronym(String fundingItemAcronym) {
- this.externalDataObject
- .addMetadata(new MetadataValueDTO("dc", "title", "alternative", null, fundingItemAcronym));
- return this;
- }
-
- /**
- * Add metadata dc.coverage.spatial
- *
- * @param metadata funderJuristiction
- * @return ExternalDataObjectBuilder
- */
- public ExternalDataObjectBuilder addFunderJuristiction(String funderJuristiction) {
- this.externalDataObject
- .addMetadata(new MetadataValueDTO("dc", "coverage", "spatial", null, funderJuristiction));
- return this;
- }
-
- /**
- * Add metadata dc.description
- *
- * @param metadata description
- * @return ExternalDataObjectBuilder
- */
- public ExternalDataObjectBuilder addDescription(String description) {
- this.externalDataObject.addMetadata(new MetadataValueDTO("dc", "description", null, null, description));
- return this;
- }
-
- /**
- * Add metadata dc.subject
- *
- * @param metadata subject
- * @return ExternalDataObjectBuilder
- */
- public ExternalDataObjectBuilder addSubject(String subject) {
- this.externalDataObject.addMetadata(new MetadataValueDTO("dc", "subject", null, null, subject));
+ public ExternalDataObjectBuilder addMetadata(MetadataFieldConfig metadataField, String value) {
+ this.externalDataObject.addMetadata(new MetadataValueDTO(metadataField.getSchema(),
+ metadataField.getElement(),
+ metadataField.getQualifier(), null, value));
return this;
}
@@ -508,4 +399,5 @@ public class OpenAIREFundingDataProvider extends AbstractExternalDataProvider {
return this.externalDataObject;
}
}
-}
+
+}
\ No newline at end of file
diff --git a/dspace-api/src/main/java/org/dspace/handle/UpdateHandlePrefix.java b/dspace-api/src/main/java/org/dspace/handle/UpdateHandlePrefix.java
index 133d3dbc2c..7fb03376eb 100644
--- a/dspace-api/src/main/java/org/dspace/handle/UpdateHandlePrefix.java
+++ b/dspace-api/src/main/java/org/dspace/handle/UpdateHandlePrefix.java
@@ -126,7 +126,7 @@ public class UpdateHandlePrefix {
);
} catch (SQLException sqle) {
- if ((context != null) && (context.isValid())) {
+ if (context.isValid()) {
context.abort();
context = null;
}
diff --git a/dspace-api/src/main/java/org/dspace/harvest/HarvestedCollectionServiceImpl.java b/dspace-api/src/main/java/org/dspace/harvest/HarvestedCollectionServiceImpl.java
index d4bb37cf94..0ad83a3292 100644
--- a/dspace-api/src/main/java/org/dspace/harvest/HarvestedCollectionServiceImpl.java
+++ b/dspace-api/src/main/java/org/dspace/harvest/HarvestedCollectionServiceImpl.java
@@ -19,10 +19,8 @@ import java.util.Calendar;
import java.util.Date;
import java.util.List;
import javax.xml.parsers.ParserConfigurationException;
-import javax.xml.transform.TransformerException;
+import javax.xml.xpath.XPathExpressionException;
-import ORG.oclc.oai.harvester2.verb.Identify;
-import ORG.oclc.oai.harvester2.verb.ListIdentifiers;
import org.dspace.content.Collection;
import org.dspace.core.Context;
import org.dspace.harvest.dao.HarvestedCollectionDAO;
@@ -33,6 +31,8 @@ import org.jdom2.Document;
import org.jdom2.Element;
import org.jdom2.Namespace;
import org.jdom2.input.DOMBuilder;
+import org.oclc.oai.harvester2.verb.Identify;
+import org.oclc.oai.harvester2.verb.ListIdentifiers;
import org.springframework.beans.factory.annotation.Autowired;
import org.w3c.dom.DOMException;
import org.xml.sax.SAXException;
@@ -198,7 +198,7 @@ public class HarvestedCollectionServiceImpl implements HarvestedCollectionServic
// First, see if we can contact the target server at all.
try {
new Identify(oaiSource);
- } catch (IOException | ParserConfigurationException | TransformerException | SAXException ex) {
+ } catch (IOException | ParserConfigurationException | XPathExpressionException | SAXException ex) {
errorSet.add(OAI_ADDRESS_ERROR + ": OAI server could not be reached.");
return errorSet;
}
@@ -216,7 +216,7 @@ public class HarvestedCollectionServiceImpl implements HarvestedCollectionServic
try {
OREOAIPrefix = OAIHarvester.oaiResolveNamespaceToPrefix(oaiSource, OAIHarvester.getORENamespace().getURI());
DMDOAIPrefix = OAIHarvester.oaiResolveNamespaceToPrefix(oaiSource, DMD_NS.getURI());
- } catch (IOException | ParserConfigurationException | TransformerException | SAXException ex) {
+ } catch (IOException | ParserConfigurationException | XPathExpressionException | SAXException ex) {
errorSet.add(OAI_ADDRESS_ERROR
+ ": OAI did not respond to ListMetadataFormats query ("
+ ORE_NS.getPrefix() + ":" + OREOAIPrefix + " ; "
@@ -260,7 +260,8 @@ public class HarvestedCollectionServiceImpl implements HarvestedCollectionServic
}
}
}
- } catch (IOException | ParserConfigurationException | TransformerException | DOMException | SAXException e) {
+ } catch (IOException | ParserConfigurationException | XPathExpressionException | DOMException |
+ SAXException e) {
errorSet.add(OAI_ADDRESS_ERROR + ": OAI server could not be reached");
return errorSet;
} catch (RuntimeException re) {
diff --git a/dspace-api/src/main/java/org/dspace/harvest/OAIHarvester.java b/dspace-api/src/main/java/org/dspace/harvest/OAIHarvester.java
index a15ed53a93..5a6b26da93 100644
--- a/dspace-api/src/main/java/org/dspace/harvest/OAIHarvester.java
+++ b/dspace-api/src/main/java/org/dspace/harvest/OAIHarvester.java
@@ -28,13 +28,10 @@ import java.util.Map;
import java.util.Set;
import java.util.TimeZone;
import javax.xml.parsers.ParserConfigurationException;
-import javax.xml.transform.TransformerException;
+import javax.xml.xpath.XPathExpressionException;
-import ORG.oclc.oai.harvester2.verb.GetRecord;
-import ORG.oclc.oai.harvester2.verb.Identify;
-import ORG.oclc.oai.harvester2.verb.ListMetadataFormats;
-import ORG.oclc.oai.harvester2.verb.ListRecords;
import org.apache.commons.lang3.StringUtils;
+import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
import org.dspace.authorize.AuthorizeException;
import org.dspace.content.Bitstream;
@@ -75,6 +72,10 @@ import org.jdom2.Element;
import org.jdom2.Namespace;
import org.jdom2.input.DOMBuilder;
import org.jdom2.output.XMLOutputter;
+import org.oclc.oai.harvester2.verb.GetRecord;
+import org.oclc.oai.harvester2.verb.Identify;
+import org.oclc.oai.harvester2.verb.ListMetadataFormats;
+import org.oclc.oai.harvester2.verb.ListRecords;
import org.xml.sax.SAXException;
@@ -91,7 +92,7 @@ public class OAIHarvester {
/**
* log4j category
*/
- private static Logger log = org.apache.logging.log4j.LogManager.getLogger(OAIHarvester.class);
+ private static final Logger log = LogManager.getLogger();
private static final Namespace ATOM_NS = Namespace.getNamespace("http://www.w3.org/2005/Atom");
private static final Namespace ORE_NS = Namespace.getNamespace("http://www.openarchives.org/ore/terms/");
@@ -133,7 +134,7 @@ public class OAIHarvester {
private String metadataKey;
// DOMbuilder class for the DOM -> JDOM conversions
- private static DOMBuilder db = new DOMBuilder();
+ private static final DOMBuilder db = new DOMBuilder();
// The point at which this thread should terminate itself
/* Initialize the harvester with a collection object */
@@ -331,18 +332,16 @@ public class OAIHarvester {
// main loop to keep requesting more objects until we're done
List records;
- Set errorSet = new HashSet();
+ Set errorSet = new HashSet<>();
ListRecords listRecords = new ListRecords(oaiSource, fromDate, toDate, oaiSetId, descMDPrefix);
log.debug(
"Harvesting request parameters: listRecords " + oaiSource + " " + fromDate + " " + toDate + " " +
oaiSetId + " " + descMDPrefix);
- if (listRecords != null) {
- log.info("HTTP Request: " + listRecords.getRequestURL());
- }
+ log.info("HTTP Request: " + listRecords.getRequestURL());
while (listRecords != null) {
- records = new ArrayList();
+ records = new ArrayList<>();
oaiResponse = db.build(listRecords.getDocument());
if (listRecords.getErrors() != null && listRecords.getErrors().getLength() > 0) {
@@ -376,8 +375,8 @@ public class OAIHarvester {
}
// Process the obtained records
- if (records != null && records.size() > 0) {
- log.info("Found " + records.size() + " records to process");
+ if (!records.isEmpty()) {
+ log.info("Found {} records to process", records::size);
for (Element record : records) {
// check for STOP interrupt from the scheduler
if (HarvestScheduler.getInterrupt() == HarvestScheduler.HARVESTER_INTERRUPT_STOP) {
@@ -439,7 +438,8 @@ public class OAIHarvester {
harvestRow.setHarvestStatus(HarvestedCollection.STATUS_UNKNOWN_ERROR);
harvestedCollectionService.update(ourContext, harvestRow);
alertAdmin(HarvestedCollection.STATUS_UNKNOWN_ERROR, ex);
- log.error("Error occurred while generating an OAI response: " + ex.getMessage() + " " + ex.getCause(), ex);
+ log.error("Error occurred while generating an OAI response: {} {}",
+ ex.getMessage(), ex.getCause(), ex);
ourContext.complete();
return;
} finally {
@@ -493,11 +493,11 @@ public class OAIHarvester {
* @throws HarvestingException if harvesting error
* @throws ParserConfigurationException XML parsing error
* @throws SAXException if XML processing error
- * @throws TransformerException if XML transformer error
+ * @throws XPathExpressionException if XPath error
*/
protected void processRecord(Element record, String OREPrefix, final long currentRecord, long totalListSize)
throws SQLException, AuthorizeException, IOException, CrosswalkException, HarvestingException,
- ParserConfigurationException, SAXException, TransformerException {
+ ParserConfigurationException, SAXException, XPathExpressionException {
WorkspaceItem wi = null;
Date timeStart = new Date();
@@ -623,7 +623,7 @@ public class OAIHarvester {
List OREBundles = itemService.getBundles(item, "ORE");
Bitstream OREBitstream = null;
- if (OREBundles.size() > 0) {
+ if (!OREBundles.isEmpty()) {
OREBundle = OREBundles.get(0);
} else {
OREBundle = bundleService.create(ourContext, item, "ORE");
@@ -698,7 +698,7 @@ public class OAIHarvester {
List values = itemService.getMetadata(item, "dc", "identifier", Item.ANY, Item.ANY);
- if (values.size() > 0 && acceptedHandleServers != null) {
+ if (!values.isEmpty() && acceptedHandleServers != null) {
for (MetadataValue value : values) {
// 0 1 2 3 4
// https://hdl.handle.net/1234/12
@@ -732,7 +732,7 @@ public class OAIHarvester {
* @return a string in the format 'yyyy-mm-ddThh:mm:ssZ' and converted to UTC timezone
*/
private String processDate(Date date) {
- Integer timePad = configurationService.getIntProperty("oai.harvester.timePadding");
+ int timePad = configurationService.getIntProperty("oai.harvester.timePadding");
if (timePad == 0) {
timePad = 120;
@@ -769,10 +769,10 @@ public class OAIHarvester {
* @throws IOException if IO error
* @throws SAXException if XML processing error
* @throws ParserConfigurationException XML parsing error
- * @throws TransformerException if XML transformer error
+ * @throws XPathExpressionException if XPath error
*/
private String oaiGetDateGranularity(String oaiSource)
- throws IOException, ParserConfigurationException, SAXException, TransformerException {
+ throws IOException, ParserConfigurationException, SAXException, XPathExpressionException {
Identify iden = new Identify(oaiSource);
return iden.getDocument().getElementsByTagNameNS(OAI_NS.getURI(), "granularity").item(0).getTextContent();
}
@@ -789,26 +789,24 @@ public class OAIHarvester {
* operations.
* @throws ParserConfigurationException XML parsing error
* @throws SAXException if XML processing error
- * @throws TransformerException if XML transformer error
+ * @throws XPathExpressionException if XPath error
* @throws ConnectException if could not connect to OAI server
*/
public static String oaiResolveNamespaceToPrefix(String oaiSource, String MDNamespace)
- throws IOException, ParserConfigurationException, SAXException, TransformerException, ConnectException {
+ throws IOException, ParserConfigurationException, SAXException, XPathExpressionException, ConnectException {
String metaPrefix = null;
// Query the OAI server for the metadata
ListMetadataFormats lmf = new ListMetadataFormats(oaiSource);
- if (lmf != null) {
- Document lmfResponse = db.build(lmf.getDocument());
- List mdFormats = lmfResponse.getRootElement().getChild("ListMetadataFormats", OAI_NS)
- .getChildren("metadataFormat", OAI_NS);
+ Document lmfResponse = db.build(lmf.getDocument());
+ List mdFormats = lmfResponse.getRootElement().getChild("ListMetadataFormats", OAI_NS)
+ .getChildren("metadataFormat", OAI_NS);
- for (Element mdFormat : mdFormats) {
- if (MDNamespace.equals(mdFormat.getChildText("metadataNamespace", OAI_NS))) {
- metaPrefix = mdFormat.getChildText("metadataPrefix", OAI_NS);
- break;
- }
+ for (Element mdFormat : mdFormats) {
+ if (MDNamespace.equals(mdFormat.getChildText("metadataNamespace", OAI_NS))) {
+ metaPrefix = mdFormat.getChildText("metadataPrefix", OAI_NS);
+ break;
}
}
@@ -868,15 +866,15 @@ public class OAIHarvester {
* operations.
* @throws ParserConfigurationException XML parsing error
* @throws SAXException if XML processing error
- * @throws TransformerException if XML transformer error
+ * @throws XPathExpressionException if XPath error
* @throws HarvestingException if harvesting error
*/
protected List getMDrecord(String oaiSource, String itemOaiId, String metadataPrefix)
- throws IOException, ParserConfigurationException, SAXException, TransformerException, HarvestingException {
+ throws IOException, ParserConfigurationException, SAXException, XPathExpressionException, HarvestingException {
GetRecord getRecord = new GetRecord(oaiSource, itemOaiId, metadataPrefix);
- Set errorSet = new HashSet();
+ Set errorSet = new HashSet<>();
// If the metadata is not available for this item, can the whole thing
- if (getRecord != null && getRecord.getErrors() != null && getRecord.getErrors().getLength() > 0) {
+ if (getRecord.getErrors() != null && getRecord.getErrors().getLength() > 0) {
for (int i = 0; i < getRecord.getErrors().getLength(); i++) {
String errorCode = getRecord.getErrors().item(i).getAttributes().getNamedItem("code").getTextContent();
errorSet.add(errorCode);
diff --git a/dspace-api/src/main/java/org/dspace/health/EmbargoCheck.java b/dspace-api/src/main/java/org/dspace/health/EmbargoCheck.java
index 5577f41e66..e7b456f7b3 100644
--- a/dspace-api/src/main/java/org/dspace/health/EmbargoCheck.java
+++ b/dspace-api/src/main/java/org/dspace/health/EmbargoCheck.java
@@ -26,9 +26,8 @@ public class EmbargoCheck extends Check {
@Override
public String run(ReportInfo ri) {
String ret = "";
- Context context = null;
+ Context context = new Context();
try {
- context = new Context();
Iterator item_iter = null;
try {
item_iter = embargoService.findItemsByLiftMetadata(context);
@@ -56,9 +55,7 @@ public class EmbargoCheck extends Check {
} catch (SQLException e) {
error(e);
try {
- if (null != context) {
- context.abort();
- }
+ context.abort();
} catch (Exception e1) {
error(e);
}
diff --git a/dspace-api/src/main/java/org/dspace/identifier/DOIIdentifierProvider.java b/dspace-api/src/main/java/org/dspace/identifier/DOIIdentifierProvider.java
index 2d7914a75a..66e7b94a4b 100644
--- a/dspace-api/src/main/java/org/dspace/identifier/DOIIdentifierProvider.java
+++ b/dspace-api/src/main/java/org/dspace/identifier/DOIIdentifierProvider.java
@@ -543,7 +543,7 @@ public class DOIIdentifierProvider extends FilteredIdentifierProvider {
if (DELETED.equals(doiRow.getStatus()) || TO_BE_DELETED.equals(doiRow.getStatus())) {
throw new DOIIdentifierException("You tried to update the metadata"
- + "of a DOI that is marked as DELETED.",
+ + " of a DOI that is marked as DELETED.",
DOIIdentifierException.DOI_IS_DELETED);
}
diff --git a/dspace-api/src/main/java/org/dspace/identifier/VersionedDOIIdentifierProvider.java b/dspace-api/src/main/java/org/dspace/identifier/VersionedDOIIdentifierProvider.java
index cc43bd21b5..a864b4be4b 100644
--- a/dspace-api/src/main/java/org/dspace/identifier/VersionedDOIIdentifierProvider.java
+++ b/dspace-api/src/main/java/org/dspace/identifier/VersionedDOIIdentifierProvider.java
@@ -261,7 +261,6 @@ public class VersionedDOIIdentifierProvider extends DOIIdentifierProvider {
doiService.update(context, doi);
return doi.getDoi();
}
- assert (previousVersionDOI != null);
String identifier = getBareDOI(previousVersionDOI);
diff --git a/dspace-api/src/main/java/org/dspace/identifier/VersionedHandleIdentifierProviderWithCanonicalHandles.java b/dspace-api/src/main/java/org/dspace/identifier/VersionedHandleIdentifierProviderWithCanonicalHandles.java
index 61abbcb580..7705fd2b57 100644
--- a/dspace-api/src/main/java/org/dspace/identifier/VersionedHandleIdentifierProviderWithCanonicalHandles.java
+++ b/dspace-api/src/main/java/org/dspace/identifier/VersionedHandleIdentifierProviderWithCanonicalHandles.java
@@ -117,7 +117,7 @@ public class VersionedHandleIdentifierProviderWithCanonicalHandles extends Ident
// check if we have a previous item
if (previous != null) {
try {
- // If we have a reviewer he/she might not have the
+ // If we have a reviewer they might not have the
// rights to edit the metadata of thes previous item.
// Temporarly grant them:
context.turnOffAuthorisationSystem();
diff --git a/dspace-api/src/main/java/org/dspace/identifier/service/IdentifierService.java b/dspace-api/src/main/java/org/dspace/identifier/service/IdentifierService.java
index 74219fc71c..64eee1dfcf 100644
--- a/dspace-api/src/main/java/org/dspace/identifier/service/IdentifierService.java
+++ b/dspace-api/src/main/java/org/dspace/identifier/service/IdentifierService.java
@@ -92,6 +92,9 @@ public interface IdentifierService {
throws AuthorizeException, SQLException, IdentifierException;
/**
+ * Used to register newly-minted identifiers. Each provider is responsible
+ * for creating the appropriate identifier. All providers are interrogated.
+ *
* @param context The relevant DSpace Context.
* @param dso DSpace object to be registered
* @throws AuthorizeException if authorization error
@@ -101,7 +104,7 @@ public interface IdentifierService {
void register(Context context, DSpaceObject dso) throws AuthorizeException, SQLException, IdentifierException;
/**
- * Used to Register a specific Identifier (for example a Handle, hdl:1234.5/6)
+ * Used to Register a specific Identifier (for example a Handle, hdl:1234.5/6).
* The provider is responsible for detecting and processing the appropriate
* identifier. All Providers are interrogated. Multiple providers
* can process the same identifier.
diff --git a/dspace-api/src/main/java/org/dspace/importer/external/bibtex/service/BibtexImportMetadataSourceServiceImpl.java b/dspace-api/src/main/java/org/dspace/importer/external/bibtex/service/BibtexImportMetadataSourceServiceImpl.java
index 7468d601f5..445dfedebd 100644
--- a/dspace-api/src/main/java/org/dspace/importer/external/bibtex/service/BibtexImportMetadataSourceServiceImpl.java
+++ b/dspace-api/src/main/java/org/dspace/importer/external/bibtex/service/BibtexImportMetadataSourceServiceImpl.java
@@ -70,11 +70,24 @@ public class BibtexImportMetadataSourceServiceImpl extends AbstractPlainMetadata
keyValueItem.setKey(entry.getValue().getType().getValue());
keyValueItem.setValue(entry.getKey().getValue());
keyValues.add(keyValueItem);
+ PlainMetadataKeyValueItem typeItem = new PlainMetadataKeyValueItem();
+ typeItem.setKey("type");
+ typeItem.setValue(entry.getValue().getType().getValue());
+ keyValues.add(typeItem);
if (entry.getValue().getFields() != null) {
for (Entry subentry : entry.getValue().getFields().entrySet()) {
PlainMetadataKeyValueItem innerItem = new PlainMetadataKeyValueItem();
- innerItem.setKey(subentry.getKey().getValue());
- innerItem.setValue(subentry.getValue().toUserString());
+ innerItem.setKey(subentry.getKey().getValue().toLowerCase());
+ String latexString = subentry.getValue().toUserString();
+ try {
+ org.jbibtex.LaTeXParser laTeXParser = new org.jbibtex.LaTeXParser();
+ List latexObjects = laTeXParser.parse(latexString);
+ org.jbibtex.LaTeXPrinter laTeXPrinter = new org.jbibtex.LaTeXPrinter();
+ String plainTextString = laTeXPrinter.print(latexObjects);
+ innerItem.setValue(plainTextString.replaceAll("\n", " "));
+ } catch (ParseException e) {
+ innerItem.setValue(latexString);
+ }
keyValues.add(innerItem);
}
}
diff --git a/dspace-api/src/main/java/org/dspace/importer/external/metadatamapping/contributor/SplitMetadataContributor.java b/dspace-api/src/main/java/org/dspace/importer/external/metadatamapping/contributor/SplitMetadataContributor.java
new file mode 100644
index 0000000000..c04081957f
--- /dev/null
+++ b/dspace-api/src/main/java/org/dspace/importer/external/metadatamapping/contributor/SplitMetadataContributor.java
@@ -0,0 +1,65 @@
+/**
+ * The contents of this file are subject to the license and copyright
+ * detailed in the LICENSE and NOTICE files at the root of the source
+ * tree and available online at
+ *
+ * http://www.dspace.org/license/
+ */
+package org.dspace.importer.external.metadatamapping.contributor;
+
+import java.util.ArrayList;
+import java.util.Collection;
+
+import org.dspace.importer.external.metadatamapping.MetadataFieldMapping;
+import org.dspace.importer.external.metadatamapping.MetadatumDTO;
+
+/**
+ * Wrapper class used to split another MetadataContributor's output into distinct values.
+ * The split is performed by matching a regular expression against the wrapped MetadataContributor's output.
+ *
+ * @author Philipp Rumpf (philipp.rumpf@uni-bamberg.de)
+ */
+
+public class SplitMetadataContributor implements MetadataContributor {
+ private final MetadataContributor innerContributor;
+ private final String regex;
+
+ /**
+ * @param innerContributor The MetadataContributor whose output is split
+ * @param regex A regular expression matching the separator between different values
+ */
+ public SplitMetadataContributor(MetadataContributor innerContributor, String regex) {
+ this.innerContributor = innerContributor;
+ this.regex = regex;
+ }
+
+ @Override
+ public void setMetadataFieldMapping(MetadataFieldMapping> rt) {
+
+ }
+
+ /**
+ * Each metadatum returned by the wrapped MetadataContributor is split into one or more metadata values
+ * based on the provided regular expression.
+ *
+ * @param t The recordType object to retrieve metadata from
+ * @return The collection of processed metadata values
+ */
+ @Override
+ public Collection contributeMetadata(T t) {
+ Collection metadata = innerContributor.contributeMetadata(t);
+ ArrayList splitMetadata = new ArrayList<>();
+ for (MetadatumDTO metadatumDTO : metadata) {
+ String[] split = metadatumDTO.getValue().split(regex);
+ for (String splitItem : split) {
+ MetadatumDTO splitMetadatumDTO = new MetadatumDTO();
+ splitMetadatumDTO.setSchema(metadatumDTO.getSchema());
+ splitMetadatumDTO.setElement(metadatumDTO.getElement());
+ splitMetadatumDTO.setQualifier(metadatumDTO.getQualifier());
+ splitMetadatumDTO.setValue(splitItem);
+ splitMetadata.add(splitMetadatumDTO);
+ }
+ }
+ return splitMetadata;
+ }
+}
diff --git a/dspace-api/src/main/java/org/dspace/scripts/ProcessServiceImpl.java b/dspace-api/src/main/java/org/dspace/scripts/ProcessServiceImpl.java
index 3dc859ef47..33fea75add 100644
--- a/dspace-api/src/main/java/org/dspace/scripts/ProcessServiceImpl.java
+++ b/dspace-api/src/main/java/org/dspace/scripts/ProcessServiceImpl.java
@@ -305,6 +305,12 @@ public class ProcessServiceImpl implements ProcessService {
tempFile.delete();
}
+ @Override
+ public List findByStatusAndCreationTimeOlderThan(Context context, List statuses,
+ Date date) throws SQLException {
+ return this.processDAO.findByStatusAndCreationTimeOlderThan(context, statuses, date);
+ }
+
private String formatLogLine(int processId, String scriptName, String output, ProcessLogLevel processLogLevel) {
SimpleDateFormat sdf = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss.SSS");
StringBuilder sb = new StringBuilder();
diff --git a/dspace-api/src/main/java/org/dspace/scripts/service/ProcessService.java b/dspace-api/src/main/java/org/dspace/scripts/service/ProcessService.java
index 95bcdd3270..ce6a173b0e 100644
--- a/dspace-api/src/main/java/org/dspace/scripts/service/ProcessService.java
+++ b/dspace-api/src/main/java/org/dspace/scripts/service/ProcessService.java
@@ -10,11 +10,13 @@ package org.dspace.scripts.service;
import java.io.IOException;
import java.io.InputStream;
import java.sql.SQLException;
+import java.util.Date;
import java.util.List;
import java.util.Set;
import org.dspace.authorize.AuthorizeException;
import org.dspace.content.Bitstream;
+import org.dspace.content.ProcessStatus;
import org.dspace.core.Context;
import org.dspace.eperson.EPerson;
import org.dspace.eperson.Group;
@@ -240,4 +242,17 @@ public interface ProcessService {
*/
void createLogBitstream(Context context, Process process)
throws IOException, SQLException, AuthorizeException;
+
+ /**
+ * Find all the processes with one of the given status and with a creation time
+ * older than the specified date.
+ *
+ * @param context The relevant DSpace context
+ * @param statuses the statuses of the processes to search for
+ * @param date the creation date to search for
+ * @return The list of all Processes which match requirements
+ * @throws AuthorizeException If something goes wrong
+ */
+ List findByStatusAndCreationTimeOlderThan(Context context, List statuses, Date date)
+ throws SQLException;
}
diff --git a/dspace-api/src/main/java/org/dspace/service/impl/ClientInfoServiceImpl.java b/dspace-api/src/main/java/org/dspace/service/impl/ClientInfoServiceImpl.java
index f63a7a4f91..f4c8818e02 100644
--- a/dspace-api/src/main/java/org/dspace/service/impl/ClientInfoServiceImpl.java
+++ b/dspace-api/src/main/java/org/dspace/service/impl/ClientInfoServiceImpl.java
@@ -139,7 +139,7 @@ public class ClientInfoServiceImpl implements ClientInfoService {
// If our IPTable is not empty, log the trusted proxies and return it
if (!ipTable.isEmpty()) {
- log.info("Trusted proxies (configure via 'proxies.trusted.ipranges'): {}", ipTable.toSet().toString());
+ log.info("Trusted proxies (configure via 'proxies.trusted.ipranges'): {}", ipTable);
return ipTable;
} else {
return null;
diff --git a/dspace-api/src/main/java/org/dspace/sort/OrderFormat.java b/dspace-api/src/main/java/org/dspace/sort/OrderFormat.java
index 4b3e188662..fdaaef98b5 100644
--- a/dspace-api/src/main/java/org/dspace/sort/OrderFormat.java
+++ b/dspace-api/src/main/java/org/dspace/sort/OrderFormat.java
@@ -86,23 +86,23 @@ public class OrderFormat {
}
// No delegates found, so apply defaults
- if (type.equalsIgnoreCase(OrderFormat.AUTHOR) && authorDelegate != null) {
+ if (type.equalsIgnoreCase(OrderFormat.AUTHOR)) {
return authorDelegate.makeSortString(value, language);
}
- if (type.equalsIgnoreCase(OrderFormat.TITLE) && titleDelegate != null) {
+ if (type.equalsIgnoreCase(OrderFormat.TITLE)) {
return titleDelegate.makeSortString(value, language);
}
- if (type.equalsIgnoreCase(OrderFormat.TEXT) && textDelegate != null) {
+ if (type.equalsIgnoreCase(OrderFormat.TEXT)) {
return textDelegate.makeSortString(value, language);
}
- if (type.equalsIgnoreCase(OrderFormat.DATE) && dateDelegate != null) {
+ if (type.equalsIgnoreCase(OrderFormat.DATE)) {
return dateDelegate.makeSortString(value, language);
}
- if (type.equalsIgnoreCase(OrderFormat.AUTHORITY) && authorityDelegate != null) {
+ if (type.equalsIgnoreCase(OrderFormat.AUTHORITY)) {
return authorityDelegate.makeSortString(value, language);
}
}
diff --git a/dspace-api/src/main/java/org/dspace/sort/OrderFormatTitle.java b/dspace-api/src/main/java/org/dspace/sort/OrderFormatTitle.java
index eb3586dc61..b745f0719c 100644
--- a/dspace-api/src/main/java/org/dspace/sort/OrderFormatTitle.java
+++ b/dspace-api/src/main/java/org/dspace/sort/OrderFormatTitle.java
@@ -10,6 +10,7 @@ package org.dspace.sort;
import org.dspace.text.filter.DecomposeDiactritics;
import org.dspace.text.filter.LowerCaseAndTrim;
import org.dspace.text.filter.StandardInitialArticleWord;
+import org.dspace.text.filter.StripDiacritics;
import org.dspace.text.filter.TextFilter;
/**
@@ -21,6 +22,7 @@ public class OrderFormatTitle extends AbstractTextFilterOFD {
{
filters = new TextFilter[] {new StandardInitialArticleWord(),
new DecomposeDiactritics(),
+ new StripDiacritics(),
new LowerCaseAndTrim()};
}
}
diff --git a/dspace-api/src/main/java/org/dspace/sort/OrderFormatTitleMarc21.java b/dspace-api/src/main/java/org/dspace/sort/OrderFormatTitleMarc21.java
index 670e5c87e5..fa9ba29725 100644
--- a/dspace-api/src/main/java/org/dspace/sort/OrderFormatTitleMarc21.java
+++ b/dspace-api/src/main/java/org/dspace/sort/OrderFormatTitleMarc21.java
@@ -10,6 +10,7 @@ package org.dspace.sort;
import org.dspace.text.filter.DecomposeDiactritics;
import org.dspace.text.filter.LowerCaseAndTrim;
import org.dspace.text.filter.MARC21InitialArticleWord;
+import org.dspace.text.filter.StripDiacritics;
import org.dspace.text.filter.StripLeadingNonAlphaNum;
import org.dspace.text.filter.TextFilter;
@@ -22,6 +23,7 @@ public class OrderFormatTitleMarc21 extends AbstractTextFilterOFD {
{
filters = new TextFilter[] {new MARC21InitialArticleWord(),
new DecomposeDiactritics(),
+ new StripDiacritics(),
new StripLeadingNonAlphaNum(),
new LowerCaseAndTrim()};
}
diff --git a/dspace-api/src/main/java/org/dspace/statistics/content/DatasetTimeGenerator.java b/dspace-api/src/main/java/org/dspace/statistics/content/DatasetTimeGenerator.java
index 1152ee669c..a8ffbb4b40 100644
--- a/dspace-api/src/main/java/org/dspace/statistics/content/DatasetTimeGenerator.java
+++ b/dspace-api/src/main/java/org/dspace/statistics/content/DatasetTimeGenerator.java
@@ -187,7 +187,7 @@ public class DatasetTimeGenerator extends DatasetGenerator {
cal2.clear(Calendar.HOUR);
cal1.clear(Calendar.HOUR_OF_DAY);
cal2.clear(Calendar.HOUR_OF_DAY);
- //yet i know calendar just won't clear his hours
+ //yet i know calendar just won't clear its hours
cal1.set(Calendar.HOUR_OF_DAY, 0);
cal2.set(Calendar.HOUR_OF_DAY, 0);
}
diff --git a/dspace-api/src/main/java/org/dspace/statistics/content/StatisticsDataVisits.java b/dspace-api/src/main/java/org/dspace/statistics/content/StatisticsDataVisits.java
index 4ee7a0f3e4..121e66af48 100644
--- a/dspace-api/src/main/java/org/dspace/statistics/content/StatisticsDataVisits.java
+++ b/dspace-api/src/main/java/org/dspace/statistics/content/StatisticsDataVisits.java
@@ -621,6 +621,10 @@ public class StatisticsDataVisits extends StatisticsData {
}
if (dsoId != null && query.dsoType != -1) {
+ // Store the UUID of the DSO as an attribute. Needed in particular for Bitstream download usage reports,
+ // as the Bitstream itself won't be available when converting points to their REST representation
+ attrs.put("id", dsoId);
+
switch (query.dsoType) {
case Constants.BITSTREAM:
Bitstream bit = bitstreamService.findByIdOrLegacyId(context, dsoId);
diff --git a/dspace-api/src/main/java/org/dspace/statistics/util/IPTable.java b/dspace-api/src/main/java/org/dspace/statistics/util/IPTable.java
index 139b75e8cf..cb94dcc1a1 100644
--- a/dspace-api/src/main/java/org/dspace/statistics/util/IPTable.java
+++ b/dspace-api/src/main/java/org/dspace/statistics/util/IPTable.java
@@ -7,11 +7,13 @@
*/
package org.dspace.statistics.util;
-import java.util.HashMap;
+import java.net.InetAddress;
+import java.net.UnknownHostException;
import java.util.HashSet;
-import java.util.Map;
+import java.util.Iterator;
import java.util.Set;
+import org.apache.commons.lang3.StringUtils;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
@@ -25,8 +27,40 @@ public class IPTable {
private static final Logger log = LogManager.getLogger(IPTable.class);
/* A lookup tree for IP addresses and SubnetRanges */
- private final Map>>> map
- = new HashMap<>();
+ private final Set ipRanges = new HashSet<>();
+
+ /**
+ * Internal class representing an IP range
+ */
+ static class IPRange {
+
+ /* Lowest address in the range */
+ private final long ipLo;
+
+ /* Highest address in the range */
+ private final long ipHi;
+
+ IPRange(long ipLo, long ipHi) {
+ this.ipLo = ipLo;
+ this.ipHi = ipHi;
+ }
+
+ /**
+ * Get the lowest address in the range
+ * @return the lowest address as a long integer
+ */
+ public long getIpLo() {
+ return ipLo;
+ }
+
+ /**
+ * Get the highest address in the range
+ * @return the highest address as a long integer
+ */
+ public long getIpHi() {
+ return ipHi;
+ }
+ }
/**
* Can be full v4 IP, subnet or range string.
@@ -45,155 +79,126 @@ public class IPTable {
*/
public void add(String ip) throws IPFormatException {
- String[] start;
+ String start;
- String[] end;
+ String end;
String[] range = ip.split("-");
- if (range.length >= 2) {
+ if (range.length == 2) {
- start = range[0].trim().split("/")[0].split("\\.");
- end = range[1].trim().split("/")[0].split("\\.");
+ start = range[0].trim();
+ end = range[1].trim();
- if (start.length != 4 || end.length != 4) {
- throw new IPFormatException(ip + " - Ranges need to be full IPv4 Addresses");
- }
-
- if (!(start[0].equals(end[0]) && start[1].equals(end[1]) && start[2].equals(end[2]))) {
- throw new IPFormatException(ip + " - Ranges can only be across the last subnet x.y.z.0 - x.y.z.254");
+ try {
+ long ipLo = ipToLong(InetAddress.getByName(start));
+ long ipHi = ipToLong(InetAddress.getByName(end));
+ ipRanges.add(new IPRange(ipLo, ipHi));
+ return;
+ } catch (UnknownHostException e) {
+ throw new IPFormatException(ip + " - Range format should be similar to 1.2.3.0-1.2.3.255");
}
} else {
- //need to ignore CIDR notation for the moment.
- //ip = ip.split("\\/")[0];
-
- String[] subnets = ip.split("\\.");
-
- if (subnets.length < 3) {
- throw new IPFormatException(ip + " - require at least three subnet places (255.255.255.0");
+ // Convert implicit ranges to netmask format
+ // 192 -> 192.0.0.0/8
+ // 192.168 -> 192.168.0.0/16
+ // 192.168.1 -> 192.168.1.0/24
+ int periods = StringUtils.countMatches(ip, '.');
+ if (periods < 3) {
+ ip = StringUtils.join(ip, StringUtils.repeat(".0", 4 - periods - 1), "/", (periods + 1) * 8);
}
- start = subnets;
- end = subnets;
- }
-
- if (start.length >= 3) {
- Map>> first = map.get(start[0]);
-
- if (first == null) {
- first = new HashMap<>();
- map.put(start[0], first);
- }
-
- Map> second = first.get(start[1]);
-
- if (second == null) {
- second = new HashMap<>();
- first.put(start[1], second);
- }
-
- Set third = second.get(start[2]);
-
- if (third == null) {
- third = new HashSet<>();
- second.put(start[2], third);
- }
-
- //now populate fourth place (* or value 0-254);
-
- if (start.length == 3) {
- third.add("*");
- }
-
- if (third.contains("*")) {
- return;
- }
-
- if (start.length >= 4) {
- int s = Integer.valueOf(start[3]);
- int e = Integer.valueOf(end[3]);
- for (int i = s; i <= e; i++) {
- third.add(String.valueOf(i));
+ if (ip.contains("/")) {
+ String[] parts = ip.split("/");
+ try {
+ long ipLong = ipToLong(InetAddress.getByName(parts[0]));
+ long mask = (long) Math.pow(2, 32 - Integer.parseInt(parts[1]));
+ long ipLo = (ipLong / mask) * mask;
+ long ipHi = (( (ipLong / mask) + 1) * mask) - 1;
+ ipRanges.add(new IPRange(ipLo, ipHi));
+ return;
+ } catch (Exception e) {
+ throw new IPFormatException(ip + " - Range format should be similar to 172.16.0.0/12");
+ }
+ } else {
+ try {
+ long ipLo = ipToLong(InetAddress.getByName(ip));
+ ipRanges.add(new IPRange(ipLo, ipLo));
+ return;
+ } catch (UnknownHostException e) {
+ throw new IPFormatException(ip + " - IP address format should be similar to 1.2.3.14");
}
}
}
}
+ /**
+ * Convert an IP address to a long integer
+ * @param ip the IP address
+ * @return
+ */
+ public static long ipToLong(InetAddress ip) {
+ byte[] octets = ip.getAddress();
+ long result = 0;
+ for (byte octet : octets) {
+ result <<= 8;
+ result |= octet & 0xff;
+ }
+ return result;
+ }
+
+ /**
+ * Convert a long integer into an IP address string
+ * @param ip the IP address as a long integer
+ * @return
+ */
+ public static String longToIp(long ip) {
+ long part = ip;
+ String[] parts = new String[4];
+ for (int i = 0; i < 4; i++) {
+ long octet = part & 0xff;
+ parts[3 - i] = String.valueOf(octet);
+ part = part / 256;
+ }
+
+ return parts[0] + "." + parts[1] + "." + parts[2] + "." + parts[3];
+ }
+
/**
* Check whether a given address is contained in this netblock.
*
* @param ip the address to be tested
* @return true if {@code ip} is within this table's limits. Returns false
- * if {@link ip} looks like an IPv6 address.
+ * if {@code ip} looks like an IPv6 address.
* @throws IPFormatException Exception Class to deal with IPFormat errors.
*/
public boolean contains(String ip) throws IPFormatException {
- String[] subnets = ip.split("\\.");
-
- // Does it look like IPv6?
- if (subnets.length > 4 || ip.contains("::")) {
- log.warn("Address {} assumed not to match. IPv6 is not implemented.", ip);
- return false;
+ try {
+ long ipToTest = ipToLong(InetAddress.getByName(ip));
+ return ipRanges.stream()
+ .anyMatch(ipRange -> (ipToTest >= ipRange.getIpLo() && ipToTest <= ipRange.getIpHi()));
+ } catch (UnknownHostException e) {
+ throw new IPFormatException("ip not valid");
}
-
- // Does it look like a subnet?
- if (subnets.length < 4) {
- throw new IPFormatException("needs to be a single IP address");
- }
-
- Map>> first = map.get(subnets[0]);
-
- if (first == null) {
- return false;
- }
-
- Map> second = first.get(subnets[1]);
-
- if (second == null) {
- return false;
- }
-
- Set third = second.get(subnets[2]);
-
- if (third == null) {
- return false;
- }
-
- return third.contains(subnets[3]) || third.contains("*");
-
}
/**
- * Convert to a Set.
+ * Convert to a Set. This set contains all IPs in the range
*
* @return this table's content as a Set
*/
public Set toSet() {
HashSet set = new HashSet<>();
- for (Map.Entry>>> first : map.entrySet()) {
- String firstString = first.getKey();
- Map>> secondMap = first.getValue();
-
- for (Map.Entry>> second : secondMap.entrySet()) {
- String secondString = second.getKey();
- Map> thirdMap = second.getValue();
-
- for (Map.Entry> third : thirdMap.entrySet()) {
- String thirdString = third.getKey();
- Set fourthSet = third.getValue();
-
- if (fourthSet.contains("*")) {
- set.add(firstString + "." + secondString + "." + thirdString);
- } else {
- for (String fourth : fourthSet) {
- set.add(firstString + "." + secondString + "." + thirdString + "." + fourth);
- }
- }
-
- }
+ Iterator ipRangeIterator = ipRanges.iterator();
+ while (ipRangeIterator.hasNext()) {
+ IPRange ipRange = ipRangeIterator.next();
+ long ipLo = ipRange.getIpLo();
+ long ipHi = ipRange.getIpHi();
+ for (long ip = ipLo; ip <= ipHi; ip++) {
+ set.add(longToIp(ip));
}
}
@@ -205,7 +210,7 @@ public class IPTable {
* @return true if empty, false otherwise
*/
public boolean isEmpty() {
- return map.isEmpty();
+ return ipRanges.isEmpty();
}
/**
@@ -217,5 +222,23 @@ public class IPTable {
}
}
-
+ /**
+ * Represent this IP table as a string
+ * @return a string containing all IP ranges in this IP table
+ */
+ @Override
+ public String toString() {
+ StringBuilder stringBuilder = new StringBuilder();
+ Iterator ipRangeIterator = ipRanges.iterator();
+ while (ipRangeIterator.hasNext()) {
+ IPRange ipRange = ipRangeIterator.next();
+ stringBuilder.append(longToIp(ipRange.getIpLo()))
+ .append("-")
+ .append(longToIp(ipRange.getIpHi()));
+ if (ipRangeIterator.hasNext()) {
+ stringBuilder.append(", ");
+ }
+ }
+ return stringBuilder.toString();
+ }
}
diff --git a/dspace-api/src/main/java/org/dspace/statistics/util/StatisticsImporter.java b/dspace-api/src/main/java/org/dspace/statistics/util/StatisticsImporter.java
index bd8662854f..95736a8bd6 100644
--- a/dspace-api/src/main/java/org/dspace/statistics/util/StatisticsImporter.java
+++ b/dspace-api/src/main/java/org/dspace/statistics/util/StatisticsImporter.java
@@ -348,9 +348,9 @@ public class StatisticsImporter {
// Get the eperson details
EPerson eperson = EPersonServiceFactory.getInstance().getEPersonService().findByEmail(context, user);
- int epersonId = 0;
+ UUID epersonId = null;
if (eperson != null) {
- eperson.getID();
+ epersonId = eperson.getID();
}
// Save it in our server
@@ -365,12 +365,10 @@ public class StatisticsImporter {
sid.addField("city", city);
sid.addField("latitude", latitude);
sid.addField("longitude", longitude);
- if (epersonId > 0) {
+ if (epersonId != null) {
sid.addField("epersonid", epersonId);
}
- if (dns != null) {
- sid.addField("dns", dns.toLowerCase());
- }
+ sid.addField("dns", dns.toLowerCase());
solrLoggerService.storeParents(sid, dso);
solr.add(sid);
diff --git a/dspace-api/src/main/java/org/dspace/storage/bitstore/BitstreamStorageServiceImpl.java b/dspace-api/src/main/java/org/dspace/storage/bitstore/BitstreamStorageServiceImpl.java
index 8bf5d3cbd3..39826ce82c 100644
--- a/dspace-api/src/main/java/org/dspace/storage/bitstore/BitstreamStorageServiceImpl.java
+++ b/dspace-api/src/main/java/org/dspace/storage/bitstore/BitstreamStorageServiceImpl.java
@@ -18,6 +18,7 @@ import java.util.UUID;
import javax.annotation.Nullable;
import org.apache.commons.collections4.MapUtils;
+import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
import org.dspace.authorize.AuthorizeException;
import org.dspace.checker.service.ChecksumHistoryService;
@@ -57,13 +58,12 @@ import org.springframework.beans.factory.annotation.Autowired;
* be notified of BitstreamStorageManager actions.
*
* @author Peter Breton, Robert Tansley, David Little, Nathan Sarr
- * @version $Revision$
*/
public class BitstreamStorageServiceImpl implements BitstreamStorageService, InitializingBean {
/**
* log4j log
*/
- private static Logger log = org.apache.logging.log4j.LogManager.getLogger(BitstreamStorageServiceImpl.class);
+ private static final Logger log = LogManager.getLogger();
@Autowired(required = true)
protected BitstreamService bitstreamService;
@@ -73,7 +73,7 @@ public class BitstreamStorageServiceImpl implements BitstreamStorageService, Ini
/**
* asset stores
*/
- private Map stores = new HashMap();
+ private Map stores = new HashMap<>();
/**
* The index of the asset store to use for new bitstreams
@@ -222,11 +222,10 @@ public class BitstreamStorageServiceImpl implements BitstreamStorageService, Ini
@Override
public void cleanup(boolean deleteDbRecords, boolean verbose) throws SQLException, IOException, AuthorizeException {
- Context context = null;
+ Context context = new Context(Context.Mode.BATCH_EDIT);
int commitCounter = 0;
try {
- context = new Context(Context.Mode.BATCH_EDIT);
context.turnOffAuthorisationSystem();
List storage = bitstreamService.findDeletedBitstreams(context);
@@ -321,9 +320,7 @@ public class BitstreamStorageServiceImpl implements BitstreamStorageService, Ini
context.abort();
throw sqle;
} finally {
- if (context != null) {
- context.restoreAuthSystemState();
- }
+ context.restoreAuthSystemState();
}
}
@@ -386,11 +383,12 @@ public class BitstreamStorageServiceImpl implements BitstreamStorageService, Ini
* @throws AuthorizeException Exception indicating the current user of the context does not have permission
* to perform a particular action.
*/
+ @Override
public void migrate(Context context, Integer assetstoreSource, Integer assetstoreDestination, boolean deleteOld,
Integer batchCommitSize) throws IOException, SQLException, AuthorizeException {
//Find all the bitstreams on the old source, copy it to new destination, update store_number, save, remove old
Iterator allBitstreamsInSource = bitstreamService.findByStoreNumber(context, assetstoreSource);
- Integer processedCounter = 0;
+ int processedCounter = 0;
while (allBitstreamsInSource.hasNext()) {
Bitstream bitstream = allBitstreamsInSource.next();
@@ -424,6 +422,7 @@ public class BitstreamStorageServiceImpl implements BitstreamStorageService, Ini
"] completed. " + processedCounter + " objects were transferred.");
}
+ @Override
public void printStores(Context context) {
try {
diff --git a/dspace-api/src/main/java/org/dspace/storage/bitstore/S3BitStoreService.java b/dspace-api/src/main/java/org/dspace/storage/bitstore/S3BitStoreService.java
index ce2b3b3f05..004a8bba04 100644
--- a/dspace-api/src/main/java/org/dspace/storage/bitstore/S3BitStoreService.java
+++ b/dspace-api/src/main/java/org/dspace/storage/bitstore/S3BitStoreService.java
@@ -25,6 +25,12 @@ import com.amazonaws.services.s3.model.ObjectMetadata;
import com.amazonaws.services.s3.model.PutObjectRequest;
import com.amazonaws.services.s3.model.PutObjectResult;
import com.amazonaws.services.s3.model.S3Object;
+import org.apache.commons.cli.CommandLine;
+import org.apache.commons.cli.DefaultParser;
+import org.apache.commons.cli.HelpFormatter;
+import org.apache.commons.cli.Option;
+import org.apache.commons.cli.Options;
+import org.apache.commons.cli.ParseException;
import org.apache.commons.io.FileUtils;
import org.apache.commons.lang3.StringUtils;
import org.apache.http.HttpStatus;
@@ -324,27 +330,37 @@ public class S3BitStoreService implements BitStoreService {
* @throws Exception generic exception
*/
public static void main(String[] args) throws Exception {
- //TODO use proper CLI, or refactor to be a unit test. Can't mock this without keys though.
+ //TODO Perhaps refactor to be a unit test. Can't mock this without keys though.
// parse command line
- String assetFile = null;
- String accessKey = null;
- String secretKey = null;
+ Options options = new Options();
+ Option option;
- for (int i = 0; i < args.length; i += 2) {
- if (args[i].startsWith("-a")) {
- accessKey = args[i + 1];
- } else if (args[i].startsWith("-s")) {
- secretKey = args[i + 1];
- } else if (args[i].startsWith("-f")) {
- assetFile = args[i + 1];
- }
- }
+ option = Option.builder("a").desc("access key").hasArg().required().build();
+ options.addOption(option);
- if (accessKey == null || secretKey == null || assetFile == null) {
- System.out.println("Missing arguments - exiting");
+ option = Option.builder("s").desc("secret key").hasArg().required().build();
+ options.addOption(option);
+
+ option = Option.builder("f").desc("asset file name").hasArg().required().build();
+ options.addOption(option);
+
+ DefaultParser parser = new DefaultParser();
+
+ CommandLine command;
+ try {
+ command = parser.parse(options, args);
+ } catch (ParseException e) {
+ System.err.println(e.getMessage());
+ new HelpFormatter().printHelp(
+ S3BitStoreService.class.getSimpleName() + "options", options);
return;
}
+
+ String accessKey = command.getOptionValue("a");
+ String secretKey = command.getOptionValue("s");
+ String assetFile = command.getOptionValue("f");
+
S3BitStoreService store = new S3BitStoreService();
AWSCredentials awsCredentials = new BasicAWSCredentials(accessKey, secretKey);
diff --git a/dspace-api/src/main/java/org/dspace/submit/model/UploadConfiguration.java b/dspace-api/src/main/java/org/dspace/submit/model/UploadConfiguration.java
index bc2f117b3c..a6421b3f7a 100644
--- a/dspace-api/src/main/java/org/dspace/submit/model/UploadConfiguration.java
+++ b/dspace-api/src/main/java/org/dspace/submit/model/UploadConfiguration.java
@@ -8,15 +8,17 @@
package org.dspace.submit.model;
import java.util.List;
+import javax.inject.Inject;
import org.dspace.services.ConfigurationService;
/**
+ * A collection of conditions to be met when uploading Bitstreams.
* @author Luigi Andrea Pascarelli (luigiandrea.pascarelli at 4science.it)
*/
public class UploadConfiguration {
- private ConfigurationService configurationService;
+ private final ConfigurationService configurationService;
private String metadataDefinition;
private List options;
@@ -24,22 +26,52 @@ public class UploadConfiguration {
private Boolean required;
private String name;
+ /**
+ * Construct a bitstream uploading configuration.
+ * @param configurationService DSpace configuration provided by the DI container.
+ */
+ @Inject
+ public UploadConfiguration(ConfigurationService configurationService) {
+ this.configurationService = configurationService;
+ }
+
+ /**
+ * The list of access restriction types from which a submitter may choose.
+ * @return choices for restricting access to Bitstreams.
+ */
public List getOptions() {
return options;
}
+ /**
+ * Set the list of access restriction types from which to choose.
+ * Required. May be empty.
+ * @param options choices for restricting access to Bitstreams.
+ */
public void setOptions(List options) {
this.options = options;
}
+ /**
+ * Name of the submission form to which these conditions are attached.
+ * @return the form's name.
+ */
public String getMetadata() {
return metadataDefinition;
}
+ /**
+ * Name the submission form to which these conditions are attached.
+ * @param metadata the form's name.
+ */
public void setMetadata(String metadata) {
this.metadataDefinition = metadata;
}
+ /**
+ * Limit on the maximum size of an uploaded Bitstream.
+ * @return maximum upload size in bytes.
+ */
public Long getMaxSize() {
if (maxSize == null) {
maxSize = configurationService.getLongProperty("upload.max");
@@ -47,10 +79,18 @@ public class UploadConfiguration {
return maxSize;
}
+ /**
+ * Limit the maximum size of an uploaded Bitstream.
+ * @param maxSize maximum upload size in bytes.
+ */
public void setMaxSize(Long maxSize) {
this.maxSize = maxSize;
}
+ /**
+ * Is at least one Bitstream required when submitting a new Item?
+ * @return true if a Bitstream is required.
+ */
public Boolean isRequired() {
if (required == null) {
//defaults to true
@@ -60,25 +100,27 @@ public class UploadConfiguration {
return required;
}
+ /**
+ * Is at least one Bitstream required when submitting a new Item?
+ * @param required true if a Bitstream is required.
+ */
public void setRequired(Boolean required) {
this.required = required;
}
- public ConfigurationService getConfigurationService() {
- return configurationService;
- }
-
- public void setConfigurationService(ConfigurationService configurationService) {
- this.configurationService = configurationService;
- }
-
+ /**
+ * The unique name of this configuration.
+ * @return configuration's name.
+ */
public String getName() {
return name;
}
+ /**
+ * Give this configuration a unique name. Required.
+ * @param name configuration's name.
+ */
public void setName(String name) {
this.name = name;
}
-
-
}
diff --git a/dspace-api/src/main/java/org/dspace/util/SolrUpgradePre6xStatistics.java b/dspace-api/src/main/java/org/dspace/util/SolrUpgradePre6xStatistics.java
index 12a9970539..7dcebcc09f 100644
--- a/dspace-api/src/main/java/org/dspace/util/SolrUpgradePre6xStatistics.java
+++ b/dspace-api/src/main/java/org/dspace/util/SolrUpgradePre6xStatistics.java
@@ -240,8 +240,8 @@ public class SolrUpgradePre6xStatistics {
/**
* Print a status message appended with the processing time for the operation
*
- * @param header
- * Message to display
+ * @param numProcessed
+ * count of records processed so far.
* @param fromStart
* if true, report on processing time since the start of the program
*/
diff --git a/dspace-api/src/main/java/org/dspace/xmlworkflow/WorkflowRequirementsServiceImpl.java b/dspace-api/src/main/java/org/dspace/xmlworkflow/WorkflowRequirementsServiceImpl.java
index c651097fcb..aecdccd55a 100644
--- a/dspace-api/src/main/java/org/dspace/xmlworkflow/WorkflowRequirementsServiceImpl.java
+++ b/dspace-api/src/main/java/org/dspace/xmlworkflow/WorkflowRequirementsServiceImpl.java
@@ -100,7 +100,7 @@ public class WorkflowRequirementsServiceImpl implements WorkflowRequirementsServ
//Then remove the current user from the inProgressUsers
inProgressUserService.delete(context, inProgressUserService.findByWorkflowItemAndEPerson(context, wfi, user));
- //Make sure the removed user has his custom rights removed
+ //Make sure the removed user has their custom rights removed
xmlWorkflowService.removeUserItemPolicies(context, wfi.getItem(), user);
Workflow workflow = workflowFactory.getWorkflow(wfi.getCollection());
diff --git a/dspace-api/src/main/java/org/dspace/xmlworkflow/XmlWorkflowServiceImpl.java b/dspace-api/src/main/java/org/dspace/xmlworkflow/XmlWorkflowServiceImpl.java
index fbe06245ab..90f180ec87 100644
--- a/dspace-api/src/main/java/org/dspace/xmlworkflow/XmlWorkflowServiceImpl.java
+++ b/dspace-api/src/main/java/org/dspace/xmlworkflow/XmlWorkflowServiceImpl.java
@@ -447,7 +447,7 @@ public class XmlWorkflowServiceImpl implements XmlWorkflowService {
enteredNewStep);
}
} else if (enteredNewStep) {
- // If the user finished his/her step, we keep processing until there is a UI step action or no
+ // If the user finished their step, we keep processing until there is a UI step action or no
// step at all
nextStep = workflow.getNextStep(c, wfi, currentStep, currentOutcome.getResult());
c.turnOffAuthorisationSystem();
@@ -938,7 +938,7 @@ public class XmlWorkflowServiceImpl implements XmlWorkflowService {
authorizeService.removeEPersonPolicies(context, bitstream, e);
}
}
- // Ensure that the submitter always retains his resource policies
+ // Ensure that the submitter always retains their resource policies
if (e.getID().equals(item.getSubmitter().getID())) {
grantSubmitterReadPolicies(context, item);
}
diff --git a/dspace-api/src/main/java/org/dspace/xmlworkflow/state/Workflow.java b/dspace-api/src/main/java/org/dspace/xmlworkflow/state/Workflow.java
index 636007344c..fd081b3a1b 100644
--- a/dspace-api/src/main/java/org/dspace/xmlworkflow/state/Workflow.java
+++ b/dspace-api/src/main/java/org/dspace/xmlworkflow/state/Workflow.java
@@ -8,7 +8,7 @@
package org.dspace.xmlworkflow.state;
import java.sql.SQLException;
-import java.util.HashMap;
+import java.util.LinkedHashMap;
import java.util.List;
import java.util.Map;
@@ -118,7 +118,7 @@ public class Workflow implements BeanNameAware {
* @return a map containing the roles, the role name will the key, the role itself the value
*/
public Map getRoles() {
- Map roles = new HashMap<>();
+ Map roles = new LinkedHashMap<>();
for (Step step : steps) {
if (step.getRole() != null) {
roles.put(step.getRole().getId(), step.getRole());
diff --git a/dspace-api/src/main/java/org/dspace/xmlworkflow/state/actions/processingaction/SelectReviewerAction.java b/dspace-api/src/main/java/org/dspace/xmlworkflow/state/actions/processingaction/SelectReviewerAction.java
index 16a8777275..28f21cc418 100644
--- a/dspace-api/src/main/java/org/dspace/xmlworkflow/state/actions/processingaction/SelectReviewerAction.java
+++ b/dspace-api/src/main/java/org/dspace/xmlworkflow/state/actions/processingaction/SelectReviewerAction.java
@@ -89,7 +89,7 @@ public class SelectReviewerAction extends ProcessingAction {
//Retrieve the identifier of the eperson which will do the reviewing
UUID reviewerId = UUID.fromString(submitButton.substring(submitButton.lastIndexOf("_") + 1));
EPerson reviewer = ePersonService.find(c, reviewerId);
- //We have a reviewer, assign him, the workflowitemrole will be translated into a task in the autoassign
+ //Assign the reviewer. The workflowitemrole will be translated into a task in the autoassign
WorkflowItemRole workflowItemRole = workflowItemRoleService.create(c);
workflowItemRole.setEPerson(reviewer);
workflowItemRole.setRoleId(getRole().getId());
diff --git a/dspace-api/src/main/java/org/dspace/xmlworkflow/state/actions/processingaction/SingleUserReviewAction.java b/dspace-api/src/main/java/org/dspace/xmlworkflow/state/actions/processingaction/SingleUserReviewAction.java
index 9ef554821d..27cf98f77f 100644
--- a/dspace-api/src/main/java/org/dspace/xmlworkflow/state/actions/processingaction/SingleUserReviewAction.java
+++ b/dspace-api/src/main/java/org/dspace/xmlworkflow/state/actions/processingaction/SingleUserReviewAction.java
@@ -25,7 +25,7 @@ import org.dspace.xmlworkflow.storedcomponents.XmlWorkflowItem;
/**
* Processing class of an action where a single user has
- * been assigned and he can either accept/reject the workflow item
+ * been assigned and they can either accept/reject the workflow item
* or reject the task
*
* @author Bram De Schouwer (bram.deschouwer at dot com)
@@ -90,7 +90,7 @@ public class SingleUserReviewAction extends ProcessingAction {
} else {
request.setAttribute("page", REJECT_PAGE);
}
- // We have pressed reject item, so take the user to a page where he can reject
+ // We have pressed reject item, so take the user to a page where they can reject
return new ActionResult(ActionResult.TYPE.TYPE_PAGE);
} else if (request.getParameter(SUBMIT_DECLINE_TASK) != null) {
return new ActionResult(ActionResult.TYPE.TYPE_OUTCOME, OUTCOME_REJECT);
diff --git a/dspace-api/src/main/java/org/dspace/xmlworkflow/state/actions/userassignment/AssignOriginalSubmitterAction.java b/dspace-api/src/main/java/org/dspace/xmlworkflow/state/actions/userassignment/AssignOriginalSubmitterAction.java
index 5d934ba189..0cd82fe770 100644
--- a/dspace-api/src/main/java/org/dspace/xmlworkflow/state/actions/userassignment/AssignOriginalSubmitterAction.java
+++ b/dspace-api/src/main/java/org/dspace/xmlworkflow/state/actions/userassignment/AssignOriginalSubmitterAction.java
@@ -136,7 +136,7 @@ public class AssignOriginalSubmitterAction extends UserSelectionAction {
protected void createTaskForEPerson(Context c, XmlWorkflowItem wfi, Step step, WorkflowActionConfig actionConfig,
EPerson user) throws SQLException, AuthorizeException, IOException {
if (claimedTaskService.find(c, wfi, step.getId(), actionConfig.getId()) != null) {
- workflowRequirementsService.addClaimedUser(c, wfi, step, c.getCurrentUser());
+ workflowRequirementsService.addClaimedUser(c, wfi, step, user);
XmlWorkflowServiceFactory.getInstance().getXmlWorkflowService()
.createOwnedTask(c, wfi, step, actionConfig, user);
}
diff --git a/dspace-api/src/main/java/org/dspace/xmlworkflow/storedcomponents/PoolTaskServiceImpl.java b/dspace-api/src/main/java/org/dspace/xmlworkflow/storedcomponents/PoolTaskServiceImpl.java
index f64f1b3942..fb673725e1 100644
--- a/dspace-api/src/main/java/org/dspace/xmlworkflow/storedcomponents/PoolTaskServiceImpl.java
+++ b/dspace-api/src/main/java/org/dspace/xmlworkflow/storedcomponents/PoolTaskServiceImpl.java
@@ -92,7 +92,7 @@ public class PoolTaskServiceImpl implements PoolTaskService {
return poolTask;
} else {
//If the user has a is processing or has finished the step for a workflowitem, there is no need to look
- // for pooltasks for one of his
+ // for pooltasks for one of their
//groups because the user already has the task claimed
if (inProgressUserService.findByWorkflowItemAndEPerson(context, workflowItem, ePerson) != null) {
return null;
diff --git a/dspace-api/src/test/data/dspaceFolder/config/spring/api/access-conditions.xml b/dspace-api/src/test/data/dspaceFolder/config/spring/api/access-conditions.xml
index e21a85cca4..450ed3ad0b 100644
--- a/dspace-api/src/test/data/dspaceFolder/config/spring/api/access-conditions.xml
+++ b/dspace-api/src/test/data/dspaceFolder/config/spring/api/access-conditions.xml
@@ -3,10 +3,9 @@
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xsi:schemaLocation="http://www.springframework.org/schema/beans
http://www.springframework.org/schema/beans/spring-beans-2.5.xsd">
-
+
-
-
+
@@ -18,7 +17,7 @@
-
+
@@ -31,7 +30,7 @@
-
+
@@ -43,13 +42,13 @@
-
+
diff --git a/dspace-api/src/test/data/dspaceFolder/config/spring/api/external-openaire.xml b/dspace-api/src/test/data/dspaceFolder/config/spring/api/external-openaire.xml
index e10d04a16f..f1e6c30d13 100644
--- a/dspace-api/src/test/data/dspaceFolder/config/spring/api/external-openaire.xml
+++ b/dspace-api/src/test/data/dspaceFolder/config/spring/api/external-openaire.xml
@@ -1,6 +1,10 @@
-
@@ -15,11 +19,71 @@
init-method="init">
+ Project
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
\ No newline at end of file
diff --git a/dspace-api/src/test/data/dspaceFolder/config/spring/api/scripts.xml b/dspace-api/src/test/data/dspaceFolder/config/spring/api/scripts.xml
index bae2dd11ae..eb3ee82f60 100644
--- a/dspace-api/src/test/data/dspaceFolder/config/spring/api/scripts.xml
+++ b/dspace-api/src/test/data/dspaceFolder/config/spring/api/scripts.xml
@@ -64,7 +64,12 @@
-
+
+
+
+
+
+
diff --git a/dspace-api/src/test/java/org/dspace/administer/ProcessCleanerIT.java b/dspace-api/src/test/java/org/dspace/administer/ProcessCleanerIT.java
new file mode 100644
index 0000000000..4676236cfe
--- /dev/null
+++ b/dspace-api/src/test/java/org/dspace/administer/ProcessCleanerIT.java
@@ -0,0 +1,380 @@
+/**
+ * The contents of this file are subject to the license and copyright
+ * detailed in the LICENSE and NOTICE files at the root of the source
+ * tree and available online at
+ *
+ * http://www.dspace.org/license/
+ */
+package org.dspace.administer;
+
+import static org.apache.commons.lang.time.DateUtils.addDays;
+import static org.dspace.content.ProcessStatus.COMPLETED;
+import static org.dspace.content.ProcessStatus.FAILED;
+import static org.dspace.content.ProcessStatus.RUNNING;
+import static org.hamcrest.MatcherAssert.assertThat;
+import static org.hamcrest.Matchers.empty;
+import static org.hamcrest.Matchers.hasItem;
+import static org.hamcrest.Matchers.hasSize;
+import static org.hamcrest.Matchers.notNullValue;
+import static org.hamcrest.Matchers.nullValue;
+
+import java.sql.SQLException;
+import java.util.Date;
+import java.util.List;
+
+import org.dspace.AbstractIntegrationTestWithDatabase;
+import org.dspace.app.launcher.ScriptLauncher;
+import org.dspace.app.scripts.handler.impl.TestDSpaceRunnableHandler;
+import org.dspace.builder.ProcessBuilder;
+import org.dspace.content.ProcessStatus;
+import org.dspace.scripts.Process;
+import org.dspace.scripts.factory.ScriptServiceFactory;
+import org.dspace.scripts.service.ProcessService;
+import org.dspace.services.ConfigurationService;
+import org.dspace.services.factory.DSpaceServicesFactory;
+import org.junit.Test;
+
+/**
+ * Integration tests for {@link ProcessCleaner}.
+ *
+ * @author Luca Giamminonni (luca.giamminonni at 4science.it)
+ *
+ */
+public class ProcessCleanerIT extends AbstractIntegrationTestWithDatabase {
+
+ private ConfigurationService configurationService = DSpaceServicesFactory.getInstance().getConfigurationService();
+
+ private ProcessService processService = ScriptServiceFactory.getInstance().getProcessService();
+
+ @Test
+ public void testWithoutProcessToDelete() throws Exception {
+
+ Process process_1 = buildProcess(COMPLETED, addDays(new Date(), -2));
+ Process process_2 = buildProcess(RUNNING, addDays(new Date(), -1));
+ Process process_3 = buildProcess(FAILED, addDays(new Date(), -3));
+
+ configurationService.setProperty("process-cleaner.days", 5);
+
+ TestDSpaceRunnableHandler testDSpaceRunnableHandler = new TestDSpaceRunnableHandler();
+
+ String[] args = new String[] { "process-cleaner" };
+ ScriptLauncher.handleScript(args, ScriptLauncher.getConfig(kernelImpl), testDSpaceRunnableHandler, kernelImpl);
+
+ assertThat(testDSpaceRunnableHandler.getErrorMessages(), empty());
+ assertThat(testDSpaceRunnableHandler.getWarningMessages(), empty());
+
+ List messages = testDSpaceRunnableHandler.getInfoMessages();
+ assertThat(messages, hasSize(3));
+ assertThat(messages, hasItem("Searching for processes with status: [COMPLETED]"));
+ assertThat(messages, hasItem("Found 0 processes to be deleted"));
+ assertThat(messages, hasItem("Process cleanup completed"));
+
+ assertThat(processService.find(context, process_1.getID()), notNullValue());
+ assertThat(processService.find(context, process_2.getID()), notNullValue());
+ assertThat(processService.find(context, process_3.getID()), notNullValue());
+
+ }
+
+ @Test
+ public void testWithoutSpecifiedStatus() throws Exception {
+
+ Process process_1 = buildProcess(COMPLETED, addDays(new Date(), -2));
+ Process process_2 = buildProcess(RUNNING, addDays(new Date(), -1));
+ Process process_3 = buildProcess(FAILED, addDays(new Date(), -3));
+ Process process_4 = buildProcess(COMPLETED, addDays(new Date(), -6));
+ Process process_5 = buildProcess(COMPLETED, addDays(new Date(), -8));
+ Process process_6 = buildProcess(RUNNING, addDays(new Date(), -7));
+ Process process_7 = buildProcess(FAILED, addDays(new Date(), -8));
+
+ configurationService.setProperty("process-cleaner.days", 5);
+
+ TestDSpaceRunnableHandler testDSpaceRunnableHandler = new TestDSpaceRunnableHandler();
+
+ String[] args = new String[] { "process-cleaner" };
+ ScriptLauncher.handleScript(args, ScriptLauncher.getConfig(kernelImpl), testDSpaceRunnableHandler, kernelImpl);
+
+ assertThat(testDSpaceRunnableHandler.getErrorMessages(), empty());
+ assertThat(testDSpaceRunnableHandler.getWarningMessages(), empty());
+
+ List messages = testDSpaceRunnableHandler.getInfoMessages();
+ assertThat(messages, hasSize(3));
+ assertThat(messages, hasItem("Searching for processes with status: [COMPLETED]"));
+ assertThat(messages, hasItem("Found 2 processes to be deleted"));
+ assertThat(messages, hasItem("Process cleanup completed"));
+
+ assertThat(processService.find(context, process_1.getID()), notNullValue());
+ assertThat(processService.find(context, process_2.getID()), notNullValue());
+ assertThat(processService.find(context, process_3.getID()), notNullValue());
+ assertThat(processService.find(context, process_4.getID()), nullValue());
+ assertThat(processService.find(context, process_5.getID()), nullValue());
+ assertThat(processService.find(context, process_6.getID()), notNullValue());
+ assertThat(processService.find(context, process_7.getID()), notNullValue());
+
+ }
+
+ @Test
+ public void testWithCompletedStatus() throws Exception {
+
+ Process process_1 = buildProcess(COMPLETED, addDays(new Date(), -2));
+ Process process_2 = buildProcess(RUNNING, addDays(new Date(), -1));
+ Process process_3 = buildProcess(FAILED, addDays(new Date(), -3));
+ Process process_4 = buildProcess(COMPLETED, addDays(new Date(), -6));
+ Process process_5 = buildProcess(COMPLETED, addDays(new Date(), -8));
+ Process process_6 = buildProcess(RUNNING, addDays(new Date(), -7));
+ Process process_7 = buildProcess(FAILED, addDays(new Date(), -8));
+
+ configurationService.setProperty("process-cleaner.days", 5);
+
+ TestDSpaceRunnableHandler testDSpaceRunnableHandler = new TestDSpaceRunnableHandler();
+
+ String[] args = new String[] { "process-cleaner", "-c" };
+ ScriptLauncher.handleScript(args, ScriptLauncher.getConfig(kernelImpl), testDSpaceRunnableHandler, kernelImpl);
+
+ assertThat(testDSpaceRunnableHandler.getErrorMessages(), empty());
+ assertThat(testDSpaceRunnableHandler.getWarningMessages(), empty());
+
+ List messages = testDSpaceRunnableHandler.getInfoMessages();
+ assertThat(messages, hasSize(3));
+ assertThat(messages, hasItem("Searching for processes with status: [COMPLETED]"));
+ assertThat(messages, hasItem("Found 2 processes to be deleted"));
+ assertThat(messages, hasItem("Process cleanup completed"));
+
+ assertThat(processService.find(context, process_1.getID()), notNullValue());
+ assertThat(processService.find(context, process_2.getID()), notNullValue());
+ assertThat(processService.find(context, process_3.getID()), notNullValue());
+ assertThat(processService.find(context, process_4.getID()), nullValue());
+ assertThat(processService.find(context, process_5.getID()), nullValue());
+ assertThat(processService.find(context, process_6.getID()), notNullValue());
+ assertThat(processService.find(context, process_7.getID()), notNullValue());
+
+ }
+
+ @Test
+ public void testWithRunningStatus() throws Exception {
+
+ Process process_1 = buildProcess(COMPLETED, addDays(new Date(), -2));
+ Process process_2 = buildProcess(RUNNING, addDays(new Date(), -1));
+ Process process_3 = buildProcess(FAILED, addDays(new Date(), -3));
+ Process process_4 = buildProcess(COMPLETED, addDays(new Date(), -6));
+ Process process_5 = buildProcess(COMPLETED, addDays(new Date(), -8));
+ Process process_6 = buildProcess(RUNNING, addDays(new Date(), -7));
+ Process process_7 = buildProcess(FAILED, addDays(new Date(), -8));
+ Process process_8 = buildProcess(RUNNING, addDays(new Date(), -9));
+
+ configurationService.setProperty("process-cleaner.days", 5);
+
+ TestDSpaceRunnableHandler testDSpaceRunnableHandler = new TestDSpaceRunnableHandler();
+
+ String[] args = new String[] { "process-cleaner", "-r" };
+ ScriptLauncher.handleScript(args, ScriptLauncher.getConfig(kernelImpl), testDSpaceRunnableHandler, kernelImpl);
+
+ assertThat(testDSpaceRunnableHandler.getErrorMessages(), empty());
+ assertThat(testDSpaceRunnableHandler.getWarningMessages(), empty());
+
+ List messages = testDSpaceRunnableHandler.getInfoMessages();
+ assertThat(messages, hasSize(3));
+ assertThat(messages, hasItem("Searching for processes with status: [RUNNING]"));
+ assertThat(messages, hasItem("Found 2 processes to be deleted"));
+ assertThat(messages, hasItem("Process cleanup completed"));
+
+ assertThat(processService.find(context, process_1.getID()), notNullValue());
+ assertThat(processService.find(context, process_2.getID()), notNullValue());
+ assertThat(processService.find(context, process_3.getID()), notNullValue());
+ assertThat(processService.find(context, process_4.getID()), notNullValue());
+ assertThat(processService.find(context, process_5.getID()), notNullValue());
+ assertThat(processService.find(context, process_6.getID()), nullValue());
+ assertThat(processService.find(context, process_7.getID()), notNullValue());
+ assertThat(processService.find(context, process_8.getID()), nullValue());
+
+ }
+
+ @Test
+ public void testWithFailedStatus() throws Exception {
+
+ Process process_1 = buildProcess(COMPLETED, addDays(new Date(), -2));
+ Process process_2 = buildProcess(RUNNING, addDays(new Date(), -1));
+ Process process_3 = buildProcess(FAILED, addDays(new Date(), -3));
+ Process process_4 = buildProcess(COMPLETED, addDays(new Date(), -6));
+ Process process_5 = buildProcess(COMPLETED, addDays(new Date(), -8));
+ Process process_6 = buildProcess(RUNNING, addDays(new Date(), -7));
+ Process process_7 = buildProcess(FAILED, addDays(new Date(), -8));
+ Process process_8 = buildProcess(FAILED, addDays(new Date(), -9));
+
+ configurationService.setProperty("process-cleaner.days", 5);
+
+ TestDSpaceRunnableHandler testDSpaceRunnableHandler = new TestDSpaceRunnableHandler();
+
+ String[] args = new String[] { "process-cleaner", "-f" };
+ ScriptLauncher.handleScript(args, ScriptLauncher.getConfig(kernelImpl), testDSpaceRunnableHandler, kernelImpl);
+
+ assertThat(testDSpaceRunnableHandler.getErrorMessages(), empty());
+ assertThat(testDSpaceRunnableHandler.getWarningMessages(), empty());
+
+ List messages = testDSpaceRunnableHandler.getInfoMessages();
+ assertThat(messages, hasSize(3));
+ assertThat(messages, hasItem("Searching for processes with status: [FAILED]"));
+ assertThat(messages, hasItem("Found 2 processes to be deleted"));
+ assertThat(messages, hasItem("Process cleanup completed"));
+
+ assertThat(processService.find(context, process_1.getID()), notNullValue());
+ assertThat(processService.find(context, process_2.getID()), notNullValue());
+ assertThat(processService.find(context, process_3.getID()), notNullValue());
+ assertThat(processService.find(context, process_4.getID()), notNullValue());
+ assertThat(processService.find(context, process_5.getID()), notNullValue());
+ assertThat(processService.find(context, process_6.getID()), notNullValue());
+ assertThat(processService.find(context, process_7.getID()), nullValue());
+ assertThat(processService.find(context, process_8.getID()), nullValue());
+
+ }
+
+ @Test
+ public void testWithCompletedAndFailedStatus() throws Exception {
+
+ Process process_1 = buildProcess(COMPLETED, addDays(new Date(), -2));
+ Process process_2 = buildProcess(RUNNING, addDays(new Date(), -1));
+ Process process_3 = buildProcess(FAILED, addDays(new Date(), -3));
+ Process process_4 = buildProcess(COMPLETED, addDays(new Date(), -6));
+ Process process_5 = buildProcess(COMPLETED, addDays(new Date(), -8));
+ Process process_6 = buildProcess(RUNNING, addDays(new Date(), -7));
+ Process process_7 = buildProcess(FAILED, addDays(new Date(), -8));
+ Process process_8 = buildProcess(FAILED, addDays(new Date(), -9));
+
+ configurationService.setProperty("process-cleaner.days", 5);
+
+ TestDSpaceRunnableHandler testDSpaceRunnableHandler = new TestDSpaceRunnableHandler();
+
+ String[] args = new String[] { "process-cleaner", "-c", "-f" };
+ ScriptLauncher.handleScript(args, ScriptLauncher.getConfig(kernelImpl), testDSpaceRunnableHandler, kernelImpl);
+
+ List messages = testDSpaceRunnableHandler.getInfoMessages();
+ assertThat(messages, hasSize(3));
+ assertThat(messages, hasItem("Searching for processes with status: [COMPLETED, FAILED]"));
+ assertThat(messages, hasItem("Found 4 processes to be deleted"));
+ assertThat(messages, hasItem("Process cleanup completed"));
+
+ assertThat(processService.find(context, process_1.getID()), notNullValue());
+ assertThat(processService.find(context, process_2.getID()), notNullValue());
+ assertThat(processService.find(context, process_3.getID()), notNullValue());
+ assertThat(processService.find(context, process_4.getID()), nullValue());
+ assertThat(processService.find(context, process_5.getID()), nullValue());
+ assertThat(processService.find(context, process_6.getID()), notNullValue());
+ assertThat(processService.find(context, process_7.getID()), nullValue());
+ assertThat(processService.find(context, process_8.getID()), nullValue());
+
+ }
+
+ @Test
+ public void testWithCompletedAndRunningStatus() throws Exception {
+
+ Process process_1 = buildProcess(COMPLETED, addDays(new Date(), -2));
+ Process process_2 = buildProcess(RUNNING, addDays(new Date(), -1));
+ Process process_3 = buildProcess(FAILED, addDays(new Date(), -3));
+ Process process_4 = buildProcess(COMPLETED, addDays(new Date(), -6));
+ Process process_5 = buildProcess(COMPLETED, addDays(new Date(), -8));
+ Process process_6 = buildProcess(RUNNING, addDays(new Date(), -7));
+ Process process_7 = buildProcess(FAILED, addDays(new Date(), -8));
+ Process process_8 = buildProcess(RUNNING, addDays(new Date(), -9));
+
+ configurationService.setProperty("process-cleaner.days", 5);
+
+ TestDSpaceRunnableHandler testDSpaceRunnableHandler = new TestDSpaceRunnableHandler();
+
+ String[] args = new String[] { "process-cleaner", "-c", "-r" };
+ ScriptLauncher.handleScript(args, ScriptLauncher.getConfig(kernelImpl), testDSpaceRunnableHandler, kernelImpl);
+
+ List messages = testDSpaceRunnableHandler.getInfoMessages();
+ assertThat(messages, hasSize(3));
+ assertThat(messages, hasItem("Searching for processes with status: [COMPLETED, RUNNING]"));
+ assertThat(messages, hasItem("Found 4 processes to be deleted"));
+ assertThat(messages, hasItem("Process cleanup completed"));
+
+ assertThat(processService.find(context, process_1.getID()), notNullValue());
+ assertThat(processService.find(context, process_2.getID()), notNullValue());
+ assertThat(processService.find(context, process_3.getID()), notNullValue());
+ assertThat(processService.find(context, process_4.getID()), nullValue());
+ assertThat(processService.find(context, process_5.getID()), nullValue());
+ assertThat(processService.find(context, process_6.getID()), nullValue());
+ assertThat(processService.find(context, process_7.getID()), notNullValue());
+ assertThat(processService.find(context, process_8.getID()), nullValue());
+
+ }
+
+ @Test
+ public void testWithFailedAndRunningStatus() throws Exception {
+
+ Process process_1 = buildProcess(COMPLETED, addDays(new Date(), -2));
+ Process process_2 = buildProcess(RUNNING, addDays(new Date(), -1));
+ Process process_3 = buildProcess(FAILED, addDays(new Date(), -3));
+ Process process_4 = buildProcess(COMPLETED, addDays(new Date(), -6));
+ Process process_5 = buildProcess(COMPLETED, addDays(new Date(), -8));
+ Process process_6 = buildProcess(RUNNING, addDays(new Date(), -7));
+ Process process_7 = buildProcess(FAILED, addDays(new Date(), -8));
+ Process process_8 = buildProcess(RUNNING, addDays(new Date(), -9));
+
+ configurationService.setProperty("process-cleaner.days", 5);
+
+ TestDSpaceRunnableHandler testDSpaceRunnableHandler = new TestDSpaceRunnableHandler();
+
+ String[] args = new String[] { "process-cleaner", "-f", "-r" };
+ ScriptLauncher.handleScript(args, ScriptLauncher.getConfig(kernelImpl), testDSpaceRunnableHandler, kernelImpl);
+
+ List messages = testDSpaceRunnableHandler.getInfoMessages();
+ assertThat(messages, hasSize(3));
+ assertThat(messages, hasItem("Searching for processes with status: [FAILED, RUNNING]"));
+ assertThat(messages, hasItem("Found 3 processes to be deleted"));
+ assertThat(messages, hasItem("Process cleanup completed"));
+
+ assertThat(processService.find(context, process_1.getID()), notNullValue());
+ assertThat(processService.find(context, process_2.getID()), notNullValue());
+ assertThat(processService.find(context, process_3.getID()), notNullValue());
+ assertThat(processService.find(context, process_4.getID()), notNullValue());
+ assertThat(processService.find(context, process_5.getID()), notNullValue());
+ assertThat(processService.find(context, process_6.getID()), nullValue());
+ assertThat(processService.find(context, process_7.getID()), nullValue());
+ assertThat(processService.find(context, process_8.getID()), nullValue());
+
+ }
+
+ @Test
+ public void testWithCompletedFailedAndRunningStatus() throws Exception {
+
+ Process process_1 = buildProcess(COMPLETED, addDays(new Date(), -2));
+ Process process_2 = buildProcess(RUNNING, addDays(new Date(), -1));
+ Process process_3 = buildProcess(FAILED, addDays(new Date(), -3));
+ Process process_4 = buildProcess(COMPLETED, addDays(new Date(), -6));
+ Process process_5 = buildProcess(COMPLETED, addDays(new Date(), -8));
+ Process process_6 = buildProcess(RUNNING, addDays(new Date(), -7));
+ Process process_7 = buildProcess(FAILED, addDays(new Date(), -8));
+ Process process_8 = buildProcess(RUNNING, addDays(new Date(), -9));
+
+ configurationService.setProperty("process-cleaner.days", 5);
+
+ TestDSpaceRunnableHandler testDSpaceRunnableHandler = new TestDSpaceRunnableHandler();
+
+ String[] args = new String[] { "process-cleaner", "-f", "-r", "-c" };
+ ScriptLauncher.handleScript(args, ScriptLauncher.getConfig(kernelImpl), testDSpaceRunnableHandler, kernelImpl);
+
+ List messages = testDSpaceRunnableHandler.getInfoMessages();
+ assertThat(messages, hasSize(3));
+ assertThat(messages, hasItem("Searching for processes with status: [COMPLETED, FAILED, RUNNING]"));
+ assertThat(messages, hasItem("Found 5 processes to be deleted"));
+ assertThat(messages, hasItem("Process cleanup completed"));
+
+ assertThat(processService.find(context, process_1.getID()), notNullValue());
+ assertThat(processService.find(context, process_2.getID()), notNullValue());
+ assertThat(processService.find(context, process_3.getID()), notNullValue());
+ assertThat(processService.find(context, process_4.getID()), nullValue());
+ assertThat(processService.find(context, process_5.getID()), nullValue());
+ assertThat(processService.find(context, process_6.getID()), nullValue());
+ assertThat(processService.find(context, process_7.getID()), nullValue());
+ assertThat(processService.find(context, process_8.getID()), nullValue());
+
+ }
+
+ private Process buildProcess(ProcessStatus processStatus, Date creationTime) throws SQLException {
+ return ProcessBuilder.createProcess(context, admin, "test", List.of())
+ .withProcessStatus(processStatus)
+ .withCreationTime(creationTime)
+ .build();
+ }
+}
diff --git a/dspace-api/src/test/java/org/dspace/administer/StructBuilderIT.java b/dspace-api/src/test/java/org/dspace/administer/StructBuilderIT.java
index 7abe3618ed..63340698ac 100644
--- a/dspace-api/src/test/java/org/dspace/administer/StructBuilderIT.java
+++ b/dspace-api/src/test/java/org/dspace/administer/StructBuilderIT.java
@@ -8,6 +8,7 @@
package org.dspace.administer;
import static org.junit.Assert.assertFalse;
+import static org.junit.Assert.assertTrue;
import java.io.ByteArrayInputStream;
import java.io.ByteArrayOutputStream;
@@ -18,9 +19,10 @@ import java.sql.SQLException;
import java.util.Iterator;
import javax.xml.parsers.ParserConfigurationException;
import javax.xml.transform.Source;
-import javax.xml.transform.TransformerException;
import javax.xml.transform.stream.StreamSource;
+import org.apache.logging.log4j.LogManager;
+import org.apache.logging.log4j.Logger;
import org.dspace.AbstractIntegrationTest;
import org.dspace.authorize.AuthorizeException;
import org.dspace.content.Collection;
@@ -29,13 +31,11 @@ import org.dspace.content.MetadataSchemaEnum;
import org.dspace.content.factory.ContentServiceFactory;
import org.dspace.content.service.CollectionService;
import org.dspace.content.service.CommunityService;
-import org.junit.After;
+import org.dspace.handle.Handle;
import org.junit.AfterClass;
import org.junit.Before;
import org.junit.BeforeClass;
import org.junit.Test;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
import org.w3c.dom.Attr;
import org.w3c.dom.Node;
import org.xml.sax.SAXException;
@@ -53,7 +53,7 @@ import org.xmlunit.diff.Difference;
*/
public class StructBuilderIT
extends AbstractIntegrationTest {
- private static final Logger log = LoggerFactory.getLogger(StructBuilderIT.class);
+ private static final Logger log = LogManager.getLogger();
private static final CommunityService communityService
= ContentServiceFactory.getInstance().getCommunityService();
@@ -89,27 +89,28 @@ public class StructBuilderIT
context.restoreAuthSystemState();
}
- @After
- public void tearDown() {
- }
+ private static final String COMMUNITY_0_HANDLE = "https://hdl.handle.net/1/1";
+ private static final String COMMUNITY_0_0_HANDLE = "https://hdl.handle.net/1/1.1";
+ private static final String COLLECTION_0_0_0_HANDLE = "https://hdl.handle.net/1/1.1.1";
+ private static final String COLLECTION_0_1_HANDLE = "https://hdl.handle.net/1/1.2";
/** Test structure document. */
private static final String IMPORT_DOCUMENT =
"\n" +
"\n" +
- " \n" +
+ " \n" +
" Top Community 0\n" +
" A top level community\n" +
" Testing 1 2 3\n" +
" 1969\n" +
" A sidebar\n" +
- " \n" +
+ " \n" +
" Sub Community 0.0\n" +
" A sub community\n" +
" Live from New York....\n" +
" 1957\n" +
" Another sidebar\n" +
- " \n" +
+ " \n" +
" Collection 0.0.0\n" +
" A collection\n" +
" Our next guest needs no introduction\n" +
@@ -119,7 +120,14 @@ public class StructBuilderIT
" Testing\n" +
" \n" +
" \n" +
- " \n" +
+ " \n" +
+ " Sub Community 0.1\n" +
+ " A sub community with no handle\n" +
+ " Stop me if you've heard this one\n" +
+ " 2525\n" +
+ " One more sidebar\n" +
+ " \n" +
+ " \n" +
" Collection 0.1\n" +
" Another collection\n" +
" Fourscore and seven years ago\n" +
@@ -150,7 +158,7 @@ public class StructBuilderIT
* @throws java.lang.Exception passed through.
*/
@Test
- public void testImportStructure()
+ public void testImportStructureWithoutHandles()
throws Exception {
System.out.println("importStructure");
@@ -160,11 +168,7 @@ public class StructBuilderIT
byte[] inputBytes = IMPORT_DOCUMENT.getBytes(StandardCharsets.UTF_8);
context.turnOffAuthorisationSystem();
try (InputStream input = new ByteArrayInputStream(inputBytes);) {
- StructBuilder.importStructure(context, input, outputDocument);
- } catch (IOException | SQLException
- | ParserConfigurationException | TransformerException ex) {
- System.err.println(ex.getMessage());
- System.exit(1);
+ StructBuilder.importStructure(context, input, outputDocument, false);
} finally {
context.restoreAuthSystemState();
}
@@ -180,7 +184,81 @@ public class StructBuilderIT
IMPORT_DOCUMENT.getBytes(StandardCharsets.UTF_8)));
Diff myDiff = DiffBuilder.compare(reference).withTest(output)
.normalizeWhitespace()
-// .withNodeFilter(new MyNodeFilter())
+ .withAttributeFilter((Attr attr) ->
+ !attr.getName().equals("identifier"))
+ .checkForIdentical()
+ .build();
+
+ // Was there a difference?
+ // Always output differences -- one is expected.
+ ComparisonFormatter formatter = new DefaultComparisonFormatter();
+ for (Difference difference : myDiff.getDifferences()) {
+ System.err.println(difference.toString(formatter));
+ }
+ // Test for *significant* differences.
+ assertFalse("Output does not match input.", isDifferent(myDiff));
+
+ // TODO spot-check some objects.
+ }
+
+ /**
+ * Test of importStructure method, with given Handles.
+ *
+ * @throws java.lang.Exception passed through.
+ */
+ @Test
+ public void testImportStructureWithHandles()
+ throws Exception {
+ System.out.println("importStructure");
+
+ // Run the method under test and collect its output.
+ ByteArrayOutputStream outputDocument
+ = new ByteArrayOutputStream(IMPORT_DOCUMENT.length() * 2 * 2);
+ byte[] inputBytes = IMPORT_DOCUMENT.getBytes(StandardCharsets.UTF_8);
+ context.turnOffAuthorisationSystem();
+ try (InputStream input = new ByteArrayInputStream(inputBytes);) {
+ StructBuilder.importStructure(context, input, outputDocument, true);
+ } finally {
+ context.restoreAuthSystemState();
+ }
+
+ boolean found;
+
+ // Check a chosen Community for the right Handle.
+ found = false;
+ for (Community community : communityService.findAllTop(context)) {
+ for (Handle handle : community.getHandles()) {
+ if (handle.getHandle().equals(COMMUNITY_0_HANDLE)) {
+ found = true;
+ break;
+ }
+ }
+ }
+ assertTrue("A community should have its specified handle", found);
+
+ // Check a chosen Collection for the right Handle.
+ found = false;
+ for (Collection collection : collectionService.findAll(context)) {
+ for (Handle handle : collection.getHandles()) {
+ if (handle.getHandle().equals(COLLECTION_0_1_HANDLE)) {
+ found = true;
+ break;
+ }
+ }
+ }
+ assertTrue("A collection should have its specified handle", found);
+
+ // Compare import's output with its input.
+ // N.B. here we rely on StructBuilder to emit communities and
+ // collections in the same order as the input document. If that changes,
+ // we will need a smarter NodeMatcher, probably based on children.
+ Source output = new StreamSource(
+ new ByteArrayInputStream(outputDocument.toByteArray()));
+ Source reference = new StreamSource(
+ new ByteArrayInputStream(
+ IMPORT_DOCUMENT.getBytes(StandardCharsets.UTF_8)));
+ Diff myDiff = DiffBuilder.compare(reference).withTest(output)
+ .normalizeWhitespace()
.withAttributeFilter((Attr attr) ->
!attr.getName().equals("identifier"))
.checkForIdentical()
@@ -236,7 +314,6 @@ public class StructBuilderIT
EXPORT_DOCUMENT.getBytes(StandardCharsets.UTF_8)));
Diff myDiff = DiffBuilder.compare(reference).withTest(output)
.normalizeWhitespace()
-// .withNodeFilter(new MyNodeFilter())
.withAttributeFilter((Attr attr) ->
!attr.getName().equals("identifier"))
.checkForIdentical()
@@ -310,23 +387,4 @@ public class StructBuilderIT
// There must be at most one difference.
return diffIterator.hasNext();
}
-
- /**
- * Reject uninteresting nodes. (currently commented out of tests above)
- */
- /*private static class MyNodeFilter implements Predicate {
- private static final List dontCare = Arrays.asList(
- "description",
- "intro",
- "copyright",
- "sidebar",
- "license",
- "provenance");
-
- @Override
- public boolean test(Node node) {
- String type = node.getLocalName();
- return ! dontCare.contains(type);
- }
- }*/
}
diff --git a/dspace-api/src/test/java/org/dspace/app/requestitem/CollectionAdministratorsRequestItemStrategyTest.java b/dspace-api/src/test/java/org/dspace/app/requestitem/CollectionAdministratorsRequestItemStrategyTest.java
new file mode 100644
index 0000000000..37292e91c8
--- /dev/null
+++ b/dspace-api/src/test/java/org/dspace/app/requestitem/CollectionAdministratorsRequestItemStrategyTest.java
@@ -0,0 +1,62 @@
+/**
+ * The contents of this file are subject to the license and copyright
+ * detailed in the LICENSE and NOTICE files at the root of the source
+ * tree and available online at
+ *
+ * http://www.dspace.org/license/
+ */
+package org.dspace.app.requestitem;
+
+import static org.junit.Assert.assertEquals;
+
+import java.util.List;
+
+import org.dspace.content.Collection;
+import org.dspace.content.Item;
+import org.dspace.core.Context;
+import org.dspace.eperson.EPerson;
+import org.dspace.eperson.Group;
+import org.junit.Test;
+import org.mockito.Mockito;
+
+/**
+ *
+ * @author Mark H. Wood
+ */
+public class CollectionAdministratorsRequestItemStrategyTest {
+ private static final String NAME = "John Q. Public";
+ private static final String EMAIL = "jqpublic@example.com";
+
+ /**
+ * Test of getRequestItemAuthor method, of class CollectionAdministratorsRequestItemStrategy.
+ * @throws java.lang.Exception passed through.
+ */
+ @Test
+ public void testGetRequestItemAuthor()
+ throws Exception {
+ System.out.println("getRequestItemAuthor");
+
+ Context context = Mockito.mock(Context.class);
+
+ EPerson eperson1 = Mockito.mock(EPerson.class);
+ Mockito.when(eperson1.getEmail()).thenReturn(EMAIL);
+ Mockito.when(eperson1.getFullName()).thenReturn(NAME);
+
+ Group group1 = Mockito.mock(Group.class);
+ Mockito.when(group1.getMembers()).thenReturn(List.of(eperson1));
+
+ Collection collection1 = Mockito.mock(Collection.class);
+ Mockito.when(collection1.getAdministrators()).thenReturn(group1);
+
+ Item item = Mockito.mock(Item.class);
+ Mockito.when(item.getOwningCollection()).thenReturn(collection1);
+ Mockito.when(item.getSubmitter()).thenReturn(eperson1);
+
+ CollectionAdministratorsRequestItemStrategy instance = new CollectionAdministratorsRequestItemStrategy();
+ List result = instance.getRequestItemAuthor(context,
+ item);
+ assertEquals("Should be one author", 1, result.size());
+ assertEquals("Name should match " + NAME, NAME, result.get(0).getFullName());
+ assertEquals("Email should match " + EMAIL, EMAIL, result.get(0).getEmail());
+ }
+}
diff --git a/dspace-api/src/test/java/org/dspace/app/requestitem/CombiningRequestItemStrategyTest.java b/dspace-api/src/test/java/org/dspace/app/requestitem/CombiningRequestItemStrategyTest.java
new file mode 100644
index 0000000000..c5475612cb
--- /dev/null
+++ b/dspace-api/src/test/java/org/dspace/app/requestitem/CombiningRequestItemStrategyTest.java
@@ -0,0 +1,53 @@
+/**
+ * The contents of this file are subject to the license and copyright
+ * detailed in the LICENSE and NOTICE files at the root of the source
+ * tree and available online at
+ *
+ * http://www.dspace.org/license/
+ */
+package org.dspace.app.requestitem;
+
+import static org.hamcrest.MatcherAssert.assertThat;
+import static org.hamcrest.collection.IsIterableContainingInAnyOrder.containsInAnyOrder;
+
+import java.util.List;
+
+import org.dspace.content.Item;
+import org.dspace.core.Context;
+import org.junit.Test;
+import org.mockito.Mockito;
+
+/**
+ *
+ * @author Mark H. Wood
+ */
+public class CombiningRequestItemStrategyTest {
+ /**
+ * Test of getRequestItemAuthor method, of class CombiningRequestItemStrategy.
+ * @throws java.lang.Exception passed through.
+ */
+ @Test
+ public void testGetRequestItemAuthor()
+ throws Exception {
+ System.out.println("getRequestItemAuthor");
+ Context context = null;
+
+ Item item = Mockito.mock(Item.class);
+ RequestItemAuthor author1 = new RequestItemAuthor("Pat Paulsen", "ppaulsen@example.com");
+ RequestItemAuthor author2 = new RequestItemAuthor("Alfred E. Neuman", "aeneuman@example.com");
+ RequestItemAuthor author3 = new RequestItemAuthor("Alias Undercover", "aundercover@example.com");
+
+ RequestItemAuthorExtractor strategy1 = Mockito.mock(RequestItemHelpdeskStrategy.class);
+ Mockito.when(strategy1.getRequestItemAuthor(context, item)).thenReturn(List.of(author1));
+
+ RequestItemAuthorExtractor strategy2 = Mockito.mock(RequestItemMetadataStrategy.class);
+ Mockito.when(strategy2.getRequestItemAuthor(context, item)).thenReturn(List.of(author2, author3));
+
+ List strategies = List.of(strategy1, strategy2);
+
+ CombiningRequestItemStrategy instance = new CombiningRequestItemStrategy(strategies);
+ List result = instance.getRequestItemAuthor(context,
+ item);
+ assertThat(result, containsInAnyOrder(author1, author2, author3));
+ }
+}
diff --git a/dspace-api/src/test/java/org/dspace/authorize/AuthorizeServiceTest.java b/dspace-api/src/test/java/org/dspace/authorize/AuthorizeServiceTest.java
index 46435ec8f1..70eaa2a0b9 100644
--- a/dspace-api/src/test/java/org/dspace/authorize/AuthorizeServiceTest.java
+++ b/dspace-api/src/test/java/org/dspace/authorize/AuthorizeServiceTest.java
@@ -27,7 +27,7 @@ import org.junit.Assert;
import org.junit.Test;
/**
- * Created by pbecker as he wanted to write a test against DS-3572.
+ * Created by pbecker to write a test against DS-3572.
* This definitely needs to be extended, but it's at least a start.
*/
public class AuthorizeServiceTest extends AbstractUnitTest {
@@ -80,7 +80,7 @@ public class AuthorizeServiceTest extends AbstractUnitTest {
}
try {
- // eperson1 should be able to write as he is member of a group that has write permissions
+ // eperson1 should be able to write as it is a member of a group that has write permissions
Assert.assertTrue(authorizeService.authorizeActionBoolean(context, eperson1, dso, Constants.WRITE, true));
// person2 shouldn't have write access
Assert.assertFalse(authorizeService.authorizeActionBoolean(context, eperson2, dso, Constants.WRITE, true));
diff --git a/dspace-api/src/test/java/org/dspace/builder/ProcessBuilder.java b/dspace-api/src/test/java/org/dspace/builder/ProcessBuilder.java
index 62e1d2e7e5..86573940e4 100644
--- a/dspace-api/src/test/java/org/dspace/builder/ProcessBuilder.java
+++ b/dspace-api/src/test/java/org/dspace/builder/ProcessBuilder.java
@@ -11,6 +11,7 @@ import java.io.IOException;
import java.sql.SQLException;
import java.text.ParseException;
import java.text.SimpleDateFormat;
+import java.util.Date;
import java.util.List;
import java.util.Set;
@@ -60,6 +61,11 @@ public class ProcessBuilder extends AbstractBuilder {
return this;
}
+ public ProcessBuilder withCreationTime(Date creationTime) {
+ process.setCreationTime(creationTime);
+ return this;
+ }
+
public ProcessBuilder withStartAndEndTime(String startTime, String endTime) throws ParseException {
SimpleDateFormat simpleDateFormat = new SimpleDateFormat("dd/MM/yyyy");
process.setStartTime(simpleDateFormat.parse(startTime));
diff --git a/dspace-api/src/test/java/org/dspace/content/logic/LogicalFilterTest.java b/dspace-api/src/test/java/org/dspace/content/logic/LogicalFilterTest.java
index 7c8268a03b..0e08646220 100644
--- a/dspace-api/src/test/java/org/dspace/content/logic/LogicalFilterTest.java
+++ b/dspace-api/src/test/java/org/dspace/content/logic/LogicalFilterTest.java
@@ -408,6 +408,7 @@ public class LogicalFilterTest extends AbstractUnitTest {
// Create condition to match pattern on dc.title metadata
Condition condition = new MetadataValuesMatchCondition();
+ condition.setItemService(ContentServiceFactory.getInstance().getItemService());
Map parameters = new HashMap<>();
// Match on the dc.title field
parameters.put("field", "dc.title");
@@ -461,6 +462,7 @@ public class LogicalFilterTest extends AbstractUnitTest {
// Instantiate new filter for testing this condition
DefaultFilter filter = new DefaultFilter();
Condition condition = new InCollectionCondition();
+ condition.setItemService(ContentServiceFactory.getInstance().getItemService());
Map parameters = new HashMap<>();
// Add collectionOne handle to the collections parameter - ie. we are testing to see if the item is
diff --git a/dspace-api/src/test/java/org/dspace/content/packager/ITDSpaceAIP.java b/dspace-api/src/test/java/org/dspace/content/packager/ITDSpaceAIP.java
index 33e353f457..a634b98130 100644
--- a/dspace-api/src/test/java/org/dspace/content/packager/ITDSpaceAIP.java
+++ b/dspace-api/src/test/java/org/dspace/content/packager/ITDSpaceAIP.java
@@ -194,7 +194,7 @@ public class ITDSpaceAIP extends AbstractIntegrationTest {
ePersonService.update(context, submitter);
context.setCurrentUser(submitter);
- //Make our test ePerson an admin so he can perform deletes and restores
+ //Make our test ePerson an admin so it can perform deletes and restores
GroupService groupService = EPersonServiceFactory.getInstance().getGroupService();
Group adminGroup = groupService.findByName(context, Group.ADMIN);
groupService.addMember(context, adminGroup, submitter);
diff --git a/dspace-api/src/test/java/org/dspace/ctask/general/CreateMissingIdentifiersIT.java b/dspace-api/src/test/java/org/dspace/ctask/general/CreateMissingIdentifiersIT.java
new file mode 100644
index 0000000000..383f87bbd7
--- /dev/null
+++ b/dspace-api/src/test/java/org/dspace/ctask/general/CreateMissingIdentifiersIT.java
@@ -0,0 +1,79 @@
+/**
+ * The contents of this file are subject to the license and copyright
+ * detailed in the LICENSE and NOTICE files at the root of the source
+ * tree and available online at
+ *
+ * http://www.dspace.org/license/
+ */
+package org.dspace.ctask.general;
+
+import static org.junit.Assert.assertEquals;
+
+import java.io.IOException;
+
+import org.dspace.AbstractIntegrationTestWithDatabase;
+import org.dspace.builder.CollectionBuilder;
+import org.dspace.builder.CommunityBuilder;
+import org.dspace.builder.ItemBuilder;
+import org.dspace.content.Collection;
+import org.dspace.content.Item;
+import org.dspace.curate.Curator;
+import org.dspace.identifier.VersionedHandleIdentifierProviderWithCanonicalHandles;
+import org.dspace.services.ConfigurationService;
+import org.dspace.services.factory.DSpaceServicesFactory;
+import org.junit.Test;
+
+/**
+ * Rudimentary test of the curation task.
+ *
+ * @author mwood
+ */
+public class CreateMissingIdentifiersIT
+ extends AbstractIntegrationTestWithDatabase {
+ private static final String P_TASK_DEF
+ = "plugin.named.org.dspace.curate.CurationTask";
+ private static final String TASK_NAME = "test";
+
+ @Test
+ public void testPerform()
+ throws IOException {
+ ConfigurationService configurationService
+ = DSpaceServicesFactory.getInstance().getConfigurationService();
+ configurationService.setProperty(P_TASK_DEF, null);
+ configurationService.addPropertyValue(P_TASK_DEF,
+ CreateMissingIdentifiers.class.getCanonicalName() + " = " + TASK_NAME);
+
+ Curator curator = new Curator();
+ curator.addTask(TASK_NAME);
+
+ context.setCurrentUser(admin);
+ parentCommunity = CommunityBuilder.createCommunity(context)
+ .build();
+ Collection collection = CollectionBuilder.createCollection(context, parentCommunity)
+ .build();
+ Item item = ItemBuilder.createItem(context, collection)
+ .build();
+
+ /*
+ * Curate with regular test configuration -- should succeed.
+ */
+ curator.curate(context, item);
+ int status = curator.getStatus(TASK_NAME);
+ assertEquals("Curation should succeed", Curator.CURATE_SUCCESS, status);
+
+ /*
+ * Now install an incompatible provider to make the task fail.
+ */
+ DSpaceServicesFactory.getInstance()
+ .getServiceManager()
+ .registerServiceClass(
+ VersionedHandleIdentifierProviderWithCanonicalHandles.class.getCanonicalName(),
+ VersionedHandleIdentifierProviderWithCanonicalHandles.class);
+
+ curator.curate(context, item);
+ System.out.format("With incompatible provider, result is '%s'.\n",
+ curator.getResult(TASK_NAME));
+ assertEquals("Curation should fail", Curator.CURATE_ERROR,
+ curator.getStatus(TASK_NAME));
+ }
+}
diff --git a/dspace-api/src/test/java/org/dspace/statistics/util/IPTableTest.java b/dspace-api/src/test/java/org/dspace/statistics/util/IPTableTest.java
index 320cc55a0d..1dbbdb6cd0 100644
--- a/dspace-api/src/test/java/org/dspace/statistics/util/IPTableTest.java
+++ b/dspace-api/src/test/java/org/dspace/statistics/util/IPTableTest.java
@@ -56,14 +56,15 @@ public class IPTableTest {
IPTable instance = new IPTable();
// Add IP address
instance.add(LOCALHOST);
- // Add IP range
+ // Add IP range (contains 256 addresses)
instance.add("192.168.1");
- // Make sure both exist
+ // Make sure it returns the addresses for all ranges
Set ipSet = instance.toSet();
- assertEquals(2, ipSet.size());
+ assertEquals(257, ipSet.size());
assertTrue(ipSet.contains(LOCALHOST));
- assertTrue(ipSet.contains("192.168.1"));
+ assertTrue(ipSet.contains("192.168.1.0"));
+ assertTrue(ipSet.contains("192.168.1.255"));
}
@Test
@@ -76,13 +77,13 @@ public class IPTableTest {
assertEquals(1, instance.toSet().size());
instance = new IPTable();
- // Add IP range & then add an IP from within that range
+ // Add IP range w/ 256 addresses & then add an IP from within that range
instance.add("192.168.1");
instance.add("192.168.1.1");
// Verify only the range exists
Set ipSet = instance.toSet();
- assertEquals(1, ipSet.size());
- assertTrue(ipSet.contains("192.168.1"));
+ assertEquals(256, ipSet.size());
+ assertTrue(ipSet.contains("192.168.1.1"));
instance = new IPTable();
// Now, switch order. Add IP address, then add a range encompassing that IP
@@ -90,8 +91,8 @@ public class IPTableTest {
instance.add("192.168.1");
// Verify only the range exists
ipSet = instance.toSet();
- assertEquals(1, ipSet.size());
- assertTrue(ipSet.contains("192.168.1"));
+ assertEquals(256, ipSet.size());
+ assertTrue(ipSet.contains("192.168.1.1"));
}
/**
@@ -120,6 +121,48 @@ public class IPTableTest {
assertTrue("IP within an add()ed range should match", contains);
}
+ @Test
+ public void testDashRangeContains() throws Exception {
+ IPTable instance = new IPTable();
+ instance.add("192.168.0.0 - 192.168.0.245");
+
+ assertTrue("Range should contain lower limit", instance.contains("192.168.0.0"));
+ assertTrue("Range should contain upper limit", instance.contains("192.168.0.245"));
+ assertTrue("Range should contain value in between limits", instance.contains("192.168.0.123"));
+ assertTrue("Range should contain value in between limits", instance.contains("192.168.0.234"));
+
+ assertFalse("Range should not contain value below lower limit", instance.contains("192.167.255.255"));
+ assertFalse("Range should not contain value above upper limit", instance.contains("192.168.0.246"));
+ }
+
+ @Test
+ public void testSubnetRangeContains() throws Exception {
+ IPTable instance = new IPTable();
+ instance.add("192.168.0.0/30"); // translates to 192.168.0.0 - 192.168.0.3
+
+ assertTrue("Range should contain lower limit", instance.contains("192.168.0.0"));
+ assertTrue("Range should contain upper limit", instance.contains("192.168.0.3"));
+ assertTrue("Range should contain values in between limits", instance.contains("192.168.0.1"));
+ assertTrue("Range should contain values in between limits", instance.contains("192.168.0.2"));
+
+ assertFalse("Range should not contain value below lower limit", instance.contains("192.167.255.255"));
+ assertFalse("Range should not contain value above upper limit", instance.contains("192.168.0.4"));
+ }
+
+ @Test
+ public void testImplicitRangeContains() throws Exception {
+ IPTable instance = new IPTable();
+ instance.add("192.168.1");
+
+ assertTrue("Range should contain lower limit", instance.contains("192.168.1.0"));
+ assertTrue("Range should contain upper limit", instance.contains("192.168.1.255"));
+ assertTrue("Range should contain values in between limits", instance.contains("192.168.1.123"));
+ assertTrue("Range should contain values in between limits", instance.contains("192.168.1.234"));
+
+ assertFalse("Range should not contain value below lower limit", instance.contains("192.168.0.0"));
+ assertFalse("Range should not contain value above upper limit", instance.contains("192.168.2.0"));
+ }
+
/**
* Test of isEmpty method, of class IPTable.
* @throws java.lang.Exception passed through.
diff --git a/dspace-oai/src/main/java/org/dspace/xoai/app/XOAI.java b/dspace-oai/src/main/java/org/dspace/xoai/app/XOAI.java
index 700105899a..e27a3ee947 100644
--- a/dspace-oai/src/main/java/org/dspace/xoai/app/XOAI.java
+++ b/dspace-oai/src/main/java/org/dspace/xoai/app/XOAI.java
@@ -8,6 +8,10 @@
package org.dspace.xoai.app;
import static com.lyncode.xoai.dataprovider.core.Granularity.Second;
+import static java.util.Objects.nonNull;
+import static org.apache.commons.lang.StringUtils.EMPTY;
+import static org.apache.solr.common.params.CursorMarkParams.CURSOR_MARK_PARAM;
+import static org.apache.solr.common.params.CursorMarkParams.CURSOR_MARK_START;
import static org.dspace.xoai.util.ItemUtils.retrieveMetadata;
import java.io.ByteArrayOutputStream;
@@ -38,6 +42,8 @@ import org.apache.solr.client.solrj.SolrClient;
import org.apache.solr.client.solrj.SolrQuery;
import org.apache.solr.client.solrj.SolrQuery.ORDER;
import org.apache.solr.client.solrj.SolrServerException;
+import org.apache.solr.client.solrj.response.QueryResponse;
+import org.apache.solr.common.SolrDocument;
import org.apache.solr.common.SolrDocumentList;
import org.apache.solr.common.SolrInputDocument;
import org.dspace.authorize.ResourcePolicy;
@@ -77,6 +83,7 @@ import org.springframework.context.annotation.AnnotationConfigApplicationContext
public class XOAI {
private static Logger log = LogManager.getLogger(XOAI.class);
+ // needed because the solr query only returns 10 rows by default
private final Context context;
private boolean optimize;
private final boolean verbose;
@@ -94,8 +101,8 @@ public class XOAI {
private final AuthorizeService authorizeService;
private final ItemService itemService;
- private final static ConfigurationService configurationService = DSpaceServicesFactory
- .getInstance().getConfigurationService();
+ private final static ConfigurationService configurationService = DSpaceServicesFactory.getInstance()
+ .getConfigurationService();
private List extensionPlugins;
@@ -152,9 +159,8 @@ public class XOAI {
System.out.println("Using full import.");
result = this.indexAll();
} else {
- SolrQuery solrParams = new SolrQuery("*:*")
- .addField("item.lastmodified")
- .addSort("item.lastmodified", ORDER.desc).setRows(1);
+ SolrQuery solrParams = new SolrQuery("*:*").addField("item.lastmodified")
+ .addSort("item.lastmodified", ORDER.desc).setRows(1);
SolrDocumentList results = DSpaceSolrSearch.query(solrServerResolver.getServer(), solrParams);
if (results.getNumFound() == 0) {
@@ -167,7 +173,6 @@ public class XOAI {
}
solrServerResolver.getServer().commit();
-
if (optimize) {
println("Optimizing Index");
solrServerResolver.getServer().optimize();
@@ -183,12 +188,10 @@ public class XOAI {
}
private int index(Date last) throws DSpaceSolrIndexerException, IOException {
- System.out
- .println("Incremental import. Searching for documents modified after: "
- + last.toString());
+ System.out.println("Incremental import. Searching for documents modified after: " + last.toString());
/*
- * Index all changed or new items or items whose visibility is viable to
- * change due to an embargo.
+ * Index all changed or new items or items whose visibility is viable to change
+ * due to an embargo.
*/
try {
Iterator discoverableChangedItems = itemService
@@ -204,31 +207,55 @@ public class XOAI {
}
/**
- * Get all items already in the index which are viable to change visibility
- * due to an embargo. Only consider those which haven't been modified
- * anyways since the last update, so they aren't updated twice in one import
- * run.
+ * Get all items already in the index which are viable to change visibility due
+ * to an embargo. Only consider those which haven't been modified anyways since
+ * the last update, so they aren't updated twice in one import run.
*
- * @param last
- * maximum date for an item to be considered for an update
- * @return Iterator over list of items which might have changed their
- * visibility since the last update.
+ * @param last maximum date for an item to be considered for an update
+ * @return Iterator over list of items which might have changed their visibility
+ * since the last update.
* @throws DSpaceSolrIndexerException
*/
private Iterator getItemsWithPossibleChangesBefore(Date last) throws DSpaceSolrIndexerException, IOException {
try {
- SolrQuery params = new SolrQuery("item.willChangeStatus:true").addField("item.id");
- SolrDocumentList documents = DSpaceSolrSearch.query(solrServerResolver.getServer(), params);
+ SolrQuery params = new SolrQuery("item.willChangeStatus:true").addField("item.id").setRows(100)
+ .addSort("item.handle", SolrQuery.ORDER.asc);
+ SolrClient solrClient = solrServerResolver.getServer();
+
List items = new LinkedList<>();
- for (int i = 0; i < documents.getNumFound(); i++) {
- Item item = itemService.find(context,
- UUID.fromString((String) documents.get(i).getFieldValue("item.id")));
- if (item.getLastModified().before(last)) {
- items.add(item);
+ boolean done = false;
+ /*
+ * Using solr cursors to paginate and prevent the query from returning 10
+ * SolrDocument objects only.
+ */
+ String cursorMark = CURSOR_MARK_START;
+ String nextCursorMark = EMPTY;
+
+ while (!done) {
+ params.set(CURSOR_MARK_PARAM, cursorMark);
+ QueryResponse response = solrClient.query(params);
+ nextCursorMark = response.getNextCursorMark();
+
+ for (SolrDocument document : response.getResults()) {
+ Item item = itemService.find(context, UUID.fromString((String) document.getFieldValue("item.id")));
+ if (nonNull(item)) {
+ if (nonNull(item.getLastModified())) {
+ if (item.getLastModified().before(last)) {
+ items.add(item);
+ }
+ } else {
+ log.warn("Skipping item with id " + item.getID());
+ }
+ }
}
+
+ if (cursorMark.equals(nextCursorMark)) {
+ done = true;
+ }
+ cursorMark = nextCursorMark;
}
return items.iterator();
- } catch (SolrServerException | SQLException | DSpaceSolrException ex) {
+ } catch (SolrServerException | SQLException ex) {
throw new DSpaceSolrIndexerException(ex.getMessage(), ex);
}
}
@@ -250,11 +277,10 @@ public class XOAI {
}
/**
- * Check if an item is already indexed. Using this, it is possible to check
- * if withdrawn or nondiscoverable items have to be indexed at all.
+ * Check if an item is already indexed. Using this, it is possible to check if
+ * withdrawn or nondiscoverable items have to be indexed at all.
*
- * @param item
- * Item that should be checked for its presence in the index.
+ * @param item Item that should be checked for its presence in the index.
* @return has it been indexed?
*/
private boolean checkIfIndexed(Item item) throws IOException {
@@ -266,11 +292,11 @@ public class XOAI {
return false;
}
}
- /**
+
+ /**
* Check if an item is flagged visible in the index.
*
- * @param item
- * Item that should be checked for its presence in the index.
+ * @param item Item that should be checked for its presence in the index.
* @return has it been indexed?
*/
private boolean checkIfVisibleInOAI(Item item) throws IOException {
@@ -287,8 +313,7 @@ public class XOAI {
}
}
- private int index(Iterator iterator)
- throws DSpaceSolrIndexerException {
+ private int index(Iterator iterator) throws DSpaceSolrIndexerException {
try {
int i = 0;
int batchSize = configurationService.getIntProperty("oai.import.batch.size", 1000);
@@ -302,7 +327,7 @@ public class XOAI {
} else {
list.add(this.index(item));
}
- //Uncache the item to keep memory consumption low
+ // Uncache the item to keep memory consumption low
context.uncacheEntity(item);
} catch (SQLException | IOException | XMLStreamException | WritingXmlException ex) {
@@ -334,12 +359,11 @@ public class XOAI {
}
/**
- * Method to get the most recent date on which the item changed concerning
- * the OAI deleted status (policy start and end dates for all anonymous READ
+ * Method to get the most recent date on which the item changed concerning the
+ * OAI deleted status (policy start and end dates for all anonymous READ
* policies and the standard last modification date)
*
- * @param item
- * Item
+ * @param item Item
* @return date
* @throws SQLException
*/
@@ -382,17 +406,16 @@ public class XOAI {
boolean isIndexed = this.checkIfIndexed(item);
/*
- * If the item is not under embargo, it should be visible. If it is,
- * make it invisible if this is the first time it is indexed. For
- * subsequent index runs, keep the current status, so that if the item
- * is embargoed again, it is flagged as deleted instead and does not
- * just disappear, or if it is still under embargo, it won't become
- * visible and be known to harvesters as deleted before it gets
- * disseminated for the first time. The item has to be indexed directly
- * after publication even if it is still embargoed, because its
- * lastModified date will not change when the embargo end date (or start
- * date) is reached. To circumvent this, an item which will change its
- * status in the future will be marked as such.
+ * If the item is not under embargo, it should be visible. If it is, make it
+ * invisible if this is the first time it is indexed. For subsequent index runs,
+ * keep the current status, so that if the item is embargoed again, it is
+ * flagged as deleted instead and does not just disappear, or if it is still
+ * under embargo, it won't become visible and be known to harvesters as deleted
+ * before it gets disseminated for the first time. The item has to be indexed
+ * directly after publication even if it is still embargoed, because its
+ * lastModified date will not change when the embargo end date (or start date)
+ * is reached. To circumvent this, an item which will change its status in the
+ * future will be marked as such.
*/
boolean isPublic = isEmbargoed ? (isIndexed ? isCurrentlyVisible : false) : true;
@@ -404,33 +427,31 @@ public class XOAI {
doc.addField("item.willChangeStatus", willChangeStatus(item));
/*
- * Mark an item as deleted not only if it is withdrawn, but also if it
- * is made private, because items should not simply disappear from OAI
- * with a transient deletion policy. Do not set the flag for still
- * invisible embargoed items, because this will override the item.public
- * flag.
+ * Mark an item as deleted not only if it is withdrawn, but also if it is made
+ * private, because items should not simply disappear from OAI with a transient
+ * deletion policy. Do not set the flag for still invisible embargoed items,
+ * because this will override the item.public flag.
*/
doc.addField("item.deleted",
(item.isWithdrawn() || !item.isDiscoverable() || (isEmbargoed ? isPublic : false)));
/*
- * An item that is embargoed will potentially not be harvested by
- * incremental harvesters if the from and until params do not encompass
- * both the standard lastModified date and the anonymous-READ resource
- * policy start date. The same is true for the end date, where
- * harvesters might not get a tombstone record. Therefore, consider all
- * relevant policy dates and the standard lastModified date and take the
- * most recent of those which have already passed.
+ * An item that is embargoed will potentially not be harvested by incremental
+ * harvesters if the from and until params do not encompass both the standard
+ * lastModified date and the anonymous-READ resource policy start date. The same
+ * is true for the end date, where harvesters might not get a tombstone record.
+ * Therefore, consider all relevant policy dates and the standard lastModified
+ * date and take the most recent of those which have already passed.
*/
- doc.addField("item.lastmodified", SolrUtils.getDateFormatter()
- .format(this.getMostRecentModificationDate(item)));
+ doc.addField("item.lastmodified",
+ SolrUtils.getDateFormatter().format(this.getMostRecentModificationDate(item)));
if (item.getSubmitter() != null) {
doc.addField("item.submitter", item.getSubmitter().getEmail());
}
- for (Collection col: item.getCollections()) {
+ for (Collection col : item.getCollections()) {
doc.addField("item.collections", "col_" + col.getHandle().replace("/", "_"));
}
for (Community com : collectionsService.flatParentCommunities(context, item)) {
@@ -457,8 +478,7 @@ public class XOAI {
// Message output before processing - for debugging purposes
if (verbose) {
- println(String.format("Item %s with handle %s is about to be indexed",
- item.getID().toString(), handle));
+ println(String.format("Item %s with handle %s is about to be indexed", item.getID().toString(), handle));
}
ByteArrayOutputStream out = new ByteArrayOutputStream();
@@ -476,8 +496,7 @@ public class XOAI {
doc.addField("item.compile", out.toString());
if (verbose) {
- println(String.format("Item %s with handle %s indexed",
- item.getID().toString(), handle));
+ println(String.format("Item %s with handle %s indexed", item.getID().toString(), handle));
}
return doc;
@@ -510,12 +529,10 @@ public class XOAI {
return pub;
}
-
private static boolean getKnownExplanation(Throwable t) {
if (t instanceof ConnectException) {
- System.err.println("Solr server ("
- + configurationService.getProperty("oai.solr.url", "")
- + ") is down, turn it on.");
+ System.err.println(
+ "Solr server (" + configurationService.getProperty("oai.solr.url", "") + ") is down, turn it on.");
return true;
}
@@ -544,7 +561,7 @@ public class XOAI {
}
private static void cleanCache(XOAIItemCacheService xoaiItemCacheService, XOAICacheService xoaiCacheService)
- throws IOException {
+ throws IOException {
System.out.println("Purging cached OAI responses.");
xoaiItemCacheService.deleteAll();
xoaiCacheService.deleteAll();
@@ -557,10 +574,8 @@ public class XOAI {
public static void main(String[] argv) throws IOException, ConfigurationException {
-
- AnnotationConfigApplicationContext applicationContext = new AnnotationConfigApplicationContext(new Class[] {
- BasicConfiguration.class
- });
+ AnnotationConfigApplicationContext applicationContext = new AnnotationConfigApplicationContext(
+ new Class[] { BasicConfiguration.class });
XOAICacheService cacheService = applicationContext.getBean(XOAICacheService.class);
XOAIItemCacheService itemCacheService = applicationContext.getBean(XOAIItemCacheService.class);
@@ -571,21 +586,19 @@ public class XOAI {
CommandLineParser parser = new DefaultParser();
Options options = new Options();
options.addOption("c", "clear", false, "Clear index before indexing");
- options.addOption("o", "optimize", false,
- "Optimize index at the end");
+ options.addOption("o", "optimize", false, "Optimize index at the end");
options.addOption("v", "verbose", false, "Verbose output");
options.addOption("h", "help", false, "Shows some help");
options.addOption("n", "number", true, "FOR DEVELOPMENT MUST DELETE");
CommandLine line = parser.parse(options, argv);
- String[] validSolrCommands = {COMMAND_IMPORT, COMMAND_CLEAN_CACHE};
- String[] validDatabaseCommands = {COMMAND_CLEAN_CACHE, COMMAND_COMPILE_ITEMS, COMMAND_ERASE_COMPILED_ITEMS};
-
+ String[] validSolrCommands = { COMMAND_IMPORT, COMMAND_CLEAN_CACHE };
+ String[] validDatabaseCommands = { COMMAND_CLEAN_CACHE, COMMAND_COMPILE_ITEMS,
+ COMMAND_ERASE_COMPILED_ITEMS };
boolean solr = true; // Assuming solr by default
solr = !("database").equals(configurationService.getProperty("oai.storage", "solr"));
-
boolean run = false;
if (line.getArgs().length > 0) {
if (solr) {
@@ -607,10 +620,7 @@ public class XOAI {
if (COMMAND_IMPORT.equals(command)) {
ctx = new Context(Context.Mode.READ_ONLY);
- XOAI indexer = new XOAI(ctx,
- line.hasOption('o'),
- line.hasOption('c'),
- line.hasOption('v'));
+ XOAI indexer = new XOAI(ctx, line.hasOption('o'), line.hasOption('c'), line.hasOption('v'));
applicationContext.getAutowireCapableBeanFactory().autowireBean(indexer);
@@ -635,8 +645,7 @@ public class XOAI {
}
System.out.println("OAI 2.0 manager action ended. It took "
- + ((System.currentTimeMillis() - start) / 1000)
- + " seconds.");
+ + ((System.currentTimeMillis() - start) / 1000) + " seconds.");
} else {
usage();
}
@@ -688,7 +697,7 @@ public class XOAI {
private static void usage() {
boolean solr = true; // Assuming solr by default
- solr = !("database").equals(configurationService.getProperty("oai.storage","solr"));
+ solr = !("database").equals(configurationService.getProperty("oai.storage", "solr"));
if (solr) {
System.out.println("OAI Manager Script");
diff --git a/dspace-rest/src/main/webapp/static/reports/restCollReport.js b/dspace-rest/src/main/webapp/static/reports/restCollReport.js
index 1d1c04ae07..8d800a8edc 100644
--- a/dspace-rest/src/main/webapp/static/reports/restCollReport.js
+++ b/dspace-rest/src/main/webapp/static/reports/restCollReport.js
@@ -11,7 +11,7 @@ var CollReport = function() {
//this.hasSorttable = function(){return true;}
this.getLangSuffix = function(){
return "[en]";
- }
+ };
//Indicate if Password Authentication is supported
//this.makeAuthLink = function(){return true;};
@@ -38,7 +38,7 @@ var CollReport = function() {
icollection : "",
ifilter : "",
};
- }
+ };
this.getCurrentParameters = function(){
return {
"show_fields[]" : this.myMetadataFields.getShowFields(),
@@ -49,7 +49,7 @@ var CollReport = function() {
icollection : $("#icollection").val(),
ifilter : $("#ifilter").val(),
};
- }
+ };
var self = this;
this.init = function() {
@@ -61,7 +61,7 @@ var CollReport = function() {
collapsible: true,
active: 2
});
- }
+ };
this.myAuth.callback = function(data) {
self.createCollectionTable();
@@ -71,11 +71,11 @@ var CollReport = function() {
$("#refresh-fields,#refresh-fields-bits").bind("click", function(){
self.drawItemTable($("#icollection").val(), $("#ifilter").val(), 0);
});
- }
+ };
this.createCollectionTable = function() {
var self = this;
- var tbl = $("
");
+ var tbl = $("
");
tbl.attr("id","table");
$("#report").replaceWith(tbl);
@@ -93,7 +93,7 @@ var CollReport = function() {
self.myHtmlUtil.makeTotalCol(thn);
self.addCollections();
- }
+ };
this.addCollections = function() {
var self = this;
@@ -144,8 +144,6 @@ var CollReport = function() {
self.myHtmlUtil.addTd(tr, parval).addClass("title comm");
self.myHtmlUtil.addTdAnchor(tr, coll.name, self.ROOTPATH + coll.handle).addClass("title");
- var td = self.myHtmlUtil.addTd(tr, "").addClass("num").addClass("link").addClass("numCount");
- td = self.myHtmlUtil.addTd(tr, "").addClass("num").addClass("numFiltered");
};
@@ -197,7 +195,7 @@ var CollReport = function() {
$(".showCollections").attr("disabled", false);
}
});
- }
+ };
this.loadData = function() {
self.spinner.spin($("h1")[0]);
@@ -208,7 +206,7 @@ var CollReport = function() {
$("#table tr.data").addClass("processing");
self.myFilters.filterString = self.myFilters.getFilterList();
self.doRow(0, self.THREADS, self.loadId);
- }
+ };
this.doRow = function(row, threads, curLoadId) {
if (self.loadId != curLoadId) return;
@@ -285,14 +283,14 @@ var CollReport = function() {
$("#table").addClass("sortable");
sorttable.makeSortable($("#table")[0]);
}
- }
-
+ };
+
this.totalFilters = function() {
- var colcount = $("#table tr th").length;
- for(var i=4; i= self.TOOBIG) {
td.addClass("toobig");
- title+= "\nIt will take significant time to apply this filter to the entire collection."
+ title+= "\nIt will take significant time to apply this filter to the entire collection.";
}
td.attr("title", title);
return false;
@@ -359,7 +357,7 @@ var CollReport = function() {
self.totalFilters();
}
return true;
- }
+ };
this.setCellCount = function(tr, cid, offset, isPartial, itemFilter) {
var filterName = itemFilter["filter-name"];
@@ -391,7 +389,7 @@ var CollReport = function() {
$("#ifilter").val(filterName);
});
}
- }
+ };
this.drawItemTable = function(cid, filter, offset) {
@@ -433,7 +431,7 @@ var CollReport = function() {
offset: offset,
"show_fields[]" : fields,
"show_fields_bits[]" : bitfields,
- }
+ };
$.ajax({
url: "/rest/filtered-collections/"+cid,
@@ -452,7 +450,6 @@ var CollReport = function() {
self.myHtmlUtil.addTd(tr, item.name).addClass("ititle");
if (fields != null) {
$.each(fields, function(index, field){
- var text = "";
var td = self.myHtmlUtil.addTd(tr, "");
$.each(item.metadata, function(mindex,mv){
if (mv.key == field) {
@@ -493,7 +490,7 @@ var CollReport = function() {
$("#itemResults").accordion("option", "active", self.IACCIDX_ITEM);
}
});
- }
+ };
//Ignore the first column containing a row number and the item handle
this.exportCol = function(colnum, col) {
@@ -503,8 +500,8 @@ var CollReport = function() {
data += (colnum == 1) ? "" : ",";
data += self.exportCell(col);
return data;
- }
-}
+ };
+};
CollReport.prototype = Object.create(Report.prototype);
$(document).ready(function(){
diff --git a/dspace-rest/src/main/webapp/static/reports/restQueryReport.js b/dspace-rest/src/main/webapp/static/reports/restQueryReport.js
index 9a8297fb69..18e9a61d08 100644
--- a/dspace-rest/src/main/webapp/static/reports/restQueryReport.js
+++ b/dspace-rest/src/main/webapp/static/reports/restQueryReport.js
@@ -12,7 +12,7 @@ var QueryReport = function() {
//this.hasSorttable = function(){return true;}
this.getLangSuffix = function(){
return "[en]";
- }
+ };
//Indicate if Password Authentication is supported
//this.makeAuthLink = function(){return true;};
@@ -31,7 +31,7 @@ var QueryReport = function() {
"limit" : this.ITEM_LIMIT,
"offset" : 0,
};
- }
+ };
this.getCurrentParameters = function(){
var expand = "parentCollection,metadata";
if (this.myBitstreamFields.hasBitstreamFields()) {
@@ -54,21 +54,20 @@ var QueryReport = function() {
paramArr[paramArr.length] = $(this).val();
});
return params;
- }
+ };
var self = this;
this.init = function() {
this.baseInit();
- var communitySelector = new CommunitySelector(this, $("#collSelector"), this.myReportParameters.params["collSel[]"]);
- }
+ };
this.initMetadataFields = function() {
this.myMetadataFields = new QueryableMetadataFields(self);
this.myMetadataFields.load();
- }
+ };
this.myAuth.callback = function(data) {
- $(".query-button").click(function(){self.runQuery();})
- }
+ $(".query-button").click(function(){self.runQuery();});
+ };
this.runQuery = function() {
this.spinner.spin($("body")[0]);
@@ -93,7 +92,7 @@ var QueryReport = function() {
$("button").not("#next,#prev").attr("disabled", false);
}
});
- }
+ };
this.drawItemFilterTable = function(data) {
$("#itemtable").replaceWith($('