Merge branch 'main' into iiif-bundle

This commit is contained in:
Michael Spalti
2022-09-21 14:00:51 -07:00
166 changed files with 4363 additions and 1951 deletions

View File

@@ -548,17 +548,11 @@
<groupId>com.ibm.icu</groupId> <groupId>com.ibm.icu</groupId>
<artifactId>icu4j</artifactId> <artifactId>icu4j</artifactId>
</dependency> </dependency>
<!-- Codebase at https://github.com/OCLC-Research/oaiharvester2/ --> <!-- Codebase at https://github.com/DSpace/oclc-harvester2 -->
<dependency> <dependency>
<groupId>org.dspace</groupId> <groupId>org.dspace</groupId>
<artifactId>oclc-harvester2</artifactId> <artifactId>oclc-harvester2</artifactId>
</dependency> </dependency>
<!-- Xalan is REQUIRED by 'oclc-harvester2' listed above (OAI harvesting fails without it).
Please do NOT use Xalan in DSpace codebase as it is not well maintained. -->
<dependency>
<groupId>xalan</groupId>
<artifactId>xalan</artifactId>
</dependency>
<dependency> <dependency>
<groupId>org.dspace</groupId> <groupId>org.dspace</groupId>
<artifactId>dspace-services</artifactId> <artifactId>dspace-services</artifactId>
@@ -600,7 +594,7 @@
<dependency> <dependency>
<groupId>org.jbibtex</groupId> <groupId>org.jbibtex</groupId>
<artifactId>jbibtex</artifactId> <artifactId>jbibtex</artifactId>
<version>1.0.10</version> <version>1.0.20</version>
</dependency> </dependency>
<dependency> <dependency>
<groupId>org.apache.httpcomponents</groupId> <groupId>org.apache.httpcomponents</groupId>
@@ -801,7 +795,7 @@
<dependency> <dependency>
<groupId>com.amazonaws</groupId> <groupId>com.amazonaws</groupId>
<artifactId>aws-java-sdk-s3</artifactId> <artifactId>aws-java-sdk-s3</artifactId>
<version>1.12.116</version> <version>1.12.261</version>
</dependency> </dependency>
<dependency> <dependency>
@@ -850,7 +844,7 @@
<dependency> <dependency>
<groupId>com.opencsv</groupId> <groupId>com.opencsv</groupId>
<artifactId>opencsv</artifactId> <artifactId>opencsv</artifactId>
<version>5.2</version> <version>5.6</version>
</dependency> </dependency>
<!-- Email templating --> <!-- Email templating -->

View File

@@ -14,6 +14,7 @@ import java.util.Locale;
import org.apache.commons.cli.CommandLine; import org.apache.commons.cli.CommandLine;
import org.apache.commons.cli.CommandLineParser; import org.apache.commons.cli.CommandLineParser;
import org.apache.commons.cli.DefaultParser; import org.apache.commons.cli.DefaultParser;
import org.apache.commons.cli.HelpFormatter;
import org.apache.commons.cli.Options; import org.apache.commons.cli.Options;
import org.apache.commons.lang3.StringUtils; import org.apache.commons.lang3.StringUtils;
import org.dspace.core.Context; import org.dspace.core.Context;
@@ -69,19 +70,41 @@ public final class CreateAdministrator {
options.addOption("e", "email", true, "administrator email address"); options.addOption("e", "email", true, "administrator email address");
options.addOption("f", "first", true, "administrator first name"); options.addOption("f", "first", true, "administrator first name");
options.addOption("h", "help", false, "explain create-administrator options");
options.addOption("l", "last", true, "administrator last name"); options.addOption("l", "last", true, "administrator last name");
options.addOption("c", "language", true, "administrator language"); options.addOption("c", "language", true, "administrator language");
options.addOption("p", "password", true, "administrator password"); options.addOption("p", "password", true, "administrator password");
CommandLine line = parser.parse(options, argv); CommandLine line = null;
try {
line = parser.parse(options, argv);
} catch (Exception e) {
System.out.println(e.getMessage() + "\nTry \"dspace create-administrator -h\" to print help information.");
System.exit(1);
}
if (line.hasOption("e") && line.hasOption("f") && line.hasOption("l") && if (line.hasOption("e") && line.hasOption("f") && line.hasOption("l") &&
line.hasOption("c") && line.hasOption("p")) { line.hasOption("c") && line.hasOption("p")) {
ca.createAdministrator(line.getOptionValue("e"), ca.createAdministrator(line.getOptionValue("e"),
line.getOptionValue("f"), line.getOptionValue("l"), line.getOptionValue("f"), line.getOptionValue("l"),
line.getOptionValue("c"), line.getOptionValue("p")); line.getOptionValue("c"), line.getOptionValue("p"));
} else if (line.hasOption("h")) {
String header = "\nA command-line tool for creating an initial administrator for setting up a" +
" DSpace site. Unless all the required parameters are passed it will" +
" prompt for an e-mail address, last name, first name and password from" +
" standard input.. An administrator group is then created and the data passed" +
" in used to create an e-person in that group.\n\n";
String footer = "\n";
HelpFormatter formatter = new HelpFormatter();
formatter.printHelp("dspace create-administrator", header, options, footer, true);
return;
} else { } else {
ca.negotiateAdministratorDetails(); ca.negotiateAdministratorDetails(line);
} }
} }
@@ -103,20 +126,20 @@ public final class CreateAdministrator {
* *
* @throws Exception if error * @throws Exception if error
*/ */
protected void negotiateAdministratorDetails() protected void negotiateAdministratorDetails(CommandLine line)
throws Exception { throws Exception {
Console console = System.console(); Console console = System.console();
System.out.println("Creating an initial administrator account"); System.out.println("Creating an initial administrator account");
boolean dataOK = false; String email = line.getOptionValue('e');
String firstName = line.getOptionValue('f');
String email = null; String lastName = line.getOptionValue('l');
String firstName = null;
String lastName = null;
char[] password1 = null;
char[] password2 = null;
String language = I18nUtil.getDefaultLocale().getLanguage(); String language = I18nUtil.getDefaultLocale().getLanguage();
ConfigurationService cfg = DSpaceServicesFactory.getInstance().getConfigurationService();
boolean flag = line.hasOption('p');
char[] password = null;
boolean dataOK = line.hasOption('f') && line.hasOption('e') && line.hasOption('l');
while (!dataOK) { while (!dataOK) {
System.out.print("E-mail address: "); System.out.print("E-mail address: ");
@@ -147,8 +170,6 @@ public final class CreateAdministrator {
if (lastName != null) { if (lastName != null) {
lastName = lastName.trim(); lastName = lastName.trim();
} }
ConfigurationService cfg = DSpaceServicesFactory.getInstance().getConfigurationService();
if (cfg.hasProperty("webui.supported.locales")) { if (cfg.hasProperty("webui.supported.locales")) {
System.out.println("Select one of the following languages: " System.out.println("Select one of the following languages: "
+ cfg.getProperty("webui.supported.locales")); + cfg.getProperty("webui.supported.locales"));
@@ -163,20 +184,6 @@ public final class CreateAdministrator {
} }
} }
System.out.println("Password will not display on screen.");
System.out.print("Password: ");
System.out.flush();
password1 = console.readPassword();
System.out.print("Again to confirm: ");
System.out.flush();
password2 = console.readPassword();
//TODO real password validation
if (password1.length > 1 && Arrays.equals(password1, password2)) {
// password OK
System.out.print("Is the above data correct? (y or n): "); System.out.print("Is the above data correct? (y or n): ");
System.out.flush(); System.out.flush();
@@ -188,19 +195,46 @@ public final class CreateAdministrator {
dataOK = true; dataOK = true;
} }
} }
}
if (!flag) {
password = getPassword(console);
if (password == null) {
return;
}
} else {
password = line.getOptionValue("p").toCharArray();
}
// if we make it to here, we are ready to create an administrator
createAdministrator(email, firstName, lastName, language, String.valueOf(password));
}
private char[] getPassword(Console console) {
char[] password1 = null;
char[] password2 = null;
System.out.println("Password will not display on screen.");
System.out.print("Password: ");
System.out.flush();
password1 = console.readPassword();
System.out.print("Again to confirm: ");
System.out.flush();
password2 = console.readPassword();
// TODO real password validation
if (password1.length > 1 && Arrays.equals(password1, password2)) {
// password OK
Arrays.fill(password2, ' ');
return password1;
} else { } else {
System.out.println("Passwords don't match"); System.out.println("Passwords don't match");
return null;
} }
} }
// if we make it to here, we are ready to create an administrator
createAdministrator(email, firstName, lastName, language, String.valueOf(password1));
//Cleaning arrays that held password
Arrays.fill(password1, ' ');
Arrays.fill(password2, ' ');
}
/** /**
* Create the administrator with the given details. If the user * Create the administrator with the given details. If the user
* already exists then they are simply upped to administrator status * already exists then they are simply upped to administrator status

View File

@@ -0,0 +1,140 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.administer;
import java.io.IOException;
import java.sql.SQLException;
import java.util.ArrayList;
import java.util.Date;
import java.util.List;
import org.apache.commons.cli.ParseException;
import org.apache.commons.lang.time.DateUtils;
import org.dspace.authorize.AuthorizeException;
import org.dspace.content.ProcessStatus;
import org.dspace.core.Context;
import org.dspace.scripts.DSpaceRunnable;
import org.dspace.scripts.Process;
import org.dspace.scripts.factory.ScriptServiceFactory;
import org.dspace.scripts.service.ProcessService;
import org.dspace.services.ConfigurationService;
import org.dspace.services.factory.DSpaceServicesFactory;
import org.dspace.utils.DSpace;
/**
* Script to cleanup the old processes in the specified state.
*
* @author Luca Giamminonni (luca.giamminonni at 4science.it)
*
*/
public class ProcessCleaner extends DSpaceRunnable<ProcessCleanerConfiguration<ProcessCleaner>> {
private ConfigurationService configurationService;
private ProcessService processService;
private boolean cleanCompleted = false;
private boolean cleanFailed = false;
private boolean cleanRunning = false;
private boolean help = false;
private Integer days;
@Override
public void setup() throws ParseException {
this.configurationService = DSpaceServicesFactory.getInstance().getConfigurationService();
this.processService = ScriptServiceFactory.getInstance().getProcessService();
this.help = commandLine.hasOption('h');
this.cleanFailed = commandLine.hasOption('f');
this.cleanRunning = commandLine.hasOption('r');
this.cleanCompleted = commandLine.hasOption('c') || (!cleanFailed && !cleanRunning);
this.days = configurationService.getIntProperty("process-cleaner.days", 14);
if (this.days <= 0) {
throw new IllegalStateException("The number of days must be a positive integer.");
}
}
@Override
public void internalRun() throws Exception {
if (help) {
printHelp();
return;
}
Context context = new Context();
try {
context.turnOffAuthorisationSystem();
performDeletion(context);
} finally {
context.restoreAuthSystemState();
context.complete();
}
}
/**
* Delete the processes based on the specified statuses and the configured days
* from their creation.
*/
private void performDeletion(Context context) throws SQLException, IOException, AuthorizeException {
List<ProcessStatus> statuses = getProcessToDeleteStatuses();
Date creationDate = calculateCreationDate();
handler.logInfo("Searching for processes with status: " + statuses);
List<Process> processes = processService.findByStatusAndCreationTimeOlderThan(context, statuses, creationDate);
handler.logInfo("Found " + processes.size() + " processes to be deleted");
for (Process process : processes) {
processService.delete(context, process);
}
handler.logInfo("Process cleanup completed");
}
/**
* Returns the list of Process statuses do be deleted.
*/
private List<ProcessStatus> getProcessToDeleteStatuses() {
List<ProcessStatus> statuses = new ArrayList<ProcessStatus>();
if (cleanCompleted) {
statuses.add(ProcessStatus.COMPLETED);
}
if (cleanFailed) {
statuses.add(ProcessStatus.FAILED);
}
if (cleanRunning) {
statuses.add(ProcessStatus.RUNNING);
}
return statuses;
}
private Date calculateCreationDate() {
return DateUtils.addDays(new Date(), -days);
}
@Override
@SuppressWarnings("unchecked")
public ProcessCleanerConfiguration<ProcessCleaner> getScriptConfiguration() {
return new DSpace().getServiceManager()
.getServiceByName("process-cleaner", ProcessCleanerConfiguration.class);
}
}

View File

@@ -0,0 +1,18 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.administer;
/**
* The {@link ProcessCleaner} for CLI.
*
* @author Luca Giamminonni (luca.giamminonni at 4science.it)
*
*/
public class ProcessCleanerCli extends ProcessCleaner {
}

View File

@@ -0,0 +1,18 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.administer;
/**
* The {@link ProcessCleanerConfiguration} for CLI.
*
* @author Luca Giamminonni (luca.giamminonni at 4science.it)
*
*/
public class ProcessCleanerCliConfiguration extends ProcessCleanerConfiguration<ProcessCleanerCli> {
}

View File

@@ -0,0 +1,70 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.administer;
import java.sql.SQLException;
import org.apache.commons.cli.Options;
import org.dspace.authorize.service.AuthorizeService;
import org.dspace.core.Context;
import org.dspace.scripts.configuration.ScriptConfiguration;
import org.springframework.beans.factory.annotation.Autowired;
/**
* The {@link ScriptConfiguration} for the {@link ProcessCleaner} script.
*/
public class ProcessCleanerConfiguration<T extends ProcessCleaner> extends ScriptConfiguration<T> {
@Autowired
private AuthorizeService authorizeService;
private Class<T> dspaceRunnableClass;
@Override
public boolean isAllowedToExecute(Context context) {
try {
return authorizeService.isAdmin(context);
} catch (SQLException e) {
throw new RuntimeException("SQLException occurred when checking if the current user is an admin", e);
}
}
@Override
public Options getOptions() {
if (options == null) {
Options options = new Options();
options.addOption("h", "help", false, "help");
options.addOption("r", "running", false, "delete the process with RUNNING status");
options.getOption("r").setType(boolean.class);
options.addOption("f", "failed", false, "delete the process with FAILED status");
options.getOption("f").setType(boolean.class);
options.addOption("c", "completed", false,
"delete the process with COMPLETED status (default if no statuses are specified)");
options.getOption("c").setType(boolean.class);
super.options = options;
}
return options;
}
@Override
public Class<T> getDspaceRunnableClass() {
return dspaceRunnableClass;
}
@Override
public void setDspaceRunnableClass(Class<T> dspaceRunnableClass) {
this.dspaceRunnableClass = dspaceRunnableClass;
}
}

View File

@@ -42,6 +42,7 @@ import org.apache.commons.cli.HelpFormatter;
import org.apache.commons.cli.Option; import org.apache.commons.cli.Option;
import org.apache.commons.cli.Options; import org.apache.commons.cli.Options;
import org.apache.commons.cli.ParseException; import org.apache.commons.cli.ParseException;
import org.apache.commons.lang3.StringUtils;
import org.dspace.authorize.AuthorizeException; import org.dspace.authorize.AuthorizeException;
import org.dspace.content.Collection; import org.dspace.content.Collection;
import org.dspace.content.Community; import org.dspace.content.Community;
@@ -55,6 +56,8 @@ import org.dspace.content.service.CommunityService;
import org.dspace.core.Context; import org.dspace.core.Context;
import org.dspace.eperson.factory.EPersonServiceFactory; import org.dspace.eperson.factory.EPersonServiceFactory;
import org.dspace.eperson.service.EPersonService; import org.dspace.eperson.service.EPersonService;
import org.dspace.handle.factory.HandleServiceFactory;
import org.dspace.handle.service.HandleService;
import org.jdom2.Element; import org.jdom2.Element;
import org.jdom2.output.Format; import org.jdom2.output.Format;
import org.jdom2.output.XMLOutputter; import org.jdom2.output.XMLOutputter;
@@ -79,6 +82,7 @@ import org.xml.sax.SAXException;
* </community> * </community>
* </import_structure> * </import_structure>
* }</pre> * }</pre>
*
* <p> * <p>
* It can be arbitrarily deep, and supports all the metadata elements * It can be arbitrarily deep, and supports all the metadata elements
* that make up the community and collection metadata. See the system * that make up the community and collection metadata. See the system
@@ -107,12 +111,14 @@ public class StructBuilder {
*/ */
private static final Map<String, MetadataFieldName> communityMap = new HashMap<>(); private static final Map<String, MetadataFieldName> communityMap = new HashMap<>();
protected static CommunityService communityService protected static final CommunityService communityService
= ContentServiceFactory.getInstance().getCommunityService(); = ContentServiceFactory.getInstance().getCommunityService();
protected static CollectionService collectionService protected static final CollectionService collectionService
= ContentServiceFactory.getInstance().getCollectionService(); = ContentServiceFactory.getInstance().getCollectionService();
protected static EPersonService ePersonService protected static final EPersonService ePersonService
= EPersonServiceFactory.getInstance().getEPersonService(); = EPersonServiceFactory.getInstance().getEPersonService();
protected static final HandleService handleService
= HandleServiceFactory.getInstance().getHandleService();
/** /**
* Default constructor * Default constructor
@@ -138,6 +144,7 @@ public class StructBuilder {
* @throws SQLException passed through. * @throws SQLException passed through.
* @throws FileNotFoundException if input or output could not be opened. * @throws FileNotFoundException if input or output could not be opened.
* @throws TransformerException if the input document is invalid. * @throws TransformerException if the input document is invalid.
* @throws XPathExpressionException passed through.
*/ */
public static void main(String[] argv) public static void main(String[] argv)
throws ParserConfigurationException, SQLException, throws ParserConfigurationException, SQLException,
@@ -148,6 +155,7 @@ public class StructBuilder {
options.addOption("h", "help", false, "Print this help message."); options.addOption("h", "help", false, "Print this help message.");
options.addOption("?", "help"); options.addOption("?", "help");
options.addOption("x", "export", false, "Export the current structure as XML."); options.addOption("x", "export", false, "Export the current structure as XML.");
options.addOption("k", "keep-handles", false, "Apply Handles from input document.");
options.addOption(Option.builder("e").longOpt("eperson") options.addOption(Option.builder("e").longOpt("eperson")
.desc("User who is manipulating the repository's structure.") .desc("User who is manipulating the repository's structure.")
@@ -209,6 +217,7 @@ public class StructBuilder {
// Export? Import? // Export? Import?
if (line.hasOption('x')) { // export if (line.hasOption('x')) { // export
exportStructure(context, outputStream); exportStructure(context, outputStream);
outputStream.close();
} else { // Must be import } else { // Must be import
String input = line.getOptionValue('f'); String input = line.getOptionValue('f');
if (null == input) { if (null == input) {
@@ -223,7 +232,12 @@ public class StructBuilder {
inputStream = new FileInputStream(input); inputStream = new FileInputStream(input);
} }
importStructure(context, inputStream, outputStream); boolean keepHandles = options.hasOption("k");
importStructure(context, inputStream, outputStream, keepHandles);
inputStream.close();
outputStream.close();
// save changes from import // save changes from import
context.complete(); context.complete();
} }
@@ -236,14 +250,17 @@ public class StructBuilder {
* @param context * @param context
* @param input XML which describes the new communities and collections. * @param input XML which describes the new communities and collections.
* @param output input, annotated with the new objects' identifiers. * @param output input, annotated with the new objects' identifiers.
* @param keepHandles true if Handles should be set from input.
* @throws IOException * @throws IOException
* @throws ParserConfigurationException * @throws ParserConfigurationException
* @throws SAXException * @throws SAXException
* @throws TransformerException * @throws TransformerException
* @throws SQLException * @throws SQLException
*/ */
static void importStructure(Context context, InputStream input, OutputStream output) static void importStructure(Context context, InputStream input,
throws IOException, ParserConfigurationException, SQLException, TransformerException, XPathExpressionException { OutputStream output, boolean keepHandles)
throws IOException, ParserConfigurationException, SQLException,
TransformerException, XPathExpressionException {
// load the XML // load the XML
Document document = null; Document document = null;
@@ -271,7 +288,19 @@ public class StructBuilder {
NodeList identifierNodes = (NodeList) xPath.compile("//*[@identifier]") NodeList identifierNodes = (NodeList) xPath.compile("//*[@identifier]")
.evaluate(document, XPathConstants.NODESET); .evaluate(document, XPathConstants.NODESET);
if (identifierNodes.getLength() > 0) { if (identifierNodes.getLength() > 0) {
if (!keepHandles) {
System.err.println("The input document has 'identifier' attributes, which will be ignored."); System.err.println("The input document has 'identifier' attributes, which will be ignored.");
} else {
for (int i = 0; i < identifierNodes.getLength() ; i++) {
String identifier = identifierNodes.item(i).getAttributes().item(0).getTextContent();
if (handleService.resolveToURL(context, identifier) != null) {
System.err.printf("The input document contains handle %s,"
+ " which is in use already. Aborting...%n",
identifier);
System.exit(1);
}
}
}
} }
// load the mappings into the member variable hashmaps // load the mappings into the member variable hashmaps
@@ -296,7 +325,7 @@ public class StructBuilder {
.evaluate(document, XPathConstants.NODESET); .evaluate(document, XPathConstants.NODESET);
// run the import starting with the top level communities // run the import starting with the top level communities
elements = handleCommunities(context, first, null); elements = handleCommunities(context, first, null, keepHandles);
} catch (TransformerException ex) { } catch (TransformerException ex) {
System.err.format("Input content not understood: %s%n", ex.getMessage()); System.err.format("Input content not understood: %s%n", ex.getMessage());
System.exit(1); System.exit(1);
@@ -619,23 +648,29 @@ public class StructBuilder {
* @param context the context of the request * @param context the context of the request
* @param communities a nodelist of communities to create along with their sub-structures * @param communities a nodelist of communities to create along with their sub-structures
* @param parent the parent community of the nodelist of communities to create * @param parent the parent community of the nodelist of communities to create
* @param keepHandles use Handles from input.
* @return an element array containing additional information regarding the * @return an element array containing additional information regarding the
* created communities (e.g. the handles they have been assigned) * created communities (e.g. the handles they have been assigned)
*/ */
private static Element[] handleCommunities(Context context, NodeList communities, Community parent) private static Element[] handleCommunities(Context context, NodeList communities,
throws TransformerException, SQLException, AuthorizeException, XPathExpressionException { Community parent, boolean keepHandles)
throws TransformerException, SQLException, AuthorizeException,
XPathExpressionException {
Element[] elements = new Element[communities.getLength()]; Element[] elements = new Element[communities.getLength()];
XPath xPath = XPathFactory.newInstance().newXPath(); XPath xPath = XPathFactory.newInstance().newXPath();
for (int i = 0; i < communities.getLength(); i++) { for (int i = 0; i < communities.getLength(); i++) {
Community community; Node tn = communities.item(i);
Element element = new Element("community"); Node identifier = tn.getAttributes().getNamedItem("identifier");
// create the community or sub community // create the community or sub community
if (parent != null) { Community community;
if (null == identifier
|| StringUtils.isBlank(identifier.getNodeValue())
|| !keepHandles) {
community = communityService.create(parent, context); community = communityService.create(parent, context);
} else { } else {
community = communityService.create(null, context); community = communityService.create(parent, context, identifier.getNodeValue());
} }
// default the short description to be an empty string // default the short description to be an empty string
@@ -643,7 +678,6 @@ public class StructBuilder {
MD_SHORT_DESCRIPTION, null, " "); MD_SHORT_DESCRIPTION, null, " ");
// now update the metadata // now update the metadata
Node tn = communities.item(i);
for (Map.Entry<String, MetadataFieldName> entry : communityMap.entrySet()) { for (Map.Entry<String, MetadataFieldName> entry : communityMap.entrySet()) {
NodeList nl = (NodeList) xPath.compile(entry.getKey()).evaluate(tn, XPathConstants.NODESET); NodeList nl = (NodeList) xPath.compile(entry.getKey()).evaluate(tn, XPathConstants.NODESET);
if (nl.getLength() == 1) { if (nl.getLength() == 1) {
@@ -669,6 +703,7 @@ public class StructBuilder {
// but it's here to keep it separate from the create process in // but it's here to keep it separate from the create process in
// case // case
// we want to move it or make it switchable later // we want to move it or make it switchable later
Element element = new Element("community");
element.setAttribute("identifier", community.getHandle()); element.setAttribute("identifier", community.getHandle());
Element nameElement = new Element("name"); Element nameElement = new Element("name");
@@ -711,12 +746,16 @@ public class StructBuilder {
} }
// handle sub communities // handle sub communities
NodeList subCommunities = (NodeList) xPath.compile("community").evaluate(tn, XPathConstants.NODESET); NodeList subCommunities = (NodeList) xPath.compile("community")
Element[] subCommunityElements = handleCommunities(context, subCommunities, community); .evaluate(tn, XPathConstants.NODESET);
Element[] subCommunityElements = handleCommunities(context,
subCommunities, community, keepHandles);
// handle collections // handle collections
NodeList collections = (NodeList) xPath.compile("collection").evaluate(tn, XPathConstants.NODESET); NodeList collections = (NodeList) xPath.compile("collection")
Element[] collectionElements = handleCollections(context, collections, community); .evaluate(tn, XPathConstants.NODESET);
Element[] collectionElements = handleCollections(context,
collections, community, keepHandles);
int j; int j;
for (j = 0; j < subCommunityElements.length; j++) { for (j = 0; j < subCommunityElements.length; j++) {
@@ -741,21 +780,31 @@ public class StructBuilder {
* @return an Element array containing additional information about the * @return an Element array containing additional information about the
* created collections (e.g. the handle) * created collections (e.g. the handle)
*/ */
private static Element[] handleCollections(Context context, NodeList collections, Community parent) private static Element[] handleCollections(Context context,
NodeList collections, Community parent, boolean keepHandles)
throws SQLException, AuthorizeException, XPathExpressionException { throws SQLException, AuthorizeException, XPathExpressionException {
Element[] elements = new Element[collections.getLength()]; Element[] elements = new Element[collections.getLength()];
XPath xPath = XPathFactory.newInstance().newXPath(); XPath xPath = XPathFactory.newInstance().newXPath();
for (int i = 0; i < collections.getLength(); i++) { for (int i = 0; i < collections.getLength(); i++) {
Element element = new Element("collection"); Node tn = collections.item(i);
Collection collection = collectionService.create(context, parent); Node identifier = tn.getAttributes().getNamedItem("identifier");
// Create the Collection.
Collection collection;
if (null == identifier
|| StringUtils.isBlank(identifier.getNodeValue())
|| !keepHandles) {
collection = collectionService.create(context, parent);
} else {
collection = collectionService.create(context, parent, identifier.getNodeValue());
}
// default the short description to the empty string // default the short description to the empty string
collectionService.setMetadataSingleValue(context, collection, collectionService.setMetadataSingleValue(context, collection,
MD_SHORT_DESCRIPTION, Item.ANY, " "); MD_SHORT_DESCRIPTION, Item.ANY, " ");
// import the rest of the metadata // import the rest of the metadata
Node tn = collections.item(i);
for (Map.Entry<String, MetadataFieldName> entry : collectionMap.entrySet()) { for (Map.Entry<String, MetadataFieldName> entry : collectionMap.entrySet()) {
NodeList nl = (NodeList) xPath.compile(entry.getKey()).evaluate(tn, XPathConstants.NODESET); NodeList nl = (NodeList) xPath.compile(entry.getKey()).evaluate(tn, XPathConstants.NODESET);
if (nl.getLength() == 1) { if (nl.getLength() == 1) {
@@ -766,6 +815,7 @@ public class StructBuilder {
collectionService.update(context, collection); collectionService.update(context, collection);
Element element = new Element("collection");
element.setAttribute("identifier", collection.getHandle()); element.setAttribute("identifier", collection.getHandle());
Element nameElement = new Element("name"); Element nameElement = new Element("name");

View File

@@ -204,7 +204,7 @@ public class ItemExportCLITool {
|| (mycollection.getType() != Constants.COLLECTION)) { || (mycollection.getType() != Constants.COLLECTION)) {
mycollection = null; mycollection = null;
} }
} else if (myIDString != null) { } else {
mycollection = collectionService.find(c, UUID.fromString(myIDString)); mycollection = collectionService.find(c, UUID.fromString(myIDString));
} }

View File

@@ -64,17 +64,21 @@ import org.springframework.beans.factory.annotation.Autowired;
* Item exporter to create simple AIPs for DSpace content. Currently exports * Item exporter to create simple AIPs for DSpace content. Currently exports
* individual items, or entire collections. For instructions on use, see * individual items, or entire collections. For instructions on use, see
* printUsage() method. * printUsage() method.
* <P> * <p>
* ItemExport creates the simple AIP package that the importer also uses. It * ItemExport creates the simple AIP package that the importer also uses. It
* consists of: * consists of:
* <P> * <pre>{@code
* /exportdir/42/ (one directory per item) / dublin_core.xml - qualified dublin * /exportdir/42/ (one directory per item)
* core in RDF schema / contents - text file, listing one file per line / file1 * / dublin_core.xml - qualified dublin core in RDF schema
* - files contained in the item / file2 / ... * / contents - text file, listing one file per line
* <P> * / file1 - files contained in the item
* / file2
* / ...
* }</pre>
* <p>
* issues -doesn't handle special characters in metadata (needs to turn {@code &'s} into * issues -doesn't handle special characters in metadata (needs to turn {@code &'s} into
* {@code &amp;}, etc.) * {@code &amp;}, etc.)
* <P> * <p>
* Modified by David Little, UCSD Libraries 12/21/04 to allow the registration * Modified by David Little, UCSD Libraries 12/21/04 to allow the registration
* of files (bitstreams) into DSpace. * of files (bitstreams) into DSpace.
* *
@@ -101,7 +105,7 @@ public class ItemExportServiceImpl implements ItemExportService {
/** /**
* log4j logger * log4j logger
*/ */
private final Logger log = org.apache.logging.log4j.LogManager.getLogger(ItemExportServiceImpl.class); private final Logger log = org.apache.logging.log4j.LogManager.getLogger();
protected ItemExportServiceImpl() { protected ItemExportServiceImpl() {
@@ -168,6 +172,7 @@ public class ItemExportServiceImpl implements ItemExportService {
// make it this far, now start exporting // make it this far, now start exporting
writeMetadata(c, myItem, itemDir, migrate); writeMetadata(c, myItem, itemDir, migrate);
writeBitstreams(c, myItem, itemDir, excludeBitstreams); writeBitstreams(c, myItem, itemDir, excludeBitstreams);
writeCollections(myItem, itemDir);
if (!migrate) { if (!migrate) {
writeHandle(c, myItem, itemDir); writeHandle(c, myItem, itemDir);
} }
@@ -343,6 +348,33 @@ public class ItemExportServiceImpl implements ItemExportService {
} }
} }
/**
* Create the 'collections' file. List handles of all Collections which
* contain this Item. The "owning" Collection is listed first.
*
* @param item list collections holding this Item.
* @param destDir write the file here.
* @throws IOException if the file cannot be created or written.
*/
protected void writeCollections(Item item, File destDir)
throws IOException {
File outFile = new File(destDir, "collections");
if (outFile.createNewFile()) {
try (PrintWriter out = new PrintWriter(new FileWriter(outFile))) {
String ownerHandle = item.getOwningCollection().getHandle();
out.println(ownerHandle);
for (Collection collection : item.getCollections()) {
String collectionHandle = collection.getHandle();
if (!collectionHandle.equals(ownerHandle)) {
out.println(collectionHandle);
}
}
}
} else {
throw new IOException("Cannot create 'collections' in " + destDir);
}
}
/** /**
* Create both the bitstreams and the contents file. Any bitstreams that * Create both the bitstreams and the contents file. Any bitstreams that
* were originally registered will be marked in the contents file as such. * were originally registered will be marked in the contents file as such.
@@ -630,11 +662,9 @@ public class ItemExportServiceImpl implements ItemExportService {
Thread go = new Thread() { Thread go = new Thread() {
@Override @Override
public void run() { public void run() {
Context context = null; Context context = new Context();
Iterator<Item> iitems = null; Iterator<Item> iitems = null;
try { try {
// create a new dspace context
context = new Context();
// ignore auths // ignore auths
context.turnOffAuthorisationSystem(); context.turnOffAuthorisationSystem();

View File

@@ -264,16 +264,12 @@ public class ItemImportServiceImpl implements ItemImportService, InitializingBea
// sneaky isResume == true means open file in append mode // sneaky isResume == true means open file in append mode
outFile = new File(mapFile); outFile = new File(mapFile);
mapOut = new PrintWriter(new FileWriter(outFile, isResume)); mapOut = new PrintWriter(new FileWriter(outFile, isResume));
if (mapOut == null) {
throw new Exception("can't open mapfile: " + mapFile);
}
} }
// open and process the source directory // open and process the source directory
File d = new java.io.File(sourceDir); File d = new java.io.File(sourceDir);
if (d == null || !d.isDirectory()) { if (!d.isDirectory()) {
throw new Exception("Error, cannot open source directory " + sourceDir); throw new Exception("Error, cannot open source directory " + sourceDir);
} }
@@ -433,11 +429,15 @@ public class ItemImportServiceImpl implements ItemImportService, InitializingBea
/** /**
* Read the relationship manifest file. * Read the relationship manifest file.
* *
* Each line in the file contains a relationship type id and an item identifier in the following format: * Each line in the file contains a relationship type id and an item
* identifier in the following format:
* *
* relation.<relation_key> <handle|uuid|folderName:import_item_folder|schema.element[.qualifier]:value> * <p>
* {@code relation.<relation_key> <handle|uuid|folderName:import_item_folder|schema.element[.qualifier]:value>}
* *
* The input_item_folder should refer the folder name of another item in this import batch. * <p>
* The {@code input_item_folder} should refer the folder name of another
* item in this import batch.
* *
* @param path The main import folder path. * @param path The main import folder path.
* @param filename The name of the manifest file to check ('relationships') * @param filename The name of the manifest file to check ('relationships')
@@ -558,9 +558,10 @@ public class ItemImportServiceImpl implements ItemImportService, InitializingBea
/** /**
* Lookup an item by a (unique) meta value. * Lookup an item by a (unique) meta value.
* *
* @param metaKey * @param c current DSpace session.
* @param metaValue * @param metaKey name of the metadata field to match.
* @return Item * @param metaValue value to be matched.
* @return the matching Item.
* @throws Exception if single item not found. * @throws Exception if single item not found.
*/ */
protected Item findItemByMetaValue(Context c, String metaKey, String metaValue) throws Exception { protected Item findItemByMetaValue(Context c, String metaKey, String metaValue) throws Exception {
@@ -604,7 +605,7 @@ public class ItemImportServiceImpl implements ItemImportService, InitializingBea
// verify the source directory // verify the source directory
File d = new java.io.File(sourceDir); File d = new java.io.File(sourceDir);
if (d == null || !d.isDirectory()) { if (!d.isDirectory()) {
throw new Exception("Error, cannot open source directory " throw new Exception("Error, cannot open source directory "
+ sourceDir); + sourceDir);
} }
@@ -643,10 +644,6 @@ public class ItemImportServiceImpl implements ItemImportService, InitializingBea
File handleFile = new File(sourceDir + File.separatorChar + newItemName + File.separatorChar + "handle"); File handleFile = new File(sourceDir + File.separatorChar + newItemName + File.separatorChar + "handle");
PrintWriter handleOut = new PrintWriter(new FileWriter(handleFile, true)); PrintWriter handleOut = new PrintWriter(new FileWriter(handleFile, true));
if (handleOut == null) {
throw new Exception("can't open handle file: " + handleFile.getCanonicalPath());
}
handleOut.println(oldHandle); handleOut.println(oldHandle);
handleOut.close(); handleOut.close();
@@ -1668,26 +1665,27 @@ public class ItemImportServiceImpl implements ItemImportService, InitializingBea
.trim(); .trim();
} }
if (isTest) {
continue;
}
Bitstream bs = null; Bitstream bs = null;
boolean notfound = true;
boolean updateRequired = false; boolean updateRequired = false;
if (!isTest) {
// find bitstream // find bitstream
List<Bitstream> bitstreams = itemService.getNonInternalBitstreams(c, myItem); List<Bitstream> bitstreams = itemService.getNonInternalBitstreams(c, myItem);
for (int j = 0; j < bitstreams.size() && notfound; j++) { for (Bitstream bitstream : bitstreams) {
if (bitstreams.get(j).getName().equals(bitstreamName)) { if (bitstream.getName().equals(bitstreamName)) {
bs = bitstreams.get(j); bs = bitstream;
notfound = false; break;
}
} }
} }
if (notfound && !isTest) { if (null == bs) {
// this should never happen // this should never happen
System.out.println("\tdefault permissions set for " System.out.printf("\tdefault permissions set for %s%n",
+ bitstreamName); bitstreamName);
} else if (!isTest) { } else {
if (permissionsExist) { if (permissionsExist) {
if (myGroup == null) { if (myGroup == null) {
System.out.println("\t" + groupName System.out.println("\t" + groupName
@@ -2028,15 +2026,11 @@ public class ItemImportServiceImpl implements ItemImportService, InitializingBea
Thread go = new Thread() { Thread go = new Thread() {
@Override @Override
public void run() { public void run() {
Context context = null; Context context = new Context();
String importDir = null; String importDir = null;
EPerson eperson = null; EPerson eperson = null;
try { try {
// create a new dspace context
context = new Context();
eperson = ePersonService.find(context, oldEPerson.getID()); eperson = ePersonService.find(context, oldEPerson.getID());
context.setCurrentUser(eperson); context.setCurrentUser(eperson);
context.turnOffAuthorisationSystem(); context.turnOffAuthorisationSystem();
@@ -2047,7 +2041,8 @@ public class ItemImportServiceImpl implements ItemImportService, InitializingBea
if (theOtherCollections != null) { if (theOtherCollections != null) {
for (String colID : theOtherCollections) { for (String colID : theOtherCollections) {
UUID colId = UUID.fromString(colID); UUID colId = UUID.fromString(colID);
if (!theOwningCollection.getID().equals(colId)) { if (theOwningCollection != null
&& !theOwningCollection.getID().equals(colId)) {
Collection col = collectionService.find(context, colId); Collection col = collectionService.find(context, colId);
if (col != null) { if (col != null) {
collectionList.add(col); collectionList.add(col);

View File

@@ -77,7 +77,7 @@ public class AddBitstreamsAction extends UpdateBitstreamsAction {
ItemUpdate.pr("Contents bitstream count: " + contents.size()); ItemUpdate.pr("Contents bitstream count: " + contents.size());
String[] files = dir.list(ItemUpdate.fileFilter); String[] files = dir.list(ItemUpdate.fileFilter);
List<String> fileList = new ArrayList<String>(); List<String> fileList = new ArrayList<>();
for (String filename : files) { for (String filename : files) {
fileList.add(filename); fileList.add(filename);
ItemUpdate.pr("file: " + filename); ItemUpdate.pr("file: " + filename);
@@ -134,9 +134,6 @@ public class AddBitstreamsAction extends UpdateBitstreamsAction {
ItemUpdate.pr("contents entry for bitstream: " + ce.toString()); ItemUpdate.pr("contents entry for bitstream: " + ce.toString());
File f = new File(dir, ce.filename); File f = new File(dir, ce.filename);
// get an input stream
BufferedInputStream bis = new BufferedInputStream(new FileInputStream(f));
Bitstream bs = null; Bitstream bs = null;
String newBundleName = ce.bundlename; String newBundleName = ce.bundlename;
@@ -173,7 +170,9 @@ public class AddBitstreamsAction extends UpdateBitstreamsAction {
targetBundle = bundles.iterator().next(); targetBundle = bundles.iterator().next();
} }
try (BufferedInputStream bis = new BufferedInputStream(new FileInputStream(f));) {
bs = bitstreamService.create(context, targetBundle, bis); bs = bitstreamService.create(context, targetBundle, bis);
}
bs.setName(context, ce.filename); bs.setName(context, ce.filename);
// Identify the format // Identify the format

View File

@@ -39,29 +39,34 @@ import org.dspace.handle.factory.HandleServiceFactory;
import org.dspace.handle.service.HandleService; import org.dspace.handle.service.HandleService;
/** /**
* Provides some batch editing capabilities for items in DSpace: * Provides some batch editing capabilities for items in DSpace.
* Metadata fields - Add, Delete * <ul>
* Bitstreams - Add, Delete * <li>Metadata fields - Add, Delete</li>
* <li>Bitstreams - Add, Delete</li>
* </ul>
* *
* The design has been for compatibility with ItemImporter * <p>
* The design has been for compatibility with
* {@link org.dspace.app.itemimport.service.ItemImportService}
* in the use of the DSpace archive format which is used to * in the use of the DSpace archive format which is used to
* specify changes on a per item basis. The directory names * specify changes on a per item basis. The directory names
* to correspond to each item are arbitrary and will only be * to correspond to each item are arbitrary and will only be
* used for logging purposes. The reference to the item is * used for logging purposes. The reference to the item is
* from a required dc.identifier with the item handle to be * from a required {@code dc.identifier} with the item handle to be
* included in the dublin_core.xml (or similar metadata) file. * included in the {@code dublin_core.xml} (or similar metadata) file.
* *
* Any combination of these actions is permitted in a single run of this class * <p>
* Any combination of these actions is permitted in a single run of this class.
* The order of actions is important when used in combination. * The order of actions is important when used in combination.
* It is the responsibility of the calling class (here, ItemUpdate) * It is the responsibility of the calling class (here, {@code ItemUpdate})
* to register UpdateAction classes in the order to which they are * to register {@link UpdateAction} classes in the order which they are
* to be performed. * to be performed.
* *
* * <p>
* It is unfortunate that so much code needs to be borrowed * It is unfortunate that so much code needs to be borrowed from
* from ItemImport as it is not reusable in private methods, etc. * {@link org.dspace.app.itemimport.service.ItemImportService} as it is not
* Some of this has been placed into the MetadataUtilities class * reusable in private methods, etc. Some of this has been placed into the
* for possible reuse elsewhere. * {@link MetadataUtilities} class for possible reuse elsewhere.
* *
* @author W. Hays based on a conceptual design by R. Rodgers * @author W. Hays based on a conceptual design by R. Rodgers
*/ */
@@ -73,7 +78,7 @@ public class ItemUpdate {
public static final String DELETE_CONTENTS_FILE = "delete_contents"; public static final String DELETE_CONTENTS_FILE = "delete_contents";
public static String HANDLE_PREFIX = null; public static String HANDLE_PREFIX = null;
public static final Map<String, String> filterAliases = new HashMap<String, String>(); public static final Map<String, String> filterAliases = new HashMap<>();
public static boolean verbose = false; public static boolean verbose = false;
@@ -375,7 +380,7 @@ public class ItemUpdate {
// open and process the source directory // open and process the source directory
File sourceDir = new File(sourceDirPath); File sourceDir = new File(sourceDirPath);
if ((sourceDir == null) || !sourceDir.exists() || !sourceDir.isDirectory()) { if (!sourceDir.exists() || !sourceDir.isDirectory()) {
pr("Error, cannot open archive source directory " + sourceDirPath); pr("Error, cannot open archive source directory " + sourceDirPath);
throw new Exception("error with archive source directory " + sourceDirPath); throw new Exception("error with archive source directory " + sourceDirPath);
} }

View File

@@ -21,10 +21,10 @@ import java.awt.image.BufferedImage;
*/ */
public class Brand { public class Brand {
private int brandWidth; private final int brandWidth;
private int brandHeight; private final int brandHeight;
private Font font; private final Font font;
private int xOffset; private final int xOffset;
/** /**
* Constructor to set up footer image attributes. * Constructor to set up footer image attributes.
@@ -92,7 +92,7 @@ public class Brand {
* do the text placements and preparatory work for the brand image generation * do the text placements and preparatory work for the brand image generation
* *
* @param brandImage a BufferedImage object where the image is created * @param brandImage a BufferedImage object where the image is created
* @param identifier and Identifier object describing what text is to be placed in what * @param brandText an Identifier object describing what text is to be placed in what
* position within the brand * position within the brand
*/ */
private void drawImage(BufferedImage brandImage, private void drawImage(BufferedImage brandImage,

View File

@@ -39,7 +39,7 @@ class BrandText {
* its location within a rectangular area. * its location within a rectangular area.
* *
* @param location one of the class location constants e.g. <code>Identifier.BL</code> * @param location one of the class location constants e.g. <code>Identifier.BL</code>
* @param the text associated with the location * @param text text associated with the location
*/ */
public BrandText(String location, String text) { public BrandText(String location, String text) {
this.location = location; this.location = location;

View File

@@ -631,7 +631,7 @@ public class Packager {
//otherwise, just disseminate a single object to a single package file //otherwise, just disseminate a single object to a single package file
dip.disseminate(context, dso, pkgParams, pkgFile); dip.disseminate(context, dso, pkgParams, pkgFile);
if (pkgFile != null && pkgFile.exists()) { if (pkgFile.exists()) {
System.out.println("\nCREATED package file: " + pkgFile.getCanonicalPath()); System.out.println("\nCREATED package file: " + pkgFile.getCanonicalPath());
} }
} }

View File

@@ -0,0 +1,46 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.app.requestitem;
import java.sql.SQLException;
import java.util.ArrayList;
import java.util.List;
import org.dspace.content.Collection;
import org.dspace.content.Item;
import org.dspace.core.Context;
import org.dspace.eperson.EPerson;
import org.springframework.lang.NonNull;
/**
* Derive request recipients from groups of the Collection which owns an Item.
* The list will include all members of the administrators group. If the
* resulting list is empty, delegates to {@link RequestItemHelpdeskStrategy}.
*
* @author Mark H. Wood <mwood@iupui.edu>
*/
public class CollectionAdministratorsRequestItemStrategy
extends RequestItemHelpdeskStrategy {
@Override
@NonNull
public List<RequestItemAuthor> getRequestItemAuthor(Context context,
Item item)
throws SQLException {
List<RequestItemAuthor> recipients = new ArrayList<>();
Collection collection = item.getOwningCollection();
for (EPerson admin : collection.getAdministrators().getMembers()) {
recipients.add(new RequestItemAuthor(admin));
}
if (recipients.isEmpty()) {
return super.getRequestItemAuthor(context, item);
} else {
return recipients;
}
}
}

View File

@@ -0,0 +1,61 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.app.requestitem;
import java.sql.SQLException;
import java.util.ArrayList;
import java.util.List;
import org.dspace.content.Item;
import org.dspace.core.Context;
import org.springframework.lang.NonNull;
import org.springframework.util.Assert;
/**
* Assemble a list of recipients from the results of other strategies.
* The list of strategy classes is injected as the constructor argument
* {@code strategies}.
* If the strategy list is not configured, returns an empty List.
*
* @author Mark H. Wood <mwood@iupui.edu>
*/
public class CombiningRequestItemStrategy
implements RequestItemAuthorExtractor {
/** The strategies to combine. */
private final List<RequestItemAuthorExtractor> strategies;
/**
* Initialize a combination of strategies.
* @param strategies the author extraction strategies to combine.
*/
public CombiningRequestItemStrategy(@NonNull List<RequestItemAuthorExtractor> strategies) {
Assert.notNull(strategies, "Strategy list may not be null");
this.strategies = strategies;
}
/**
* Do not call.
* @throws IllegalArgumentException always
*/
private CombiningRequestItemStrategy() {
throw new IllegalArgumentException();
}
@Override
@NonNull
public List<RequestItemAuthor> getRequestItemAuthor(Context context, Item item)
throws SQLException {
List<RequestItemAuthor> recipients = new ArrayList<>();
for (RequestItemAuthorExtractor strategy : strategies) {
recipients.addAll(strategy.getRequestItemAuthor(context, item));
}
return recipients;
}
}

View File

@@ -27,7 +27,7 @@ import org.dspace.core.Context;
import org.dspace.core.ReloadableEntity; import org.dspace.core.ReloadableEntity;
/** /**
* Object representing an Item Request * Object representing an Item Request.
*/ */
@Entity @Entity
@Table(name = "requestitem") @Table(name = "requestitem")
@@ -94,6 +94,9 @@ public class RequestItem implements ReloadableEntity<Integer> {
this.allfiles = allfiles; this.allfiles = allfiles;
} }
/**
* @return {@code true} if all of the Item's files are requested.
*/
public boolean isAllfiles() { public boolean isAllfiles() {
return allfiles; return allfiles;
} }
@@ -102,6 +105,9 @@ public class RequestItem implements ReloadableEntity<Integer> {
this.reqMessage = reqMessage; this.reqMessage = reqMessage;
} }
/**
* @return a message from the requester.
*/
public String getReqMessage() { public String getReqMessage() {
return reqMessage; return reqMessage;
} }
@@ -110,6 +116,9 @@ public class RequestItem implements ReloadableEntity<Integer> {
this.reqName = reqName; this.reqName = reqName;
} }
/**
* @return Human-readable name of the user requesting access.
*/
public String getReqName() { public String getReqName() {
return reqName; return reqName;
} }
@@ -118,6 +127,9 @@ public class RequestItem implements ReloadableEntity<Integer> {
this.reqEmail = reqEmail; this.reqEmail = reqEmail;
} }
/**
* @return address of the user requesting access.
*/
public String getReqEmail() { public String getReqEmail() {
return reqEmail; return reqEmail;
} }
@@ -126,6 +138,9 @@ public class RequestItem implements ReloadableEntity<Integer> {
this.token = token; this.token = token;
} }
/**
* @return a unique request identifier which can be emailed.
*/
public String getToken() { public String getToken() {
return token; return token;
} }

View File

@@ -11,20 +11,31 @@ import org.dspace.eperson.EPerson;
/** /**
* Simple DTO to transfer data about the corresponding author for the Request * Simple DTO to transfer data about the corresponding author for the Request
* Copy feature * Copy feature.
* *
* @author Andrea Bollini * @author Andrea Bollini
*/ */
public class RequestItemAuthor { public class RequestItemAuthor {
private String fullName; private final String fullName;
private String email; private final String email;
/**
* Construct an author record from given data.
*
* @param fullName the author's full name.
* @param email the author's email address.
*/
public RequestItemAuthor(String fullName, String email) { public RequestItemAuthor(String fullName, String email) {
super(); super();
this.fullName = fullName; this.fullName = fullName;
this.email = email; this.email = email;
} }
/**
* Construct an author from an EPerson's metadata.
*
* @param ePerson the EPerson.
*/
public RequestItemAuthor(EPerson ePerson) { public RequestItemAuthor(EPerson ePerson) {
super(); super();
this.fullName = ePerson.getFullName(); this.fullName = ePerson.getFullName();

View File

@@ -8,26 +8,28 @@
package org.dspace.app.requestitem; package org.dspace.app.requestitem;
import java.sql.SQLException; import java.sql.SQLException;
import java.util.List;
import org.dspace.content.Item; import org.dspace.content.Item;
import org.dspace.core.Context; import org.dspace.core.Context;
import org.springframework.lang.NonNull;
/** /**
* Interface to abstract the strategy for select the author to contact for * Interface to abstract the strategy for selecting the author to contact for
* request copy * request copy.
* *
* @author Andrea Bollini * @author Andrea Bollini
*/ */
public interface RequestItemAuthorExtractor { public interface RequestItemAuthorExtractor {
/** /**
* Retrieve the auhtor to contact for a request copy of the give item. * Retrieve the author to contact for requesting a copy of the given item.
* *
* @param context DSpace context object * @param context DSpace context object
* @param item item to request * @param item item to request
* @return An object containing name an email address to send the request to * @return Names and email addresses to send the request to.
* or null if no valid email address was found.
* @throws SQLException if database error * @throws SQLException if database error
*/ */
public RequestItemAuthor getRequestItemAuthor(Context context, Item item) throws SQLException; @NonNull
public List<RequestItemAuthor> getRequestItemAuthor(Context context, Item item)
throws SQLException;
} }

View File

@@ -72,28 +72,48 @@ public class RequestItemEmailNotifier {
static public void sendRequest(Context context, RequestItem ri, String responseLink) static public void sendRequest(Context context, RequestItem ri, String responseLink)
throws IOException, SQLException { throws IOException, SQLException {
// Who is making this request? // Who is making this request?
RequestItemAuthor author = requestItemAuthorExtractor List<RequestItemAuthor> authors = requestItemAuthorExtractor
.getRequestItemAuthor(context, ri.getItem()); .getRequestItemAuthor(context, ri.getItem());
String authorEmail = author.getEmail();
String authorName = author.getFullName();
// Build an email to the approver. // Build an email to the approver.
Email email = Email.getEmail(I18nUtil.getEmailFilename(context.getCurrentLocale(), Email email = Email.getEmail(I18nUtil.getEmailFilename(context.getCurrentLocale(),
"request_item.author")); "request_item.author"));
email.addRecipient(authorEmail); for (RequestItemAuthor author : authors) {
email.addRecipient(author.getEmail());
}
email.setReplyTo(ri.getReqEmail()); // Requester's address email.setReplyTo(ri.getReqEmail()); // Requester's address
email.addArgument(ri.getReqName()); // {0} Requester's name email.addArgument(ri.getReqName()); // {0} Requester's name
email.addArgument(ri.getReqEmail()); // {1} Requester's address email.addArgument(ri.getReqEmail()); // {1} Requester's address
email.addArgument(ri.isAllfiles() // {2} All bitstreams or just one? email.addArgument(ri.isAllfiles() // {2} All bitstreams or just one?
? I18nUtil.getMessage("itemRequest.all") : ri.getBitstream().getName()); ? I18nUtil.getMessage("itemRequest.all") : ri.getBitstream().getName());
email.addArgument(handleService.getCanonicalForm(ri.getItem().getHandle()));
email.addArgument(handleService.getCanonicalForm(ri.getItem().getHandle())); // {3}
email.addArgument(ri.getItem().getName()); // {4} requested item's title email.addArgument(ri.getItem().getName()); // {4} requested item's title
email.addArgument(ri.getReqMessage()); // {5} message from requester email.addArgument(ri.getReqMessage()); // {5} message from requester
email.addArgument(responseLink); // {6} Link back to DSpace for action email.addArgument(responseLink); // {6} Link back to DSpace for action
email.addArgument(authorName); // {7} corresponding author name
email.addArgument(authorEmail); // {8} corresponding author email StringBuilder names = new StringBuilder();
email.addArgument(configurationService.getProperty("dspace.name")); StringBuilder addresses = new StringBuilder();
email.addArgument(configurationService.getProperty("mail.helpdesk")); for (RequestItemAuthor author : authors) {
if (names.length() > 0) {
names.append("; ");
addresses.append("; ");
}
names.append(author.getFullName());
addresses.append(author.getEmail());
}
email.addArgument(names.toString()); // {7} corresponding author name
email.addArgument(addresses.toString()); // {8} corresponding author email
email.addArgument(configurationService.getProperty("dspace.name")); // {9}
email.addArgument(configurationService.getProperty("mail.helpdesk")); // {10}
// Send the email. // Send the email.
try { try {

View File

@@ -8,6 +8,8 @@
package org.dspace.app.requestitem; package org.dspace.app.requestitem;
import java.sql.SQLException; import java.sql.SQLException;
import java.util.ArrayList;
import java.util.List;
import org.apache.commons.lang3.StringUtils; import org.apache.commons.lang3.StringUtils;
import org.dspace.content.Item; import org.dspace.content.Item;
@@ -16,11 +18,11 @@ import org.dspace.core.I18nUtil;
import org.dspace.eperson.EPerson; import org.dspace.eperson.EPerson;
import org.dspace.eperson.service.EPersonService; import org.dspace.eperson.service.EPersonService;
import org.dspace.services.ConfigurationService; import org.dspace.services.ConfigurationService;
import org.dspace.services.factory.DSpaceServicesFactory;
import org.springframework.beans.factory.annotation.Autowired; import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.lang.NonNull;
/** /**
* RequestItem strategy to allow DSpace support team's helpdesk to receive requestItem request * RequestItem strategy to allow DSpace support team's helpdesk to receive requestItem request.
* With this enabled, then the Item author/submitter doesn't receive the request, but the helpdesk instead does. * With this enabled, then the Item author/submitter doesn't receive the request, but the helpdesk instead does.
* *
* Failover to the RequestItemSubmitterStrategy, which means the submitter would get the request if there is no * Failover to the RequestItemSubmitterStrategy, which means the submitter would get the request if there is no
@@ -33,19 +35,24 @@ public class RequestItemHelpdeskStrategy extends RequestItemSubmitterStrategy {
@Autowired(required = true) @Autowired(required = true)
protected EPersonService ePersonService; protected EPersonService ePersonService;
@Autowired(required = true)
private ConfigurationService configuration;
public RequestItemHelpdeskStrategy() { public RequestItemHelpdeskStrategy() {
} }
@Override @Override
public RequestItemAuthor getRequestItemAuthor(Context context, Item item) throws SQLException { @NonNull
ConfigurationService configurationService public List<RequestItemAuthor> getRequestItemAuthor(Context context, Item item)
= DSpaceServicesFactory.getInstance().getConfigurationService(); throws SQLException {
boolean helpdeskOverridesSubmitter = configurationService boolean helpdeskOverridesSubmitter = configuration
.getBooleanProperty("request.item.helpdesk.override", false); .getBooleanProperty("request.item.helpdesk.override", false);
String helpDeskEmail = configurationService.getProperty("mail.helpdesk"); String helpDeskEmail = configuration.getProperty("mail.helpdesk");
if (helpdeskOverridesSubmitter && StringUtils.isNotBlank(helpDeskEmail)) { if (helpdeskOverridesSubmitter && StringUtils.isNotBlank(helpDeskEmail)) {
return getHelpDeskPerson(context, helpDeskEmail); List<RequestItemAuthor> authors = new ArrayList<>(1);
authors.add(getHelpDeskPerson(context, helpDeskEmail));
return authors;
} else { } else {
//Fallback to default logic (author of Item) if helpdesk isn't fully enabled or setup //Fallback to default logic (author of Item) if helpdesk isn't fully enabled or setup
return super.getRequestItemAuthor(context, item); return super.getRequestItemAuthor(context, item);

View File

@@ -8,6 +8,8 @@
package org.dspace.app.requestitem; package org.dspace.app.requestitem;
import java.sql.SQLException; import java.sql.SQLException;
import java.util.ArrayList;
import java.util.Collections;
import java.util.List; import java.util.List;
import org.apache.commons.lang3.StringUtils; import org.apache.commons.lang3.StringUtils;
@@ -16,12 +18,13 @@ import org.dspace.content.MetadataValue;
import org.dspace.content.service.ItemService; import org.dspace.content.service.ItemService;
import org.dspace.core.Context; import org.dspace.core.Context;
import org.dspace.core.I18nUtil; import org.dspace.core.I18nUtil;
import org.dspace.services.factory.DSpaceServicesFactory; import org.dspace.services.ConfigurationService;
import org.springframework.beans.factory.annotation.Autowired; import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.lang.NonNull;
/** /**
* Try to look to an item metadata for the corresponding author name and email. * Try to look to an item metadata for the corresponding author name and email.
* Failover to the RequestItemSubmitterStrategy * Failover to the RequestItemSubmitterStrategy.
* *
* @author Andrea Bollini * @author Andrea Bollini
*/ */
@@ -30,6 +33,9 @@ public class RequestItemMetadataStrategy extends RequestItemSubmitterStrategy {
protected String emailMetadata; protected String emailMetadata;
protected String fullNameMetadata; protected String fullNameMetadata;
@Autowired(required = true)
protected ConfigurationService configurationService;
@Autowired(required = true) @Autowired(required = true)
protected ItemService itemService; protected ItemService itemService;
@@ -37,59 +43,72 @@ public class RequestItemMetadataStrategy extends RequestItemSubmitterStrategy {
} }
@Override @Override
public RequestItemAuthor getRequestItemAuthor(Context context, Item item) @NonNull
public List<RequestItemAuthor> getRequestItemAuthor(Context context, Item item)
throws SQLException { throws SQLException {
RequestItemAuthor author = null; List<RequestItemAuthor> authors;
if (emailMetadata != null) { if (emailMetadata != null) {
List<MetadataValue> vals = itemService.getMetadataByMetadataString(item, emailMetadata); List<MetadataValue> vals = itemService.getMetadataByMetadataString(item, emailMetadata);
if (vals.size() > 0) { List<MetadataValue> nameVals;
String email = vals.iterator().next().getValue(); if (null != fullNameMetadata) {
nameVals = itemService.getMetadataByMetadataString(item, fullNameMetadata);
} else {
nameVals = Collections.EMPTY_LIST;
}
boolean useNames = vals.size() == nameVals.size();
if (!vals.isEmpty()) {
authors = new ArrayList<>(vals.size());
for (int authorIndex = 0; authorIndex < vals.size(); authorIndex++) {
String email = vals.get(authorIndex).getValue();
String fullname = null; String fullname = null;
if (fullNameMetadata != null) { if (useNames) {
List<MetadataValue> nameVals = itemService.getMetadataByMetadataString(item, fullNameMetadata); fullname = nameVals.get(authorIndex).getValue();
if (nameVals.size() > 0) {
fullname = nameVals.iterator().next().getValue();
}
} }
if (StringUtils.isBlank(fullname)) { if (StringUtils.isBlank(fullname)) {
fullname = I18nUtil fullname = I18nUtil.getMessage(
.getMessage(
"org.dspace.app.requestitem.RequestItemMetadataStrategy.unnamed", "org.dspace.app.requestitem.RequestItemMetadataStrategy.unnamed",
context); context);
} }
author = new RequestItemAuthor(fullname, email); RequestItemAuthor author = new RequestItemAuthor(
return author; fullname, email);
authors.add(author);
}
return authors;
} else {
return Collections.EMPTY_LIST;
} }
} else { } else {
// Uses the basic strategy to look for the original submitter // Uses the basic strategy to look for the original submitter
author = super.getRequestItemAuthor(context, item); authors = super.getRequestItemAuthor(context, item);
// Is the author or his email null, so get the help desk or admin name and email
if (null == author || null == author.getEmail()) { // Remove from the list authors that do not have email addresses.
String email = null; for (RequestItemAuthor author : authors) {
String name = null; if (null == author.getEmail()) {
//First get help desk name and email authors.remove(author);
email = DSpaceServicesFactory.getInstance()
.getConfigurationService().getProperty("mail.helpdesk");
name = DSpaceServicesFactory.getInstance()
.getConfigurationService().getProperty("mail.helpdesk.name");
// If help desk mail is null get the mail and name of admin
if (email == null) {
email = DSpaceServicesFactory.getInstance()
.getConfigurationService().getProperty("mail.admin");
name = DSpaceServicesFactory.getInstance()
.getConfigurationService().getProperty("mail.admin.name");
} }
author = new RequestItemAuthor(name, email);
}
}
return author;
} }
public void setEmailMetadata(String emailMetadata) { if (authors.isEmpty()) { // No author email addresses! Fall back
//First get help desk name and email
String email = configurationService.getProperty("mail.helpdesk");
String name = configurationService.getProperty("mail.helpdesk.name");
// If help desk mail is null get the mail and name of admin
if (email == null) {
email = configurationService.getProperty("mail.admin");
name = configurationService.getProperty("mail.admin.name");
}
authors.add(new RequestItemAuthor(name, email));
}
return authors;
}
}
public void setEmailMetadata(@NonNull String emailMetadata) {
this.emailMetadata = emailMetadata; this.emailMetadata = emailMetadata;
} }
public void setFullNameMetadata(String fullNameMetadata) { public void setFullNameMetadata(@NonNull String fullNameMetadata) {
this.fullNameMetadata = fullNameMetadata; this.fullNameMetadata = fullNameMetadata;
} }

View File

@@ -8,10 +8,13 @@
package org.dspace.app.requestitem; package org.dspace.app.requestitem;
import java.sql.SQLException; import java.sql.SQLException;
import java.util.ArrayList;
import java.util.List;
import org.dspace.content.Item; import org.dspace.content.Item;
import org.dspace.core.Context; import org.dspace.core.Context;
import org.dspace.eperson.EPerson; import org.dspace.eperson.EPerson;
import org.springframework.lang.NonNull;
/** /**
* Basic strategy that looks to the original submitter. * Basic strategy that looks to the original submitter.
@@ -24,21 +27,23 @@ public class RequestItemSubmitterStrategy implements RequestItemAuthorExtractor
} }
/** /**
* Returns the submitter of an Item as RequestItemAuthor or null if the * Returns the submitter of an Item as RequestItemAuthor or an empty List if
* Submitter is deleted. * the Submitter is deleted.
* *
* @return The submitter of the item or null if the submitter is deleted * @return The submitter of the item or empty List if the submitter is deleted
* @throws SQLException if database error * @throws SQLException if database error
*/ */
@Override @Override
public RequestItemAuthor getRequestItemAuthor(Context context, Item item) @NonNull
public List<RequestItemAuthor> getRequestItemAuthor(Context context, Item item)
throws SQLException { throws SQLException {
EPerson submitter = item.getSubmitter(); EPerson submitter = item.getSubmitter();
RequestItemAuthor author = null; List<RequestItemAuthor> authors = new ArrayList<>(1);
if (null != submitter) { if (null != submitter) {
author = new RequestItemAuthor( RequestItemAuthor author = new RequestItemAuthor(
submitter.getFullName(), submitter.getEmail()); submitter.getFullName(), submitter.getEmail());
authors.add(author);
} }
return author; return authors;
} }
} }

View File

@@ -29,6 +29,10 @@ import java.util.TimeZone;
import java.util.regex.Matcher; import java.util.regex.Matcher;
import java.util.regex.Pattern; import java.util.regex.Pattern;
import org.apache.commons.cli.CommandLine;
import org.apache.commons.cli.DefaultParser;
import org.apache.commons.cli.Option;
import org.apache.commons.cli.Options;
import org.apache.commons.lang3.StringUtils; import org.apache.commons.lang3.StringUtils;
import org.dspace.core.Context; import org.dspace.core.Context;
import org.dspace.core.LogHelper; import org.dspace.core.LogHelper;
@@ -44,6 +48,7 @@ import org.dspace.services.factory.DSpaceServicesFactory;
* files. Most input can be configured; use the -help flag for a full list * files. Most input can be configured; use the -help flag for a full list
* of usage information. * of usage information.
* *
* <p>
* The output of this file is plain text and forms an "aggregation" file which * The output of this file is plain text and forms an "aggregation" file which
* can then be used for display purposes using the related ReportGenerator * can then be used for display purposes using the related ReportGenerator
* class. * class.
@@ -167,7 +172,7 @@ public class LogAnalyser {
/** /**
* the average number of views per item * the average number of views per item
*/ */
private static int views = 0; private static long views = 0;
/////////////////////// ///////////////////////
// regular expressions // regular expressions
@@ -236,12 +241,12 @@ public class LogAnalyser {
/** /**
* pattern to match commented out lines from the config file * pattern to match commented out lines from the config file
*/ */
private static final Pattern comment = Pattern.compile("^#"); private static final Pattern COMMENT = Pattern.compile("^#");
/** /**
* pattern to match genuine lines from the config file * pattern to match genuine lines from the config file
*/ */
private static final Pattern real = Pattern.compile("^(.+)=(.+)"); private static final Pattern REAL = Pattern.compile("^(.+)=(.+)");
/** /**
* pattern to match all search types * pattern to match all search types
@@ -337,44 +342,73 @@ public class LogAnalyser {
Date myEndDate = null; Date myEndDate = null;
boolean myLookUp = false; boolean myLookUp = false;
// read in our command line options // Define command line options.
for (int i = 0; i < argv.length; i++) { Options options = new Options();
if (argv[i].equals("-log")) { Option option;
myLogDir = argv[i + 1];
}
if (argv[i].equals("-file")) { option = Option.builder().longOpt("log").hasArg().build();
myFileTemplate = argv[i + 1]; options.addOption(option);
}
if (argv[i].equals("-cfg")) { option = Option.builder().longOpt("file").hasArg().build();
myConfigFile = argv[i + 1]; options.addOption(option);
}
if (argv[i].equals("-out")) { option = Option.builder().longOpt("cfg").hasArg().build();
myOutFile = argv[i + 1]; options.addOption(option);
}
if (argv[i].equals("-help")) { option = Option.builder().longOpt("out").hasArg().build();
options.addOption(option);
option = Option.builder().longOpt("help").build();
options.addOption(option);
option = Option.builder().longOpt("start").hasArg().build();
options.addOption(option);
option = Option.builder().longOpt("end").hasArg().build();
options.addOption(option);
option = Option.builder().longOpt("lookup").build();
options.addOption(option);
// Parse the command.
DefaultParser cmdParser = new DefaultParser();
CommandLine cmd = cmdParser.parse(options, argv);
// Analyze the command.
if (cmd.hasOption("help")) {
LogAnalyser.usage(); LogAnalyser.usage();
System.exit(0); System.exit(0);
} }
if (argv[i].equals("-start")) { if (cmd.hasOption("log")) {
myStartDate = parseDate(argv[i + 1]); myLogDir = cmd.getOptionValue("log");
} }
if (argv[i].equals("-end")) { if (cmd.hasOption("file")) {
myEndDate = parseDate(argv[i + 1]); myFileTemplate = cmd.getOptionValue("file");
} }
if (argv[i].equals("-lookup")) { if (cmd.hasOption("cfg")) {
myLookUp = true; myConfigFile = cmd.getOptionValue("cfg");
} }
if (cmd.hasOption("out")) {
myOutFile = cmd.getOptionValue("out");
} }
if (cmd.hasOption("start")) {
myStartDate = parseDate(cmd.getOptionValue("start"));
}
if (cmd.hasOption("end")) {
myEndDate = parseDate(cmd.getOptionValue("end"));
}
myLookUp = cmd.hasOption("lookup");
// now call the method which actually processes the logs // now call the method which actually processes the logs
processLogs(context, myLogDir, myFileTemplate, myConfigFile, myOutFile, myStartDate, myEndDate, myLookUp); processLogs(context, myLogDir, myFileTemplate, myConfigFile, myOutFile,
myStartDate, myEndDate, myLookUp);
} }
/** /**
@@ -406,18 +440,18 @@ public class LogAnalyser {
startTime = new GregorianCalendar(); startTime = new GregorianCalendar();
//instantiate aggregators //instantiate aggregators
actionAggregator = new HashMap<String, Integer>(); actionAggregator = new HashMap<>();
searchAggregator = new HashMap<String, Integer>(); searchAggregator = new HashMap<>();
userAggregator = new HashMap<String, Integer>(); userAggregator = new HashMap<>();
itemAggregator = new HashMap<String, Integer>(); itemAggregator = new HashMap<>();
archiveStats = new HashMap<String, Integer>(); archiveStats = new HashMap<>();
//instantiate lists //instantiate lists
generalSummary = new ArrayList<String>(); generalSummary = new ArrayList<>();
excludeWords = new ArrayList<String>(); excludeWords = new ArrayList<>();
excludeTypes = new ArrayList<String>(); excludeTypes = new ArrayList<>();
excludeChars = new ArrayList<String>(); excludeChars = new ArrayList<>();
itemTypes = new ArrayList<String>(); itemTypes = new ArrayList<>();
// set the parameters for this analysis // set the parameters for this analysis
setParameters(myLogDir, myFileTemplate, myConfigFile, myOutFile, myStartDate, myEndDate, myLookUp); setParameters(myLogDir, myFileTemplate, myConfigFile, myOutFile, myStartDate, myEndDate, myLookUp);
@@ -529,10 +563,11 @@ public class LogAnalyser {
// for each search word add to the aggregator or // for each search word add to the aggregator or
// increment the aggregator's counter // increment the aggregator's counter
for (int j = 0; j < words.length; j++) { for (String word : words) {
// FIXME: perhaps aggregators ought to be objects // FIXME: perhaps aggregators ought to be objects
// themselves // themselves
searchAggregator.put(words[j], increment(searchAggregator, words[j])); searchAggregator.put(word,
increment(searchAggregator, word));
} }
} }
@@ -591,13 +626,13 @@ public class LogAnalyser {
} }
// do the average views analysis // do the average views analysis
if ((archiveStats.get("All Items")).intValue() != 0) { if ((archiveStats.get("All Items")) != 0) {
// FIXME: this is dependent on their being a query on the db, which // FIXME: this is dependent on their being a query on the db, which
// there might not always be if it becomes configurable // there might not always be if it becomes configurable
Double avg = Math.ceil( double avg = Math.ceil(
(actionAggregator.get("view_item")).doubleValue() / (actionAggregator.get("view_item")).doubleValue() /
(archiveStats.get("All Items")).doubleValue()); (archiveStats.get("All Items")).doubleValue());
views = avg.intValue(); views = Math.round(avg);
} }
// finally, write the output // finally, write the output
@@ -672,55 +707,55 @@ public class LogAnalyser {
Iterator<String> keys = null; Iterator<String> keys = null;
// output the number of lines parsed // output the number of lines parsed
summary.append("log_lines=" + Integer.toString(lineCount) + "\n"); summary.append("log_lines=").append(Integer.toString(lineCount)).append("\n");
// output the number of warnings encountered // output the number of warnings encountered
summary.append("warnings=" + Integer.toString(warnCount) + "\n"); summary.append("warnings=").append(Integer.toString(warnCount)).append("\n");
summary.append("exceptions=" + Integer.toString(excCount) + "\n"); summary.append("exceptions=").append(Integer.toString(excCount)).append("\n");
// set the general summary config up in the aggregator file // set the general summary config up in the aggregator file
for (int i = 0; i < generalSummary.size(); i++) { for (int i = 0; i < generalSummary.size(); i++) {
summary.append("general_summary=" + generalSummary.get(i) + "\n"); summary.append("general_summary=").append(generalSummary.get(i)).append("\n");
} }
// output the host name // output the host name
summary.append("server_name=" + hostName + "\n"); summary.append("server_name=").append(hostName).append("\n");
// output the service name // output the service name
summary.append("service_name=" + name + "\n"); summary.append("service_name=").append(name).append("\n");
// output the date information if necessary // output the date information if necessary
SimpleDateFormat sdf = new SimpleDateFormat("dd'/'MM'/'yyyy"); SimpleDateFormat sdf = new SimpleDateFormat("dd'/'MM'/'yyyy");
if (startDate != null) { if (startDate != null) {
summary.append("start_date=" + sdf.format(startDate) + "\n"); summary.append("start_date=").append(sdf.format(startDate)).append("\n");
} else if (logStartDate != null) { } else if (logStartDate != null) {
summary.append("start_date=" + sdf.format(logStartDate) + "\n"); summary.append("start_date=").append(sdf.format(logStartDate)).append("\n");
} }
if (endDate != null) { if (endDate != null) {
summary.append("end_date=" + sdf.format(endDate) + "\n"); summary.append("end_date=").append(sdf.format(endDate)).append("\n");
} else if (logEndDate != null) { } else if (logEndDate != null) {
summary.append("end_date=" + sdf.format(logEndDate) + "\n"); summary.append("end_date=").append(sdf.format(logEndDate)).append("\n");
} }
// write out the archive stats // write out the archive stats
keys = archiveStats.keySet().iterator(); keys = archiveStats.keySet().iterator();
while (keys.hasNext()) { while (keys.hasNext()) {
String key = keys.next(); String key = keys.next();
summary.append("archive." + key + "=" + archiveStats.get(key) + "\n"); summary.append("archive.").append(key).append("=").append(archiveStats.get(key)).append("\n");
} }
// write out the action aggregation results // write out the action aggregation results
keys = actionAggregator.keySet().iterator(); keys = actionAggregator.keySet().iterator();
while (keys.hasNext()) { while (keys.hasNext()) {
String key = keys.next(); String key = keys.next();
summary.append("action." + key + "=" + actionAggregator.get(key) + "\n"); summary.append("action.").append(key).append("=").append(actionAggregator.get(key)).append("\n");
} }
// depending on the config settings for reporting on emails output the // depending on the config settings for reporting on emails output the
// login information // login information
summary.append("user_email=" + userEmail + "\n"); summary.append("user_email=").append(userEmail).append("\n");
int address = 1; int address = 1;
keys = userAggregator.keySet().iterator(); keys = userAggregator.keySet().iterator();
@@ -731,9 +766,10 @@ public class LogAnalyser {
String key = keys.next(); String key = keys.next();
summary.append("user."); summary.append("user.");
if (userEmail.equals("on")) { if (userEmail.equals("on")) {
summary.append(key + "=" + userAggregator.get(key) + "\n"); summary.append(key).append("=").append(userAggregator.get(key)).append("\n");
} else if (userEmail.equals("alias")) { } else if (userEmail.equals("alias")) {
summary.append("Address " + Integer.toString(address++) + "=" + userAggregator.get(key) + "\n"); summary.append("Address ").append(Integer.toString(address++))
.append("=").append(userAggregator.get(key)).append("\n");
} }
} }
@@ -742,12 +778,13 @@ public class LogAnalyser {
// the listing there are // the listing there are
// output the search word information // output the search word information
summary.append("search_floor=" + searchFloor + "\n"); summary.append("search_floor=").append(searchFloor).append("\n");
keys = searchAggregator.keySet().iterator(); keys = searchAggregator.keySet().iterator();
while (keys.hasNext()) { while (keys.hasNext()) {
String key = keys.next(); String key = keys.next();
if ((searchAggregator.get(key)).intValue() >= searchFloor) { if ((searchAggregator.get(key)) >= searchFloor) {
summary.append("search." + key + "=" + searchAggregator.get(key) + "\n"); summary.append("search.").append(key).append("=")
.append(searchAggregator.get(key)).append("\n");
} }
} }
@@ -759,35 +796,35 @@ public class LogAnalyser {
// be the same thing. // be the same thing.
// item viewing information // item viewing information
summary.append("item_floor=" + itemFloor + "\n"); summary.append("item_floor=").append(itemFloor).append("\n");
summary.append("host_url=" + url + "\n"); summary.append("host_url=").append(url).append("\n");
summary.append("item_lookup=" + itemLookup + "\n"); summary.append("item_lookup=").append(itemLookup).append("\n");
// write out the item access information // write out the item access information
keys = itemAggregator.keySet().iterator(); keys = itemAggregator.keySet().iterator();
while (keys.hasNext()) { while (keys.hasNext()) {
String key = keys.next(); String key = keys.next();
if ((itemAggregator.get(key)).intValue() >= itemFloor) { if ((itemAggregator.get(key)) >= itemFloor) {
summary.append("item." + key + "=" + itemAggregator.get(key) + "\n"); summary.append("item.").append(key).append("=")
.append(itemAggregator.get(key)).append("\n");
} }
} }
// output the average views per item // output the average views per item
if (views > 0) { if (views > 0) {
summary.append("avg_item_views=" + views + "\n"); summary.append("avg_item_views=").append(views).append("\n");
} }
// insert the analysis processing time information // insert the analysis processing time information
Calendar endTime = new GregorianCalendar(); Calendar endTime = new GregorianCalendar();
long timeInMillis = (endTime.getTimeInMillis() - startTime.getTimeInMillis()); long timeInMillis = (endTime.getTimeInMillis() - startTime.getTimeInMillis());
summary.append("analysis_process_time=" + Long.toString(timeInMillis / 1000) + "\n"); summary.append("analysis_process_time=")
.append(Long.toString(timeInMillis / 1000)).append("\n");
// finally write the string into the output file // finally write the string into the output file
try { try (BufferedWriter out = new BufferedWriter(new FileWriter(outFile));) {
BufferedWriter out = new BufferedWriter(new FileWriter(outFile));
out.write(summary.toString()); out.write(summary.toString());
out.flush(); out.flush();
out.close();
} catch (IOException e) { } catch (IOException e) {
System.out.println("Unable to write to output file " + outFile); System.out.println("Unable to write to output file " + outFile);
System.exit(0); System.exit(0);
@@ -891,11 +928,11 @@ public class LogAnalyser {
if (i > 0) { if (i > 0) {
wordRXString.append("|"); wordRXString.append("|");
} }
wordRXString.append(" " + excludeWords.get(i) + " "); wordRXString.append(" ").append(excludeWords.get(i)).append(" ");
wordRXString.append("|"); wordRXString.append("|");
wordRXString.append("^" + excludeWords.get(i) + " "); wordRXString.append("^").append(excludeWords.get(i)).append(" ");
wordRXString.append("|"); wordRXString.append("|");
wordRXString.append(" " + excludeWords.get(i) + "$"); wordRXString.append(" ").append(excludeWords.get(i)).append("$");
} }
wordRXString.append(")"); wordRXString.append(")");
wordRX = Pattern.compile(wordRXString.toString()); wordRX = Pattern.compile(wordRXString.toString());
@@ -956,8 +993,8 @@ public class LogAnalyser {
// read in the config file and set up our instance variables // read in the config file and set up our instance variables
while ((record = br.readLine()) != null) { while ((record = br.readLine()) != null) {
// check to see what kind of line we have // check to see what kind of line we have
Matcher matchComment = comment.matcher(record); Matcher matchComment = COMMENT.matcher(record);
Matcher matchReal = real.matcher(record); Matcher matchReal = REAL.matcher(record);
// if the line is not a comment and is real, read it in // if the line is not a comment and is real, read it in
if (!matchComment.matches() && matchReal.matches()) { if (!matchComment.matches() && matchReal.matches()) {
@@ -968,7 +1005,7 @@ public class LogAnalyser {
// read the config values into our instance variables (see // read the config values into our instance variables (see
// documentation for more info on config params) // documentation for more info on config params)
if (key.equals("general.summary")) { if (key.equals("general.summary")) {
actionAggregator.put(value, Integer.valueOf(0)); actionAggregator.put(value, 0);
generalSummary.add(value); generalSummary.add(value);
} }
@@ -1022,9 +1059,9 @@ public class LogAnalyser {
Integer newValue = null; Integer newValue = null;
if (map.containsKey(key)) { if (map.containsKey(key)) {
// FIXME: this seems like a ridiculous way to add Integers // FIXME: this seems like a ridiculous way to add Integers
newValue = Integer.valueOf((map.get(key)).intValue() + 1); newValue = (map.get(key)) + 1;
} else { } else {
newValue = Integer.valueOf(1); newValue = 1;
} }
return newValue; return newValue;
} }

View File

@@ -27,6 +27,10 @@ import java.util.StringTokenizer;
import java.util.regex.Matcher; import java.util.regex.Matcher;
import java.util.regex.Pattern; import java.util.regex.Pattern;
import org.apache.commons.cli.CommandLine;
import org.apache.commons.cli.DefaultParser;
import org.apache.commons.cli.Option;
import org.apache.commons.cli.Options;
import org.dspace.content.Item; import org.dspace.content.Item;
import org.dspace.content.MetadataSchemaEnum; import org.dspace.content.MetadataSchemaEnum;
import org.dspace.content.MetadataValue; import org.dspace.content.MetadataValue;
@@ -162,7 +166,7 @@ public class ReportGenerator {
/** /**
* pattern that matches an unqualified aggregator property * pattern that matches an unqualified aggregator property
*/ */
private static final Pattern real = Pattern.compile("^(.+)=(.+)"); private static final Pattern REAL = Pattern.compile("^(.+)=(.+)");
////////////////////////// //////////////////////////
// Miscellaneous variables // Miscellaneous variables
@@ -221,28 +225,46 @@ public class ReportGenerator {
String myOutput = null; String myOutput = null;
String myMap = null; String myMap = null;
// read in our command line options Options options = new Options();
for (int i = 0; i < argv.length; i++) { Option option;
if (argv[i].equals("-format")) {
myFormat = argv[i + 1].toLowerCase();
}
if (argv[i].equals("-in")) { option = Option.builder().longOpt("format").hasArg().build();
myInput = argv[i + 1]; options.addOption(option);
}
if (argv[i].equals("-out")) { option = Option.builder().longOpt("in").hasArg().build();
myOutput = argv[i + 1]; options.addOption(option);
}
if (argv[i].equals("-map")) { option = Option.builder().longOpt("out").hasArg().build();
myMap = argv[i + 1]; options.addOption(option);
}
if (argv[i].equals("-help")) { option = Option.builder().longOpt("map").hasArg().build();
options.addOption(option);
option = Option.builder().longOpt("help").build();
options.addOption(option);
DefaultParser parser = new DefaultParser();
CommandLine cmd = parser.parse(options, argv);
if (cmd.hasOption("help")) {
usage(); usage();
System.exit(0); System.exit(0);
} }
if (cmd.hasOption("format")) {
myFormat = cmd.getOptionValue("format");
}
if (cmd.hasOption("in")) {
myInput = cmd.getOptionValue("in");
}
if (cmd.hasOption("out")) {
myOutput = cmd.getOptionValue("out");
}
if (cmd.hasOption("map")) {
myMap = cmd.getOptionValue("map");
} }
processReport(context, myFormat, myInput, myOutput, myMap); processReport(context, myFormat, myInput, myOutput, myMap);
@@ -576,7 +598,7 @@ public class ReportGenerator {
// loop through the map file and read in the values // loop through the map file and read in the values
while ((record = br.readLine()) != null) { while ((record = br.readLine()) != null) {
Matcher matchReal = real.matcher(record); Matcher matchReal = REAL.matcher(record);
// if the line is real then read it in // if the line is real then read it in
if (matchReal.matches()) { if (matchReal.matches()) {
@@ -650,7 +672,7 @@ public class ReportGenerator {
// loop through the aggregator file and read in the values // loop through the aggregator file and read in the values
while ((record = br.readLine()) != null) { while ((record = br.readLine()) != null) {
// match real lines // match real lines
Matcher matchReal = real.matcher(record); Matcher matchReal = REAL.matcher(record);
// pre-prepare our input strings // pre-prepare our input strings
String section = null; String section = null;

View File

@@ -324,13 +324,9 @@ public class StatisticsLoader {
ConfigurationService configurationService ConfigurationService configurationService
= DSpaceServicesFactory.getInstance().getConfigurationService(); = DSpaceServicesFactory.getInstance().getConfigurationService();
File reportDir = new File(configurationService.getProperty("log.report.dir")); File reportDir = new File(configurationService.getProperty("log.report.dir"));
if (reportDir != null) {
return reportDir.listFiles(new AnalysisAndReportFilter()); return reportDir.listFiles(new AnalysisAndReportFilter());
} }
return null;
}
/** /**
* Simple class for holding information about an analysis/report file. * Simple class for holding information about an analysis/report file.
*/ */

View File

@@ -144,8 +144,8 @@ public class DCInput {
private boolean isMetadataField = false; private boolean isMetadataField = false;
private String relationshipType = null; private String relationshipType = null;
private String searchConfiguration = null; private String searchConfiguration = null;
private String filter; private final String filter;
private List<String> externalSources; private final List<String> externalSources;
/** /**
* The scope of the input sets, this restricts hidden metadata fields from * The scope of the input sets, this restricts hidden metadata fields from
@@ -213,7 +213,7 @@ public class DCInput {
|| "yes".equalsIgnoreCase(closedVocabularyStr); || "yes".equalsIgnoreCase(closedVocabularyStr);
// parsing of the <type-bind> element (using the colon as split separator) // parsing of the <type-bind> element (using the colon as split separator)
typeBind = new ArrayList<String>(); typeBind = new ArrayList<>();
String typeBindDef = fieldMap.get("type-bind"); String typeBindDef = fieldMap.get("type-bind");
if (typeBindDef != null && typeBindDef.trim().length() > 0) { if (typeBindDef != null && typeBindDef.trim().length() > 0) {
String[] types = typeBindDef.split(","); String[] types = typeBindDef.split(",");
@@ -553,7 +553,7 @@ public class DCInput {
} }
} }
} catch (PatternSyntaxException ex) { } catch (PatternSyntaxException ex) {
log.error("Regex validation failed!", ex.getMessage()); log.error("Regex validation failed! {}", ex.getMessage());
} }
} }
@@ -571,18 +571,22 @@ public class DCInput {
} }
/** /**
* Verify whether the current field contains an entity relationship * Verify whether the current field contains an entity relationship.
* This also implies a relationship type is defined for this field * This also implies a relationship type is defined for this field.
* The field can contain both an entity relationship and a metadata field simultaneously * The field can contain both an entity relationship and a metadata field
* simultaneously.
* @return true if the field contains a relationship.
*/ */
public boolean isRelationshipField() { public boolean isRelationshipField() {
return isRelationshipField; return isRelationshipField;
} }
/** /**
* Verify whether the current field contains a metadata field * Verify whether the current field contains a metadata field.
* This also implies a field type is defined for this field * This also implies a field type is defined for this field.
* The field can contain both an entity relationship and a metadata field simultaneously * The field can contain both an entity relationship and a metadata field
* simultaneously.
* @return true if the field contains a metadata field.
*/ */
public boolean isMetadataField() { public boolean isMetadataField() {
return isMetadataField; return isMetadataField;

View File

@@ -17,7 +17,6 @@ import org.dspace.core.Utils;
* Class representing all DC inputs required for a submission, organized into pages * Class representing all DC inputs required for a submission, organized into pages
* *
* @author Brian S. Hughes, based on work by Jenny Toves, OCLC * @author Brian S. Hughes, based on work by Jenny Toves, OCLC
* @version $Revision$
*/ */
public class DCInputSet { public class DCInputSet {
@@ -34,7 +33,6 @@ public class DCInputSet {
* constructor * constructor
* *
* @param formName form name * @param formName form name
* @param mandatoryFlags
* @param rows the rows * @param rows the rows
* @param listMap map * @param listMap map
*/ */

View File

@@ -470,11 +470,7 @@ public class GoogleMetadata {
parsedOptions.add(parsedFields); parsedOptions.add(parsedFields);
} }
if (null != parsedOptions) {
return parsedOptions; return parsedOptions;
} else {
return null;
}
} }
/** /**

View File

@@ -11,7 +11,6 @@ import java.sql.SQLException;
import java.util.ArrayList; import java.util.ArrayList;
import java.util.List; import java.util.List;
import org.apache.logging.log4j.Logger;
import org.dspace.content.Collection; import org.dspace.content.Collection;
import org.dspace.content.factory.ContentServiceFactory; import org.dspace.content.factory.ContentServiceFactory;
import org.dspace.content.service.CollectionService; import org.dspace.content.service.CollectionService;
@@ -23,12 +22,12 @@ import org.dspace.eperson.service.EPersonService;
import org.springframework.util.StopWatch; import org.springframework.util.StopWatch;
/** /**
* A command line tool to verify/test the accuracy and speed gains of
* {@link Collection.findAuthorizedOptimized}.
* Invocation: {@code dsrun org.dspace.app.util.OptimizeSelectCollection}
* @author peterdietz * @author peterdietz
* A command line tool to verify/test the accuracy and speed gains of Collection.findAuthorizedOptimized()
* Invocation: dsrun org.dspace.app.util.OptimizeSelectCollection
*/ */
public class OptimizeSelectCollection { public class OptimizeSelectCollection {
private static final Logger log = org.apache.logging.log4j.LogManager.getLogger(OptimizeSelectCollection.class);
private static Context context; private static Context context;
private static ArrayList<EPerson> brokenPeople; private static ArrayList<EPerson> brokenPeople;
@@ -49,7 +48,7 @@ public class OptimizeSelectCollection {
"values as the legacy select-collection logic."); "values as the legacy select-collection logic.");
context = new Context(); context = new Context();
brokenPeople = new ArrayList<EPerson>(); brokenPeople = new ArrayList<>();
int peopleChecked = 0; int peopleChecked = 0;
timeSavedMS = 0L; timeSavedMS = 0L;
@@ -68,7 +67,7 @@ public class OptimizeSelectCollection {
} }
} }
if (brokenPeople.size() > 0) { if (!brokenPeople.isEmpty()) {
System.out.println("NOT DONE YET!!! Some people don't have all their collections."); System.out.println("NOT DONE YET!!! Some people don't have all their collections.");
for (EPerson person : brokenPeople) { for (EPerson person : brokenPeople) {
System.out.println("-- " + person.getEmail()); System.out.println("-- " + person.getEmail());
@@ -90,7 +89,7 @@ public class OptimizeSelectCollection {
stopWatch.start("findAuthorized"); stopWatch.start("findAuthorized");
List<Collection> collections = collectionService.findAuthorized(context, null, Constants.ADD); List<Collection> collections = collectionService.findAuthorized(context, null, Constants.ADD);
stopWatch.stop(); stopWatch.stop();
Long defaultMS = stopWatch.getLastTaskTimeMillis(); long defaultMS = stopWatch.getLastTaskTimeMillis();
stopWatch.start("ListingCollections"); stopWatch.start("ListingCollections");
System.out.println("Legacy Find Authorized"); System.out.println("Legacy Find Authorized");
@@ -100,7 +99,7 @@ public class OptimizeSelectCollection {
stopWatch.start("findAuthorizedOptimized"); stopWatch.start("findAuthorizedOptimized");
List<Collection> collectionsOptimized = collectionService.findAuthorizedOptimized(context, Constants.ADD); List<Collection> collectionsOptimized = collectionService.findAuthorizedOptimized(context, Constants.ADD);
stopWatch.stop(); stopWatch.stop();
Long optimizedMS = stopWatch.getLastTaskTimeMillis(); long optimizedMS = stopWatch.getLastTaskTimeMillis();
timeSavedMS += defaultMS - optimizedMS; timeSavedMS += defaultMS - optimizedMS;

View File

@@ -420,7 +420,7 @@ public class SyndicationFeed {
// with length of song in seconds // with length of song in seconds
if (extent != null && extent.length() > 0) { if (extent != null && extent.length() > 0) {
extent = extent.split(" ")[0]; extent = extent.split(" ")[0];
Integer duration = Integer.parseInt(extent); long duration = Long.parseLong(extent);
itunes.setDuration(new Duration(duration)); // <itunes:duration> itunes.setDuration(new Duration(duration)); // <itunes:duration>
} }

View File

@@ -346,7 +346,7 @@ public interface BrowseDAO {
public String getFilterValueField(); public String getFilterValueField();
/** /**
* Set he name of the field in which the value to constrain results is * Set the name of the field in which the value to constrain results is
* contained * contained
* *
* @param valueField the name of the field * @param valueField the name of the field

View File

@@ -203,7 +203,12 @@ public class BrowseEngine {
// get the table name that we are going to be getting our data from // get the table name that we are going to be getting our data from
dao.setTable(browseIndex.getTableName()); dao.setTable(browseIndex.getTableName());
if (scope.getBrowseIndex() != null && OrderFormat.TITLE.equals(scope.getBrowseIndex().getDataType())) {
// For browsing by title, apply the same normalization applied to indexed titles
dao.setStartsWith(normalizeJumpToValue(scope.getStartsWith()));
} else {
dao.setStartsWith(StringUtils.lowerCase(scope.getStartsWith())); dao.setStartsWith(StringUtils.lowerCase(scope.getStartsWith()));
}
// tell the browse query whether we are ascending or descending on the value // tell the browse query whether we are ascending or descending on the value
dao.setAscending(scope.isAscending()); dao.setAscending(scope.isAscending());
@@ -290,7 +295,7 @@ public class BrowseEngine {
// now, if we don't have any results, we are at the end of the browse. This will // now, if we don't have any results, we are at the end of the browse. This will
// be because a starts_with value has been supplied for which we don't have // be because a starts_with value has been supplied for which we don't have
// any items. // any items.
if (results.size() == 0) { if (results.isEmpty()) {
// In this case, we will calculate a new offset for the last page of results // In this case, we will calculate a new offset for the last page of results
offset = total - scope.getResultsPerPage(); offset = total - scope.getResultsPerPage();
if (offset < 0) { if (offset < 0) {
@@ -450,7 +455,7 @@ public class BrowseEngine {
// now, if we don't have any results, we are at the end of the browse. This will // now, if we don't have any results, we are at the end of the browse. This will
// be because a starts_with value has been supplied for which we don't have // be because a starts_with value has been supplied for which we don't have
// any items. // any items.
if (results.size() == 0) { if (results.isEmpty()) {
// In this case, we will calculate a new offset for the last page of results // In this case, we will calculate a new offset for the last page of results
offset = total - scope.getResultsPerPage(); offset = total - scope.getResultsPerPage();
if (offset < 0) { if (offset < 0) {
@@ -463,7 +468,7 @@ public class BrowseEngine {
} }
} else { } else {
// No records, so make an empty list // No records, so make an empty list
results = new ArrayList<String[]>(); results = new ArrayList<>();
} }
// construct the BrowseInfo object to pass back // construct the BrowseInfo object to pass back
@@ -554,7 +559,7 @@ public class BrowseEngine {
} }
String col = "sort_1"; String col = "sort_1";
if (so.getNumber() > 0) { if (so != null && so.getNumber() > 0) {
col = "sort_" + Integer.toString(so.getNumber()); col = "sort_" + Integer.toString(so.getNumber());
} }
@@ -591,7 +596,7 @@ public class BrowseEngine {
} }
String col = "sort_1"; String col = "sort_1";
if (so.getNumber() > 0) { if (so != null && so.getNumber() > 0) {
col = "sort_" + Integer.toString(so.getNumber()); col = "sort_" + Integer.toString(so.getNumber());
} }

View File

@@ -313,14 +313,6 @@ public final class BrowseIndex {
return name; return name;
} }
/**
* @param name The name to set.
*/
// public void setName(String name)
// {
// this.name = name;
// }
/** /**
* Get the SortOption associated with this index. * Get the SortOption associated with this index.
* *

View File

@@ -25,22 +25,7 @@ public class ItemListConfig {
/** /**
* a map of column number to metadata value * a map of column number to metadata value
*/ */
private Map<Integer, String[]> metadata = new HashMap<Integer, String[]>(); private Map<Integer, String[]> metadata = new HashMap<>();
/**
* a map of column number to data type
*/
private Map<Integer, Integer> types = new HashMap<Integer, Integer>();
/**
* constant for a DATE column
*/
private static final int DATE = 1;
/**
* constant for a TEXT column
*/
private static final int TEXT = 2;
private final transient ConfigurationService configurationService private final transient ConfigurationService configurationService
= DSpaceServicesFactory.getInstance().getConfigurationService(); = DSpaceServicesFactory.getInstance().getConfigurationService();
@@ -63,14 +48,11 @@ public class ItemListConfig {
// parse the config // parse the config
int i = 1; int i = 1;
for (String token : browseFields) { for (String token : browseFields) {
Integer key = Integer.valueOf(i); Integer key = i;
// find out if the field is a date // find out if the field is a date
if (token.indexOf("(date)") > 0) { if (token.indexOf("(date)") > 0) {
token = token.replaceAll("\\(date\\)", ""); token = token.replaceAll("\\(date\\)", "");
types.put(key, Integer.valueOf(ItemListConfig.DATE));
} else {
types.put(key, Integer.valueOf(ItemListConfig.TEXT));
} }
String[] mdBits = interpretField(token.trim(), null); String[] mdBits = interpretField(token.trim(), null);
@@ -100,7 +82,7 @@ public class ItemListConfig {
* @return array of metadata * @return array of metadata
*/ */
public String[] getMetadata(int col) { public String[] getMetadata(int col) {
return metadata.get(Integer.valueOf(col)); return metadata.get(col);
} }
/** /**

View File

@@ -59,7 +59,7 @@ public class LicenseUtils {
* {6} the eperson object that will be formatted using the appropriate * {6} the eperson object that will be formatted using the appropriate
* LicenseArgumentFormatter plugin (if defined)<br> * LicenseArgumentFormatter plugin (if defined)<br>
* {x} any addition argument supplied wrapped in the * {x} any addition argument supplied wrapped in the
* LicenseArgumentFormatter based on his type (map key) * LicenseArgumentFormatter based on its type (map key)
* *
* @param locale Formatter locale * @param locale Formatter locale
* @param collection collection to get license from * @param collection collection to get license from

View File

@@ -417,9 +417,7 @@ public class MODSDisseminationCrosswalk extends SelfNamedPlugin
String title = site.getName(); String title = site.getName();
String url = site.getURL(); String url = site.getURL();
if (identifier_uri != null) {
metadata.add(createDCValue("identifier.uri", null, identifier_uri)); metadata.add(createDCValue("identifier.uri", null, identifier_uri));
}
//FIXME: adding two URIs for now (site handle and URL), in case site isn't using handles //FIXME: adding two URIs for now (site handle and URL), in case site isn't using handles
if (url != null) { if (url != null) {
@@ -466,9 +464,7 @@ public class MODSDisseminationCrosswalk extends SelfNamedPlugin
metadata.add(createDCValue("description", "tableofcontents", description_table)); metadata.add(createDCValue("description", "tableofcontents", description_table));
} }
if (identifier_uri != null) {
metadata.add(createDCValue("identifier.uri", null, identifier_uri)); metadata.add(createDCValue("identifier.uri", null, identifier_uri));
}
if (rights != null) { if (rights != null) {
metadata.add(createDCValue("rights", null, rights)); metadata.add(createDCValue("rights", null, rights));
@@ -520,9 +516,7 @@ public class MODSDisseminationCrosswalk extends SelfNamedPlugin
metadata.add(createDCValue("description", "tableofcontents", description_table)); metadata.add(createDCValue("description", "tableofcontents", description_table));
} }
if (identifier_uri != null) {
metadata.add(createDCValue("identifier", "uri", identifier_uri)); metadata.add(createDCValue("identifier", "uri", identifier_uri));
}
if (provenance != null) { if (provenance != null) {
metadata.add(createDCValue("provenance", null, provenance)); metadata.add(createDCValue("provenance", null, provenance));

View File

@@ -90,17 +90,17 @@ public class XHTMLHeadDisseminationCrosswalk
* Maps DSpace metadata field to name to use in XHTML head element, e.g. * Maps DSpace metadata field to name to use in XHTML head element, e.g.
* dc.creator or dc.description.abstract * dc.creator or dc.description.abstract
*/ */
private Map<String, String> names; private final Map<String, String> names;
/** /**
* Maps DSpace metadata field to scheme for that field, if any * Maps DSpace metadata field to scheme for that field, if any
*/ */
private Map<String, String> schemes; private final Map<String, String> schemes;
/** /**
* Schemas to add -- maps schema.NAME to schema URL * Schemas to add -- maps schema.NAME to schema URL
*/ */
private Map<String, String> schemaURLs; private final Map<String, String> schemaURLs;
public XHTMLHeadDisseminationCrosswalk() throws IOException { public XHTMLHeadDisseminationCrosswalk() throws IOException {
names = new HashMap<>(); names = new HashMap<>();
@@ -109,17 +109,9 @@ public class XHTMLHeadDisseminationCrosswalk
// Read in configuration // Read in configuration
Properties crosswalkProps = new Properties(); Properties crosswalkProps = new Properties();
FileInputStream fis = new FileInputStream(config);
try { try (FileInputStream fis = new FileInputStream(config);) {
crosswalkProps.load(fis); crosswalkProps.load(fis);
} finally {
if (fis != null) {
try {
fis.close();
} catch (IOException ioe) {
// ignore
}
}
} }
Enumeration e = crosswalkProps.keys(); Enumeration e = crosswalkProps.keys();

View File

@@ -8,8 +8,10 @@
package org.dspace.content.dao; package org.dspace.content.dao;
import java.sql.SQLException; import java.sql.SQLException;
import java.util.Date;
import java.util.List; import java.util.List;
import org.dspace.content.ProcessStatus;
import org.dspace.core.Context; import org.dspace.core.Context;
import org.dspace.core.GenericDAO; import org.dspace.core.GenericDAO;
import org.dspace.scripts.Process; import org.dspace.scripts.Process;
@@ -81,4 +83,18 @@ public interface ProcessDAO extends GenericDAO<Process> {
int countTotalWithParameters(Context context, ProcessQueryParameterContainer processQueryParameterContainer) int countTotalWithParameters(Context context, ProcessQueryParameterContainer processQueryParameterContainer)
throws SQLException; throws SQLException;
/**
* Find all the processes with one of the given status and with a creation time
* older than the specified date.
*
* @param context The relevant DSpace context
* @param statuses the statuses of the processes to search for
* @param date the creation date to search for
* @return The list of all Processes which match requirements
* @throws SQLException If something goes wrong
*/
List<Process> findByStatusAndCreationTimeOlderThan(Context context, List<ProcessStatus> statuses, Date date)
throws SQLException;
} }

View File

@@ -7,7 +7,10 @@
*/ */
package org.dspace.content.dao.impl; package org.dspace.content.dao.impl;
import static org.dspace.scripts.Process_.CREATION_TIME;
import java.sql.SQLException; import java.sql.SQLException;
import java.util.Date;
import java.util.LinkedList; import java.util.LinkedList;
import java.util.List; import java.util.List;
import java.util.Map; import java.util.Map;
@@ -17,6 +20,7 @@ import javax.persistence.criteria.Predicate;
import javax.persistence.criteria.Root; import javax.persistence.criteria.Root;
import org.apache.commons.lang3.StringUtils; import org.apache.commons.lang3.StringUtils;
import org.dspace.content.ProcessStatus;
import org.dspace.content.dao.ProcessDAO; import org.dspace.content.dao.ProcessDAO;
import org.dspace.core.AbstractHibernateDAO; import org.dspace.core.AbstractHibernateDAO;
import org.dspace.core.Context; import org.dspace.core.Context;
@@ -147,6 +151,23 @@ public class ProcessDAOImpl extends AbstractHibernateDAO<Process> implements Pro
} }
@Override
public List<Process> findByStatusAndCreationTimeOlderThan(Context context, List<ProcessStatus> statuses,
Date date) throws SQLException {
CriteriaBuilder criteriaBuilder = getCriteriaBuilder(context);
CriteriaQuery<Process> criteriaQuery = getCriteriaQuery(criteriaBuilder, Process.class);
Root<Process> processRoot = criteriaQuery.from(Process.class);
criteriaQuery.select(processRoot);
Predicate creationTimeLessThanGivenDate = criteriaBuilder.lessThan(processRoot.get(CREATION_TIME), date);
Predicate statusIn = processRoot.get(Process_.PROCESS_STATUS).in(statuses);
criteriaQuery.where(criteriaBuilder.and(creationTimeLessThanGivenDate, statusIn));
return list(context, criteriaQuery, false, Process.class, -1, -1);
}
} }

View File

@@ -12,7 +12,6 @@ import java.util.Map;
import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger; import org.apache.logging.log4j.Logger;
import org.dspace.content.Item; import org.dspace.content.Item;
import org.dspace.content.factory.ContentServiceFactory;
import org.dspace.content.logic.LogicalStatementException; import org.dspace.content.logic.LogicalStatementException;
import org.dspace.content.service.CollectionService; import org.dspace.content.service.CollectionService;
import org.dspace.content.service.ItemService; import org.dspace.content.service.ItemService;
@@ -32,10 +31,10 @@ public abstract class AbstractCondition implements Condition {
private Map<String, Object> parameters; private Map<String, Object> parameters;
// Declare and instantiate spring services // Declare and instantiate spring services
//@Autowired(required = true) @Autowired(required = true)
protected ItemService itemService = ContentServiceFactory.getInstance().getItemService(); protected ItemService itemService;
//@Autowired(required = true) @Autowired(required = true)
protected CollectionService collectionService = ContentServiceFactory.getInstance().getCollectionService(); protected CollectionService collectionService;
@Autowired(required = true) @Autowired(required = true)
protected HandleService handleService; protected HandleService handleService;

View File

@@ -14,9 +14,7 @@ import java.io.FileOutputStream;
import java.io.IOException; import java.io.IOException;
import java.io.InputStream; import java.io.InputStream;
import java.io.OutputStream; import java.io.OutputStream;
import java.io.UnsupportedEncodingException;
import java.lang.reflect.InvocationTargetException; import java.lang.reflect.InvocationTargetException;
import java.net.URLEncoder;
import java.sql.SQLException; import java.sql.SQLException;
import java.util.HashMap; import java.util.HashMap;
import java.util.Iterator; import java.util.Iterator;
@@ -328,7 +326,6 @@ public abstract class AbstractMETSDisseminator
Mets manifest = makeManifest(context, dso, params, extraStreams); Mets manifest = makeManifest(context, dso, params, extraStreams);
// copy extra (metadata, license, etc) bitstreams into zip, update manifest // copy extra (metadata, license, etc) bitstreams into zip, update manifest
if (extraStreams != null) {
for (Map.Entry<MdRef, InputStream> ment : extraStreams.getMap().entrySet()) { for (Map.Entry<MdRef, InputStream> ment : extraStreams.getMap().entrySet()) {
MdRef ref = ment.getKey(); MdRef ref = ment.getKey();
@@ -368,7 +365,6 @@ public abstract class AbstractMETSDisseminator
is.close(); is.close();
} }
} }
}
// write manifest after metadata. // write manifest after metadata.
ZipEntry me = new ZipEntry(METSManifest.MANIFEST_FILE); ZipEntry me = new ZipEntry(METSManifest.MANIFEST_FILE);
@@ -467,17 +463,17 @@ public abstract class AbstractMETSDisseminator
Utils.copy(input, zip); Utils.copy(input, zip);
input.close(); input.close();
} else { } else {
log.warn("Adding zero-length file for Bitstream, SID=" log.warn("Adding zero-length file for Bitstream, uuid="
+ String.valueOf(bitstream.getSequenceID()) + String.valueOf(bitstream.getID())
+ ", not authorized for READ."); + ", not authorized for READ.");
} }
zip.closeEntry(); zip.closeEntry();
} else if (unauth != null && unauth.equalsIgnoreCase("skip")) { } else if (unauth != null && unauth.equalsIgnoreCase("skip")) {
log.warn("Skipping Bitstream, SID=" + String log.warn("Skipping Bitstream, uuid=" + String
.valueOf(bitstream.getSequenceID()) + ", not authorized for READ."); .valueOf(bitstream.getID()) + ", not authorized for READ.");
} else { } else {
throw new AuthorizeException( throw new AuthorizeException(
"Not authorized to read Bitstream, SID=" + String.valueOf(bitstream.getSequenceID())); "Not authorized to read Bitstream, uuid=" + String.valueOf(bitstream.getID()));
} }
} }
} }
@@ -898,12 +894,12 @@ public abstract class AbstractMETSDisseminator
continue; continue;
} else if (!(unauth != null && unauth.equalsIgnoreCase("zero"))) { } else if (!(unauth != null && unauth.equalsIgnoreCase("zero"))) {
throw new AuthorizeException( throw new AuthorizeException(
"Not authorized to read Bitstream, SID=" + String.valueOf(bitstream.getSequenceID())); "Not authorized to read Bitstream, uuid=" + String.valueOf(bitstream.getID()));
} }
} }
String sid = String.valueOf(bitstream.getSequenceID()); String uuid = String.valueOf(bitstream.getID());
String fileID = bitstreamIDstart + sid; String fileID = bitstreamIDstart + uuid;
edu.harvard.hul.ois.mets.File file = new edu.harvard.hul.ois.mets.File(); edu.harvard.hul.ois.mets.File file = new edu.harvard.hul.ois.mets.File();
file.setID(fileID); file.setID(fileID);
file.setSEQ(bitstream.getSequenceID()); file.setSEQ(bitstream.getSequenceID());
@@ -926,7 +922,7 @@ public abstract class AbstractMETSDisseminator
* extracted text or a thumbnail, so we use the name to work * extracted text or a thumbnail, so we use the name to work
* out which bitstream to be in the same group as * out which bitstream to be in the same group as
*/ */
String groupID = "GROUP_" + bitstreamIDstart + sid; String groupID = "GROUP_" + bitstreamIDstart + uuid;
if ((bundle.getName() != null) if ((bundle.getName() != null)
&& (bundle.getName().equals("THUMBNAIL") || && (bundle.getName().equals("THUMBNAIL") ||
bundle.getName().startsWith("TEXT"))) { bundle.getName().startsWith("TEXT"))) {
@@ -936,7 +932,7 @@ public abstract class AbstractMETSDisseminator
bitstream); bitstream);
if (original != null) { if (original != null) {
groupID = "GROUP_" + bitstreamIDstart groupID = "GROUP_" + bitstreamIDstart
+ original.getSequenceID(); + String.valueOf(original.getID());
} }
} }
file.setGROUPID(groupID); file.setGROUPID(groupID);
@@ -1405,7 +1401,7 @@ public abstract class AbstractMETSDisseminator
// if bare manifest, use external "persistent" URI for bitstreams // if bare manifest, use external "persistent" URI for bitstreams
if (params != null && (params.getBooleanProperty("manifestOnly", false))) { if (params != null && (params.getBooleanProperty("manifestOnly", false))) {
// Try to build a persistent(-ish) URI for bitstream // Try to build a persistent(-ish) URI for bitstream
// Format: {site-base-url}/bitstream/{item-handle}/{sequence-id}/{bitstream-name} // Format: {site-ui-url}/bitstreams/{bitstream-uuid}
try { try {
// get handle of parent Item of this bitstream, if there is one: // get handle of parent Item of this bitstream, if there is one:
String handle = null; String handle = null;
@@ -1416,26 +1412,13 @@ public abstract class AbstractMETSDisseminator
handle = bi.get(0).getHandle(); handle = bi.get(0).getHandle();
} }
} }
if (handle != null) {
return configurationService return configurationService
.getProperty("dspace.ui.url") .getProperty("dspace.ui.url")
+ "/bitstream/" + "/bitstreams/"
+ handle + String.valueOf(bitstream.getID())
+ "/" + "/download";
+ String.valueOf(bitstream.getSequenceID())
+ "/"
+ URLEncoder.encode(bitstream.getName(), "UTF-8");
} else { //no Handle assigned, so persistent(-ish) URI for bitstream is
// Format: {site-base-url}/retrieve/{bitstream-internal-id}
return configurationService
.getProperty("dspace.ui.url")
+ "/retrieve/"
+ String.valueOf(bitstream.getID());
}
} catch (SQLException e) { } catch (SQLException e) {
log.error("Database problem", e); log.error("Database problem", e);
} catch (UnsupportedEncodingException e) {
log.error("Unknown character set", e);
} }
// We should only get here if we failed to build a nice URL above // We should only get here if we failed to build a nice URL above

View File

@@ -718,7 +718,7 @@ public class Context implements AutoCloseable {
} }
/** /**
* Restore the user bound to the context and his special groups * Restore the user bound to the context and their special groups
* *
* @throws IllegalStateException if no switch was performed before * @throws IllegalStateException if no switch was performed before
*/ */

View File

@@ -346,7 +346,7 @@ public class I18nUtil {
} }
} }
if (fileNameL != null && !fileFound) { if (!fileFound) {
File fileTmp = new File(fileNameL + fileType); File fileTmp = new File(fileNameL + fileType);
if (fileTmp.exists()) { if (fileTmp.exists()) {
fileFound = true; fileFound = true;

View File

@@ -10,7 +10,6 @@ package org.dspace.core;
import java.io.BufferedReader; import java.io.BufferedReader;
import java.io.FileReader; import java.io.FileReader;
import java.io.IOException; import java.io.IOException;
import java.io.Serializable;
import java.lang.reflect.Array; import java.lang.reflect.Array;
import java.lang.reflect.InvocationTargetException; import java.lang.reflect.InvocationTargetException;
import java.util.ArrayList; import java.util.ArrayList;
@@ -173,7 +172,7 @@ public class LegacyPluginServiceImpl implements PluginService {
throws PluginInstantiationException { throws PluginInstantiationException {
// cache of config data for Sequence Plugins; format its // cache of config data for Sequence Plugins; format its
// <interface-name> -> [ <classname>.. ] (value is Array) // <interface-name> -> [ <classname>.. ] (value is Array)
Map<String, String[]> sequenceConfig = new HashMap<String, String[]>(); Map<String, String[]> sequenceConfig = new HashMap<>();
// cache the configuration for this interface after grovelling it once: // cache the configuration for this interface after grovelling it once:
// format is prefix.<interface> = <classname> // format is prefix.<interface> = <classname>
@@ -220,10 +219,7 @@ public class LegacyPluginServiceImpl implements PluginService {
// Map of named plugin classes, [intfc,name] -> class // Map of named plugin classes, [intfc,name] -> class
// Also contains intfc -> "marker" to mark when interface has been loaded. // Also contains intfc -> "marker" to mark when interface has been loaded.
private Map<String, String> namedPluginClasses = new HashMap<String, String>(); private final Map<String, String> namedPluginClasses = new HashMap<>();
// Map of cached (reusable) named plugin instances, [class,name] -> instance
private Map<Serializable, Object> namedInstanceCache = new HashMap<Serializable, Object>();
// load and cache configuration data for the given interface. // load and cache configuration data for the given interface.
private void configureNamedPlugin(String iname) private void configureNamedPlugin(String iname)
@@ -413,14 +409,14 @@ public class LegacyPluginServiceImpl implements PluginService {
String iname = interfaceClass.getName(); String iname = interfaceClass.getName();
configureNamedPlugin(iname); configureNamedPlugin(iname);
String prefix = iname + SEP; String prefix = iname + SEP;
ArrayList<String> result = new ArrayList<String>(); ArrayList<String> result = new ArrayList<>();
for (String key : namedPluginClasses.keySet()) { for (String key : namedPluginClasses.keySet()) {
if (key.startsWith(prefix)) { if (key.startsWith(prefix)) {
result.add(key.substring(prefix.length())); result.add(key.substring(prefix.length()));
} }
} }
if (result.size() == 0) { if (result.isEmpty()) {
log.error("Cannot find any names for named plugin, interface=" + iname); log.error("Cannot find any names for named plugin, interface=" + iname);
} }
@@ -508,10 +504,10 @@ public class LegacyPluginServiceImpl implements PluginService {
*/ */
// tables of config keys for each type of config line: // tables of config keys for each type of config line:
Map<String, String> singleKey = new HashMap<String, String>(); Map<String, String> singleKey = new HashMap<>();
Map<String, String> sequenceKey = new HashMap<String, String>(); Map<String, String> sequenceKey = new HashMap<>();
Map<String, String> namedKey = new HashMap<String, String>(); Map<String, String> namedKey = new HashMap<>();
Map<String, String> selfnamedKey = new HashMap<String, String>(); Map<String, String> selfnamedKey = new HashMap<>();
// Find all property keys starting with "plugin." // Find all property keys starting with "plugin."
List<String> keys = configurationService.getPropertyKeys("plugin."); List<String> keys = configurationService.getPropertyKeys("plugin.");
@@ -533,7 +529,7 @@ public class LegacyPluginServiceImpl implements PluginService {
// 2. Build up list of all interfaces and test that they are loadable. // 2. Build up list of all interfaces and test that they are loadable.
// don't bother testing that they are "interface" rather than "class" // don't bother testing that they are "interface" rather than "class"
// since either one will work for the Plugin Manager. // since either one will work for the Plugin Manager.
ArrayList<String> allInterfaces = new ArrayList<String>(); ArrayList<String> allInterfaces = new ArrayList<>();
allInterfaces.addAll(singleKey.keySet()); allInterfaces.addAll(singleKey.keySet());
allInterfaces.addAll(sequenceKey.keySet()); allInterfaces.addAll(sequenceKey.keySet());
allInterfaces.addAll(namedKey.keySet()); allInterfaces.addAll(namedKey.keySet());
@@ -547,7 +543,6 @@ public class LegacyPluginServiceImpl implements PluginService {
// - each class is loadable. // - each class is loadable.
// - plugin.selfnamed values are each subclass of SelfNamedPlugin // - plugin.selfnamed values are each subclass of SelfNamedPlugin
// - save classname in allImpls // - save classname in allImpls
Map<String, String> allImpls = new HashMap<String, String>();
// single plugins - just check that it has a valid impl. class // single plugins - just check that it has a valid impl. class
ii = singleKey.keySet().iterator(); ii = singleKey.keySet().iterator();
@@ -558,9 +553,6 @@ public class LegacyPluginServiceImpl implements PluginService {
log.error("Single plugin config not found for: " + SINGLE_PREFIX + key); log.error("Single plugin config not found for: " + SINGLE_PREFIX + key);
} else { } else {
val = val.trim(); val = val.trim();
if (checkClassname(val, "implementation class")) {
allImpls.put(val, val);
}
} }
} }
@@ -571,12 +563,6 @@ public class LegacyPluginServiceImpl implements PluginService {
String[] vals = configurationService.getArrayProperty(SEQUENCE_PREFIX + key); String[] vals = configurationService.getArrayProperty(SEQUENCE_PREFIX + key);
if (vals == null || vals.length == 0) { if (vals == null || vals.length == 0) {
log.error("Sequence plugin config not found for: " + SEQUENCE_PREFIX + key); log.error("Sequence plugin config not found for: " + SEQUENCE_PREFIX + key);
} else {
for (String val : vals) {
if (checkClassname(val, "implementation class")) {
allImpls.put(val, val);
}
}
} }
} }
@@ -591,7 +577,6 @@ public class LegacyPluginServiceImpl implements PluginService {
} else { } else {
for (String val : vals) { for (String val : vals) {
if (checkClassname(val, "selfnamed implementation class")) { if (checkClassname(val, "selfnamed implementation class")) {
allImpls.put(val, val);
checkSelfNamed(val); checkSelfNamed(val);
} }
} }
@@ -609,15 +594,6 @@ public class LegacyPluginServiceImpl implements PluginService {
log.error("Named plugin config not found for: " + NAMED_PREFIX + key); log.error("Named plugin config not found for: " + NAMED_PREFIX + key);
} else { } else {
checkNames(key); checkNames(key);
for (String val : vals) {
// each named plugin has two parts to the value, format:
// [classname] = [plugin-name]
String val_split[] = val.split("\\s*=\\s*");
String classname = val_split[0];
if (checkClassname(classname, "implementation class")) {
allImpls.put(classname, classname);
}
}
} }
} }
} }

View File

@@ -5,7 +5,7 @@
* *
* http://www.dspace.org/license/ * http://www.dspace.org/license/
*/ */
package org.dspace.curate; package org.dspace.ctask.general;
import java.io.ByteArrayInputStream; import java.io.ByteArrayInputStream;
import java.io.IOException; import java.io.IOException;
@@ -18,6 +18,9 @@ import java.util.Map;
import org.apache.logging.log4j.Logger; import org.apache.logging.log4j.Logger;
import org.dspace.authorize.AuthorizeException; import org.dspace.authorize.AuthorizeException;
import org.dspace.authorize.ResourcePolicy;
import org.dspace.authorize.factory.AuthorizeServiceFactory;
import org.dspace.authorize.service.ResourcePolicyService;
import org.dspace.content.Bitstream; import org.dspace.content.Bitstream;
import org.dspace.content.Bundle; import org.dspace.content.Bundle;
import org.dspace.content.DSpaceObject; import org.dspace.content.DSpaceObject;
@@ -26,6 +29,10 @@ import org.dspace.content.factory.ContentServiceFactory;
import org.dspace.content.service.BitstreamService; import org.dspace.content.service.BitstreamService;
import org.dspace.content.service.BundleService; import org.dspace.content.service.BundleService;
import org.dspace.core.Context; import org.dspace.core.Context;
import org.dspace.curate.AbstractCurationTask;
import org.dspace.curate.Curator;
import org.dspace.curate.Distributive;
import org.dspace.curate.Mutative;
import org.dspace.disseminate.factory.DisseminateServiceFactory; import org.dspace.disseminate.factory.DisseminateServiceFactory;
import org.dspace.disseminate.service.CitationDocumentService; import org.dspace.disseminate.service.CitationDocumentService;
@@ -67,6 +74,10 @@ public class CitationPage extends AbstractCurationTask {
protected BitstreamService bitstreamService = ContentServiceFactory.getInstance().getBitstreamService(); protected BitstreamService bitstreamService = ContentServiceFactory.getInstance().getBitstreamService();
protected BundleService bundleService = ContentServiceFactory.getInstance().getBundleService(); protected BundleService bundleService = ContentServiceFactory.getInstance().getBundleService();
protected ResourcePolicyService resourcePolicyService = AuthorizeServiceFactory.getInstance()
.getResourcePolicyService();
private Map<String,Bitstream> displayMap = new HashMap<String,Bitstream>();
/** /**
* {@inheritDoc} * {@inheritDoc}
@@ -95,13 +106,17 @@ public class CitationPage extends AbstractCurationTask {
protected void performItem(Item item) throws SQLException { protected void performItem(Item item) throws SQLException {
//Determine if the DISPLAY bundle exits. If not, create it. //Determine if the DISPLAY bundle exits. If not, create it.
List<Bundle> dBundles = itemService.getBundles(item, CitationPage.DISPLAY_BUNDLE_NAME); List<Bundle> dBundles = itemService.getBundles(item, CitationPage.DISPLAY_BUNDLE_NAME);
Bundle original = itemService.getBundles(item, "ORIGINAL").get(0);
Bundle dBundle = null; Bundle dBundle = null;
if (dBundles == null || dBundles.isEmpty()) { if (dBundles == null || dBundles.isEmpty()) {
try { try {
dBundle = bundleService.create(Curator.curationContext(), item, CitationPage.DISPLAY_BUNDLE_NAME); dBundle = bundleService.create(Curator.curationContext(), item, CitationPage.DISPLAY_BUNDLE_NAME);
// don't inherit now otherwise they will be copied over the moved bitstreams
resourcePolicyService.removeAllPolicies(Curator.curationContext(), dBundle);
} catch (AuthorizeException e) { } catch (AuthorizeException e) {
log.error("User not authroized to create bundle on item \"" log.error("User not authroized to create bundle on item \"{}\": {}",
+ item.getName() + "\": " + e.getMessage()); item::getName, e::getMessage);
return;
} }
} else { } else {
dBundle = dBundles.get(0); dBundle = dBundles.get(0);
@@ -109,7 +124,6 @@ public class CitationPage extends AbstractCurationTask {
//Create a map of the bitstreams in the displayBundle. This is used to //Create a map of the bitstreams in the displayBundle. This is used to
//check if the bundle being cited is already in the display bundle. //check if the bundle being cited is already in the display bundle.
Map<String, Bitstream> displayMap = new HashMap<>();
for (Bitstream bs : dBundle.getBitstreams()) { for (Bitstream bs : dBundle.getBitstreams()) {
displayMap.put(bs.getName(), bs); displayMap.put(bs.getName(), bs);
} }
@@ -120,13 +134,15 @@ public class CitationPage extends AbstractCurationTask {
List<Bundle> pBundles = itemService.getBundles(item, CitationPage.PRESERVATION_BUNDLE_NAME); List<Bundle> pBundles = itemService.getBundles(item, CitationPage.PRESERVATION_BUNDLE_NAME);
Bundle pBundle = null; Bundle pBundle = null;
List<Bundle> bundles = new ArrayList<>(); List<Bundle> bundles = new ArrayList<>();
if (pBundles != null && pBundles.size() > 0) { if (pBundles != null && !pBundles.isEmpty()) {
pBundle = pBundles.get(0); pBundle = pBundles.get(0);
bundles.addAll(itemService.getBundles(item, "ORIGINAL")); bundles.addAll(itemService.getBundles(item, "ORIGINAL"));
bundles.addAll(pBundles); bundles.addAll(pBundles);
} else { } else {
try { try {
pBundle = bundleService.create(Curator.curationContext(), item, CitationPage.PRESERVATION_BUNDLE_NAME); pBundle = bundleService.create(Curator.curationContext(), item, CitationPage.PRESERVATION_BUNDLE_NAME);
// don't inherit now otherwise they will be copied over the moved bitstreams
resourcePolicyService.removeAllPolicies(Curator.curationContext(), pBundle);
} catch (AuthorizeException e) { } catch (AuthorizeException e) {
log.error("User not authroized to create bundle on item \"" log.error("User not authroized to create bundle on item \""
+ item.getName() + "\": " + e.getMessage()); + item.getName() + "\": " + e.getMessage());
@@ -159,7 +175,10 @@ public class CitationPage extends AbstractCurationTask {
citationDocument.makeCitedDocument(Curator.curationContext(), bitstream).getLeft()); citationDocument.makeCitedDocument(Curator.curationContext(), bitstream).getLeft());
//Add the cited document to the approiate bundle //Add the cited document to the approiate bundle
this.addCitedPageToItem(citedInputStream, bundle, pBundle, this.addCitedPageToItem(citedInputStream, bundle, pBundle,
dBundle, displayMap, item, bitstream); dBundle, item, bitstream);
// now set the policies of the preservation and display bundle
clonePolicies(Curator.curationContext(), original, pBundle);
clonePolicies(Curator.curationContext(), original, dBundle);
} catch (Exception e) { } catch (Exception e) {
//Could be many things, but nothing that should be //Could be many things, but nothing that should be
//expected. //expected.
@@ -202,8 +221,6 @@ public class CitationPage extends AbstractCurationTask {
* @param pBundle The preservation bundle. The original document should be * @param pBundle The preservation bundle. The original document should be
* put in here if it is not already. * put in here if it is not already.
* @param dBundle The display bundle. The cited document gets put in here. * @param dBundle The display bundle. The cited document gets put in here.
* @param displayMap The map of bitstream names to bitstreams in the display
* bundle.
* @param item The item containing the bundles being used. * @param item The item containing the bundles being used.
* @param bitstream The original source bitstream. * @param bitstream The original source bitstream.
* @throws SQLException if database error * @throws SQLException if database error
@@ -211,7 +228,7 @@ public class CitationPage extends AbstractCurationTask {
* @throws IOException if IO error * @throws IOException if IO error
*/ */
protected void addCitedPageToItem(InputStream citedDoc, Bundle bundle, Bundle pBundle, protected void addCitedPageToItem(InputStream citedDoc, Bundle bundle, Bundle pBundle,
Bundle dBundle, Map<String,Bitstream> displayMap, Item item, Bundle dBundle, Item item,
Bitstream bitstream) throws SQLException, AuthorizeException, IOException { Bitstream bitstream) throws SQLException, AuthorizeException, IOException {
//If we are modifying a file that is not in the //If we are modifying a file that is not in the
//preservation bundle then we have to move it there. //preservation bundle then we have to move it there.
@@ -239,7 +256,8 @@ public class CitationPage extends AbstractCurationTask {
citedBitstream.setName(context, bitstream.getName()); citedBitstream.setName(context, bitstream.getName());
bitstreamService.setFormat(context, citedBitstream, bitstream.getFormat(Curator.curationContext())); bitstreamService.setFormat(context, citedBitstream, bitstream.getFormat(Curator.curationContext()));
citedBitstream.setDescription(context, bitstream.getDescription()); citedBitstream.setDescription(context, bitstream.getDescription());
displayMap.put(bitstream.getName(), citedBitstream);
clonePolicies(context, bitstream, citedBitstream);
this.resBuilder.append(" Added ") this.resBuilder.append(" Added ")
.append(citedBitstream.getName()) .append(citedBitstream.getName())
.append(" to the ") .append(" to the ")
@@ -251,4 +269,16 @@ public class CitationPage extends AbstractCurationTask {
itemService.update(context, item); itemService.update(context, item);
this.status = Curator.CURATE_SUCCESS; this.status = Curator.CURATE_SUCCESS;
} }
private void clonePolicies(Context context, DSpaceObject source,DSpaceObject target)
throws SQLException, AuthorizeException {
resourcePolicyService.removeAllPolicies(context, target);
for (ResourcePolicy rp: source.getResourcePolicies()) {
ResourcePolicy newPolicy = resourcePolicyService.clone(context, rp);
newPolicy.setdSpaceObject(target);
newPolicy.setAction(rp.getAction());
resourcePolicyService.update(context, newPolicy);
}
}
} }

View File

@@ -0,0 +1,94 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.ctask.general;
import java.io.IOException;
import java.sql.SQLException;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
import org.dspace.authorize.AuthorizeException;
import org.dspace.content.DSpaceObject;
import org.dspace.content.Item;
import org.dspace.core.Constants;
import org.dspace.core.Context;
import org.dspace.curate.AbstractCurationTask;
import org.dspace.curate.Curator;
import org.dspace.identifier.IdentifierException;
import org.dspace.identifier.IdentifierProvider;
import org.dspace.identifier.VersionedHandleIdentifierProviderWithCanonicalHandles;
import org.dspace.identifier.factory.IdentifierServiceFactory;
import org.dspace.identifier.service.IdentifierService;
import org.dspace.services.factory.DSpaceServicesFactory;
/**
* Ensure that an object has all of the identifiers that it should, minting them
* as necessary.
*
* @author Mark H. Wood {@literal <mwood@iupui.edu>}
*/
public class CreateMissingIdentifiers
extends AbstractCurationTask {
private static final Logger LOG = LogManager.getLogger();
@Override
public int perform(DSpaceObject dso)
throws IOException {
// Only some kinds of model objects get identifiers
if (!(dso instanceof Item)) {
return Curator.CURATE_SKIP;
}
// XXX Temporary escape when an incompatible provider is configured.
// XXX Remove this when the provider is fixed.
boolean compatible = DSpaceServicesFactory
.getInstance()
.getServiceManager()
.getServiceByName(
VersionedHandleIdentifierProviderWithCanonicalHandles.class.getCanonicalName(),
IdentifierProvider.class) == null;
if (!compatible) {
setResult("This task is not compatible with VersionedHandleIdentifierProviderWithCanonicalHandles");
return Curator.CURATE_ERROR;
}
// XXX End of escape
String typeText = Constants.typeText[dso.getType()];
// Get a Context
Context context;
try {
context = Curator.curationContext();
} catch (SQLException ex) {
report("Could not get the curation Context: " + ex.getMessage());
return Curator.CURATE_ERROR;
}
// Find the IdentifierService implementation
IdentifierService identifierService = IdentifierServiceFactory
.getInstance()
.getIdentifierService();
// Register any missing identifiers.
try {
identifierService.register(context, dso);
} catch (AuthorizeException | IdentifierException | SQLException ex) {
String message = ex.getMessage();
report(String.format("Identifier(s) not minted for %s %s: %s%n",
typeText, dso.getID().toString(), message));
LOG.error("Identifier(s) not minted: {}", message);
return Curator.CURATE_ERROR;
}
// Success!
report(String.format("%s %s registered.%n",
typeText, dso.getID().toString()));
return Curator.CURATE_SUCCESS;
}
}

View File

@@ -10,11 +10,13 @@ package org.dspace.ctask.general;
import java.io.IOException; import java.io.IOException;
import java.io.InputStream; import java.io.InputStream;
import java.sql.SQLException; import java.sql.SQLException;
import java.util.ArrayDeque;
import java.util.ArrayList; import java.util.ArrayList;
import java.util.HashMap; import java.util.HashMap;
import java.util.Iterator; import java.util.Iterator;
import java.util.List; import java.util.List;
import java.util.Map; import java.util.Map;
import java.util.Queue;
import java.util.regex.Matcher; import java.util.regex.Matcher;
import java.util.regex.Pattern; import java.util.regex.Pattern;
import javax.xml.XMLConstants; import javax.xml.XMLConstants;
@@ -33,6 +35,7 @@ import org.apache.http.HttpStatus;
import org.apache.http.client.methods.HttpGet; import org.apache.http.client.methods.HttpGet;
import org.apache.http.impl.client.CloseableHttpClient; import org.apache.http.impl.client.CloseableHttpClient;
import org.apache.http.impl.client.HttpClientBuilder; import org.apache.http.impl.client.HttpClientBuilder;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger; import org.apache.logging.log4j.Logger;
import org.dspace.authorize.AuthorizeException; import org.dspace.authorize.AuthorizeException;
import org.dspace.content.DSpaceObject; import org.dspace.content.DSpaceObject;
@@ -60,18 +63,18 @@ import org.xml.sax.SAXException;
* Intended use: cataloging tool in workflow and general curation. * Intended use: cataloging tool in workflow and general curation.
* The task uses a URL 'template' to compose the service call, e.g. * The task uses a URL 'template' to compose the service call, e.g.
* *
* {@code http://www.sherpa.ac.uk/romeo/api29.php?issn=\{dc.identifier.issn\}} * <p>{@code http://www.sherpa.ac.uk/romeo/api29.php?issn=\{dc.identifier.issn\}}
* *
* Task will substitute the value of the passed item's metadata field * <p>Task will substitute the value of the passed item's metadata field
* in the {parameter} position. If multiple values are present in the * in the {parameter} position. If multiple values are present in the
* item field, the first value is used. * item field, the first value is used.
* *
* The task uses another property (the datamap) to determine what data * <p>The task uses another property (the datamap) to determine what data
* to extract from the service response and how to use it, e.g. * to extract from the service response and how to use it, e.g.
* *
* {@code //publisher/name=>dc.publisher,//romeocolour} * <p>{@code //publisher/name=>dc.publisher,//romeocolour}
* *
* Task will evaluate the left-hand side (or entire token) of each * <p>Task will evaluate the left-hand side (or entire token) of each
* comma-separated token in the property as an XPath 1.0 expression into * comma-separated token in the property as an XPath 1.0 expression into
* the response document, and if there is a mapping symbol (e.g. {@code '=>'}) and * the response document, and if there is a mapping symbol (e.g. {@code '=>'}) and
* value, it will assign the response document value(s) to the named * value, it will assign the response document value(s) to the named
@@ -79,48 +82,52 @@ import org.xml.sax.SAXException;
* multiple values, they will all be assigned to the item field. The * multiple values, they will all be assigned to the item field. The
* mapping symbol governs the nature of metadata field assignment: * mapping symbol governs the nature of metadata field assignment:
* *
* {@code '->'} mapping will add to any existing values in the item field * <ul>
* {@code '=>'} mapping will replace any existing values in the item field * <li>{@code '->'} mapping will add to any existing values in the item field</li>
* {@code '~>'} mapping will add *only* if item field has no existing values * <li>{@code '=>'} mapping will replace any existing values in the item field</li>
* <li>{@code '~>'} mapping will add *only* if item field has no existing values</li>
* </ul>
* *
* Unmapped data (without a mapping symbol) will simply be added to the task * <p>Unmapped data (without a mapping symbol) will simply be added to the task
* result string, prepended by the XPath expression (a little prettified). * result string, prepended by the XPath expression (a little prettified).
* Each label/value pair in the result string is separated by a space, * Each label/value pair in the result string is separated by a space,
* unless the optional 'separator' property is defined. * unless the optional 'separator' property is defined.
* *
* A very rudimentary facility for transformation of data is supported, e.g. * <p>A very rudimentary facility for transformation of data is supported, e.g.
* *
* {@code http://www.crossref.org/openurl/?id=\{doi:dc.relation.isversionof\}&format=unixref} * <p>{@code http://www.crossref.org/openurl/?id=\{doi:dc.relation.isversionof\}&format=unixref}
* *
* The 'doi:' prefix will cause the task to look for a 'transform' with that * <p>The 'doi:' prefix will cause the task to look for a 'transform' with that
* name, which is applied to the metadata value before parameter substitution * name, which is applied to the metadata value before parameter substitution
* occurs. Transforms are defined in a task property such as the following: * occurs. Transforms are defined in a task property such as the following:
* *
* {@code transform.doi = match 10. trunc 60} * <p>{@code transform.doi = match 10. trunc 60}
* *
* This means exclude the value string up to the occurrence of '10.', then * <p>This means exclude the value string up to the occurrence of '10.', then
* truncate after 60 characters. The only transform functions currently defined: * truncate after 60 characters. The only transform functions currently defined:
* *
* {@code 'cut' <number>} = remove number leading characters * <ul>
* {@code 'trunc' <number>} = remove trailing characters after number length * <li>{@code 'cut' <number>} = remove number leading characters</li>
* {@code 'match' <pattern>} = start match at pattern * <li>{@code 'trunc' <number>} = remove trailing characters after number length</li>
* {@code 'text' <characters>} = append literal characters (enclose in ' ' when whitespace needed) * <li>{@code 'match' <pattern>} = start match at pattern</li>
* <li>{@code 'text' <characters>} = append literal characters (enclose in ' ' when whitespace needed)</li>
* </ul>
* *
* If the transform results in an invalid state (e.g. cutting more characters * <p>If the transform results in an invalid state (e.g. cutting more characters
* than are in the value), the condition will be logged and the * than are in the value), the condition will be logged and the
* un-transformed value used. * un-transformed value used.
* *
* Transforms may also be used in datamaps, e.g. * <p>Transforms may also be used in datamaps, e.g.
* *
* {@code //publisher/name=>shorten:dc.publisher,//romeocolour} * <p>{@code //publisher/name=>shorten:dc.publisher,//romeocolour}
* *
* which would apply the 'shorten' transform to the service response value(s) * <p>which would apply the 'shorten' transform to the service response value(s)
* prior to metadata field assignment. * prior to metadata field assignment.
* *
* An optional property 'headers' may be defined to stipulate any HTTP headers * <p>An optional property 'headers' may be defined to stipulate any HTTP headers
* required in the service call. The property syntax is double-pipe separated headers: * required in the service call. The property syntax is double-pipe separated headers:
* *
* {@code Accept: text/xml||Cache-Control: no-cache} * <p>{@code Accept: text/xml||Cache-Control: no-cache}
* *
* @author richardrodgers * @author richardrodgers
*/ */
@@ -128,9 +135,9 @@ import org.xml.sax.SAXException;
@Suspendable @Suspendable
public class MetadataWebService extends AbstractCurationTask implements NamespaceContext { public class MetadataWebService extends AbstractCurationTask implements NamespaceContext {
/** /**
* log4j category * logging category
*/ */
private static final Logger log = org.apache.logging.log4j.LogManager.getLogger(MetadataWebService.class); private static final Logger log = LogManager.getLogger();
// transform token parsing pattern // transform token parsing pattern
protected Pattern ttPattern = Pattern.compile("\'([^\']*)\'|(\\S+)"); protected Pattern ttPattern = Pattern.compile("\'([^\']*)\'|(\\S+)");
// URL of web service with template parameters // URL of web service with template parameters
@@ -360,42 +367,45 @@ public class MetadataWebService extends AbstractCurationTask implements Namespac
if (transDef == null) { if (transDef == null) {
return value; return value;
} }
String[] tokens = tokenize(transDef); Queue<String> tokens = tokenize(transDef);
String retValue = value; String retValue = value;
for (int i = 0; i < tokens.length; i += 2) { while (!tokens.isEmpty()) {
if ("cut".equals(tokens[i]) || "trunc".equals(tokens[i])) { String function = tokens.poll();
int index = Integer.parseInt(tokens[i + 1]); if ("cut".equals(function) || "trunc".equals(function)) {
String argument = tokens.poll();
int index = Integer.parseInt(argument);
if (retValue.length() > index) { if (retValue.length() > index) {
if ("cut".equals(tokens[i])) { if ("cut".equals(function)) {
retValue = retValue.substring(index); retValue = retValue.substring(index);
} else { } else {
retValue = retValue.substring(0, index); retValue = retValue.substring(0, index);
} }
} else if ("cut".equals(tokens[i])) { } else if ("cut".equals(function)) {
log.error("requested cut: " + index + " exceeds value length"); log.error("requested cut: {} exceeds value length", index);
return value; return value;
} }
} else if ("match".equals(tokens[i])) { } else if ("match".equals(function)) {
int index2 = retValue.indexOf(tokens[i + 1]); String argument = tokens.poll();
int index2 = retValue.indexOf(argument);
if (index2 > 0) { if (index2 > 0) {
retValue = retValue.substring(index2); retValue = retValue.substring(index2);
} else { } else {
log.error("requested match: " + tokens[i + 1] + " failed"); log.error("requested match: {} failed", argument);
return value; return value;
} }
} else if ("text".equals(tokens[i])) { } else if ("text".equals(function)) {
retValue = retValue + tokens[i + 1]; retValue = retValue + tokens.poll();
} else { } else {
log.error(" unknown transform operation: " + tokens[i]); log.error(" unknown transform operation: " + function);
return value; return value;
} }
} }
return retValue; return retValue;
} }
protected String[] tokenize(String text) { protected Queue<String> tokenize(String text) {
List<String> list = new ArrayList<>();
Matcher m = ttPattern.matcher(text); Matcher m = ttPattern.matcher(text);
Queue<String> list = new ArrayDeque<>(m.groupCount());
while (m.find()) { while (m.find()) {
if (m.group(1) != null) { if (m.group(1) != null) {
list.add(m.group(1)); list.add(m.group(1));
@@ -403,7 +413,7 @@ public class MetadataWebService extends AbstractCurationTask implements Namespac
list.add(m.group(2)); list.add(m.group(2));
} }
} }
return list.toArray(new String[0]); return list;
} }
protected int getMapIndex(String mapping) { protected int getMapIndex(String mapping) {

View File

@@ -21,7 +21,6 @@ import org.apache.solr.client.solrj.impl.HttpSolrClient;
import org.dspace.discovery.indexobject.IndexableItem; import org.dspace.discovery.indexobject.IndexableItem;
import org.dspace.service.impl.HttpConnectionPoolService; import org.dspace.service.impl.HttpConnectionPoolService;
import org.dspace.services.ConfigurationService; import org.dspace.services.ConfigurationService;
import org.dspace.services.factory.DSpaceServicesFactory;
import org.dspace.storage.rdbms.DatabaseUtils; import org.dspace.storage.rdbms.DatabaseUtils;
import org.springframework.beans.factory.annotation.Autowired; import org.springframework.beans.factory.annotation.Autowired;
@@ -75,14 +74,13 @@ public class SolrSearchCore {
*/ */
protected void initSolr() { protected void initSolr() {
if (solr == null) { if (solr == null) {
String solrService = DSpaceServicesFactory.getInstance().getConfigurationService() String solrService = configurationService.getProperty("discovery.search.server");
.getProperty("discovery.search.server");
UrlValidator urlValidator = new UrlValidator(UrlValidator.ALLOW_LOCAL_URLS); UrlValidator urlValidator = new UrlValidator(UrlValidator.ALLOW_LOCAL_URLS);
if (urlValidator.isValid(solrService) || configurationService if (urlValidator.isValid(solrService) || configurationService
.getBooleanProperty("discovery.solr.url.validation.enabled", true)) { .getBooleanProperty("discovery.solr.url.validation.enabled", true)) {
try { try {
log.debug("Solr URL: " + solrService); log.debug("Solr URL: {}", solrService);
HttpSolrClient solrServer = new HttpSolrClient.Builder(solrService) HttpSolrClient solrServer = new HttpSolrClient.Builder(solrService)
.withHttpClient(httpConnectionPoolService.getClient()) .withHttpClient(httpConnectionPoolService.getClient())
.build(); .build();
@@ -103,10 +101,13 @@ public class SolrSearchCore {
solr = solrServer; solr = solrServer;
} catch (SolrServerException | IOException e) { } catch (SolrServerException | IOException e) {
log.error("Error while initializing solr server", e); log.error("Error while initializing solr server {}",
solrService, e);
throw new RuntimeException("Failed to contact Solr at " + solrService
+ " : " + e.getMessage());
} }
} else { } else {
log.error("Error while initializing solr, invalid url: " + solrService); log.error("Error while initializing solr, invalid url: {}", solrService);
} }
} }
} }

View File

@@ -979,7 +979,7 @@ public class SolrServiceImpl implements SearchService, IndexingService {
// if we found stale objects we can decide to skip execution of the remaining code to improve performance // if we found stale objects we can decide to skip execution of the remaining code to improve performance
boolean skipLoadingResponse = false; boolean skipLoadingResponse = false;
// use zombieDocs to collect stale found objects // use zombieDocs to collect stale found objects
List<String> zombieDocs = new ArrayList<String>(); List<String> zombieDocs = new ArrayList<>();
QueryResponse solrQueryResponse = solrSearchCore.getSolr().query(solrQuery, QueryResponse solrQueryResponse = solrSearchCore.getSolr().query(solrQuery,
solrSearchCore.REQUEST_METHOD); solrSearchCore.REQUEST_METHOD);
if (solrQueryResponse != null) { if (solrQueryResponse != null) {
@@ -1033,12 +1033,6 @@ public class SolrServiceImpl implements SearchService, IndexingService {
//We need to remove all the "_hl" appendix strings from our keys //We need to remove all the "_hl" appendix strings from our keys
Map<String, List<String>> resultMap = new HashMap<>(); Map<String, List<String>> resultMap = new HashMap<>();
for (String key : highlightedFields.keySet()) { for (String key : highlightedFields.keySet()) {
List<String> highlightOriginalValue = highlightedFields.get(key);
List<String[]> resultHighlightOriginalValue = new ArrayList<>();
for (String highlightValue : highlightOriginalValue) {
String[] splitted = highlightValue.split("###");
resultHighlightOriginalValue.add(splitted);
}
resultMap.put(key.substring(0, key.lastIndexOf("_hl")), highlightedFields.get(key)); resultMap.put(key.substring(0, key.lastIndexOf("_hl")), highlightedFields.get(key));
} }
@@ -1054,7 +1048,7 @@ public class SolrServiceImpl implements SearchService, IndexingService {
// If any stale entries are found in the current page of results, // If any stale entries are found in the current page of results,
// we remove those stale entries and rerun the same query again. // we remove those stale entries and rerun the same query again.
// Otherwise, the query is valid and the results are returned. // Otherwise, the query is valid and the results are returned.
if (zombieDocs.size() != 0) { if (!zombieDocs.isEmpty()) {
log.info("Cleaning " + zombieDocs.size() + " stale objects from Discovery Index"); log.info("Cleaning " + zombieDocs.size() + " stale objects from Discovery Index");
log.info("ZombieDocs "); log.info("ZombieDocs ");
zombieDocs.forEach(log::info); zombieDocs.forEach(log::info);

View File

@@ -22,6 +22,8 @@ import java.util.UUID;
import org.apache.commons.collections4.CollectionUtils; import org.apache.commons.collections4.CollectionUtils;
import org.apache.commons.lang3.StringUtils; import org.apache.commons.lang3.StringUtils;
import org.apache.commons.lang3.tuple.Pair; import org.apache.commons.lang3.tuple.Pair;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
import org.dspace.authorize.AuthorizeConfiguration; import org.dspace.authorize.AuthorizeConfiguration;
import org.dspace.authorize.AuthorizeException; import org.dspace.authorize.AuthorizeException;
import org.dspace.authorize.ResourcePolicy; import org.dspace.authorize.ResourcePolicy;
@@ -52,8 +54,6 @@ import org.dspace.xmlworkflow.storedcomponents.PoolTask;
import org.dspace.xmlworkflow.storedcomponents.service.ClaimedTaskService; import org.dspace.xmlworkflow.storedcomponents.service.ClaimedTaskService;
import org.dspace.xmlworkflow.storedcomponents.service.CollectionRoleService; import org.dspace.xmlworkflow.storedcomponents.service.CollectionRoleService;
import org.dspace.xmlworkflow.storedcomponents.service.PoolTaskService; import org.dspace.xmlworkflow.storedcomponents.service.PoolTaskService;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired; import org.springframework.beans.factory.annotation.Autowired;
/** /**
@@ -64,7 +64,7 @@ import org.springframework.beans.factory.annotation.Autowired;
* @author kevinvandevelde at atmire.com * @author kevinvandevelde at atmire.com
*/ */
public class GroupServiceImpl extends DSpaceObjectServiceImpl<Group> implements GroupService { public class GroupServiceImpl extends DSpaceObjectServiceImpl<Group> implements GroupService {
private static final Logger log = LoggerFactory.getLogger(GroupServiceImpl.class); private static final Logger log = LogManager.getLogger();
@Autowired(required = true) @Autowired(required = true)
protected GroupDAO groupDAO; protected GroupDAO groupDAO;
@@ -473,7 +473,7 @@ public class GroupServiceImpl extends DSpaceObjectServiceImpl<Group> implements
@Override @Override
public void delete(Context context, Group group) throws SQLException { public void delete(Context context, Group group) throws SQLException {
if (group.isPermanent()) { if (group.isPermanent()) {
log.error("Attempt to delete permanent Group $", group.getName()); log.error("Attempt to delete permanent Group {}", group::getName);
throw new SQLException("Attempt to delete a permanent Group"); throw new SQLException("Attempt to delete a permanent Group");
} }
@@ -715,7 +715,7 @@ public class GroupServiceImpl extends DSpaceObjectServiceImpl<Group> implements
// if the group is used for one or more roles on a single collection, // if the group is used for one or more roles on a single collection,
// admins can eventually manage it // admins can eventually manage it
List<CollectionRole> collectionRoles = collectionRoleService.findByGroup(context, group); List<CollectionRole> collectionRoles = collectionRoleService.findByGroup(context, group);
if (collectionRoles != null && collectionRoles.size() > 0) { if (collectionRoles != null && !collectionRoles.isEmpty()) {
Set<Collection> colls = new HashSet<>(); Set<Collection> colls = new HashSet<>();
for (CollectionRole cr : collectionRoles) { for (CollectionRole cr : collectionRoles) {
colls.add(cr.getCollection()); colls.add(cr.getCollection());

View File

@@ -45,7 +45,7 @@ public interface GroupDAO extends DSpaceObjectDAO<Group>, DSpaceObjectLegacySupp
* Find all groups ordered by the specified metadata fields ascending * Find all groups ordered by the specified metadata fields ascending
* *
* @param context The DSpace context * @param context The DSpace context
* @param sortMetadataFields The metadata fields to sort on * @param metadataSortFields The metadata fields to sort on
* @param pageSize how many results return * @param pageSize how many results return
* @param offset the position of the first result to return * @param offset the position of the first result to return
* @return A list of all groups, ordered by metadata fields * @return A list of all groups, ordered by metadata fields

View File

@@ -15,6 +15,7 @@ import java.util.ArrayList;
import java.util.Base64; import java.util.Base64;
import java.util.Collections; import java.util.Collections;
import java.util.List; import java.util.List;
import java.util.Map;
import java.util.Optional; import java.util.Optional;
import java.util.regex.Pattern; import java.util.regex.Pattern;
import java.util.stream.Collectors; import java.util.stream.Collectors;
@@ -33,6 +34,7 @@ import org.dspace.content.dto.MetadataValueDTO;
import org.dspace.external.OpenAIRERestConnector; import org.dspace.external.OpenAIRERestConnector;
import org.dspace.external.model.ExternalDataObject; import org.dspace.external.model.ExternalDataObject;
import org.dspace.external.provider.AbstractExternalDataProvider; import org.dspace.external.provider.AbstractExternalDataProvider;
import org.dspace.importer.external.metadatamapping.MetadataFieldConfig;
import org.springframework.beans.factory.annotation.Autowired; import org.springframework.beans.factory.annotation.Autowired;
/** /**
@@ -40,13 +42,9 @@ import org.springframework.beans.factory.annotation.Autowired;
* will deal with the OpenAIRE External Data lookup * will deal with the OpenAIRE External Data lookup
* *
* @author paulo-graca * @author paulo-graca
*
*/ */
public class OpenAIREFundingDataProvider extends AbstractExternalDataProvider { public class OpenAIREFundingDataProvider extends AbstractExternalDataProvider {
/**
* log4j logger
*/
private static Logger log = org.apache.logging.log4j.LogManager.getLogger(OpenAIREFundingDataProvider.class); private static Logger log = org.apache.logging.log4j.LogManager.getLogger(OpenAIREFundingDataProvider.class);
/** /**
@@ -54,6 +52,16 @@ public class OpenAIREFundingDataProvider extends AbstractExternalDataProvider {
*/ */
protected static final String PREFIX = "info:eu-repo/grantAgreement"; protected static final String PREFIX = "info:eu-repo/grantAgreement";
private static final String TITLE = "dcTitle";
private static final String SUBJECT = "dcSubject";
private static final String AWARD_URI = "awardURI";
private static final String FUNDER_NAME = "funderName";
private static final String SPATIAL = "coverageSpatial";
private static final String AWARD_NUMBER = "awardNumber";
private static final String FUNDER_ID = "funderIdentifier";
private static final String FUNDING_STREAM = "fundingStream";
private static final String TITLE_ALTERNATIVE = "titleAlternative";
/** /**
* rows default limit * rows default limit
*/ */
@@ -69,11 +77,9 @@ public class OpenAIREFundingDataProvider extends AbstractExternalDataProvider {
*/ */
protected OpenAIRERestConnector connector; protected OpenAIRERestConnector connector;
/** protected Map<String, MetadataFieldConfig> metadataFields;
* required method
*/ public void init() throws IOException {}
public void init() throws IOException {
}
@Override @Override
public String getSourceIdentifier() { public String getSourceIdentifier() {
@@ -266,14 +272,22 @@ public class OpenAIREFundingDataProvider extends AbstractExternalDataProvider {
} }
} }
public Map<String, MetadataFieldConfig> getMetadataFields() {
return metadataFields;
}
public void setMetadataFields(Map<String, MetadataFieldConfig> metadataFields) {
this.metadataFields = metadataFields;
}
/** /**
* OpenAIRE Funding External Data Builder Class * OpenAIRE Funding External Data Builder Class
* *
* @author pgraca * @author pgraca
*
*/ */
public static class ExternalDataObjectBuilder { public class ExternalDataObjectBuilder {
ExternalDataObject externalDataObject;
private ExternalDataObject externalDataObject;
public ExternalDataObjectBuilder(Project project) { public ExternalDataObjectBuilder(Project project) {
String funderIdPrefix = "urn:openaire:"; String funderIdPrefix = "urn:openaire:";
@@ -283,46 +297,42 @@ public class OpenAIREFundingDataProvider extends AbstractExternalDataProvider {
for (FundingTreeType fundingTree : projectHelper.getFundingTreeTypes()) { for (FundingTreeType fundingTree : projectHelper.getFundingTreeTypes()) {
FunderType funder = fundingTree.getFunder(); FunderType funder = fundingTree.getFunder();
// Funder name // Funder name
this.addFunderName(funder.getName()); this.addMetadata(metadataFields.get(FUNDER_NAME), funder.getName());
// Funder Id - convert it to an urn // Funder Id - convert it to an urn
this.addFunderID(funderIdPrefix + funder.getId()); this.addMetadata(metadataFields.get(FUNDER_ID), funderIdPrefix + funder.getId());
// Jurisdiction // Jurisdiction
this.addFunderJuristiction(funder.getJurisdiction()); this.addMetadata(metadataFields.get(SPATIAL), funder.getJurisdiction());
FundingHelper fundingHelper = new FundingHelper( FundingHelper fundingHelper = new FundingHelper(
fundingTree.getFundingLevel2OrFundingLevel1OrFundingLevel0()); fundingTree.getFundingLevel2OrFundingLevel1OrFundingLevel0());
// Funding description // Funding description
for (FundingType funding : fundingHelper.getFirstAvailableFunding()) { for (FundingType funding : fundingHelper.getFirstAvailableFunding()) {
this.addFundingStream(funding.getDescription()); this.addMetadata(metadataFields.get(FUNDING_STREAM), funding.getDescription());
} }
} }
// Title // Title
for (String title : projectHelper.getTitles()) { for (String title : projectHelper.getTitles()) {
this.addAwardTitle(title); this.addMetadata(metadataFields.get(TITLE), title);
this.setDisplayValue(title); this.setDisplayValue(title);
this.setValue(title); this.setValue(title);
} }
// Code // Code
for (String code : projectHelper.getCodes()) { for (String code : projectHelper.getCodes()) {
this.addAwardNumber(code); this.addMetadata(metadataFields.get(AWARD_NUMBER), code);
} }
// Website url // Website url
for (String url : projectHelper.getWebsiteUrls()) { for (String url : projectHelper.getWebsiteUrls()) {
this.addAwardURI(url); this.addMetadata(metadataFields.get(AWARD_URI), url);
} }
// Acronyms // Acronyms
for (String acronym : projectHelper.getAcronyms()) { for (String acronym : projectHelper.getAcronyms()) {
this.addFundingItemAcronym(acronym); this.addMetadata(metadataFields.get(TITLE_ALTERNATIVE), acronym);
} }
// Keywords // Keywords
for (String keyword : projectHelper.getKeywords()) { for (String keyword : projectHelper.getKeywords()) {
this.addSubject(keyword); this.addMetadata(metadataFields.get(SUBJECT), keyword);
} }
} }
@@ -366,7 +376,6 @@ public class OpenAIREFundingDataProvider extends AbstractExternalDataProvider {
* @return ExternalDataObjectBuilder * @return ExternalDataObjectBuilder
*/ */
public ExternalDataObjectBuilder setId(String id) { public ExternalDataObjectBuilder setId(String id) {
// we use base64 encoding in order to use slashes / and other // we use base64 encoding in order to use slashes / and other
// characters that must be escaped for the <:entry-id> // characters that must be escaped for the <:entry-id>
String base64Id = Base64.getEncoder().encodeToString(id.getBytes()); String base64Id = Base64.getEncoder().encodeToString(id.getBytes());
@@ -374,128 +383,10 @@ public class OpenAIREFundingDataProvider extends AbstractExternalDataProvider {
return this; return this;
} }
/** public ExternalDataObjectBuilder addMetadata(MetadataFieldConfig metadataField, String value) {
* Add metadata dc.identifier this.externalDataObject.addMetadata(new MetadataValueDTO(metadataField.getSchema(),
* metadataField.getElement(),
* @param metadata identifier metadataField.getQualifier(), null, value));
* @return ExternalDataObjectBuilder
*/
public ExternalDataObjectBuilder addIdentifier(String identifier) {
this.externalDataObject.addMetadata(new MetadataValueDTO("dc", "identifier", null, null, identifier));
return this;
}
/**
* Add metadata project.funder.name
*
* @param metadata funderName
* @return ExternalDataObjectBuilder
*/
public ExternalDataObjectBuilder addFunderName(String funderName) {
this.externalDataObject.addMetadata(new MetadataValueDTO("project", "funder", "name", null, funderName));
return this;
}
/**
* Add metadata project.funder.identifier
*
* @param metadata funderId
* @return ExternalDataObjectBuilder
*/
public ExternalDataObjectBuilder addFunderID(String funderID) {
this.externalDataObject
.addMetadata(new MetadataValueDTO("project", "funder", "identifier", null, funderID));
return this;
}
/**
* Add metadata dc.title
*
* @param metadata awardTitle
* @return ExternalDataObjectBuilder
*/
public ExternalDataObjectBuilder addAwardTitle(String awardTitle) {
this.externalDataObject.addMetadata(new MetadataValueDTO("dc", "title", null, null, awardTitle));
return this;
}
/**
* Add metadata oaire.fundingStream
*
* @param metadata fundingStream
* @return ExternalDataObjectBuilder
*/
public ExternalDataObjectBuilder addFundingStream(String fundingStream) {
this.externalDataObject
.addMetadata(new MetadataValueDTO("oaire", "fundingStream", null, null, fundingStream));
return this;
}
/**
* Add metadata oaire.awardNumber
*
* @param metadata awardNumber
* @return ExternalDataObjectBuilder
*/
public ExternalDataObjectBuilder addAwardNumber(String awardNumber) {
this.externalDataObject.addMetadata(new MetadataValueDTO("oaire", "awardNumber", null, null, awardNumber));
return this;
}
/**
* Add metadata oaire.awardURI
*
* @param metadata websiteUrl
* @return ExternalDataObjectBuilder
*/
public ExternalDataObjectBuilder addAwardURI(String websiteUrl) {
this.externalDataObject.addMetadata(new MetadataValueDTO("oaire", "awardURI", null, null, websiteUrl));
return this;
}
/**
* Add metadata dc.title.alternative
*
* @param metadata fundingItemAcronym
* @return ExternalDataObjectBuilder
*/
public ExternalDataObjectBuilder addFundingItemAcronym(String fundingItemAcronym) {
this.externalDataObject
.addMetadata(new MetadataValueDTO("dc", "title", "alternative", null, fundingItemAcronym));
return this;
}
/**
* Add metadata dc.coverage.spatial
*
* @param metadata funderJuristiction
* @return ExternalDataObjectBuilder
*/
public ExternalDataObjectBuilder addFunderJuristiction(String funderJuristiction) {
this.externalDataObject
.addMetadata(new MetadataValueDTO("dc", "coverage", "spatial", null, funderJuristiction));
return this;
}
/**
* Add metadata dc.description
*
* @param metadata description
* @return ExternalDataObjectBuilder
*/
public ExternalDataObjectBuilder addDescription(String description) {
this.externalDataObject.addMetadata(new MetadataValueDTO("dc", "description", null, null, description));
return this;
}
/**
* Add metadata dc.subject
*
* @param metadata subject
* @return ExternalDataObjectBuilder
*/
public ExternalDataObjectBuilder addSubject(String subject) {
this.externalDataObject.addMetadata(new MetadataValueDTO("dc", "subject", null, null, subject));
return this; return this;
} }
@@ -508,4 +399,5 @@ public class OpenAIREFundingDataProvider extends AbstractExternalDataProvider {
return this.externalDataObject; return this.externalDataObject;
} }
} }
} }

View File

@@ -126,7 +126,7 @@ public class UpdateHandlePrefix {
); );
} catch (SQLException sqle) { } catch (SQLException sqle) {
if ((context != null) && (context.isValid())) { if (context.isValid()) {
context.abort(); context.abort();
context = null; context = null;
} }

View File

@@ -19,10 +19,8 @@ import java.util.Calendar;
import java.util.Date; import java.util.Date;
import java.util.List; import java.util.List;
import javax.xml.parsers.ParserConfigurationException; import javax.xml.parsers.ParserConfigurationException;
import javax.xml.transform.TransformerException; import javax.xml.xpath.XPathExpressionException;
import ORG.oclc.oai.harvester2.verb.Identify;
import ORG.oclc.oai.harvester2.verb.ListIdentifiers;
import org.dspace.content.Collection; import org.dspace.content.Collection;
import org.dspace.core.Context; import org.dspace.core.Context;
import org.dspace.harvest.dao.HarvestedCollectionDAO; import org.dspace.harvest.dao.HarvestedCollectionDAO;
@@ -33,6 +31,8 @@ import org.jdom2.Document;
import org.jdom2.Element; import org.jdom2.Element;
import org.jdom2.Namespace; import org.jdom2.Namespace;
import org.jdom2.input.DOMBuilder; import org.jdom2.input.DOMBuilder;
import org.oclc.oai.harvester2.verb.Identify;
import org.oclc.oai.harvester2.verb.ListIdentifiers;
import org.springframework.beans.factory.annotation.Autowired; import org.springframework.beans.factory.annotation.Autowired;
import org.w3c.dom.DOMException; import org.w3c.dom.DOMException;
import org.xml.sax.SAXException; import org.xml.sax.SAXException;
@@ -198,7 +198,7 @@ public class HarvestedCollectionServiceImpl implements HarvestedCollectionServic
// First, see if we can contact the target server at all. // First, see if we can contact the target server at all.
try { try {
new Identify(oaiSource); new Identify(oaiSource);
} catch (IOException | ParserConfigurationException | TransformerException | SAXException ex) { } catch (IOException | ParserConfigurationException | XPathExpressionException | SAXException ex) {
errorSet.add(OAI_ADDRESS_ERROR + ": OAI server could not be reached."); errorSet.add(OAI_ADDRESS_ERROR + ": OAI server could not be reached.");
return errorSet; return errorSet;
} }
@@ -216,7 +216,7 @@ public class HarvestedCollectionServiceImpl implements HarvestedCollectionServic
try { try {
OREOAIPrefix = OAIHarvester.oaiResolveNamespaceToPrefix(oaiSource, OAIHarvester.getORENamespace().getURI()); OREOAIPrefix = OAIHarvester.oaiResolveNamespaceToPrefix(oaiSource, OAIHarvester.getORENamespace().getURI());
DMDOAIPrefix = OAIHarvester.oaiResolveNamespaceToPrefix(oaiSource, DMD_NS.getURI()); DMDOAIPrefix = OAIHarvester.oaiResolveNamespaceToPrefix(oaiSource, DMD_NS.getURI());
} catch (IOException | ParserConfigurationException | TransformerException | SAXException ex) { } catch (IOException | ParserConfigurationException | XPathExpressionException | SAXException ex) {
errorSet.add(OAI_ADDRESS_ERROR errorSet.add(OAI_ADDRESS_ERROR
+ ": OAI did not respond to ListMetadataFormats query (" + ": OAI did not respond to ListMetadataFormats query ("
+ ORE_NS.getPrefix() + ":" + OREOAIPrefix + " ; " + ORE_NS.getPrefix() + ":" + OREOAIPrefix + " ; "
@@ -260,7 +260,8 @@ public class HarvestedCollectionServiceImpl implements HarvestedCollectionServic
} }
} }
} }
} catch (IOException | ParserConfigurationException | TransformerException | DOMException | SAXException e) { } catch (IOException | ParserConfigurationException | XPathExpressionException | DOMException |
SAXException e) {
errorSet.add(OAI_ADDRESS_ERROR + ": OAI server could not be reached"); errorSet.add(OAI_ADDRESS_ERROR + ": OAI server could not be reached");
return errorSet; return errorSet;
} catch (RuntimeException re) { } catch (RuntimeException re) {

View File

@@ -28,13 +28,10 @@ import java.util.Map;
import java.util.Set; import java.util.Set;
import java.util.TimeZone; import java.util.TimeZone;
import javax.xml.parsers.ParserConfigurationException; import javax.xml.parsers.ParserConfigurationException;
import javax.xml.transform.TransformerException; import javax.xml.xpath.XPathExpressionException;
import ORG.oclc.oai.harvester2.verb.GetRecord;
import ORG.oclc.oai.harvester2.verb.Identify;
import ORG.oclc.oai.harvester2.verb.ListMetadataFormats;
import ORG.oclc.oai.harvester2.verb.ListRecords;
import org.apache.commons.lang3.StringUtils; import org.apache.commons.lang3.StringUtils;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger; import org.apache.logging.log4j.Logger;
import org.dspace.authorize.AuthorizeException; import org.dspace.authorize.AuthorizeException;
import org.dspace.content.Bitstream; import org.dspace.content.Bitstream;
@@ -75,6 +72,10 @@ import org.jdom2.Element;
import org.jdom2.Namespace; import org.jdom2.Namespace;
import org.jdom2.input.DOMBuilder; import org.jdom2.input.DOMBuilder;
import org.jdom2.output.XMLOutputter; import org.jdom2.output.XMLOutputter;
import org.oclc.oai.harvester2.verb.GetRecord;
import org.oclc.oai.harvester2.verb.Identify;
import org.oclc.oai.harvester2.verb.ListMetadataFormats;
import org.oclc.oai.harvester2.verb.ListRecords;
import org.xml.sax.SAXException; import org.xml.sax.SAXException;
@@ -91,7 +92,7 @@ public class OAIHarvester {
/** /**
* log4j category * log4j category
*/ */
private static Logger log = org.apache.logging.log4j.LogManager.getLogger(OAIHarvester.class); private static final Logger log = LogManager.getLogger();
private static final Namespace ATOM_NS = Namespace.getNamespace("http://www.w3.org/2005/Atom"); private static final Namespace ATOM_NS = Namespace.getNamespace("http://www.w3.org/2005/Atom");
private static final Namespace ORE_NS = Namespace.getNamespace("http://www.openarchives.org/ore/terms/"); private static final Namespace ORE_NS = Namespace.getNamespace("http://www.openarchives.org/ore/terms/");
@@ -133,7 +134,7 @@ public class OAIHarvester {
private String metadataKey; private String metadataKey;
// DOMbuilder class for the DOM -> JDOM conversions // DOMbuilder class for the DOM -> JDOM conversions
private static DOMBuilder db = new DOMBuilder(); private static final DOMBuilder db = new DOMBuilder();
// The point at which this thread should terminate itself // The point at which this thread should terminate itself
/* Initialize the harvester with a collection object */ /* Initialize the harvester with a collection object */
@@ -331,18 +332,16 @@ public class OAIHarvester {
// main loop to keep requesting more objects until we're done // main loop to keep requesting more objects until we're done
List<Element> records; List<Element> records;
Set<String> errorSet = new HashSet<String>(); Set<String> errorSet = new HashSet<>();
ListRecords listRecords = new ListRecords(oaiSource, fromDate, toDate, oaiSetId, descMDPrefix); ListRecords listRecords = new ListRecords(oaiSource, fromDate, toDate, oaiSetId, descMDPrefix);
log.debug( log.debug(
"Harvesting request parameters: listRecords " + oaiSource + " " + fromDate + " " + toDate + " " + "Harvesting request parameters: listRecords " + oaiSource + " " + fromDate + " " + toDate + " " +
oaiSetId + " " + descMDPrefix); oaiSetId + " " + descMDPrefix);
if (listRecords != null) {
log.info("HTTP Request: " + listRecords.getRequestURL()); log.info("HTTP Request: " + listRecords.getRequestURL());
}
while (listRecords != null) { while (listRecords != null) {
records = new ArrayList<Element>(); records = new ArrayList<>();
oaiResponse = db.build(listRecords.getDocument()); oaiResponse = db.build(listRecords.getDocument());
if (listRecords.getErrors() != null && listRecords.getErrors().getLength() > 0) { if (listRecords.getErrors() != null && listRecords.getErrors().getLength() > 0) {
@@ -376,8 +375,8 @@ public class OAIHarvester {
} }
// Process the obtained records // Process the obtained records
if (records != null && records.size() > 0) { if (!records.isEmpty()) {
log.info("Found " + records.size() + " records to process"); log.info("Found {} records to process", records::size);
for (Element record : records) { for (Element record : records) {
// check for STOP interrupt from the scheduler // check for STOP interrupt from the scheduler
if (HarvestScheduler.getInterrupt() == HarvestScheduler.HARVESTER_INTERRUPT_STOP) { if (HarvestScheduler.getInterrupt() == HarvestScheduler.HARVESTER_INTERRUPT_STOP) {
@@ -439,7 +438,8 @@ public class OAIHarvester {
harvestRow.setHarvestStatus(HarvestedCollection.STATUS_UNKNOWN_ERROR); harvestRow.setHarvestStatus(HarvestedCollection.STATUS_UNKNOWN_ERROR);
harvestedCollectionService.update(ourContext, harvestRow); harvestedCollectionService.update(ourContext, harvestRow);
alertAdmin(HarvestedCollection.STATUS_UNKNOWN_ERROR, ex); alertAdmin(HarvestedCollection.STATUS_UNKNOWN_ERROR, ex);
log.error("Error occurred while generating an OAI response: " + ex.getMessage() + " " + ex.getCause(), ex); log.error("Error occurred while generating an OAI response: {} {}",
ex.getMessage(), ex.getCause(), ex);
ourContext.complete(); ourContext.complete();
return; return;
} finally { } finally {
@@ -493,11 +493,11 @@ public class OAIHarvester {
* @throws HarvestingException if harvesting error * @throws HarvestingException if harvesting error
* @throws ParserConfigurationException XML parsing error * @throws ParserConfigurationException XML parsing error
* @throws SAXException if XML processing error * @throws SAXException if XML processing error
* @throws TransformerException if XML transformer error * @throws XPathExpressionException if XPath error
*/ */
protected void processRecord(Element record, String OREPrefix, final long currentRecord, long totalListSize) protected void processRecord(Element record, String OREPrefix, final long currentRecord, long totalListSize)
throws SQLException, AuthorizeException, IOException, CrosswalkException, HarvestingException, throws SQLException, AuthorizeException, IOException, CrosswalkException, HarvestingException,
ParserConfigurationException, SAXException, TransformerException { ParserConfigurationException, SAXException, XPathExpressionException {
WorkspaceItem wi = null; WorkspaceItem wi = null;
Date timeStart = new Date(); Date timeStart = new Date();
@@ -623,7 +623,7 @@ public class OAIHarvester {
List<Bundle> OREBundles = itemService.getBundles(item, "ORE"); List<Bundle> OREBundles = itemService.getBundles(item, "ORE");
Bitstream OREBitstream = null; Bitstream OREBitstream = null;
if (OREBundles.size() > 0) { if (!OREBundles.isEmpty()) {
OREBundle = OREBundles.get(0); OREBundle = OREBundles.get(0);
} else { } else {
OREBundle = bundleService.create(ourContext, item, "ORE"); OREBundle = bundleService.create(ourContext, item, "ORE");
@@ -698,7 +698,7 @@ public class OAIHarvester {
List<MetadataValue> values = itemService.getMetadata(item, "dc", "identifier", Item.ANY, Item.ANY); List<MetadataValue> values = itemService.getMetadata(item, "dc", "identifier", Item.ANY, Item.ANY);
if (values.size() > 0 && acceptedHandleServers != null) { if (!values.isEmpty() && acceptedHandleServers != null) {
for (MetadataValue value : values) { for (MetadataValue value : values) {
// 0 1 2 3 4 // 0 1 2 3 4
// https://hdl.handle.net/1234/12 // https://hdl.handle.net/1234/12
@@ -732,7 +732,7 @@ public class OAIHarvester {
* @return a string in the format 'yyyy-mm-ddThh:mm:ssZ' and converted to UTC timezone * @return a string in the format 'yyyy-mm-ddThh:mm:ssZ' and converted to UTC timezone
*/ */
private String processDate(Date date) { private String processDate(Date date) {
Integer timePad = configurationService.getIntProperty("oai.harvester.timePadding"); int timePad = configurationService.getIntProperty("oai.harvester.timePadding");
if (timePad == 0) { if (timePad == 0) {
timePad = 120; timePad = 120;
@@ -769,10 +769,10 @@ public class OAIHarvester {
* @throws IOException if IO error * @throws IOException if IO error
* @throws SAXException if XML processing error * @throws SAXException if XML processing error
* @throws ParserConfigurationException XML parsing error * @throws ParserConfigurationException XML parsing error
* @throws TransformerException if XML transformer error * @throws XPathExpressionException if XPath error
*/ */
private String oaiGetDateGranularity(String oaiSource) private String oaiGetDateGranularity(String oaiSource)
throws IOException, ParserConfigurationException, SAXException, TransformerException { throws IOException, ParserConfigurationException, SAXException, XPathExpressionException {
Identify iden = new Identify(oaiSource); Identify iden = new Identify(oaiSource);
return iden.getDocument().getElementsByTagNameNS(OAI_NS.getURI(), "granularity").item(0).getTextContent(); return iden.getDocument().getElementsByTagNameNS(OAI_NS.getURI(), "granularity").item(0).getTextContent();
} }
@@ -789,17 +789,16 @@ public class OAIHarvester {
* operations. * operations.
* @throws ParserConfigurationException XML parsing error * @throws ParserConfigurationException XML parsing error
* @throws SAXException if XML processing error * @throws SAXException if XML processing error
* @throws TransformerException if XML transformer error * @throws XPathExpressionException if XPath error
* @throws ConnectException if could not connect to OAI server * @throws ConnectException if could not connect to OAI server
*/ */
public static String oaiResolveNamespaceToPrefix(String oaiSource, String MDNamespace) public static String oaiResolveNamespaceToPrefix(String oaiSource, String MDNamespace)
throws IOException, ParserConfigurationException, SAXException, TransformerException, ConnectException { throws IOException, ParserConfigurationException, SAXException, XPathExpressionException, ConnectException {
String metaPrefix = null; String metaPrefix = null;
// Query the OAI server for the metadata // Query the OAI server for the metadata
ListMetadataFormats lmf = new ListMetadataFormats(oaiSource); ListMetadataFormats lmf = new ListMetadataFormats(oaiSource);
if (lmf != null) {
Document lmfResponse = db.build(lmf.getDocument()); Document lmfResponse = db.build(lmf.getDocument());
List<Element> mdFormats = lmfResponse.getRootElement().getChild("ListMetadataFormats", OAI_NS) List<Element> mdFormats = lmfResponse.getRootElement().getChild("ListMetadataFormats", OAI_NS)
.getChildren("metadataFormat", OAI_NS); .getChildren("metadataFormat", OAI_NS);
@@ -810,7 +809,6 @@ public class OAIHarvester {
break; break;
} }
} }
}
return metaPrefix; return metaPrefix;
} }
@@ -868,15 +866,15 @@ public class OAIHarvester {
* operations. * operations.
* @throws ParserConfigurationException XML parsing error * @throws ParserConfigurationException XML parsing error
* @throws SAXException if XML processing error * @throws SAXException if XML processing error
* @throws TransformerException if XML transformer error * @throws XPathExpressionException if XPath error
* @throws HarvestingException if harvesting error * @throws HarvestingException if harvesting error
*/ */
protected List<Element> getMDrecord(String oaiSource, String itemOaiId, String metadataPrefix) protected List<Element> getMDrecord(String oaiSource, String itemOaiId, String metadataPrefix)
throws IOException, ParserConfigurationException, SAXException, TransformerException, HarvestingException { throws IOException, ParserConfigurationException, SAXException, XPathExpressionException, HarvestingException {
GetRecord getRecord = new GetRecord(oaiSource, itemOaiId, metadataPrefix); GetRecord getRecord = new GetRecord(oaiSource, itemOaiId, metadataPrefix);
Set<String> errorSet = new HashSet<String>(); Set<String> errorSet = new HashSet<>();
// If the metadata is not available for this item, can the whole thing // If the metadata is not available for this item, can the whole thing
if (getRecord != null && getRecord.getErrors() != null && getRecord.getErrors().getLength() > 0) { if (getRecord.getErrors() != null && getRecord.getErrors().getLength() > 0) {
for (int i = 0; i < getRecord.getErrors().getLength(); i++) { for (int i = 0; i < getRecord.getErrors().getLength(); i++) {
String errorCode = getRecord.getErrors().item(i).getAttributes().getNamedItem("code").getTextContent(); String errorCode = getRecord.getErrors().item(i).getAttributes().getNamedItem("code").getTextContent();
errorSet.add(errorCode); errorSet.add(errorCode);

View File

@@ -26,9 +26,8 @@ public class EmbargoCheck extends Check {
@Override @Override
public String run(ReportInfo ri) { public String run(ReportInfo ri) {
String ret = ""; String ret = "";
Context context = null; Context context = new Context();
try { try {
context = new Context();
Iterator<Item> item_iter = null; Iterator<Item> item_iter = null;
try { try {
item_iter = embargoService.findItemsByLiftMetadata(context); item_iter = embargoService.findItemsByLiftMetadata(context);
@@ -56,9 +55,7 @@ public class EmbargoCheck extends Check {
} catch (SQLException e) { } catch (SQLException e) {
error(e); error(e);
try { try {
if (null != context) {
context.abort(); context.abort();
}
} catch (Exception e1) { } catch (Exception e1) {
error(e); error(e);
} }

View File

@@ -543,7 +543,7 @@ public class DOIIdentifierProvider extends FilteredIdentifierProvider {
if (DELETED.equals(doiRow.getStatus()) || TO_BE_DELETED.equals(doiRow.getStatus())) { if (DELETED.equals(doiRow.getStatus()) || TO_BE_DELETED.equals(doiRow.getStatus())) {
throw new DOIIdentifierException("You tried to update the metadata" throw new DOIIdentifierException("You tried to update the metadata"
+ "of a DOI that is marked as DELETED.", + " of a DOI that is marked as DELETED.",
DOIIdentifierException.DOI_IS_DELETED); DOIIdentifierException.DOI_IS_DELETED);
} }

View File

@@ -261,7 +261,6 @@ public class VersionedDOIIdentifierProvider extends DOIIdentifierProvider {
doiService.update(context, doi); doiService.update(context, doi);
return doi.getDoi(); return doi.getDoi();
} }
assert (previousVersionDOI != null);
String identifier = getBareDOI(previousVersionDOI); String identifier = getBareDOI(previousVersionDOI);

View File

@@ -117,7 +117,7 @@ public class VersionedHandleIdentifierProviderWithCanonicalHandles extends Ident
// check if we have a previous item // check if we have a previous item
if (previous != null) { if (previous != null) {
try { try {
// If we have a reviewer he/she might not have the // If we have a reviewer they might not have the
// rights to edit the metadata of thes previous item. // rights to edit the metadata of thes previous item.
// Temporarly grant them: // Temporarly grant them:
context.turnOffAuthorisationSystem(); context.turnOffAuthorisationSystem();

View File

@@ -92,6 +92,9 @@ public interface IdentifierService {
throws AuthorizeException, SQLException, IdentifierException; throws AuthorizeException, SQLException, IdentifierException;
/** /**
* Used to register newly-minted identifiers. Each provider is responsible
* for creating the appropriate identifier. All providers are interrogated.
*
* @param context The relevant DSpace Context. * @param context The relevant DSpace Context.
* @param dso DSpace object to be registered * @param dso DSpace object to be registered
* @throws AuthorizeException if authorization error * @throws AuthorizeException if authorization error
@@ -101,7 +104,7 @@ public interface IdentifierService {
void register(Context context, DSpaceObject dso) throws AuthorizeException, SQLException, IdentifierException; void register(Context context, DSpaceObject dso) throws AuthorizeException, SQLException, IdentifierException;
/** /**
* Used to Register a specific Identifier (for example a Handle, hdl:1234.5/6) * Used to Register a specific Identifier (for example a Handle, hdl:1234.5/6).
* The provider is responsible for detecting and processing the appropriate * The provider is responsible for detecting and processing the appropriate
* identifier. All Providers are interrogated. Multiple providers * identifier. All Providers are interrogated. Multiple providers
* can process the same identifier. * can process the same identifier.

View File

@@ -70,11 +70,24 @@ public class BibtexImportMetadataSourceServiceImpl extends AbstractPlainMetadata
keyValueItem.setKey(entry.getValue().getType().getValue()); keyValueItem.setKey(entry.getValue().getType().getValue());
keyValueItem.setValue(entry.getKey().getValue()); keyValueItem.setValue(entry.getKey().getValue());
keyValues.add(keyValueItem); keyValues.add(keyValueItem);
PlainMetadataKeyValueItem typeItem = new PlainMetadataKeyValueItem();
typeItem.setKey("type");
typeItem.setValue(entry.getValue().getType().getValue());
keyValues.add(typeItem);
if (entry.getValue().getFields() != null) { if (entry.getValue().getFields() != null) {
for (Entry<Key,Value> subentry : entry.getValue().getFields().entrySet()) { for (Entry<Key,Value> subentry : entry.getValue().getFields().entrySet()) {
PlainMetadataKeyValueItem innerItem = new PlainMetadataKeyValueItem(); PlainMetadataKeyValueItem innerItem = new PlainMetadataKeyValueItem();
innerItem.setKey(subentry.getKey().getValue()); innerItem.setKey(subentry.getKey().getValue().toLowerCase());
innerItem.setValue(subentry.getValue().toUserString()); String latexString = subentry.getValue().toUserString();
try {
org.jbibtex.LaTeXParser laTeXParser = new org.jbibtex.LaTeXParser();
List<org.jbibtex.LaTeXObject> latexObjects = laTeXParser.parse(latexString);
org.jbibtex.LaTeXPrinter laTeXPrinter = new org.jbibtex.LaTeXPrinter();
String plainTextString = laTeXPrinter.print(latexObjects);
innerItem.setValue(plainTextString.replaceAll("\n", " "));
} catch (ParseException e) {
innerItem.setValue(latexString);
}
keyValues.add(innerItem); keyValues.add(innerItem);
} }
} }

View File

@@ -0,0 +1,65 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.importer.external.metadatamapping.contributor;
import java.util.ArrayList;
import java.util.Collection;
import org.dspace.importer.external.metadatamapping.MetadataFieldMapping;
import org.dspace.importer.external.metadatamapping.MetadatumDTO;
/**
* Wrapper class used to split another MetadataContributor's output into distinct values.
* The split is performed by matching a regular expression against the wrapped MetadataContributor's output.
*
* @author Philipp Rumpf (philipp.rumpf@uni-bamberg.de)
*/
public class SplitMetadataContributor<T> implements MetadataContributor<T> {
private final MetadataContributor<T> innerContributor;
private final String regex;
/**
* @param innerContributor The MetadataContributor whose output is split
* @param regex A regular expression matching the separator between different values
*/
public SplitMetadataContributor(MetadataContributor<T> innerContributor, String regex) {
this.innerContributor = innerContributor;
this.regex = regex;
}
@Override
public void setMetadataFieldMapping(MetadataFieldMapping<T, MetadataContributor<T>> rt) {
}
/**
* Each metadatum returned by the wrapped MetadataContributor is split into one or more metadata values
* based on the provided regular expression.
*
* @param t The recordType object to retrieve metadata from
* @return The collection of processed metadata values
*/
@Override
public Collection<MetadatumDTO> contributeMetadata(T t) {
Collection<MetadatumDTO> metadata = innerContributor.contributeMetadata(t);
ArrayList<MetadatumDTO> splitMetadata = new ArrayList<>();
for (MetadatumDTO metadatumDTO : metadata) {
String[] split = metadatumDTO.getValue().split(regex);
for (String splitItem : split) {
MetadatumDTO splitMetadatumDTO = new MetadatumDTO();
splitMetadatumDTO.setSchema(metadatumDTO.getSchema());
splitMetadatumDTO.setElement(metadatumDTO.getElement());
splitMetadatumDTO.setQualifier(metadatumDTO.getQualifier());
splitMetadatumDTO.setValue(splitItem);
splitMetadata.add(splitMetadatumDTO);
}
}
return splitMetadata;
}
}

View File

@@ -305,6 +305,12 @@ public class ProcessServiceImpl implements ProcessService {
tempFile.delete(); tempFile.delete();
} }
@Override
public List<Process> findByStatusAndCreationTimeOlderThan(Context context, List<ProcessStatus> statuses,
Date date) throws SQLException {
return this.processDAO.findByStatusAndCreationTimeOlderThan(context, statuses, date);
}
private String formatLogLine(int processId, String scriptName, String output, ProcessLogLevel processLogLevel) { private String formatLogLine(int processId, String scriptName, String output, ProcessLogLevel processLogLevel) {
SimpleDateFormat sdf = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss.SSS"); SimpleDateFormat sdf = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss.SSS");
StringBuilder sb = new StringBuilder(); StringBuilder sb = new StringBuilder();

View File

@@ -10,11 +10,13 @@ package org.dspace.scripts.service;
import java.io.IOException; import java.io.IOException;
import java.io.InputStream; import java.io.InputStream;
import java.sql.SQLException; import java.sql.SQLException;
import java.util.Date;
import java.util.List; import java.util.List;
import java.util.Set; import java.util.Set;
import org.dspace.authorize.AuthorizeException; import org.dspace.authorize.AuthorizeException;
import org.dspace.content.Bitstream; import org.dspace.content.Bitstream;
import org.dspace.content.ProcessStatus;
import org.dspace.core.Context; import org.dspace.core.Context;
import org.dspace.eperson.EPerson; import org.dspace.eperson.EPerson;
import org.dspace.eperson.Group; import org.dspace.eperson.Group;
@@ -240,4 +242,17 @@ public interface ProcessService {
*/ */
void createLogBitstream(Context context, Process process) void createLogBitstream(Context context, Process process)
throws IOException, SQLException, AuthorizeException; throws IOException, SQLException, AuthorizeException;
/**
* Find all the processes with one of the given status and with a creation time
* older than the specified date.
*
* @param context The relevant DSpace context
* @param statuses the statuses of the processes to search for
* @param date the creation date to search for
* @return The list of all Processes which match requirements
* @throws AuthorizeException If something goes wrong
*/
List<Process> findByStatusAndCreationTimeOlderThan(Context context, List<ProcessStatus> statuses, Date date)
throws SQLException;
} }

View File

@@ -139,7 +139,7 @@ public class ClientInfoServiceImpl implements ClientInfoService {
// If our IPTable is not empty, log the trusted proxies and return it // If our IPTable is not empty, log the trusted proxies and return it
if (!ipTable.isEmpty()) { if (!ipTable.isEmpty()) {
log.info("Trusted proxies (configure via 'proxies.trusted.ipranges'): {}", ipTable.toSet().toString()); log.info("Trusted proxies (configure via 'proxies.trusted.ipranges'): {}", ipTable);
return ipTable; return ipTable;
} else { } else {
return null; return null;

View File

@@ -86,23 +86,23 @@ public class OrderFormat {
} }
// No delegates found, so apply defaults // No delegates found, so apply defaults
if (type.equalsIgnoreCase(OrderFormat.AUTHOR) && authorDelegate != null) { if (type.equalsIgnoreCase(OrderFormat.AUTHOR)) {
return authorDelegate.makeSortString(value, language); return authorDelegate.makeSortString(value, language);
} }
if (type.equalsIgnoreCase(OrderFormat.TITLE) && titleDelegate != null) { if (type.equalsIgnoreCase(OrderFormat.TITLE)) {
return titleDelegate.makeSortString(value, language); return titleDelegate.makeSortString(value, language);
} }
if (type.equalsIgnoreCase(OrderFormat.TEXT) && textDelegate != null) { if (type.equalsIgnoreCase(OrderFormat.TEXT)) {
return textDelegate.makeSortString(value, language); return textDelegate.makeSortString(value, language);
} }
if (type.equalsIgnoreCase(OrderFormat.DATE) && dateDelegate != null) { if (type.equalsIgnoreCase(OrderFormat.DATE)) {
return dateDelegate.makeSortString(value, language); return dateDelegate.makeSortString(value, language);
} }
if (type.equalsIgnoreCase(OrderFormat.AUTHORITY) && authorityDelegate != null) { if (type.equalsIgnoreCase(OrderFormat.AUTHORITY)) {
return authorityDelegate.makeSortString(value, language); return authorityDelegate.makeSortString(value, language);
} }
} }

View File

@@ -10,6 +10,7 @@ package org.dspace.sort;
import org.dspace.text.filter.DecomposeDiactritics; import org.dspace.text.filter.DecomposeDiactritics;
import org.dspace.text.filter.LowerCaseAndTrim; import org.dspace.text.filter.LowerCaseAndTrim;
import org.dspace.text.filter.StandardInitialArticleWord; import org.dspace.text.filter.StandardInitialArticleWord;
import org.dspace.text.filter.StripDiacritics;
import org.dspace.text.filter.TextFilter; import org.dspace.text.filter.TextFilter;
/** /**
@@ -21,6 +22,7 @@ public class OrderFormatTitle extends AbstractTextFilterOFD {
{ {
filters = new TextFilter[] {new StandardInitialArticleWord(), filters = new TextFilter[] {new StandardInitialArticleWord(),
new DecomposeDiactritics(), new DecomposeDiactritics(),
new StripDiacritics(),
new LowerCaseAndTrim()}; new LowerCaseAndTrim()};
} }
} }

View File

@@ -10,6 +10,7 @@ package org.dspace.sort;
import org.dspace.text.filter.DecomposeDiactritics; import org.dspace.text.filter.DecomposeDiactritics;
import org.dspace.text.filter.LowerCaseAndTrim; import org.dspace.text.filter.LowerCaseAndTrim;
import org.dspace.text.filter.MARC21InitialArticleWord; import org.dspace.text.filter.MARC21InitialArticleWord;
import org.dspace.text.filter.StripDiacritics;
import org.dspace.text.filter.StripLeadingNonAlphaNum; import org.dspace.text.filter.StripLeadingNonAlphaNum;
import org.dspace.text.filter.TextFilter; import org.dspace.text.filter.TextFilter;
@@ -22,6 +23,7 @@ public class OrderFormatTitleMarc21 extends AbstractTextFilterOFD {
{ {
filters = new TextFilter[] {new MARC21InitialArticleWord(), filters = new TextFilter[] {new MARC21InitialArticleWord(),
new DecomposeDiactritics(), new DecomposeDiactritics(),
new StripDiacritics(),
new StripLeadingNonAlphaNum(), new StripLeadingNonAlphaNum(),
new LowerCaseAndTrim()}; new LowerCaseAndTrim()};
} }

View File

@@ -187,7 +187,7 @@ public class DatasetTimeGenerator extends DatasetGenerator {
cal2.clear(Calendar.HOUR); cal2.clear(Calendar.HOUR);
cal1.clear(Calendar.HOUR_OF_DAY); cal1.clear(Calendar.HOUR_OF_DAY);
cal2.clear(Calendar.HOUR_OF_DAY); cal2.clear(Calendar.HOUR_OF_DAY);
//yet i know calendar just won't clear his hours //yet i know calendar just won't clear its hours
cal1.set(Calendar.HOUR_OF_DAY, 0); cal1.set(Calendar.HOUR_OF_DAY, 0);
cal2.set(Calendar.HOUR_OF_DAY, 0); cal2.set(Calendar.HOUR_OF_DAY, 0);
} }

View File

@@ -621,6 +621,10 @@ public class StatisticsDataVisits extends StatisticsData {
} }
if (dsoId != null && query.dsoType != -1) { if (dsoId != null && query.dsoType != -1) {
// Store the UUID of the DSO as an attribute. Needed in particular for Bitstream download usage reports,
// as the Bitstream itself won't be available when converting points to their REST representation
attrs.put("id", dsoId);
switch (query.dsoType) { switch (query.dsoType) {
case Constants.BITSTREAM: case Constants.BITSTREAM:
Bitstream bit = bitstreamService.findByIdOrLegacyId(context, dsoId); Bitstream bit = bitstreamService.findByIdOrLegacyId(context, dsoId);

View File

@@ -7,11 +7,13 @@
*/ */
package org.dspace.statistics.util; package org.dspace.statistics.util;
import java.util.HashMap; import java.net.InetAddress;
import java.net.UnknownHostException;
import java.util.HashSet; import java.util.HashSet;
import java.util.Map; import java.util.Iterator;
import java.util.Set; import java.util.Set;
import org.apache.commons.lang3.StringUtils;
import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger; import org.apache.logging.log4j.Logger;
@@ -25,8 +27,40 @@ public class IPTable {
private static final Logger log = LogManager.getLogger(IPTable.class); private static final Logger log = LogManager.getLogger(IPTable.class);
/* A lookup tree for IP addresses and SubnetRanges */ /* A lookup tree for IP addresses and SubnetRanges */
private final Map<String, Map<String, Map<String, Set<String>>>> map private final Set<IPRange> ipRanges = new HashSet<>();
= new HashMap<>();
/**
* Internal class representing an IP range
*/
static class IPRange {
/* Lowest address in the range */
private final long ipLo;
/* Highest address in the range */
private final long ipHi;
IPRange(long ipLo, long ipHi) {
this.ipLo = ipLo;
this.ipHi = ipHi;
}
/**
* Get the lowest address in the range
* @return the lowest address as a long integer
*/
public long getIpLo() {
return ipLo;
}
/**
* Get the highest address in the range
* @return the highest address as a long integer
*/
public long getIpHi() {
return ipHi;
}
}
/** /**
* Can be full v4 IP, subnet or range string. * Can be full v4 IP, subnet or range string.
@@ -45,79 +79,90 @@ public class IPTable {
*/ */
public void add(String ip) throws IPFormatException { public void add(String ip) throws IPFormatException {
String[] start; String start;
String[] end; String end;
String[] range = ip.split("-"); String[] range = ip.split("-");
if (range.length >= 2) { if (range.length == 2) {
start = range[0].trim().split("/")[0].split("\\."); start = range[0].trim();
end = range[1].trim().split("/")[0].split("\\."); end = range[1].trim();
if (start.length != 4 || end.length != 4) { try {
throw new IPFormatException(ip + " - Ranges need to be full IPv4 Addresses"); long ipLo = ipToLong(InetAddress.getByName(start));
} long ipHi = ipToLong(InetAddress.getByName(end));
ipRanges.add(new IPRange(ipLo, ipHi));
if (!(start[0].equals(end[0]) && start[1].equals(end[1]) && start[2].equals(end[2]))) { return;
throw new IPFormatException(ip + " - Ranges can only be across the last subnet x.y.z.0 - x.y.z.254"); } catch (UnknownHostException e) {
throw new IPFormatException(ip + " - Range format should be similar to 1.2.3.0-1.2.3.255");
} }
} else { } else {
//need to ignore CIDR notation for the moment. // Convert implicit ranges to netmask format
//ip = ip.split("\\/")[0]; // 192 -> 192.0.0.0/8
// 192.168 -> 192.168.0.0/16
String[] subnets = ip.split("\\."); // 192.168.1 -> 192.168.1.0/24
int periods = StringUtils.countMatches(ip, '.');
if (subnets.length < 3) { if (periods < 3) {
throw new IPFormatException(ip + " - require at least three subnet places (255.255.255.0"); ip = StringUtils.join(ip, StringUtils.repeat(".0", 4 - periods - 1), "/", (periods + 1) * 8);
} }
start = subnets; if (ip.contains("/")) {
end = subnets; String[] parts = ip.split("/");
} try {
long ipLong = ipToLong(InetAddress.getByName(parts[0]));
if (start.length >= 3) { long mask = (long) Math.pow(2, 32 - Integer.parseInt(parts[1]));
Map<String, Map<String, Set<String>>> first = map.get(start[0]); long ipLo = (ipLong / mask) * mask;
long ipHi = (( (ipLong / mask) + 1) * mask) - 1;
if (first == null) { ipRanges.add(new IPRange(ipLo, ipHi));
first = new HashMap<>();
map.put(start[0], first);
}
Map<String, Set<String>> second = first.get(start[1]);
if (second == null) {
second = new HashMap<>();
first.put(start[1], second);
}
Set<String> third = second.get(start[2]);
if (third == null) {
third = new HashSet<>();
second.put(start[2], third);
}
//now populate fourth place (* or value 0-254);
if (start.length == 3) {
third.add("*");
}
if (third.contains("*")) {
return; return;
} catch (Exception e) {
throw new IPFormatException(ip + " - Range format should be similar to 172.16.0.0/12");
}
} else {
try {
long ipLo = ipToLong(InetAddress.getByName(ip));
ipRanges.add(new IPRange(ipLo, ipLo));
return;
} catch (UnknownHostException e) {
throw new IPFormatException(ip + " - IP address format should be similar to 1.2.3.14");
}
}
}
}
/**
* Convert an IP address to a long integer
* @param ip the IP address
* @return
*/
public static long ipToLong(InetAddress ip) {
byte[] octets = ip.getAddress();
long result = 0;
for (byte octet : octets) {
result <<= 8;
result |= octet & 0xff;
}
return result;
} }
if (start.length >= 4) { /**
int s = Integer.valueOf(start[3]); * Convert a long integer into an IP address string
int e = Integer.valueOf(end[3]); * @param ip the IP address as a long integer
for (int i = s; i <= e; i++) { * @return
third.add(String.valueOf(i)); */
} public static String longToIp(long ip) {
} long part = ip;
String[] parts = new String[4];
for (int i = 0; i < 4; i++) {
long octet = part & 0xff;
parts[3 - i] = String.valueOf(octet);
part = part / 256;
} }
return parts[0] + "." + parts[1] + "." + parts[2] + "." + parts[3];
} }
/** /**
@@ -125,75 +170,35 @@ public class IPTable {
* *
* @param ip the address to be tested * @param ip the address to be tested
* @return true if {@code ip} is within this table's limits. Returns false * @return true if {@code ip} is within this table's limits. Returns false
* if {@link ip} looks like an IPv6 address. * if {@code ip} looks like an IPv6 address.
* @throws IPFormatException Exception Class to deal with IPFormat errors. * @throws IPFormatException Exception Class to deal with IPFormat errors.
*/ */
public boolean contains(String ip) throws IPFormatException { public boolean contains(String ip) throws IPFormatException {
String[] subnets = ip.split("\\."); try {
long ipToTest = ipToLong(InetAddress.getByName(ip));
// Does it look like IPv6? return ipRanges.stream()
if (subnets.length > 4 || ip.contains("::")) { .anyMatch(ipRange -> (ipToTest >= ipRange.getIpLo() && ipToTest <= ipRange.getIpHi()));
log.warn("Address {} assumed not to match. IPv6 is not implemented.", ip); } catch (UnknownHostException e) {
return false; throw new IPFormatException("ip not valid");
} }
// Does it look like a subnet?
if (subnets.length < 4) {
throw new IPFormatException("needs to be a single IP address");
}
Map<String, Map<String, Set<String>>> first = map.get(subnets[0]);
if (first == null) {
return false;
}
Map<String, Set<String>> second = first.get(subnets[1]);
if (second == null) {
return false;
}
Set<String> third = second.get(subnets[2]);
if (third == null) {
return false;
}
return third.contains(subnets[3]) || third.contains("*");
} }
/** /**
* Convert to a Set. * Convert to a Set. This set contains all IPs in the range
* *
* @return this table's content as a Set * @return this table's content as a Set
*/ */
public Set<String> toSet() { public Set<String> toSet() {
HashSet<String> set = new HashSet<>(); HashSet<String> set = new HashSet<>();
for (Map.Entry<String, Map<String, Map<String, Set<String>>>> first : map.entrySet()) { Iterator<IPRange> ipRangeIterator = ipRanges.iterator();
String firstString = first.getKey(); while (ipRangeIterator.hasNext()) {
Map<String, Map<String, Set<String>>> secondMap = first.getValue(); IPRange ipRange = ipRangeIterator.next();
long ipLo = ipRange.getIpLo();
for (Map.Entry<String, Map<String, Set<String>>> second : secondMap.entrySet()) { long ipHi = ipRange.getIpHi();
String secondString = second.getKey(); for (long ip = ipLo; ip <= ipHi; ip++) {
Map<String, Set<String>> thirdMap = second.getValue(); set.add(longToIp(ip));
for (Map.Entry<String, Set<String>> third : thirdMap.entrySet()) {
String thirdString = third.getKey();
Set<String> fourthSet = third.getValue();
if (fourthSet.contains("*")) {
set.add(firstString + "." + secondString + "." + thirdString);
} else {
for (String fourth : fourthSet) {
set.add(firstString + "." + secondString + "." + thirdString + "." + fourth);
}
}
}
} }
} }
@@ -205,7 +210,7 @@ public class IPTable {
* @return true if empty, false otherwise * @return true if empty, false otherwise
*/ */
public boolean isEmpty() { public boolean isEmpty() {
return map.isEmpty(); return ipRanges.isEmpty();
} }
/** /**
@@ -217,5 +222,23 @@ public class IPTable {
} }
} }
/**
* Represent this IP table as a string
* @return a string containing all IP ranges in this IP table
*/
@Override
public String toString() {
StringBuilder stringBuilder = new StringBuilder();
Iterator<IPRange> ipRangeIterator = ipRanges.iterator();
while (ipRangeIterator.hasNext()) {
IPRange ipRange = ipRangeIterator.next();
stringBuilder.append(longToIp(ipRange.getIpLo()))
.append("-")
.append(longToIp(ipRange.getIpHi()));
if (ipRangeIterator.hasNext()) {
stringBuilder.append(", ");
}
}
return stringBuilder.toString();
}
} }

View File

@@ -348,9 +348,9 @@ public class StatisticsImporter {
// Get the eperson details // Get the eperson details
EPerson eperson = EPersonServiceFactory.getInstance().getEPersonService().findByEmail(context, user); EPerson eperson = EPersonServiceFactory.getInstance().getEPersonService().findByEmail(context, user);
int epersonId = 0; UUID epersonId = null;
if (eperson != null) { if (eperson != null) {
eperson.getID(); epersonId = eperson.getID();
} }
// Save it in our server // Save it in our server
@@ -365,12 +365,10 @@ public class StatisticsImporter {
sid.addField("city", city); sid.addField("city", city);
sid.addField("latitude", latitude); sid.addField("latitude", latitude);
sid.addField("longitude", longitude); sid.addField("longitude", longitude);
if (epersonId > 0) { if (epersonId != null) {
sid.addField("epersonid", epersonId); sid.addField("epersonid", epersonId);
} }
if (dns != null) {
sid.addField("dns", dns.toLowerCase()); sid.addField("dns", dns.toLowerCase());
}
solrLoggerService.storeParents(sid, dso); solrLoggerService.storeParents(sid, dso);
solr.add(sid); solr.add(sid);

View File

@@ -18,6 +18,7 @@ import java.util.UUID;
import javax.annotation.Nullable; import javax.annotation.Nullable;
import org.apache.commons.collections4.MapUtils; import org.apache.commons.collections4.MapUtils;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger; import org.apache.logging.log4j.Logger;
import org.dspace.authorize.AuthorizeException; import org.dspace.authorize.AuthorizeException;
import org.dspace.checker.service.ChecksumHistoryService; import org.dspace.checker.service.ChecksumHistoryService;
@@ -57,13 +58,12 @@ import org.springframework.beans.factory.annotation.Autowired;
* be notified of BitstreamStorageManager actions.</p> * be notified of BitstreamStorageManager actions.</p>
* *
* @author Peter Breton, Robert Tansley, David Little, Nathan Sarr * @author Peter Breton, Robert Tansley, David Little, Nathan Sarr
* @version $Revision$
*/ */
public class BitstreamStorageServiceImpl implements BitstreamStorageService, InitializingBean { public class BitstreamStorageServiceImpl implements BitstreamStorageService, InitializingBean {
/** /**
* log4j log * log4j log
*/ */
private static Logger log = org.apache.logging.log4j.LogManager.getLogger(BitstreamStorageServiceImpl.class); private static final Logger log = LogManager.getLogger();
@Autowired(required = true) @Autowired(required = true)
protected BitstreamService bitstreamService; protected BitstreamService bitstreamService;
@@ -73,7 +73,7 @@ public class BitstreamStorageServiceImpl implements BitstreamStorageService, Ini
/** /**
* asset stores * asset stores
*/ */
private Map<Integer, BitStoreService> stores = new HashMap<Integer, BitStoreService>(); private Map<Integer, BitStoreService> stores = new HashMap<>();
/** /**
* The index of the asset store to use for new bitstreams * The index of the asset store to use for new bitstreams
@@ -222,11 +222,10 @@ public class BitstreamStorageServiceImpl implements BitstreamStorageService, Ini
@Override @Override
public void cleanup(boolean deleteDbRecords, boolean verbose) throws SQLException, IOException, AuthorizeException { public void cleanup(boolean deleteDbRecords, boolean verbose) throws SQLException, IOException, AuthorizeException {
Context context = null; Context context = new Context(Context.Mode.BATCH_EDIT);
int commitCounter = 0; int commitCounter = 0;
try { try {
context = new Context(Context.Mode.BATCH_EDIT);
context.turnOffAuthorisationSystem(); context.turnOffAuthorisationSystem();
List<Bitstream> storage = bitstreamService.findDeletedBitstreams(context); List<Bitstream> storage = bitstreamService.findDeletedBitstreams(context);
@@ -321,11 +320,9 @@ public class BitstreamStorageServiceImpl implements BitstreamStorageService, Ini
context.abort(); context.abort();
throw sqle; throw sqle;
} finally { } finally {
if (context != null) {
context.restoreAuthSystemState(); context.restoreAuthSystemState();
} }
} }
}
@Nullable @Nullable
@Override @Override
@@ -386,11 +383,12 @@ public class BitstreamStorageServiceImpl implements BitstreamStorageService, Ini
* @throws AuthorizeException Exception indicating the current user of the context does not have permission * @throws AuthorizeException Exception indicating the current user of the context does not have permission
* to perform a particular action. * to perform a particular action.
*/ */
@Override
public void migrate(Context context, Integer assetstoreSource, Integer assetstoreDestination, boolean deleteOld, public void migrate(Context context, Integer assetstoreSource, Integer assetstoreDestination, boolean deleteOld,
Integer batchCommitSize) throws IOException, SQLException, AuthorizeException { Integer batchCommitSize) throws IOException, SQLException, AuthorizeException {
//Find all the bitstreams on the old source, copy it to new destination, update store_number, save, remove old //Find all the bitstreams on the old source, copy it to new destination, update store_number, save, remove old
Iterator<Bitstream> allBitstreamsInSource = bitstreamService.findByStoreNumber(context, assetstoreSource); Iterator<Bitstream> allBitstreamsInSource = bitstreamService.findByStoreNumber(context, assetstoreSource);
Integer processedCounter = 0; int processedCounter = 0;
while (allBitstreamsInSource.hasNext()) { while (allBitstreamsInSource.hasNext()) {
Bitstream bitstream = allBitstreamsInSource.next(); Bitstream bitstream = allBitstreamsInSource.next();
@@ -424,6 +422,7 @@ public class BitstreamStorageServiceImpl implements BitstreamStorageService, Ini
"] completed. " + processedCounter + " objects were transferred."); "] completed. " + processedCounter + " objects were transferred.");
} }
@Override
public void printStores(Context context) { public void printStores(Context context) {
try { try {

View File

@@ -25,6 +25,12 @@ import com.amazonaws.services.s3.model.ObjectMetadata;
import com.amazonaws.services.s3.model.PutObjectRequest; import com.amazonaws.services.s3.model.PutObjectRequest;
import com.amazonaws.services.s3.model.PutObjectResult; import com.amazonaws.services.s3.model.PutObjectResult;
import com.amazonaws.services.s3.model.S3Object; import com.amazonaws.services.s3.model.S3Object;
import org.apache.commons.cli.CommandLine;
import org.apache.commons.cli.DefaultParser;
import org.apache.commons.cli.HelpFormatter;
import org.apache.commons.cli.Option;
import org.apache.commons.cli.Options;
import org.apache.commons.cli.ParseException;
import org.apache.commons.io.FileUtils; import org.apache.commons.io.FileUtils;
import org.apache.commons.lang3.StringUtils; import org.apache.commons.lang3.StringUtils;
import org.apache.http.HttpStatus; import org.apache.http.HttpStatus;
@@ -324,27 +330,37 @@ public class S3BitStoreService implements BitStoreService {
* @throws Exception generic exception * @throws Exception generic exception
*/ */
public static void main(String[] args) throws Exception { public static void main(String[] args) throws Exception {
//TODO use proper CLI, or refactor to be a unit test. Can't mock this without keys though. //TODO Perhaps refactor to be a unit test. Can't mock this without keys though.
// parse command line // parse command line
String assetFile = null; Options options = new Options();
String accessKey = null; Option option;
String secretKey = null;
for (int i = 0; i < args.length; i += 2) { option = Option.builder("a").desc("access key").hasArg().required().build();
if (args[i].startsWith("-a")) { options.addOption(option);
accessKey = args[i + 1];
} else if (args[i].startsWith("-s")) {
secretKey = args[i + 1];
} else if (args[i].startsWith("-f")) {
assetFile = args[i + 1];
}
}
if (accessKey == null || secretKey == null || assetFile == null) { option = Option.builder("s").desc("secret key").hasArg().required().build();
System.out.println("Missing arguments - exiting"); options.addOption(option);
option = Option.builder("f").desc("asset file name").hasArg().required().build();
options.addOption(option);
DefaultParser parser = new DefaultParser();
CommandLine command;
try {
command = parser.parse(options, args);
} catch (ParseException e) {
System.err.println(e.getMessage());
new HelpFormatter().printHelp(
S3BitStoreService.class.getSimpleName() + "options", options);
return; return;
} }
String accessKey = command.getOptionValue("a");
String secretKey = command.getOptionValue("s");
String assetFile = command.getOptionValue("f");
S3BitStoreService store = new S3BitStoreService(); S3BitStoreService store = new S3BitStoreService();
AWSCredentials awsCredentials = new BasicAWSCredentials(accessKey, secretKey); AWSCredentials awsCredentials = new BasicAWSCredentials(accessKey, secretKey);

View File

@@ -8,15 +8,17 @@
package org.dspace.submit.model; package org.dspace.submit.model;
import java.util.List; import java.util.List;
import javax.inject.Inject;
import org.dspace.services.ConfigurationService; import org.dspace.services.ConfigurationService;
/** /**
* A collection of conditions to be met when uploading Bitstreams.
* @author Luigi Andrea Pascarelli (luigiandrea.pascarelli at 4science.it) * @author Luigi Andrea Pascarelli (luigiandrea.pascarelli at 4science.it)
*/ */
public class UploadConfiguration { public class UploadConfiguration {
private ConfigurationService configurationService; private final ConfigurationService configurationService;
private String metadataDefinition; private String metadataDefinition;
private List<AccessConditionOption> options; private List<AccessConditionOption> options;
@@ -24,22 +26,52 @@ public class UploadConfiguration {
private Boolean required; private Boolean required;
private String name; private String name;
/**
* Construct a bitstream uploading configuration.
* @param configurationService DSpace configuration provided by the DI container.
*/
@Inject
public UploadConfiguration(ConfigurationService configurationService) {
this.configurationService = configurationService;
}
/**
* The list of access restriction types from which a submitter may choose.
* @return choices for restricting access to Bitstreams.
*/
public List<AccessConditionOption> getOptions() { public List<AccessConditionOption> getOptions() {
return options; return options;
} }
/**
* Set the list of access restriction types from which to choose.
* Required. May be empty.
* @param options choices for restricting access to Bitstreams.
*/
public void setOptions(List<AccessConditionOption> options) { public void setOptions(List<AccessConditionOption> options) {
this.options = options; this.options = options;
} }
/**
* Name of the submission form to which these conditions are attached.
* @return the form's name.
*/
public String getMetadata() { public String getMetadata() {
return metadataDefinition; return metadataDefinition;
} }
/**
* Name the submission form to which these conditions are attached.
* @param metadata the form's name.
*/
public void setMetadata(String metadata) { public void setMetadata(String metadata) {
this.metadataDefinition = metadata; this.metadataDefinition = metadata;
} }
/**
* Limit on the maximum size of an uploaded Bitstream.
* @return maximum upload size in bytes.
*/
public Long getMaxSize() { public Long getMaxSize() {
if (maxSize == null) { if (maxSize == null) {
maxSize = configurationService.getLongProperty("upload.max"); maxSize = configurationService.getLongProperty("upload.max");
@@ -47,10 +79,18 @@ public class UploadConfiguration {
return maxSize; return maxSize;
} }
/**
* Limit the maximum size of an uploaded Bitstream.
* @param maxSize maximum upload size in bytes.
*/
public void setMaxSize(Long maxSize) { public void setMaxSize(Long maxSize) {
this.maxSize = maxSize; this.maxSize = maxSize;
} }
/**
* Is at least one Bitstream required when submitting a new Item?
* @return true if a Bitstream is required.
*/
public Boolean isRequired() { public Boolean isRequired() {
if (required == null) { if (required == null) {
//defaults to true //defaults to true
@@ -60,25 +100,27 @@ public class UploadConfiguration {
return required; return required;
} }
/**
* Is at least one Bitstream required when submitting a new Item?
* @param required true if a Bitstream is required.
*/
public void setRequired(Boolean required) { public void setRequired(Boolean required) {
this.required = required; this.required = required;
} }
public ConfigurationService getConfigurationService() { /**
return configurationService; * The unique name of this configuration.
} * @return configuration's name.
*/
public void setConfigurationService(ConfigurationService configurationService) {
this.configurationService = configurationService;
}
public String getName() { public String getName() {
return name; return name;
} }
/**
* Give this configuration a unique name. Required.
* @param name configuration's name.
*/
public void setName(String name) { public void setName(String name) {
this.name = name; this.name = name;
} }
} }

View File

@@ -240,8 +240,8 @@ public class SolrUpgradePre6xStatistics {
/** /**
* Print a status message appended with the processing time for the operation * Print a status message appended with the processing time for the operation
* *
* @param header * @param numProcessed
* Message to display * count of records processed so far.
* @param fromStart * @param fromStart
* if true, report on processing time since the start of the program * if true, report on processing time since the start of the program
*/ */

View File

@@ -100,7 +100,7 @@ public class WorkflowRequirementsServiceImpl implements WorkflowRequirementsServ
//Then remove the current user from the inProgressUsers //Then remove the current user from the inProgressUsers
inProgressUserService.delete(context, inProgressUserService.findByWorkflowItemAndEPerson(context, wfi, user)); inProgressUserService.delete(context, inProgressUserService.findByWorkflowItemAndEPerson(context, wfi, user));
//Make sure the removed user has his custom rights removed //Make sure the removed user has their custom rights removed
xmlWorkflowService.removeUserItemPolicies(context, wfi.getItem(), user); xmlWorkflowService.removeUserItemPolicies(context, wfi.getItem(), user);
Workflow workflow = workflowFactory.getWorkflow(wfi.getCollection()); Workflow workflow = workflowFactory.getWorkflow(wfi.getCollection());

View File

@@ -447,7 +447,7 @@ public class XmlWorkflowServiceImpl implements XmlWorkflowService {
enteredNewStep); enteredNewStep);
} }
} else if (enteredNewStep) { } else if (enteredNewStep) {
// If the user finished his/her step, we keep processing until there is a UI step action or no // If the user finished their step, we keep processing until there is a UI step action or no
// step at all // step at all
nextStep = workflow.getNextStep(c, wfi, currentStep, currentOutcome.getResult()); nextStep = workflow.getNextStep(c, wfi, currentStep, currentOutcome.getResult());
c.turnOffAuthorisationSystem(); c.turnOffAuthorisationSystem();
@@ -938,7 +938,7 @@ public class XmlWorkflowServiceImpl implements XmlWorkflowService {
authorizeService.removeEPersonPolicies(context, bitstream, e); authorizeService.removeEPersonPolicies(context, bitstream, e);
} }
} }
// Ensure that the submitter always retains his resource policies // Ensure that the submitter always retains their resource policies
if (e.getID().equals(item.getSubmitter().getID())) { if (e.getID().equals(item.getSubmitter().getID())) {
grantSubmitterReadPolicies(context, item); grantSubmitterReadPolicies(context, item);
} }

View File

@@ -8,7 +8,7 @@
package org.dspace.xmlworkflow.state; package org.dspace.xmlworkflow.state;
import java.sql.SQLException; import java.sql.SQLException;
import java.util.HashMap; import java.util.LinkedHashMap;
import java.util.List; import java.util.List;
import java.util.Map; import java.util.Map;
@@ -118,7 +118,7 @@ public class Workflow implements BeanNameAware {
* @return a map containing the roles, the role name will the key, the role itself the value * @return a map containing the roles, the role name will the key, the role itself the value
*/ */
public Map<String, Role> getRoles() { public Map<String, Role> getRoles() {
Map<String, Role> roles = new HashMap<>(); Map<String, Role> roles = new LinkedHashMap<>();
for (Step step : steps) { for (Step step : steps) {
if (step.getRole() != null) { if (step.getRole() != null) {
roles.put(step.getRole().getId(), step.getRole()); roles.put(step.getRole().getId(), step.getRole());

View File

@@ -89,7 +89,7 @@ public class SelectReviewerAction extends ProcessingAction {
//Retrieve the identifier of the eperson which will do the reviewing //Retrieve the identifier of the eperson which will do the reviewing
UUID reviewerId = UUID.fromString(submitButton.substring(submitButton.lastIndexOf("_") + 1)); UUID reviewerId = UUID.fromString(submitButton.substring(submitButton.lastIndexOf("_") + 1));
EPerson reviewer = ePersonService.find(c, reviewerId); EPerson reviewer = ePersonService.find(c, reviewerId);
//We have a reviewer, assign him, the workflowitemrole will be translated into a task in the autoassign //Assign the reviewer. The workflowitemrole will be translated into a task in the autoassign
WorkflowItemRole workflowItemRole = workflowItemRoleService.create(c); WorkflowItemRole workflowItemRole = workflowItemRoleService.create(c);
workflowItemRole.setEPerson(reviewer); workflowItemRole.setEPerson(reviewer);
workflowItemRole.setRoleId(getRole().getId()); workflowItemRole.setRoleId(getRole().getId());

View File

@@ -25,7 +25,7 @@ import org.dspace.xmlworkflow.storedcomponents.XmlWorkflowItem;
/** /**
* Processing class of an action where a single user has * Processing class of an action where a single user has
* been assigned and he can either accept/reject the workflow item * been assigned and they can either accept/reject the workflow item
* or reject the task * or reject the task
* *
* @author Bram De Schouwer (bram.deschouwer at dot com) * @author Bram De Schouwer (bram.deschouwer at dot com)
@@ -90,7 +90,7 @@ public class SingleUserReviewAction extends ProcessingAction {
} else { } else {
request.setAttribute("page", REJECT_PAGE); request.setAttribute("page", REJECT_PAGE);
} }
// We have pressed reject item, so take the user to a page where he can reject // We have pressed reject item, so take the user to a page where they can reject
return new ActionResult(ActionResult.TYPE.TYPE_PAGE); return new ActionResult(ActionResult.TYPE.TYPE_PAGE);
} else if (request.getParameter(SUBMIT_DECLINE_TASK) != null) { } else if (request.getParameter(SUBMIT_DECLINE_TASK) != null) {
return new ActionResult(ActionResult.TYPE.TYPE_OUTCOME, OUTCOME_REJECT); return new ActionResult(ActionResult.TYPE.TYPE_OUTCOME, OUTCOME_REJECT);

View File

@@ -136,7 +136,7 @@ public class AssignOriginalSubmitterAction extends UserSelectionAction {
protected void createTaskForEPerson(Context c, XmlWorkflowItem wfi, Step step, WorkflowActionConfig actionConfig, protected void createTaskForEPerson(Context c, XmlWorkflowItem wfi, Step step, WorkflowActionConfig actionConfig,
EPerson user) throws SQLException, AuthorizeException, IOException { EPerson user) throws SQLException, AuthorizeException, IOException {
if (claimedTaskService.find(c, wfi, step.getId(), actionConfig.getId()) != null) { if (claimedTaskService.find(c, wfi, step.getId(), actionConfig.getId()) != null) {
workflowRequirementsService.addClaimedUser(c, wfi, step, c.getCurrentUser()); workflowRequirementsService.addClaimedUser(c, wfi, step, user);
XmlWorkflowServiceFactory.getInstance().getXmlWorkflowService() XmlWorkflowServiceFactory.getInstance().getXmlWorkflowService()
.createOwnedTask(c, wfi, step, actionConfig, user); .createOwnedTask(c, wfi, step, actionConfig, user);
} }

View File

@@ -92,7 +92,7 @@ public class PoolTaskServiceImpl implements PoolTaskService {
return poolTask; return poolTask;
} else { } else {
//If the user has a is processing or has finished the step for a workflowitem, there is no need to look //If the user has a is processing or has finished the step for a workflowitem, there is no need to look
// for pooltasks for one of his // for pooltasks for one of their
//groups because the user already has the task claimed //groups because the user already has the task claimed
if (inProgressUserService.findByWorkflowItemAndEPerson(context, workflowItem, ePerson) != null) { if (inProgressUserService.findByWorkflowItemAndEPerson(context, workflowItem, ePerson) != null) {
return null; return null;

View File

@@ -5,8 +5,7 @@
http://www.springframework.org/schema/beans/spring-beans-2.5.xsd"> http://www.springframework.org/schema/beans/spring-beans-2.5.xsd">
<bean id="uploadConfigurationDefault" class="org.dspace.submit.model.UploadConfiguration"> <bean id="uploadConfigurationDefault" class="org.dspace.submit.model.UploadConfiguration">
<property name="name" value="upload"></property> <property name="name" value="upload"/>
<property name="configurationService" ref="org.dspace.services.ConfigurationService"/>
<property name="metadata" value="bitstream-metadata" /> <property name="metadata" value="bitstream-metadata" />
<property name="options"> <property name="options">
<list> <list>

View File

@@ -1,6 +1,10 @@
<?xml version="1.0" encoding="UTF-8"?> <?xml version="1.0" encoding="UTF-8"?>
<beans xmlns="http://www.springframework.org/schema/beans" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" <beans xmlns="http://www.springframework.org/schema/beans"
xsi:schemaLocation="http://www.springframework.org/schema/beans http://www.springframework.org/schema/beans/spring-beans.xsd" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xmlns:util="http://www.springframework.org/schema/util"
xsi:schemaLocation="http://www.springframework.org/schema/beans http://www.springframework.org/schema/beans/spring-beans.xsd
http://www.springframework.org/schema/util
http://www.springframework.org/schema/util/spring-util.xsd"
default-lazy-init="true"> default-lazy-init="true">
<bean id="mockOpenAIRERestConnector" class="org.dspace.external.MockOpenAIRERestConnector"> <bean id="mockOpenAIRERestConnector" class="org.dspace.external.MockOpenAIRERestConnector">
@@ -15,6 +19,7 @@
init-method="init"> init-method="init">
<property name="sourceIdentifier" value="openAIREFunding" /> <property name="sourceIdentifier" value="openAIREFunding" />
<property name="connector" ref="mockOpenAIRERestConnector" /> <property name="connector" ref="mockOpenAIRERestConnector" />
<property name="metadataFields" ref="mapOfmetadata"/>
<property name="supportedEntityTypes"> <property name="supportedEntityTypes">
<list> <list>
<value>Project</value> <value>Project</value>
@@ -22,4 +27,63 @@
</property> </property>
</bean> </bean>
<util:map id="mapOfmetadata"
map-class="java.util.HashMap" key-type="java.lang.String" value-type="org.dspace.importer.external.metadatamapping.MetadataFieldConfig">
<entry key="awardNumber" value-ref="openaireAwardNumber" />
<entry key="fundingStream" value-ref="openaireFundingStream" />
<entry key="awardURI" value-ref="openaireAwardURI" />
<entry key="funderName" value-ref="openaireFunderName" />
<entry key="funderIdentifier" value-ref="openaireFunderIdentifier" />
<entry key="dcTitle" value-ref="openaireTitle" />
<entry key="titleAlternative" value-ref="openaireTitleAlternative" />
<entry key="dcIdentifier" value-ref="openaireIdentifier" />
<entry key="coverageSpatial" value-ref="openaireSpatial" />
<entry key="dcDescription" value-ref="openaireDescription" />
<entry key="dcSubject" value-ref="openaireSubject" />
</util:map>
<bean id="openaireAwardNumber" class="org.dspace.importer.external.metadatamapping.MetadataFieldConfig">
<constructor-arg value="oaire.awardNumber"/>
</bean>
<bean id="openaireFundingStream" class="org.dspace.importer.external.metadatamapping.MetadataFieldConfig">
<constructor-arg value="oaire.fundingStream"/>
</bean>
<bean id="openaireAwardURI" class="org.dspace.importer.external.metadatamapping.MetadataFieldConfig">
<constructor-arg value="oaire.awardURI"/>
</bean>
<bean id="openaireFunderName" class="org.dspace.importer.external.metadatamapping.MetadataFieldConfig">
<constructor-arg value="project.funder.name"/>
</bean>
<bean id="openaireFunderIdentifier" class="org.dspace.importer.external.metadatamapping.MetadataFieldConfig">
<constructor-arg value="project.funder.identifier"/>
</bean>
<bean id="openaireTitle" class="org.dspace.importer.external.metadatamapping.MetadataFieldConfig">
<constructor-arg value="dc.title"/>
</bean>
<bean id="openaireTitleAlternative" class="org.dspace.importer.external.metadatamapping.MetadataFieldConfig">
<constructor-arg value="dc.title.alternative"/>
</bean>
<bean id="openaireIdentifier" class="org.dspace.importer.external.metadatamapping.MetadataFieldConfig">
<constructor-arg value="dc.identifier"/>
</bean>
<bean id="openaireSpatial" class="org.dspace.importer.external.metadatamapping.MetadataFieldConfig">
<constructor-arg value="dc.coverage.spatial"/>
</bean>
<bean id="openaireDescription" class="org.dspace.importer.external.metadatamapping.MetadataFieldConfig">
<constructor-arg value="dc.description"/>
</bean>
<bean id="openaireSubject" class="org.dspace.importer.external.metadatamapping.MetadataFieldConfig">
<constructor-arg value="dc.subject"/>
</bean>
</beans> </beans>

View File

@@ -65,6 +65,11 @@
<property name="dspaceRunnableClass" value="org.dspace.orcid.script.OrcidBulkPush"/> <property name="dspaceRunnableClass" value="org.dspace.orcid.script.OrcidBulkPush"/>
</bean> </bean>
<bean id="process-cleaner" class="org.dspace.administer.ProcessCleanerCliConfiguration">
<property name="description" value="Cleanup all the old processes in the specified state"/>
<property name="dspaceRunnableClass" value="org.dspace.administer.ProcessCleanerCli"/>
</bean>
<!-- Keep as last script; for test ScriptRestRepository#findOneScriptByNameTest --> <!-- Keep as last script; for test ScriptRestRepository#findOneScriptByNameTest -->
<bean id="mock-script" class="org.dspace.scripts.MockDSpaceRunnableScriptConfiguration" scope="prototype"> <bean id="mock-script" class="org.dspace.scripts.MockDSpaceRunnableScriptConfiguration" scope="prototype">
<property name="description" value="Mocking a script for testing purposes" /> <property name="description" value="Mocking a script for testing purposes" />

View File

@@ -0,0 +1,380 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.administer;
import static org.apache.commons.lang.time.DateUtils.addDays;
import static org.dspace.content.ProcessStatus.COMPLETED;
import static org.dspace.content.ProcessStatus.FAILED;
import static org.dspace.content.ProcessStatus.RUNNING;
import static org.hamcrest.MatcherAssert.assertThat;
import static org.hamcrest.Matchers.empty;
import static org.hamcrest.Matchers.hasItem;
import static org.hamcrest.Matchers.hasSize;
import static org.hamcrest.Matchers.notNullValue;
import static org.hamcrest.Matchers.nullValue;
import java.sql.SQLException;
import java.util.Date;
import java.util.List;
import org.dspace.AbstractIntegrationTestWithDatabase;
import org.dspace.app.launcher.ScriptLauncher;
import org.dspace.app.scripts.handler.impl.TestDSpaceRunnableHandler;
import org.dspace.builder.ProcessBuilder;
import org.dspace.content.ProcessStatus;
import org.dspace.scripts.Process;
import org.dspace.scripts.factory.ScriptServiceFactory;
import org.dspace.scripts.service.ProcessService;
import org.dspace.services.ConfigurationService;
import org.dspace.services.factory.DSpaceServicesFactory;
import org.junit.Test;
/**
* Integration tests for {@link ProcessCleaner}.
*
* @author Luca Giamminonni (luca.giamminonni at 4science.it)
*
*/
public class ProcessCleanerIT extends AbstractIntegrationTestWithDatabase {
private ConfigurationService configurationService = DSpaceServicesFactory.getInstance().getConfigurationService();
private ProcessService processService = ScriptServiceFactory.getInstance().getProcessService();
@Test
public void testWithoutProcessToDelete() throws Exception {
Process process_1 = buildProcess(COMPLETED, addDays(new Date(), -2));
Process process_2 = buildProcess(RUNNING, addDays(new Date(), -1));
Process process_3 = buildProcess(FAILED, addDays(new Date(), -3));
configurationService.setProperty("process-cleaner.days", 5);
TestDSpaceRunnableHandler testDSpaceRunnableHandler = new TestDSpaceRunnableHandler();
String[] args = new String[] { "process-cleaner" };
ScriptLauncher.handleScript(args, ScriptLauncher.getConfig(kernelImpl), testDSpaceRunnableHandler, kernelImpl);
assertThat(testDSpaceRunnableHandler.getErrorMessages(), empty());
assertThat(testDSpaceRunnableHandler.getWarningMessages(), empty());
List<String> messages = testDSpaceRunnableHandler.getInfoMessages();
assertThat(messages, hasSize(3));
assertThat(messages, hasItem("Searching for processes with status: [COMPLETED]"));
assertThat(messages, hasItem("Found 0 processes to be deleted"));
assertThat(messages, hasItem("Process cleanup completed"));
assertThat(processService.find(context, process_1.getID()), notNullValue());
assertThat(processService.find(context, process_2.getID()), notNullValue());
assertThat(processService.find(context, process_3.getID()), notNullValue());
}
@Test
public void testWithoutSpecifiedStatus() throws Exception {
Process process_1 = buildProcess(COMPLETED, addDays(new Date(), -2));
Process process_2 = buildProcess(RUNNING, addDays(new Date(), -1));
Process process_3 = buildProcess(FAILED, addDays(new Date(), -3));
Process process_4 = buildProcess(COMPLETED, addDays(new Date(), -6));
Process process_5 = buildProcess(COMPLETED, addDays(new Date(), -8));
Process process_6 = buildProcess(RUNNING, addDays(new Date(), -7));
Process process_7 = buildProcess(FAILED, addDays(new Date(), -8));
configurationService.setProperty("process-cleaner.days", 5);
TestDSpaceRunnableHandler testDSpaceRunnableHandler = new TestDSpaceRunnableHandler();
String[] args = new String[] { "process-cleaner" };
ScriptLauncher.handleScript(args, ScriptLauncher.getConfig(kernelImpl), testDSpaceRunnableHandler, kernelImpl);
assertThat(testDSpaceRunnableHandler.getErrorMessages(), empty());
assertThat(testDSpaceRunnableHandler.getWarningMessages(), empty());
List<String> messages = testDSpaceRunnableHandler.getInfoMessages();
assertThat(messages, hasSize(3));
assertThat(messages, hasItem("Searching for processes with status: [COMPLETED]"));
assertThat(messages, hasItem("Found 2 processes to be deleted"));
assertThat(messages, hasItem("Process cleanup completed"));
assertThat(processService.find(context, process_1.getID()), notNullValue());
assertThat(processService.find(context, process_2.getID()), notNullValue());
assertThat(processService.find(context, process_3.getID()), notNullValue());
assertThat(processService.find(context, process_4.getID()), nullValue());
assertThat(processService.find(context, process_5.getID()), nullValue());
assertThat(processService.find(context, process_6.getID()), notNullValue());
assertThat(processService.find(context, process_7.getID()), notNullValue());
}
@Test
public void testWithCompletedStatus() throws Exception {
Process process_1 = buildProcess(COMPLETED, addDays(new Date(), -2));
Process process_2 = buildProcess(RUNNING, addDays(new Date(), -1));
Process process_3 = buildProcess(FAILED, addDays(new Date(), -3));
Process process_4 = buildProcess(COMPLETED, addDays(new Date(), -6));
Process process_5 = buildProcess(COMPLETED, addDays(new Date(), -8));
Process process_6 = buildProcess(RUNNING, addDays(new Date(), -7));
Process process_7 = buildProcess(FAILED, addDays(new Date(), -8));
configurationService.setProperty("process-cleaner.days", 5);
TestDSpaceRunnableHandler testDSpaceRunnableHandler = new TestDSpaceRunnableHandler();
String[] args = new String[] { "process-cleaner", "-c" };
ScriptLauncher.handleScript(args, ScriptLauncher.getConfig(kernelImpl), testDSpaceRunnableHandler, kernelImpl);
assertThat(testDSpaceRunnableHandler.getErrorMessages(), empty());
assertThat(testDSpaceRunnableHandler.getWarningMessages(), empty());
List<String> messages = testDSpaceRunnableHandler.getInfoMessages();
assertThat(messages, hasSize(3));
assertThat(messages, hasItem("Searching for processes with status: [COMPLETED]"));
assertThat(messages, hasItem("Found 2 processes to be deleted"));
assertThat(messages, hasItem("Process cleanup completed"));
assertThat(processService.find(context, process_1.getID()), notNullValue());
assertThat(processService.find(context, process_2.getID()), notNullValue());
assertThat(processService.find(context, process_3.getID()), notNullValue());
assertThat(processService.find(context, process_4.getID()), nullValue());
assertThat(processService.find(context, process_5.getID()), nullValue());
assertThat(processService.find(context, process_6.getID()), notNullValue());
assertThat(processService.find(context, process_7.getID()), notNullValue());
}
@Test
public void testWithRunningStatus() throws Exception {
Process process_1 = buildProcess(COMPLETED, addDays(new Date(), -2));
Process process_2 = buildProcess(RUNNING, addDays(new Date(), -1));
Process process_3 = buildProcess(FAILED, addDays(new Date(), -3));
Process process_4 = buildProcess(COMPLETED, addDays(new Date(), -6));
Process process_5 = buildProcess(COMPLETED, addDays(new Date(), -8));
Process process_6 = buildProcess(RUNNING, addDays(new Date(), -7));
Process process_7 = buildProcess(FAILED, addDays(new Date(), -8));
Process process_8 = buildProcess(RUNNING, addDays(new Date(), -9));
configurationService.setProperty("process-cleaner.days", 5);
TestDSpaceRunnableHandler testDSpaceRunnableHandler = new TestDSpaceRunnableHandler();
String[] args = new String[] { "process-cleaner", "-r" };
ScriptLauncher.handleScript(args, ScriptLauncher.getConfig(kernelImpl), testDSpaceRunnableHandler, kernelImpl);
assertThat(testDSpaceRunnableHandler.getErrorMessages(), empty());
assertThat(testDSpaceRunnableHandler.getWarningMessages(), empty());
List<String> messages = testDSpaceRunnableHandler.getInfoMessages();
assertThat(messages, hasSize(3));
assertThat(messages, hasItem("Searching for processes with status: [RUNNING]"));
assertThat(messages, hasItem("Found 2 processes to be deleted"));
assertThat(messages, hasItem("Process cleanup completed"));
assertThat(processService.find(context, process_1.getID()), notNullValue());
assertThat(processService.find(context, process_2.getID()), notNullValue());
assertThat(processService.find(context, process_3.getID()), notNullValue());
assertThat(processService.find(context, process_4.getID()), notNullValue());
assertThat(processService.find(context, process_5.getID()), notNullValue());
assertThat(processService.find(context, process_6.getID()), nullValue());
assertThat(processService.find(context, process_7.getID()), notNullValue());
assertThat(processService.find(context, process_8.getID()), nullValue());
}
@Test
public void testWithFailedStatus() throws Exception {
Process process_1 = buildProcess(COMPLETED, addDays(new Date(), -2));
Process process_2 = buildProcess(RUNNING, addDays(new Date(), -1));
Process process_3 = buildProcess(FAILED, addDays(new Date(), -3));
Process process_4 = buildProcess(COMPLETED, addDays(new Date(), -6));
Process process_5 = buildProcess(COMPLETED, addDays(new Date(), -8));
Process process_6 = buildProcess(RUNNING, addDays(new Date(), -7));
Process process_7 = buildProcess(FAILED, addDays(new Date(), -8));
Process process_8 = buildProcess(FAILED, addDays(new Date(), -9));
configurationService.setProperty("process-cleaner.days", 5);
TestDSpaceRunnableHandler testDSpaceRunnableHandler = new TestDSpaceRunnableHandler();
String[] args = new String[] { "process-cleaner", "-f" };
ScriptLauncher.handleScript(args, ScriptLauncher.getConfig(kernelImpl), testDSpaceRunnableHandler, kernelImpl);
assertThat(testDSpaceRunnableHandler.getErrorMessages(), empty());
assertThat(testDSpaceRunnableHandler.getWarningMessages(), empty());
List<String> messages = testDSpaceRunnableHandler.getInfoMessages();
assertThat(messages, hasSize(3));
assertThat(messages, hasItem("Searching for processes with status: [FAILED]"));
assertThat(messages, hasItem("Found 2 processes to be deleted"));
assertThat(messages, hasItem("Process cleanup completed"));
assertThat(processService.find(context, process_1.getID()), notNullValue());
assertThat(processService.find(context, process_2.getID()), notNullValue());
assertThat(processService.find(context, process_3.getID()), notNullValue());
assertThat(processService.find(context, process_4.getID()), notNullValue());
assertThat(processService.find(context, process_5.getID()), notNullValue());
assertThat(processService.find(context, process_6.getID()), notNullValue());
assertThat(processService.find(context, process_7.getID()), nullValue());
assertThat(processService.find(context, process_8.getID()), nullValue());
}
@Test
public void testWithCompletedAndFailedStatus() throws Exception {
Process process_1 = buildProcess(COMPLETED, addDays(new Date(), -2));
Process process_2 = buildProcess(RUNNING, addDays(new Date(), -1));
Process process_3 = buildProcess(FAILED, addDays(new Date(), -3));
Process process_4 = buildProcess(COMPLETED, addDays(new Date(), -6));
Process process_5 = buildProcess(COMPLETED, addDays(new Date(), -8));
Process process_6 = buildProcess(RUNNING, addDays(new Date(), -7));
Process process_7 = buildProcess(FAILED, addDays(new Date(), -8));
Process process_8 = buildProcess(FAILED, addDays(new Date(), -9));
configurationService.setProperty("process-cleaner.days", 5);
TestDSpaceRunnableHandler testDSpaceRunnableHandler = new TestDSpaceRunnableHandler();
String[] args = new String[] { "process-cleaner", "-c", "-f" };
ScriptLauncher.handleScript(args, ScriptLauncher.getConfig(kernelImpl), testDSpaceRunnableHandler, kernelImpl);
List<String> messages = testDSpaceRunnableHandler.getInfoMessages();
assertThat(messages, hasSize(3));
assertThat(messages, hasItem("Searching for processes with status: [COMPLETED, FAILED]"));
assertThat(messages, hasItem("Found 4 processes to be deleted"));
assertThat(messages, hasItem("Process cleanup completed"));
assertThat(processService.find(context, process_1.getID()), notNullValue());
assertThat(processService.find(context, process_2.getID()), notNullValue());
assertThat(processService.find(context, process_3.getID()), notNullValue());
assertThat(processService.find(context, process_4.getID()), nullValue());
assertThat(processService.find(context, process_5.getID()), nullValue());
assertThat(processService.find(context, process_6.getID()), notNullValue());
assertThat(processService.find(context, process_7.getID()), nullValue());
assertThat(processService.find(context, process_8.getID()), nullValue());
}
@Test
public void testWithCompletedAndRunningStatus() throws Exception {
Process process_1 = buildProcess(COMPLETED, addDays(new Date(), -2));
Process process_2 = buildProcess(RUNNING, addDays(new Date(), -1));
Process process_3 = buildProcess(FAILED, addDays(new Date(), -3));
Process process_4 = buildProcess(COMPLETED, addDays(new Date(), -6));
Process process_5 = buildProcess(COMPLETED, addDays(new Date(), -8));
Process process_6 = buildProcess(RUNNING, addDays(new Date(), -7));
Process process_7 = buildProcess(FAILED, addDays(new Date(), -8));
Process process_8 = buildProcess(RUNNING, addDays(new Date(), -9));
configurationService.setProperty("process-cleaner.days", 5);
TestDSpaceRunnableHandler testDSpaceRunnableHandler = new TestDSpaceRunnableHandler();
String[] args = new String[] { "process-cleaner", "-c", "-r" };
ScriptLauncher.handleScript(args, ScriptLauncher.getConfig(kernelImpl), testDSpaceRunnableHandler, kernelImpl);
List<String> messages = testDSpaceRunnableHandler.getInfoMessages();
assertThat(messages, hasSize(3));
assertThat(messages, hasItem("Searching for processes with status: [COMPLETED, RUNNING]"));
assertThat(messages, hasItem("Found 4 processes to be deleted"));
assertThat(messages, hasItem("Process cleanup completed"));
assertThat(processService.find(context, process_1.getID()), notNullValue());
assertThat(processService.find(context, process_2.getID()), notNullValue());
assertThat(processService.find(context, process_3.getID()), notNullValue());
assertThat(processService.find(context, process_4.getID()), nullValue());
assertThat(processService.find(context, process_5.getID()), nullValue());
assertThat(processService.find(context, process_6.getID()), nullValue());
assertThat(processService.find(context, process_7.getID()), notNullValue());
assertThat(processService.find(context, process_8.getID()), nullValue());
}
@Test
public void testWithFailedAndRunningStatus() throws Exception {
Process process_1 = buildProcess(COMPLETED, addDays(new Date(), -2));
Process process_2 = buildProcess(RUNNING, addDays(new Date(), -1));
Process process_3 = buildProcess(FAILED, addDays(new Date(), -3));
Process process_4 = buildProcess(COMPLETED, addDays(new Date(), -6));
Process process_5 = buildProcess(COMPLETED, addDays(new Date(), -8));
Process process_6 = buildProcess(RUNNING, addDays(new Date(), -7));
Process process_7 = buildProcess(FAILED, addDays(new Date(), -8));
Process process_8 = buildProcess(RUNNING, addDays(new Date(), -9));
configurationService.setProperty("process-cleaner.days", 5);
TestDSpaceRunnableHandler testDSpaceRunnableHandler = new TestDSpaceRunnableHandler();
String[] args = new String[] { "process-cleaner", "-f", "-r" };
ScriptLauncher.handleScript(args, ScriptLauncher.getConfig(kernelImpl), testDSpaceRunnableHandler, kernelImpl);
List<String> messages = testDSpaceRunnableHandler.getInfoMessages();
assertThat(messages, hasSize(3));
assertThat(messages, hasItem("Searching for processes with status: [FAILED, RUNNING]"));
assertThat(messages, hasItem("Found 3 processes to be deleted"));
assertThat(messages, hasItem("Process cleanup completed"));
assertThat(processService.find(context, process_1.getID()), notNullValue());
assertThat(processService.find(context, process_2.getID()), notNullValue());
assertThat(processService.find(context, process_3.getID()), notNullValue());
assertThat(processService.find(context, process_4.getID()), notNullValue());
assertThat(processService.find(context, process_5.getID()), notNullValue());
assertThat(processService.find(context, process_6.getID()), nullValue());
assertThat(processService.find(context, process_7.getID()), nullValue());
assertThat(processService.find(context, process_8.getID()), nullValue());
}
@Test
public void testWithCompletedFailedAndRunningStatus() throws Exception {
Process process_1 = buildProcess(COMPLETED, addDays(new Date(), -2));
Process process_2 = buildProcess(RUNNING, addDays(new Date(), -1));
Process process_3 = buildProcess(FAILED, addDays(new Date(), -3));
Process process_4 = buildProcess(COMPLETED, addDays(new Date(), -6));
Process process_5 = buildProcess(COMPLETED, addDays(new Date(), -8));
Process process_6 = buildProcess(RUNNING, addDays(new Date(), -7));
Process process_7 = buildProcess(FAILED, addDays(new Date(), -8));
Process process_8 = buildProcess(RUNNING, addDays(new Date(), -9));
configurationService.setProperty("process-cleaner.days", 5);
TestDSpaceRunnableHandler testDSpaceRunnableHandler = new TestDSpaceRunnableHandler();
String[] args = new String[] { "process-cleaner", "-f", "-r", "-c" };
ScriptLauncher.handleScript(args, ScriptLauncher.getConfig(kernelImpl), testDSpaceRunnableHandler, kernelImpl);
List<String> messages = testDSpaceRunnableHandler.getInfoMessages();
assertThat(messages, hasSize(3));
assertThat(messages, hasItem("Searching for processes with status: [COMPLETED, FAILED, RUNNING]"));
assertThat(messages, hasItem("Found 5 processes to be deleted"));
assertThat(messages, hasItem("Process cleanup completed"));
assertThat(processService.find(context, process_1.getID()), notNullValue());
assertThat(processService.find(context, process_2.getID()), notNullValue());
assertThat(processService.find(context, process_3.getID()), notNullValue());
assertThat(processService.find(context, process_4.getID()), nullValue());
assertThat(processService.find(context, process_5.getID()), nullValue());
assertThat(processService.find(context, process_6.getID()), nullValue());
assertThat(processService.find(context, process_7.getID()), nullValue());
assertThat(processService.find(context, process_8.getID()), nullValue());
}
private Process buildProcess(ProcessStatus processStatus, Date creationTime) throws SQLException {
return ProcessBuilder.createProcess(context, admin, "test", List.of())
.withProcessStatus(processStatus)
.withCreationTime(creationTime)
.build();
}
}

View File

@@ -8,6 +8,7 @@
package org.dspace.administer; package org.dspace.administer;
import static org.junit.Assert.assertFalse; import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertTrue;
import java.io.ByteArrayInputStream; import java.io.ByteArrayInputStream;
import java.io.ByteArrayOutputStream; import java.io.ByteArrayOutputStream;
@@ -18,9 +19,10 @@ import java.sql.SQLException;
import java.util.Iterator; import java.util.Iterator;
import javax.xml.parsers.ParserConfigurationException; import javax.xml.parsers.ParserConfigurationException;
import javax.xml.transform.Source; import javax.xml.transform.Source;
import javax.xml.transform.TransformerException;
import javax.xml.transform.stream.StreamSource; import javax.xml.transform.stream.StreamSource;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
import org.dspace.AbstractIntegrationTest; import org.dspace.AbstractIntegrationTest;
import org.dspace.authorize.AuthorizeException; import org.dspace.authorize.AuthorizeException;
import org.dspace.content.Collection; import org.dspace.content.Collection;
@@ -29,13 +31,11 @@ import org.dspace.content.MetadataSchemaEnum;
import org.dspace.content.factory.ContentServiceFactory; import org.dspace.content.factory.ContentServiceFactory;
import org.dspace.content.service.CollectionService; import org.dspace.content.service.CollectionService;
import org.dspace.content.service.CommunityService; import org.dspace.content.service.CommunityService;
import org.junit.After; import org.dspace.handle.Handle;
import org.junit.AfterClass; import org.junit.AfterClass;
import org.junit.Before; import org.junit.Before;
import org.junit.BeforeClass; import org.junit.BeforeClass;
import org.junit.Test; import org.junit.Test;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.w3c.dom.Attr; import org.w3c.dom.Attr;
import org.w3c.dom.Node; import org.w3c.dom.Node;
import org.xml.sax.SAXException; import org.xml.sax.SAXException;
@@ -53,7 +53,7 @@ import org.xmlunit.diff.Difference;
*/ */
public class StructBuilderIT public class StructBuilderIT
extends AbstractIntegrationTest { extends AbstractIntegrationTest {
private static final Logger log = LoggerFactory.getLogger(StructBuilderIT.class); private static final Logger log = LogManager.getLogger();
private static final CommunityService communityService private static final CommunityService communityService
= ContentServiceFactory.getInstance().getCommunityService(); = ContentServiceFactory.getInstance().getCommunityService();
@@ -89,27 +89,28 @@ public class StructBuilderIT
context.restoreAuthSystemState(); context.restoreAuthSystemState();
} }
@After private static final String COMMUNITY_0_HANDLE = "https://hdl.handle.net/1/1";
public void tearDown() { private static final String COMMUNITY_0_0_HANDLE = "https://hdl.handle.net/1/1.1";
} private static final String COLLECTION_0_0_0_HANDLE = "https://hdl.handle.net/1/1.1.1";
private static final String COLLECTION_0_1_HANDLE = "https://hdl.handle.net/1/1.2";
/** Test structure document. */ /** Test structure document. */
private static final String IMPORT_DOCUMENT = private static final String IMPORT_DOCUMENT =
"<?xml version=\"1.0\" encoding=\"UTF-8\"?>\n" + "<?xml version=\"1.0\" encoding=\"UTF-8\"?>\n" +
"<import_structure>\n" + "<import_structure>\n" +
" <community>\n" + " <community identifier='" + COMMUNITY_0_HANDLE + "'>\n" +
" <name>Top Community 0</name>\n" + " <name>Top Community 0</name>\n" +
" <description>A top level community</description>\n" + " <description>A top level community</description>\n" +
" <intro>Testing 1 2 3</intro>\n" + " <intro>Testing 1 2 3</intro>\n" +
" <copyright>1969</copyright>\n" + " <copyright>1969</copyright>\n" +
" <sidebar>A sidebar</sidebar>\n" + " <sidebar>A sidebar</sidebar>\n" +
" <community>\n" + " <community identifier='" + COMMUNITY_0_0_HANDLE + "'>\n" +
" <name>Sub Community 0.0</name>\n" + " <name>Sub Community 0.0</name>\n" +
" <description>A sub community</description>\n" + " <description>A sub community</description>\n" +
" <intro>Live from New York....</intro>\n" + " <intro>Live from New York....</intro>\n" +
" <copyright>1957</copyright>\n" + " <copyright>1957</copyright>\n" +
" <sidebar>Another sidebar</sidebar>\n" + " <sidebar>Another sidebar</sidebar>\n" +
" <collection>\n" + " <collection identifier='" + COLLECTION_0_0_0_HANDLE + "'>\n" +
" <name>Collection 0.0.0</name>\n" + " <name>Collection 0.0.0</name>\n" +
" <description>A collection</description>\n" + " <description>A collection</description>\n" +
" <intro>Our next guest needs no introduction</intro>\n" + " <intro>Our next guest needs no introduction</intro>\n" +
@@ -119,7 +120,14 @@ public class StructBuilderIT
" <provenance>Testing</provenance>\n" + " <provenance>Testing</provenance>\n" +
" </collection>\n" + " </collection>\n" +
" </community>\n" + " </community>\n" +
" <collection>\n" + " <community>\n" +
" <name>Sub Community 0.1</name>\n" +
" <description>A sub community with no handle</description>\n" +
" <intro>Stop me if you've heard this one</intro>\n" +
" <copyright>2525</copyright>\n" +
" <sidebar>One more sidebar</sidebar>\n" +
" </community>\n" +
" <collection identifier='" + COLLECTION_0_1_HANDLE + "'>\n" +
" <name>Collection 0.1</name>\n" + " <name>Collection 0.1</name>\n" +
" <description>Another collection</description>\n" + " <description>Another collection</description>\n" +
" <intro>Fourscore and seven years ago</intro>\n" + " <intro>Fourscore and seven years ago</intro>\n" +
@@ -150,7 +158,7 @@ public class StructBuilderIT
* @throws java.lang.Exception passed through. * @throws java.lang.Exception passed through.
*/ */
@Test @Test
public void testImportStructure() public void testImportStructureWithoutHandles()
throws Exception { throws Exception {
System.out.println("importStructure"); System.out.println("importStructure");
@@ -160,11 +168,7 @@ public class StructBuilderIT
byte[] inputBytes = IMPORT_DOCUMENT.getBytes(StandardCharsets.UTF_8); byte[] inputBytes = IMPORT_DOCUMENT.getBytes(StandardCharsets.UTF_8);
context.turnOffAuthorisationSystem(); context.turnOffAuthorisationSystem();
try (InputStream input = new ByteArrayInputStream(inputBytes);) { try (InputStream input = new ByteArrayInputStream(inputBytes);) {
StructBuilder.importStructure(context, input, outputDocument); StructBuilder.importStructure(context, input, outputDocument, false);
} catch (IOException | SQLException
| ParserConfigurationException | TransformerException ex) {
System.err.println(ex.getMessage());
System.exit(1);
} finally { } finally {
context.restoreAuthSystemState(); context.restoreAuthSystemState();
} }
@@ -180,7 +184,81 @@ public class StructBuilderIT
IMPORT_DOCUMENT.getBytes(StandardCharsets.UTF_8))); IMPORT_DOCUMENT.getBytes(StandardCharsets.UTF_8)));
Diff myDiff = DiffBuilder.compare(reference).withTest(output) Diff myDiff = DiffBuilder.compare(reference).withTest(output)
.normalizeWhitespace() .normalizeWhitespace()
// .withNodeFilter(new MyNodeFilter()) .withAttributeFilter((Attr attr) ->
!attr.getName().equals("identifier"))
.checkForIdentical()
.build();
// Was there a difference?
// Always output differences -- one is expected.
ComparisonFormatter formatter = new DefaultComparisonFormatter();
for (Difference difference : myDiff.getDifferences()) {
System.err.println(difference.toString(formatter));
}
// Test for *significant* differences.
assertFalse("Output does not match input.", isDifferent(myDiff));
// TODO spot-check some objects.
}
/**
* Test of importStructure method, with given Handles.
*
* @throws java.lang.Exception passed through.
*/
@Test
public void testImportStructureWithHandles()
throws Exception {
System.out.println("importStructure");
// Run the method under test and collect its output.
ByteArrayOutputStream outputDocument
= new ByteArrayOutputStream(IMPORT_DOCUMENT.length() * 2 * 2);
byte[] inputBytes = IMPORT_DOCUMENT.getBytes(StandardCharsets.UTF_8);
context.turnOffAuthorisationSystem();
try (InputStream input = new ByteArrayInputStream(inputBytes);) {
StructBuilder.importStructure(context, input, outputDocument, true);
} finally {
context.restoreAuthSystemState();
}
boolean found;
// Check a chosen Community for the right Handle.
found = false;
for (Community community : communityService.findAllTop(context)) {
for (Handle handle : community.getHandles()) {
if (handle.getHandle().equals(COMMUNITY_0_HANDLE)) {
found = true;
break;
}
}
}
assertTrue("A community should have its specified handle", found);
// Check a chosen Collection for the right Handle.
found = false;
for (Collection collection : collectionService.findAll(context)) {
for (Handle handle : collection.getHandles()) {
if (handle.getHandle().equals(COLLECTION_0_1_HANDLE)) {
found = true;
break;
}
}
}
assertTrue("A collection should have its specified handle", found);
// Compare import's output with its input.
// N.B. here we rely on StructBuilder to emit communities and
// collections in the same order as the input document. If that changes,
// we will need a smarter NodeMatcher, probably based on <name> children.
Source output = new StreamSource(
new ByteArrayInputStream(outputDocument.toByteArray()));
Source reference = new StreamSource(
new ByteArrayInputStream(
IMPORT_DOCUMENT.getBytes(StandardCharsets.UTF_8)));
Diff myDiff = DiffBuilder.compare(reference).withTest(output)
.normalizeWhitespace()
.withAttributeFilter((Attr attr) -> .withAttributeFilter((Attr attr) ->
!attr.getName().equals("identifier")) !attr.getName().equals("identifier"))
.checkForIdentical() .checkForIdentical()
@@ -236,7 +314,6 @@ public class StructBuilderIT
EXPORT_DOCUMENT.getBytes(StandardCharsets.UTF_8))); EXPORT_DOCUMENT.getBytes(StandardCharsets.UTF_8)));
Diff myDiff = DiffBuilder.compare(reference).withTest(output) Diff myDiff = DiffBuilder.compare(reference).withTest(output)
.normalizeWhitespace() .normalizeWhitespace()
// .withNodeFilter(new MyNodeFilter())
.withAttributeFilter((Attr attr) -> .withAttributeFilter((Attr attr) ->
!attr.getName().equals("identifier")) !attr.getName().equals("identifier"))
.checkForIdentical() .checkForIdentical()
@@ -310,23 +387,4 @@ public class StructBuilderIT
// There must be at most one difference. // There must be at most one difference.
return diffIterator.hasNext(); return diffIterator.hasNext();
} }
/**
* Reject uninteresting nodes. (currently commented out of tests above)
*/
/*private static class MyNodeFilter implements Predicate<Node> {
private static final List<String> dontCare = Arrays.asList(
"description",
"intro",
"copyright",
"sidebar",
"license",
"provenance");
@Override
public boolean test(Node node) {
String type = node.getLocalName();
return ! dontCare.contains(type);
}
}*/
} }

View File

@@ -0,0 +1,62 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.app.requestitem;
import static org.junit.Assert.assertEquals;
import java.util.List;
import org.dspace.content.Collection;
import org.dspace.content.Item;
import org.dspace.core.Context;
import org.dspace.eperson.EPerson;
import org.dspace.eperson.Group;
import org.junit.Test;
import org.mockito.Mockito;
/**
*
* @author Mark H. Wood <mwood@iupui.edu>
*/
public class CollectionAdministratorsRequestItemStrategyTest {
private static final String NAME = "John Q. Public";
private static final String EMAIL = "jqpublic@example.com";
/**
* Test of getRequestItemAuthor method, of class CollectionAdministratorsRequestItemStrategy.
* @throws java.lang.Exception passed through.
*/
@Test
public void testGetRequestItemAuthor()
throws Exception {
System.out.println("getRequestItemAuthor");
Context context = Mockito.mock(Context.class);
EPerson eperson1 = Mockito.mock(EPerson.class);
Mockito.when(eperson1.getEmail()).thenReturn(EMAIL);
Mockito.when(eperson1.getFullName()).thenReturn(NAME);
Group group1 = Mockito.mock(Group.class);
Mockito.when(group1.getMembers()).thenReturn(List.of(eperson1));
Collection collection1 = Mockito.mock(Collection.class);
Mockito.when(collection1.getAdministrators()).thenReturn(group1);
Item item = Mockito.mock(Item.class);
Mockito.when(item.getOwningCollection()).thenReturn(collection1);
Mockito.when(item.getSubmitter()).thenReturn(eperson1);
CollectionAdministratorsRequestItemStrategy instance = new CollectionAdministratorsRequestItemStrategy();
List<RequestItemAuthor> result = instance.getRequestItemAuthor(context,
item);
assertEquals("Should be one author", 1, result.size());
assertEquals("Name should match " + NAME, NAME, result.get(0).getFullName());
assertEquals("Email should match " + EMAIL, EMAIL, result.get(0).getEmail());
}
}

View File

@@ -0,0 +1,53 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.app.requestitem;
import static org.hamcrest.MatcherAssert.assertThat;
import static org.hamcrest.collection.IsIterableContainingInAnyOrder.containsInAnyOrder;
import java.util.List;
import org.dspace.content.Item;
import org.dspace.core.Context;
import org.junit.Test;
import org.mockito.Mockito;
/**
*
* @author Mark H. Wood <mwood@iupui.edu>
*/
public class CombiningRequestItemStrategyTest {
/**
* Test of getRequestItemAuthor method, of class CombiningRequestItemStrategy.
* @throws java.lang.Exception passed through.
*/
@Test
public void testGetRequestItemAuthor()
throws Exception {
System.out.println("getRequestItemAuthor");
Context context = null;
Item item = Mockito.mock(Item.class);
RequestItemAuthor author1 = new RequestItemAuthor("Pat Paulsen", "ppaulsen@example.com");
RequestItemAuthor author2 = new RequestItemAuthor("Alfred E. Neuman", "aeneuman@example.com");
RequestItemAuthor author3 = new RequestItemAuthor("Alias Undercover", "aundercover@example.com");
RequestItemAuthorExtractor strategy1 = Mockito.mock(RequestItemHelpdeskStrategy.class);
Mockito.when(strategy1.getRequestItemAuthor(context, item)).thenReturn(List.of(author1));
RequestItemAuthorExtractor strategy2 = Mockito.mock(RequestItemMetadataStrategy.class);
Mockito.when(strategy2.getRequestItemAuthor(context, item)).thenReturn(List.of(author2, author3));
List<RequestItemAuthorExtractor> strategies = List.of(strategy1, strategy2);
CombiningRequestItemStrategy instance = new CombiningRequestItemStrategy(strategies);
List<RequestItemAuthor> result = instance.getRequestItemAuthor(context,
item);
assertThat(result, containsInAnyOrder(author1, author2, author3));
}
}

View File

@@ -27,7 +27,7 @@ import org.junit.Assert;
import org.junit.Test; import org.junit.Test;
/** /**
* Created by pbecker as he wanted to write a test against DS-3572. * Created by pbecker to write a test against DS-3572.
* This definitely needs to be extended, but it's at least a start. * This definitely needs to be extended, but it's at least a start.
*/ */
public class AuthorizeServiceTest extends AbstractUnitTest { public class AuthorizeServiceTest extends AbstractUnitTest {
@@ -80,7 +80,7 @@ public class AuthorizeServiceTest extends AbstractUnitTest {
} }
try { try {
// eperson1 should be able to write as he is member of a group that has write permissions // eperson1 should be able to write as it is a member of a group that has write permissions
Assert.assertTrue(authorizeService.authorizeActionBoolean(context, eperson1, dso, Constants.WRITE, true)); Assert.assertTrue(authorizeService.authorizeActionBoolean(context, eperson1, dso, Constants.WRITE, true));
// person2 shouldn't have write access // person2 shouldn't have write access
Assert.assertFalse(authorizeService.authorizeActionBoolean(context, eperson2, dso, Constants.WRITE, true)); Assert.assertFalse(authorizeService.authorizeActionBoolean(context, eperson2, dso, Constants.WRITE, true));

View File

@@ -11,6 +11,7 @@ import java.io.IOException;
import java.sql.SQLException; import java.sql.SQLException;
import java.text.ParseException; import java.text.ParseException;
import java.text.SimpleDateFormat; import java.text.SimpleDateFormat;
import java.util.Date;
import java.util.List; import java.util.List;
import java.util.Set; import java.util.Set;
@@ -60,6 +61,11 @@ public class ProcessBuilder extends AbstractBuilder<Process, ProcessService> {
return this; return this;
} }
public ProcessBuilder withCreationTime(Date creationTime) {
process.setCreationTime(creationTime);
return this;
}
public ProcessBuilder withStartAndEndTime(String startTime, String endTime) throws ParseException { public ProcessBuilder withStartAndEndTime(String startTime, String endTime) throws ParseException {
SimpleDateFormat simpleDateFormat = new SimpleDateFormat("dd/MM/yyyy"); SimpleDateFormat simpleDateFormat = new SimpleDateFormat("dd/MM/yyyy");
process.setStartTime(simpleDateFormat.parse(startTime)); process.setStartTime(simpleDateFormat.parse(startTime));

View File

@@ -408,6 +408,7 @@ public class LogicalFilterTest extends AbstractUnitTest {
// Create condition to match pattern on dc.title metadata // Create condition to match pattern on dc.title metadata
Condition condition = new MetadataValuesMatchCondition(); Condition condition = new MetadataValuesMatchCondition();
condition.setItemService(ContentServiceFactory.getInstance().getItemService());
Map<String, Object> parameters = new HashMap<>(); Map<String, Object> parameters = new HashMap<>();
// Match on the dc.title field // Match on the dc.title field
parameters.put("field", "dc.title"); parameters.put("field", "dc.title");
@@ -461,6 +462,7 @@ public class LogicalFilterTest extends AbstractUnitTest {
// Instantiate new filter for testing this condition // Instantiate new filter for testing this condition
DefaultFilter filter = new DefaultFilter(); DefaultFilter filter = new DefaultFilter();
Condition condition = new InCollectionCondition(); Condition condition = new InCollectionCondition();
condition.setItemService(ContentServiceFactory.getInstance().getItemService());
Map<String, Object> parameters = new HashMap<>(); Map<String, Object> parameters = new HashMap<>();
// Add collectionOne handle to the collections parameter - ie. we are testing to see if the item is // Add collectionOne handle to the collections parameter - ie. we are testing to see if the item is

View File

@@ -194,7 +194,7 @@ public class ITDSpaceAIP extends AbstractIntegrationTest {
ePersonService.update(context, submitter); ePersonService.update(context, submitter);
context.setCurrentUser(submitter); context.setCurrentUser(submitter);
//Make our test ePerson an admin so he can perform deletes and restores //Make our test ePerson an admin so it can perform deletes and restores
GroupService groupService = EPersonServiceFactory.getInstance().getGroupService(); GroupService groupService = EPersonServiceFactory.getInstance().getGroupService();
Group adminGroup = groupService.findByName(context, Group.ADMIN); Group adminGroup = groupService.findByName(context, Group.ADMIN);
groupService.addMember(context, adminGroup, submitter); groupService.addMember(context, adminGroup, submitter);

View File

@@ -0,0 +1,79 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.ctask.general;
import static org.junit.Assert.assertEquals;
import java.io.IOException;
import org.dspace.AbstractIntegrationTestWithDatabase;
import org.dspace.builder.CollectionBuilder;
import org.dspace.builder.CommunityBuilder;
import org.dspace.builder.ItemBuilder;
import org.dspace.content.Collection;
import org.dspace.content.Item;
import org.dspace.curate.Curator;
import org.dspace.identifier.VersionedHandleIdentifierProviderWithCanonicalHandles;
import org.dspace.services.ConfigurationService;
import org.dspace.services.factory.DSpaceServicesFactory;
import org.junit.Test;
/**
* Rudimentary test of the curation task.
*
* @author mwood
*/
public class CreateMissingIdentifiersIT
extends AbstractIntegrationTestWithDatabase {
private static final String P_TASK_DEF
= "plugin.named.org.dspace.curate.CurationTask";
private static final String TASK_NAME = "test";
@Test
public void testPerform()
throws IOException {
ConfigurationService configurationService
= DSpaceServicesFactory.getInstance().getConfigurationService();
configurationService.setProperty(P_TASK_DEF, null);
configurationService.addPropertyValue(P_TASK_DEF,
CreateMissingIdentifiers.class.getCanonicalName() + " = " + TASK_NAME);
Curator curator = new Curator();
curator.addTask(TASK_NAME);
context.setCurrentUser(admin);
parentCommunity = CommunityBuilder.createCommunity(context)
.build();
Collection collection = CollectionBuilder.createCollection(context, parentCommunity)
.build();
Item item = ItemBuilder.createItem(context, collection)
.build();
/*
* Curate with regular test configuration -- should succeed.
*/
curator.curate(context, item);
int status = curator.getStatus(TASK_NAME);
assertEquals("Curation should succeed", Curator.CURATE_SUCCESS, status);
/*
* Now install an incompatible provider to make the task fail.
*/
DSpaceServicesFactory.getInstance()
.getServiceManager()
.registerServiceClass(
VersionedHandleIdentifierProviderWithCanonicalHandles.class.getCanonicalName(),
VersionedHandleIdentifierProviderWithCanonicalHandles.class);
curator.curate(context, item);
System.out.format("With incompatible provider, result is '%s'.\n",
curator.getResult(TASK_NAME));
assertEquals("Curation should fail", Curator.CURATE_ERROR,
curator.getStatus(TASK_NAME));
}
}

View File

@@ -56,14 +56,15 @@ public class IPTableTest {
IPTable instance = new IPTable(); IPTable instance = new IPTable();
// Add IP address // Add IP address
instance.add(LOCALHOST); instance.add(LOCALHOST);
// Add IP range // Add IP range (contains 256 addresses)
instance.add("192.168.1"); instance.add("192.168.1");
// Make sure both exist // Make sure it returns the addresses for all ranges
Set<String> ipSet = instance.toSet(); Set<String> ipSet = instance.toSet();
assertEquals(2, ipSet.size()); assertEquals(257, ipSet.size());
assertTrue(ipSet.contains(LOCALHOST)); assertTrue(ipSet.contains(LOCALHOST));
assertTrue(ipSet.contains("192.168.1")); assertTrue(ipSet.contains("192.168.1.0"));
assertTrue(ipSet.contains("192.168.1.255"));
} }
@Test @Test
@@ -76,13 +77,13 @@ public class IPTableTest {
assertEquals(1, instance.toSet().size()); assertEquals(1, instance.toSet().size());
instance = new IPTable(); instance = new IPTable();
// Add IP range & then add an IP from within that range // Add IP range w/ 256 addresses & then add an IP from within that range
instance.add("192.168.1"); instance.add("192.168.1");
instance.add("192.168.1.1"); instance.add("192.168.1.1");
// Verify only the range exists // Verify only the range exists
Set<String> ipSet = instance.toSet(); Set<String> ipSet = instance.toSet();
assertEquals(1, ipSet.size()); assertEquals(256, ipSet.size());
assertTrue(ipSet.contains("192.168.1")); assertTrue(ipSet.contains("192.168.1.1"));
instance = new IPTable(); instance = new IPTable();
// Now, switch order. Add IP address, then add a range encompassing that IP // Now, switch order. Add IP address, then add a range encompassing that IP
@@ -90,8 +91,8 @@ public class IPTableTest {
instance.add("192.168.1"); instance.add("192.168.1");
// Verify only the range exists // Verify only the range exists
ipSet = instance.toSet(); ipSet = instance.toSet();
assertEquals(1, ipSet.size()); assertEquals(256, ipSet.size());
assertTrue(ipSet.contains("192.168.1")); assertTrue(ipSet.contains("192.168.1.1"));
} }
/** /**
@@ -120,6 +121,48 @@ public class IPTableTest {
assertTrue("IP within an add()ed range should match", contains); assertTrue("IP within an add()ed range should match", contains);
} }
@Test
public void testDashRangeContains() throws Exception {
IPTable instance = new IPTable();
instance.add("192.168.0.0 - 192.168.0.245");
assertTrue("Range should contain lower limit", instance.contains("192.168.0.0"));
assertTrue("Range should contain upper limit", instance.contains("192.168.0.245"));
assertTrue("Range should contain value in between limits", instance.contains("192.168.0.123"));
assertTrue("Range should contain value in between limits", instance.contains("192.168.0.234"));
assertFalse("Range should not contain value below lower limit", instance.contains("192.167.255.255"));
assertFalse("Range should not contain value above upper limit", instance.contains("192.168.0.246"));
}
@Test
public void testSubnetRangeContains() throws Exception {
IPTable instance = new IPTable();
instance.add("192.168.0.0/30"); // translates to 192.168.0.0 - 192.168.0.3
assertTrue("Range should contain lower limit", instance.contains("192.168.0.0"));
assertTrue("Range should contain upper limit", instance.contains("192.168.0.3"));
assertTrue("Range should contain values in between limits", instance.contains("192.168.0.1"));
assertTrue("Range should contain values in between limits", instance.contains("192.168.0.2"));
assertFalse("Range should not contain value below lower limit", instance.contains("192.167.255.255"));
assertFalse("Range should not contain value above upper limit", instance.contains("192.168.0.4"));
}
@Test
public void testImplicitRangeContains() throws Exception {
IPTable instance = new IPTable();
instance.add("192.168.1");
assertTrue("Range should contain lower limit", instance.contains("192.168.1.0"));
assertTrue("Range should contain upper limit", instance.contains("192.168.1.255"));
assertTrue("Range should contain values in between limits", instance.contains("192.168.1.123"));
assertTrue("Range should contain values in between limits", instance.contains("192.168.1.234"));
assertFalse("Range should not contain value below lower limit", instance.contains("192.168.0.0"));
assertFalse("Range should not contain value above upper limit", instance.contains("192.168.2.0"));
}
/** /**
* Test of isEmpty method, of class IPTable. * Test of isEmpty method, of class IPTable.
* @throws java.lang.Exception passed through. * @throws java.lang.Exception passed through.

View File

@@ -8,6 +8,10 @@
package org.dspace.xoai.app; package org.dspace.xoai.app;
import static com.lyncode.xoai.dataprovider.core.Granularity.Second; import static com.lyncode.xoai.dataprovider.core.Granularity.Second;
import static java.util.Objects.nonNull;
import static org.apache.commons.lang.StringUtils.EMPTY;
import static org.apache.solr.common.params.CursorMarkParams.CURSOR_MARK_PARAM;
import static org.apache.solr.common.params.CursorMarkParams.CURSOR_MARK_START;
import static org.dspace.xoai.util.ItemUtils.retrieveMetadata; import static org.dspace.xoai.util.ItemUtils.retrieveMetadata;
import java.io.ByteArrayOutputStream; import java.io.ByteArrayOutputStream;
@@ -38,6 +42,8 @@ import org.apache.solr.client.solrj.SolrClient;
import org.apache.solr.client.solrj.SolrQuery; import org.apache.solr.client.solrj.SolrQuery;
import org.apache.solr.client.solrj.SolrQuery.ORDER; import org.apache.solr.client.solrj.SolrQuery.ORDER;
import org.apache.solr.client.solrj.SolrServerException; import org.apache.solr.client.solrj.SolrServerException;
import org.apache.solr.client.solrj.response.QueryResponse;
import org.apache.solr.common.SolrDocument;
import org.apache.solr.common.SolrDocumentList; import org.apache.solr.common.SolrDocumentList;
import org.apache.solr.common.SolrInputDocument; import org.apache.solr.common.SolrInputDocument;
import org.dspace.authorize.ResourcePolicy; import org.dspace.authorize.ResourcePolicy;
@@ -77,6 +83,7 @@ import org.springframework.context.annotation.AnnotationConfigApplicationContext
public class XOAI { public class XOAI {
private static Logger log = LogManager.getLogger(XOAI.class); private static Logger log = LogManager.getLogger(XOAI.class);
// needed because the solr query only returns 10 rows by default
private final Context context; private final Context context;
private boolean optimize; private boolean optimize;
private final boolean verbose; private final boolean verbose;
@@ -94,8 +101,8 @@ public class XOAI {
private final AuthorizeService authorizeService; private final AuthorizeService authorizeService;
private final ItemService itemService; private final ItemService itemService;
private final static ConfigurationService configurationService = DSpaceServicesFactory private final static ConfigurationService configurationService = DSpaceServicesFactory.getInstance()
.getInstance().getConfigurationService(); .getConfigurationService();
private List<XOAIExtensionItemCompilePlugin> extensionPlugins; private List<XOAIExtensionItemCompilePlugin> extensionPlugins;
@@ -152,8 +159,7 @@ public class XOAI {
System.out.println("Using full import."); System.out.println("Using full import.");
result = this.indexAll(); result = this.indexAll();
} else { } else {
SolrQuery solrParams = new SolrQuery("*:*") SolrQuery solrParams = new SolrQuery("*:*").addField("item.lastmodified")
.addField("item.lastmodified")
.addSort("item.lastmodified", ORDER.desc).setRows(1); .addSort("item.lastmodified", ORDER.desc).setRows(1);
SolrDocumentList results = DSpaceSolrSearch.query(solrServerResolver.getServer(), solrParams); SolrDocumentList results = DSpaceSolrSearch.query(solrServerResolver.getServer(), solrParams);
@@ -167,7 +173,6 @@ public class XOAI {
} }
solrServerResolver.getServer().commit(); solrServerResolver.getServer().commit();
if (optimize) { if (optimize) {
println("Optimizing Index"); println("Optimizing Index");
solrServerResolver.getServer().optimize(); solrServerResolver.getServer().optimize();
@@ -183,12 +188,10 @@ public class XOAI {
} }
private int index(Date last) throws DSpaceSolrIndexerException, IOException { private int index(Date last) throws DSpaceSolrIndexerException, IOException {
System.out System.out.println("Incremental import. Searching for documents modified after: " + last.toString());
.println("Incremental import. Searching for documents modified after: "
+ last.toString());
/* /*
* Index all changed or new items or items whose visibility is viable to * Index all changed or new items or items whose visibility is viable to change
* change due to an embargo. * due to an embargo.
*/ */
try { try {
Iterator<Item> discoverableChangedItems = itemService Iterator<Item> discoverableChangedItems = itemService
@@ -204,31 +207,55 @@ public class XOAI {
} }
/** /**
* Get all items already in the index which are viable to change visibility * Get all items already in the index which are viable to change visibility due
* due to an embargo. Only consider those which haven't been modified * to an embargo. Only consider those which haven't been modified anyways since
* anyways since the last update, so they aren't updated twice in one import * the last update, so they aren't updated twice in one import run.
* run.
* *
* @param last * @param last maximum date for an item to be considered for an update
* maximum date for an item to be considered for an update * @return Iterator over list of items which might have changed their visibility
* @return Iterator over list of items which might have changed their * since the last update.
* visibility since the last update.
* @throws DSpaceSolrIndexerException * @throws DSpaceSolrIndexerException
*/ */
private Iterator<Item> getItemsWithPossibleChangesBefore(Date last) throws DSpaceSolrIndexerException, IOException { private Iterator<Item> getItemsWithPossibleChangesBefore(Date last) throws DSpaceSolrIndexerException, IOException {
try { try {
SolrQuery params = new SolrQuery("item.willChangeStatus:true").addField("item.id"); SolrQuery params = new SolrQuery("item.willChangeStatus:true").addField("item.id").setRows(100)
SolrDocumentList documents = DSpaceSolrSearch.query(solrServerResolver.getServer(), params); .addSort("item.handle", SolrQuery.ORDER.asc);
SolrClient solrClient = solrServerResolver.getServer();
List<Item> items = new LinkedList<>(); List<Item> items = new LinkedList<>();
for (int i = 0; i < documents.getNumFound(); i++) { boolean done = false;
Item item = itemService.find(context, /*
UUID.fromString((String) documents.get(i).getFieldValue("item.id"))); * Using solr cursors to paginate and prevent the query from returning 10
* SolrDocument objects only.
*/
String cursorMark = CURSOR_MARK_START;
String nextCursorMark = EMPTY;
while (!done) {
params.set(CURSOR_MARK_PARAM, cursorMark);
QueryResponse response = solrClient.query(params);
nextCursorMark = response.getNextCursorMark();
for (SolrDocument document : response.getResults()) {
Item item = itemService.find(context, UUID.fromString((String) document.getFieldValue("item.id")));
if (nonNull(item)) {
if (nonNull(item.getLastModified())) {
if (item.getLastModified().before(last)) { if (item.getLastModified().before(last)) {
items.add(item); items.add(item);
} }
} else {
log.warn("Skipping item with id " + item.getID());
}
}
}
if (cursorMark.equals(nextCursorMark)) {
done = true;
}
cursorMark = nextCursorMark;
} }
return items.iterator(); return items.iterator();
} catch (SolrServerException | SQLException | DSpaceSolrException ex) { } catch (SolrServerException | SQLException ex) {
throw new DSpaceSolrIndexerException(ex.getMessage(), ex); throw new DSpaceSolrIndexerException(ex.getMessage(), ex);
} }
} }
@@ -250,11 +277,10 @@ public class XOAI {
} }
/** /**
* Check if an item is already indexed. Using this, it is possible to check * Check if an item is already indexed. Using this, it is possible to check if
* if withdrawn or nondiscoverable items have to be indexed at all. * withdrawn or nondiscoverable items have to be indexed at all.
* *
* @param item * @param item Item that should be checked for its presence in the index.
* Item that should be checked for its presence in the index.
* @return has it been indexed? * @return has it been indexed?
*/ */
private boolean checkIfIndexed(Item item) throws IOException { private boolean checkIfIndexed(Item item) throws IOException {
@@ -266,11 +292,11 @@ public class XOAI {
return false; return false;
} }
} }
/** /**
* Check if an item is flagged visible in the index. * Check if an item is flagged visible in the index.
* *
* @param item * @param item Item that should be checked for its presence in the index.
* Item that should be checked for its presence in the index.
* @return has it been indexed? * @return has it been indexed?
*/ */
private boolean checkIfVisibleInOAI(Item item) throws IOException { private boolean checkIfVisibleInOAI(Item item) throws IOException {
@@ -287,8 +313,7 @@ public class XOAI {
} }
} }
private int index(Iterator<Item> iterator) private int index(Iterator<Item> iterator) throws DSpaceSolrIndexerException {
throws DSpaceSolrIndexerException {
try { try {
int i = 0; int i = 0;
int batchSize = configurationService.getIntProperty("oai.import.batch.size", 1000); int batchSize = configurationService.getIntProperty("oai.import.batch.size", 1000);
@@ -302,7 +327,7 @@ public class XOAI {
} else { } else {
list.add(this.index(item)); list.add(this.index(item));
} }
//Uncache the item to keep memory consumption low // Uncache the item to keep memory consumption low
context.uncacheEntity(item); context.uncacheEntity(item);
} catch (SQLException | IOException | XMLStreamException | WritingXmlException ex) { } catch (SQLException | IOException | XMLStreamException | WritingXmlException ex) {
@@ -334,12 +359,11 @@ public class XOAI {
} }
/** /**
* Method to get the most recent date on which the item changed concerning * Method to get the most recent date on which the item changed concerning the
* the OAI deleted status (policy start and end dates for all anonymous READ * OAI deleted status (policy start and end dates for all anonymous READ
* policies and the standard last modification date) * policies and the standard last modification date)
* *
* @param item * @param item Item
* Item
* @return date * @return date
* @throws SQLException * @throws SQLException
*/ */
@@ -382,17 +406,16 @@ public class XOAI {
boolean isIndexed = this.checkIfIndexed(item); boolean isIndexed = this.checkIfIndexed(item);
/* /*
* If the item is not under embargo, it should be visible. If it is, * If the item is not under embargo, it should be visible. If it is, make it
* make it invisible if this is the first time it is indexed. For * invisible if this is the first time it is indexed. For subsequent index runs,
* subsequent index runs, keep the current status, so that if the item * keep the current status, so that if the item is embargoed again, it is
* is embargoed again, it is flagged as deleted instead and does not * flagged as deleted instead and does not just disappear, or if it is still
* just disappear, or if it is still under embargo, it won't become * under embargo, it won't become visible and be known to harvesters as deleted
* visible and be known to harvesters as deleted before it gets * before it gets disseminated for the first time. The item has to be indexed
* disseminated for the first time. The item has to be indexed directly * directly after publication even if it is still embargoed, because its
* after publication even if it is still embargoed, because its * lastModified date will not change when the embargo end date (or start date)
* lastModified date will not change when the embargo end date (or start * is reached. To circumvent this, an item which will change its status in the
* date) is reached. To circumvent this, an item which will change its * future will be marked as such.
* status in the future will be marked as such.
*/ */
boolean isPublic = isEmbargoed ? (isIndexed ? isCurrentlyVisible : false) : true; boolean isPublic = isEmbargoed ? (isIndexed ? isCurrentlyVisible : false) : true;
@@ -404,33 +427,31 @@ public class XOAI {
doc.addField("item.willChangeStatus", willChangeStatus(item)); doc.addField("item.willChangeStatus", willChangeStatus(item));
/* /*
* Mark an item as deleted not only if it is withdrawn, but also if it * Mark an item as deleted not only if it is withdrawn, but also if it is made
* is made private, because items should not simply disappear from OAI * private, because items should not simply disappear from OAI with a transient
* with a transient deletion policy. Do not set the flag for still * deletion policy. Do not set the flag for still invisible embargoed items,
* invisible embargoed items, because this will override the item.public * because this will override the item.public flag.
* flag.
*/ */
doc.addField("item.deleted", doc.addField("item.deleted",
(item.isWithdrawn() || !item.isDiscoverable() || (isEmbargoed ? isPublic : false))); (item.isWithdrawn() || !item.isDiscoverable() || (isEmbargoed ? isPublic : false)));
/* /*
* An item that is embargoed will potentially not be harvested by * An item that is embargoed will potentially not be harvested by incremental
* incremental harvesters if the from and until params do not encompass * harvesters if the from and until params do not encompass both the standard
* both the standard lastModified date and the anonymous-READ resource * lastModified date and the anonymous-READ resource policy start date. The same
* policy start date. The same is true for the end date, where * is true for the end date, where harvesters might not get a tombstone record.
* harvesters might not get a tombstone record. Therefore, consider all * Therefore, consider all relevant policy dates and the standard lastModified
* relevant policy dates and the standard lastModified date and take the * date and take the most recent of those which have already passed.
* most recent of those which have already passed.
*/ */
doc.addField("item.lastmodified", SolrUtils.getDateFormatter() doc.addField("item.lastmodified",
.format(this.getMostRecentModificationDate(item))); SolrUtils.getDateFormatter().format(this.getMostRecentModificationDate(item)));
if (item.getSubmitter() != null) { if (item.getSubmitter() != null) {
doc.addField("item.submitter", item.getSubmitter().getEmail()); doc.addField("item.submitter", item.getSubmitter().getEmail());
} }
for (Collection col: item.getCollections()) { for (Collection col : item.getCollections()) {
doc.addField("item.collections", "col_" + col.getHandle().replace("/", "_")); doc.addField("item.collections", "col_" + col.getHandle().replace("/", "_"));
} }
for (Community com : collectionsService.flatParentCommunities(context, item)) { for (Community com : collectionsService.flatParentCommunities(context, item)) {
@@ -457,8 +478,7 @@ public class XOAI {
// Message output before processing - for debugging purposes // Message output before processing - for debugging purposes
if (verbose) { if (verbose) {
println(String.format("Item %s with handle %s is about to be indexed", println(String.format("Item %s with handle %s is about to be indexed", item.getID().toString(), handle));
item.getID().toString(), handle));
} }
ByteArrayOutputStream out = new ByteArrayOutputStream(); ByteArrayOutputStream out = new ByteArrayOutputStream();
@@ -476,8 +496,7 @@ public class XOAI {
doc.addField("item.compile", out.toString()); doc.addField("item.compile", out.toString());
if (verbose) { if (verbose) {
println(String.format("Item %s with handle %s indexed", println(String.format("Item %s with handle %s indexed", item.getID().toString(), handle));
item.getID().toString(), handle));
} }
return doc; return doc;
@@ -510,12 +529,10 @@ public class XOAI {
return pub; return pub;
} }
private static boolean getKnownExplanation(Throwable t) { private static boolean getKnownExplanation(Throwable t) {
if (t instanceof ConnectException) { if (t instanceof ConnectException) {
System.err.println("Solr server (" System.err.println(
+ configurationService.getProperty("oai.solr.url", "") "Solr server (" + configurationService.getProperty("oai.solr.url", "") + ") is down, turn it on.");
+ ") is down, turn it on.");
return true; return true;
} }
@@ -557,10 +574,8 @@ public class XOAI {
public static void main(String[] argv) throws IOException, ConfigurationException { public static void main(String[] argv) throws IOException, ConfigurationException {
AnnotationConfigApplicationContext applicationContext = new AnnotationConfigApplicationContext(
AnnotationConfigApplicationContext applicationContext = new AnnotationConfigApplicationContext(new Class[] { new Class[] { BasicConfiguration.class });
BasicConfiguration.class
});
XOAICacheService cacheService = applicationContext.getBean(XOAICacheService.class); XOAICacheService cacheService = applicationContext.getBean(XOAICacheService.class);
XOAIItemCacheService itemCacheService = applicationContext.getBean(XOAIItemCacheService.class); XOAIItemCacheService itemCacheService = applicationContext.getBean(XOAIItemCacheService.class);
@@ -571,21 +586,19 @@ public class XOAI {
CommandLineParser parser = new DefaultParser(); CommandLineParser parser = new DefaultParser();
Options options = new Options(); Options options = new Options();
options.addOption("c", "clear", false, "Clear index before indexing"); options.addOption("c", "clear", false, "Clear index before indexing");
options.addOption("o", "optimize", false, options.addOption("o", "optimize", false, "Optimize index at the end");
"Optimize index at the end");
options.addOption("v", "verbose", false, "Verbose output"); options.addOption("v", "verbose", false, "Verbose output");
options.addOption("h", "help", false, "Shows some help"); options.addOption("h", "help", false, "Shows some help");
options.addOption("n", "number", true, "FOR DEVELOPMENT MUST DELETE"); options.addOption("n", "number", true, "FOR DEVELOPMENT MUST DELETE");
CommandLine line = parser.parse(options, argv); CommandLine line = parser.parse(options, argv);
String[] validSolrCommands = {COMMAND_IMPORT, COMMAND_CLEAN_CACHE}; String[] validSolrCommands = { COMMAND_IMPORT, COMMAND_CLEAN_CACHE };
String[] validDatabaseCommands = {COMMAND_CLEAN_CACHE, COMMAND_COMPILE_ITEMS, COMMAND_ERASE_COMPILED_ITEMS}; String[] validDatabaseCommands = { COMMAND_CLEAN_CACHE, COMMAND_COMPILE_ITEMS,
COMMAND_ERASE_COMPILED_ITEMS };
boolean solr = true; // Assuming solr by default boolean solr = true; // Assuming solr by default
solr = !("database").equals(configurationService.getProperty("oai.storage", "solr")); solr = !("database").equals(configurationService.getProperty("oai.storage", "solr"));
boolean run = false; boolean run = false;
if (line.getArgs().length > 0) { if (line.getArgs().length > 0) {
if (solr) { if (solr) {
@@ -607,10 +620,7 @@ public class XOAI {
if (COMMAND_IMPORT.equals(command)) { if (COMMAND_IMPORT.equals(command)) {
ctx = new Context(Context.Mode.READ_ONLY); ctx = new Context(Context.Mode.READ_ONLY);
XOAI indexer = new XOAI(ctx, XOAI indexer = new XOAI(ctx, line.hasOption('o'), line.hasOption('c'), line.hasOption('v'));
line.hasOption('o'),
line.hasOption('c'),
line.hasOption('v'));
applicationContext.getAutowireCapableBeanFactory().autowireBean(indexer); applicationContext.getAutowireCapableBeanFactory().autowireBean(indexer);
@@ -635,8 +645,7 @@ public class XOAI {
} }
System.out.println("OAI 2.0 manager action ended. It took " System.out.println("OAI 2.0 manager action ended. It took "
+ ((System.currentTimeMillis() - start) / 1000) + ((System.currentTimeMillis() - start) / 1000) + " seconds.");
+ " seconds.");
} else { } else {
usage(); usage();
} }
@@ -688,7 +697,7 @@ public class XOAI {
private static void usage() { private static void usage() {
boolean solr = true; // Assuming solr by default boolean solr = true; // Assuming solr by default
solr = !("database").equals(configurationService.getProperty("oai.storage","solr")); solr = !("database").equals(configurationService.getProperty("oai.storage", "solr"));
if (solr) { if (solr) {
System.out.println("OAI Manager Script"); System.out.println("OAI Manager Script");

View File

@@ -11,7 +11,7 @@ var CollReport = function() {
//this.hasSorttable = function(){return true;} //this.hasSorttable = function(){return true;}
this.getLangSuffix = function(){ this.getLangSuffix = function(){
return "[en]"; return "[en]";
} };
//Indicate if Password Authentication is supported //Indicate if Password Authentication is supported
//this.makeAuthLink = function(){return true;}; //this.makeAuthLink = function(){return true;};
@@ -38,7 +38,7 @@ var CollReport = function() {
icollection : "", icollection : "",
ifilter : "", ifilter : "",
}; };
} };
this.getCurrentParameters = function(){ this.getCurrentParameters = function(){
return { return {
"show_fields[]" : this.myMetadataFields.getShowFields(), "show_fields[]" : this.myMetadataFields.getShowFields(),
@@ -49,7 +49,7 @@ var CollReport = function() {
icollection : $("#icollection").val(), icollection : $("#icollection").val(),
ifilter : $("#ifilter").val(), ifilter : $("#ifilter").val(),
}; };
} };
var self = this; var self = this;
this.init = function() { this.init = function() {
@@ -61,7 +61,7 @@ var CollReport = function() {
collapsible: true, collapsible: true,
active: 2 active: 2
}); });
} };
this.myAuth.callback = function(data) { this.myAuth.callback = function(data) {
self.createCollectionTable(); self.createCollectionTable();
@@ -71,7 +71,7 @@ var CollReport = function() {
$("#refresh-fields,#refresh-fields-bits").bind("click", function(){ $("#refresh-fields,#refresh-fields-bits").bind("click", function(){
self.drawItemTable($("#icollection").val(), $("#ifilter").val(), 0); self.drawItemTable($("#icollection").val(), $("#ifilter").val(), 0);
}); });
} };
this.createCollectionTable = function() { this.createCollectionTable = function() {
var self = this; var self = this;
@@ -93,7 +93,7 @@ var CollReport = function() {
self.myHtmlUtil.makeTotalCol(thn); self.myHtmlUtil.makeTotalCol(thn);
self.addCollections(); self.addCollections();
} };
this.addCollections = function() { this.addCollections = function() {
var self = this; var self = this;
@@ -144,8 +144,6 @@ var CollReport = function() {
self.myHtmlUtil.addTd(tr, parval).addClass("title comm"); self.myHtmlUtil.addTd(tr, parval).addClass("title comm");
self.myHtmlUtil.addTdAnchor(tr, coll.name, self.ROOTPATH + coll.handle).addClass("title"); self.myHtmlUtil.addTdAnchor(tr, coll.name, self.ROOTPATH + coll.handle).addClass("title");
var td = self.myHtmlUtil.addTd(tr, "").addClass("num").addClass("link").addClass("numCount");
td = self.myHtmlUtil.addTd(tr, "").addClass("num").addClass("numFiltered");
}; };
@@ -197,7 +195,7 @@ var CollReport = function() {
$(".showCollections").attr("disabled", false); $(".showCollections").attr("disabled", false);
} }
}); });
} };
this.loadData = function() { this.loadData = function() {
self.spinner.spin($("h1")[0]); self.spinner.spin($("h1")[0]);
@@ -208,7 +206,7 @@ var CollReport = function() {
$("#table tr.data").addClass("processing"); $("#table tr.data").addClass("processing");
self.myFilters.filterString = self.myFilters.getFilterList(); self.myFilters.filterString = self.myFilters.getFilterList();
self.doRow(0, self.THREADS, self.loadId); self.doRow(0, self.THREADS, self.loadId);
} };
this.doRow = function(row, threads, curLoadId) { this.doRow = function(row, threads, curLoadId) {
if (self.loadId != curLoadId) return; if (self.loadId != curLoadId) return;
@@ -285,14 +283,14 @@ var CollReport = function() {
$("#table").addClass("sortable"); $("#table").addClass("sortable");
sorttable.makeSortable($("#table")[0]); sorttable.makeSortable($("#table")[0]);
} }
} };
this.totalFilters = function() { this.totalFilters = function() {
var colcount = $("#table tr th").length; var colcount = $("#table tr th").length;
for(var i=4; i<colcount; i++) { for(var i=4; i<colcount; i++) {
self.myHtmlUtil.totalCol(i); self.myHtmlUtil.totalCol(i);
} }
} };
this.updateRow = function(cid, offset) { this.updateRow = function(cid, offset) {
var tr = $("tr[cid="+cid+"]"); var tr = $("tr[cid="+cid+"]");
@@ -351,7 +349,7 @@ var CollReport = function() {
var title = "Collection partially processed, item counts are incomplete. "; var title = "Collection partially processed, item counts are incomplete. ";
if (numItems >= self.TOOBIG) { if (numItems >= self.TOOBIG) {
td.addClass("toobig"); td.addClass("toobig");
title+= "\nIt will take significant time to apply this filter to the entire collection." title+= "\nIt will take significant time to apply this filter to the entire collection.";
} }
td.attr("title", title); td.attr("title", title);
return false; return false;
@@ -359,7 +357,7 @@ var CollReport = function() {
self.totalFilters(); self.totalFilters();
} }
return true; return true;
} };
this.setCellCount = function(tr, cid, offset, isPartial, itemFilter) { this.setCellCount = function(tr, cid, offset, isPartial, itemFilter) {
var filterName = itemFilter["filter-name"]; var filterName = itemFilter["filter-name"];
@@ -391,7 +389,7 @@ var CollReport = function() {
$("#ifilter").val(filterName); $("#ifilter").val(filterName);
}); });
} }
} };
this.drawItemTable = function(cid, filter, offset) { this.drawItemTable = function(cid, filter, offset) {
@@ -433,7 +431,7 @@ var CollReport = function() {
offset: offset, offset: offset,
"show_fields[]" : fields, "show_fields[]" : fields,
"show_fields_bits[]" : bitfields, "show_fields_bits[]" : bitfields,
} };
$.ajax({ $.ajax({
url: "/rest/filtered-collections/"+cid, url: "/rest/filtered-collections/"+cid,
@@ -452,7 +450,6 @@ var CollReport = function() {
self.myHtmlUtil.addTd(tr, item.name).addClass("ititle"); self.myHtmlUtil.addTd(tr, item.name).addClass("ititle");
if (fields != null) { if (fields != null) {
$.each(fields, function(index, field){ $.each(fields, function(index, field){
var text = "";
var td = self.myHtmlUtil.addTd(tr, ""); var td = self.myHtmlUtil.addTd(tr, "");
$.each(item.metadata, function(mindex,mv){ $.each(item.metadata, function(mindex,mv){
if (mv.key == field) { if (mv.key == field) {
@@ -493,7 +490,7 @@ var CollReport = function() {
$("#itemResults").accordion("option", "active", self.IACCIDX_ITEM); $("#itemResults").accordion("option", "active", self.IACCIDX_ITEM);
} }
}); });
} };
//Ignore the first column containing a row number and the item handle //Ignore the first column containing a row number and the item handle
this.exportCol = function(colnum, col) { this.exportCol = function(colnum, col) {
@@ -503,8 +500,8 @@ var CollReport = function() {
data += (colnum == 1) ? "" : ","; data += (colnum == 1) ? "" : ",";
data += self.exportCell(col); data += self.exportCell(col);
return data; return data;
} };
} };
CollReport.prototype = Object.create(Report.prototype); CollReport.prototype = Object.create(Report.prototype);
$(document).ready(function(){ $(document).ready(function(){

Some files were not shown because too many files have changed in this diff Show More