mirror of
https://github.com/DSpace/DSpace.git
synced 2025-10-18 15:33:09 +00:00
Merge branch 'main' into CST-4503-AddingNewRelationshipsInEditItem
This commit is contained in:
@@ -743,7 +743,7 @@
|
|||||||
<dependency>
|
<dependency>
|
||||||
<groupId>org.flywaydb</groupId>
|
<groupId>org.flywaydb</groupId>
|
||||||
<artifactId>flyway-core</artifactId>
|
<artifactId>flyway-core</artifactId>
|
||||||
<version>6.5.5</version>
|
<version>6.5.7</version>
|
||||||
</dependency>
|
</dependency>
|
||||||
|
|
||||||
<!-- Google Analytics -->
|
<!-- Google Analytics -->
|
||||||
|
@@ -81,7 +81,7 @@ public class MetadataImporter {
|
|||||||
* @throws SQLException if database error
|
* @throws SQLException if database error
|
||||||
* @throws IOException if IO error
|
* @throws IOException if IO error
|
||||||
* @throws TransformerException if transformer error
|
* @throws TransformerException if transformer error
|
||||||
* @throws ParserConfigurationException if config error
|
* @throws ParserConfigurationException if configuration error
|
||||||
* @throws AuthorizeException if authorization error
|
* @throws AuthorizeException if authorization error
|
||||||
* @throws SAXException if parser error
|
* @throws SAXException if parser error
|
||||||
* @throws NonUniqueMetadataException if duplicate metadata
|
* @throws NonUniqueMetadataException if duplicate metadata
|
||||||
@@ -91,7 +91,6 @@ public class MetadataImporter {
|
|||||||
throws ParseException, SQLException, IOException, TransformerException,
|
throws ParseException, SQLException, IOException, TransformerException,
|
||||||
ParserConfigurationException, AuthorizeException, SAXException,
|
ParserConfigurationException, AuthorizeException, SAXException,
|
||||||
NonUniqueMetadataException, RegistryImportException {
|
NonUniqueMetadataException, RegistryImportException {
|
||||||
boolean forceUpdate = false;
|
|
||||||
|
|
||||||
// create an options object and populate it
|
// create an options object and populate it
|
||||||
CommandLineParser parser = new DefaultParser();
|
CommandLineParser parser = new DefaultParser();
|
||||||
@@ -100,16 +99,14 @@ public class MetadataImporter {
|
|||||||
options.addOption("u", "update", false, "update an existing schema");
|
options.addOption("u", "update", false, "update an existing schema");
|
||||||
CommandLine line = parser.parse(options, args);
|
CommandLine line = parser.parse(options, args);
|
||||||
|
|
||||||
String file = null;
|
|
||||||
if (line.hasOption('f')) {
|
if (line.hasOption('f')) {
|
||||||
file = line.getOptionValue('f');
|
String file = line.getOptionValue('f');
|
||||||
|
boolean forceUpdate = line.hasOption('u');
|
||||||
|
loadRegistry(file, forceUpdate);
|
||||||
} else {
|
} else {
|
||||||
usage();
|
usage();
|
||||||
System.exit(0);
|
System.exit(1);
|
||||||
}
|
}
|
||||||
|
|
||||||
forceUpdate = line.hasOption('u');
|
|
||||||
loadRegistry(file, forceUpdate);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
@@ -120,7 +117,7 @@ public class MetadataImporter {
|
|||||||
* @throws SQLException if database error
|
* @throws SQLException if database error
|
||||||
* @throws IOException if IO error
|
* @throws IOException if IO error
|
||||||
* @throws TransformerException if transformer error
|
* @throws TransformerException if transformer error
|
||||||
* @throws ParserConfigurationException if config error
|
* @throws ParserConfigurationException if configuration error
|
||||||
* @throws AuthorizeException if authorization error
|
* @throws AuthorizeException if authorization error
|
||||||
* @throws SAXException if parser error
|
* @throws SAXException if parser error
|
||||||
* @throws NonUniqueMetadataException if duplicate metadata
|
* @throws NonUniqueMetadataException if duplicate metadata
|
||||||
@@ -227,7 +224,7 @@ public class MetadataImporter {
|
|||||||
/**
|
/**
|
||||||
* Process a node in the metadata registry XML file. The node must
|
* Process a node in the metadata registry XML file. The node must
|
||||||
* be a "dc-type" node. If the type already exists, then it
|
* be a "dc-type" node. If the type already exists, then it
|
||||||
* will not be reimported
|
* will not be re-imported.
|
||||||
*
|
*
|
||||||
* @param context DSpace context object
|
* @param context DSpace context object
|
||||||
* @param node the node in the DOM tree
|
* @param node the node in the DOM tree
|
||||||
|
@@ -24,7 +24,7 @@ import org.dspace.content.BitstreamFormat;
|
|||||||
import org.dspace.content.factory.ContentServiceFactory;
|
import org.dspace.content.factory.ContentServiceFactory;
|
||||||
import org.dspace.content.service.BitstreamFormatService;
|
import org.dspace.content.service.BitstreamFormatService;
|
||||||
import org.dspace.core.Context;
|
import org.dspace.core.Context;
|
||||||
import org.dspace.core.LogManager;
|
import org.dspace.core.LogHelper;
|
||||||
import org.w3c.dom.Document;
|
import org.w3c.dom.Document;
|
||||||
import org.w3c.dom.Node;
|
import org.w3c.dom.Node;
|
||||||
import org.w3c.dom.NodeList;
|
import org.w3c.dom.NodeList;
|
||||||
@@ -95,7 +95,7 @@ public class RegistryLoader {
|
|||||||
|
|
||||||
System.exit(1);
|
System.exit(1);
|
||||||
} catch (Exception e) {
|
} catch (Exception e) {
|
||||||
log.fatal(LogManager.getHeader(context, "error_loading_registries",
|
log.fatal(LogHelper.getHeader(context, "error_loading_registries",
|
||||||
""), e);
|
""), e);
|
||||||
|
|
||||||
System.err.println("Error: \n - " + e.getMessage());
|
System.err.println("Error: \n - " + e.getMessage());
|
||||||
@@ -135,7 +135,7 @@ public class RegistryLoader {
|
|||||||
loadFormat(context, n);
|
loadFormat(context, n);
|
||||||
}
|
}
|
||||||
|
|
||||||
log.info(LogManager.getHeader(context, "load_bitstream_formats",
|
log.info(LogHelper.getHeader(context, "load_bitstream_formats",
|
||||||
"number_loaded=" + typeNodes.getLength()));
|
"number_loaded=" + typeNodes.getLength()));
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@@ -159,7 +159,7 @@ public class DSpaceCSV implements Serializable {
|
|||||||
columnCounter++;
|
columnCounter++;
|
||||||
|
|
||||||
// Remove surrounding quotes if there are any
|
// Remove surrounding quotes if there are any
|
||||||
if ((element.startsWith("\"")) && (element.endsWith("\""))) {
|
if (element.startsWith("\"") && element.endsWith("\"")) {
|
||||||
element = element.substring(1, element.length() - 1);
|
element = element.substring(1, element.length() - 1);
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -337,15 +337,15 @@ public class DSpaceCSV implements Serializable {
|
|||||||
/**
|
/**
|
||||||
* Set the value separator for multiple values stored in one csv value.
|
* Set the value separator for multiple values stored in one csv value.
|
||||||
*
|
*
|
||||||
* Is set in bulkedit.cfg as valueseparator
|
* Is set in {@code bulkedit.cfg} as {@code valueseparator}.
|
||||||
*
|
*
|
||||||
* If not set, defaults to double pipe '||'
|
* If not set, defaults to double pipe '||'.
|
||||||
*/
|
*/
|
||||||
private void setValueSeparator() {
|
private void setValueSeparator() {
|
||||||
// Get the value separator
|
// Get the value separator
|
||||||
valueSeparator = DSpaceServicesFactory.getInstance().getConfigurationService()
|
valueSeparator = DSpaceServicesFactory.getInstance().getConfigurationService()
|
||||||
.getProperty("bulkedit.valueseparator");
|
.getProperty("bulkedit.valueseparator");
|
||||||
if ((valueSeparator != null) && (!"".equals(valueSeparator.trim()))) {
|
if ((valueSeparator != null) && !valueSeparator.trim().isEmpty()) {
|
||||||
valueSeparator = valueSeparator.trim();
|
valueSeparator = valueSeparator.trim();
|
||||||
} else {
|
} else {
|
||||||
valueSeparator = "||";
|
valueSeparator = "||";
|
||||||
@@ -360,7 +360,7 @@ public class DSpaceCSV implements Serializable {
|
|||||||
/**
|
/**
|
||||||
* Set the field separator use to separate fields in the csv.
|
* Set the field separator use to separate fields in the csv.
|
||||||
*
|
*
|
||||||
* Is set in bulkedit.cfg as fieldseparator
|
* Is set in {@code bulkedit.cfg} as {@code fieldseparator}.
|
||||||
*
|
*
|
||||||
* If not set, defaults to comma ','.
|
* If not set, defaults to comma ','.
|
||||||
*
|
*
|
||||||
@@ -371,7 +371,7 @@ public class DSpaceCSV implements Serializable {
|
|||||||
// Get the value separator
|
// Get the value separator
|
||||||
fieldSeparator = DSpaceServicesFactory.getInstance().getConfigurationService()
|
fieldSeparator = DSpaceServicesFactory.getInstance().getConfigurationService()
|
||||||
.getProperty("bulkedit.fieldseparator");
|
.getProperty("bulkedit.fieldseparator");
|
||||||
if ((fieldSeparator != null) && (!"".equals(fieldSeparator.trim()))) {
|
if ((fieldSeparator != null) && !fieldSeparator.trim().isEmpty()) {
|
||||||
fieldSeparator = fieldSeparator.trim();
|
fieldSeparator = fieldSeparator.trim();
|
||||||
if ("tab".equals(fieldSeparator)) {
|
if ("tab".equals(fieldSeparator)) {
|
||||||
fieldSeparator = "\t";
|
fieldSeparator = "\t";
|
||||||
@@ -395,15 +395,15 @@ public class DSpaceCSV implements Serializable {
|
|||||||
/**
|
/**
|
||||||
* Set the authority separator for value with authority data.
|
* Set the authority separator for value with authority data.
|
||||||
*
|
*
|
||||||
* Is set in dspace.cfg as bulkedit.authorityseparator
|
* Is set in {@code dspace.cfg} as {@code bulkedit.authorityseparator}.
|
||||||
*
|
*
|
||||||
* If not set, defaults to double colon '::'
|
* If not set, defaults to double colon '::'.
|
||||||
*/
|
*/
|
||||||
private void setAuthoritySeparator() {
|
private void setAuthoritySeparator() {
|
||||||
// Get the value separator
|
// Get the value separator
|
||||||
authoritySeparator = DSpaceServicesFactory.getInstance().getConfigurationService()
|
authoritySeparator = DSpaceServicesFactory.getInstance().getConfigurationService()
|
||||||
.getProperty("bulkedit.authorityseparator");
|
.getProperty("bulkedit.authorityseparator");
|
||||||
if ((authoritySeparator != null) && (!"".equals(authoritySeparator.trim()))) {
|
if ((authoritySeparator != null) && !authoritySeparator.trim().isEmpty()) {
|
||||||
authoritySeparator = authoritySeparator.trim();
|
authoritySeparator = authoritySeparator.trim();
|
||||||
} else {
|
} else {
|
||||||
authoritySeparator = "::";
|
authoritySeparator = "::";
|
||||||
@@ -508,7 +508,7 @@ public class DSpaceCSV implements Serializable {
|
|||||||
int i = 0;
|
int i = 0;
|
||||||
for (String part : bits) {
|
for (String part : bits) {
|
||||||
int bitcounter = part.length() - part.replaceAll("\"", "").length();
|
int bitcounter = part.length() - part.replaceAll("\"", "").length();
|
||||||
if ((part.startsWith("\"")) && ((!part.endsWith("\"")) || ((bitcounter & 1) == 1))) {
|
if (part.startsWith("\"") && (!part.endsWith("\"") || ((bitcounter & 1) == 1))) {
|
||||||
found = true;
|
found = true;
|
||||||
String add = bits.get(i) + fieldSeparator + bits.get(i + 1);
|
String add = bits.get(i) + fieldSeparator + bits.get(i + 1);
|
||||||
bits.remove(i);
|
bits.remove(i);
|
||||||
@@ -524,7 +524,7 @@ public class DSpaceCSV implements Serializable {
|
|||||||
// Deal with quotes around the elements
|
// Deal with quotes around the elements
|
||||||
int i = 0;
|
int i = 0;
|
||||||
for (String part : bits) {
|
for (String part : bits) {
|
||||||
if ((part.startsWith("\"")) && (part.endsWith("\""))) {
|
if (part.startsWith("\"") && part.endsWith("\"")) {
|
||||||
part = part.substring(1, part.length() - 1);
|
part = part.substring(1, part.length() - 1);
|
||||||
bits.set(i, part);
|
bits.set(i, part);
|
||||||
}
|
}
|
||||||
@@ -564,7 +564,7 @@ public class DSpaceCSV implements Serializable {
|
|||||||
for (String part : bits) {
|
for (String part : bits) {
|
||||||
if (i > 0) {
|
if (i > 0) {
|
||||||
// Is this a last empty item?
|
// Is this a last empty item?
|
||||||
if ((last) && (i == headings.size())) {
|
if (last && (i == headings.size())) {
|
||||||
part = "";
|
part = "";
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -577,7 +577,7 @@ public class DSpaceCSV implements Serializable {
|
|||||||
csvLine.add(headings.get(i - 1), null);
|
csvLine.add(headings.get(i - 1), null);
|
||||||
String[] elements = part.split(escapedValueSeparator);
|
String[] elements = part.split(escapedValueSeparator);
|
||||||
for (String element : elements) {
|
for (String element : elements) {
|
||||||
if ((element != null) && (!"".equals(element))) {
|
if ((element != null) && !element.isEmpty()) {
|
||||||
csvLine.add(headings.get(i - 1), element);
|
csvLine.add(headings.get(i - 1), element);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -629,18 +629,18 @@ public class DSpaceCSV implements Serializable {
|
|||||||
public InputStream getInputStream() {
|
public InputStream getInputStream() {
|
||||||
StringBuilder stringBuilder = new StringBuilder();
|
StringBuilder stringBuilder = new StringBuilder();
|
||||||
for (String csvLine : getCSVLinesAsStringArray()) {
|
for (String csvLine : getCSVLinesAsStringArray()) {
|
||||||
stringBuilder.append(csvLine + "\n");
|
stringBuilder.append(csvLine).append("\n");
|
||||||
}
|
}
|
||||||
return IOUtils.toInputStream(stringBuilder.toString(), StandardCharsets.UTF_8);
|
return IOUtils.toInputStream(stringBuilder.toString(), StandardCharsets.UTF_8);
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Is it Ok to export this value? When exportAll is set to false, we don't export
|
* Is it okay to export this value? When exportAll is set to false, we don't export
|
||||||
* some of the metadata elements.
|
* some of the metadata elements.
|
||||||
*
|
*
|
||||||
* The list can be configured via the key ignore-on-export in bulkedit.cfg
|
* The list can be configured via the key ignore-on-export in {@code bulkedit.cfg}.
|
||||||
*
|
*
|
||||||
* @param md The Metadatum to examine
|
* @param md The MetadataField to examine
|
||||||
* @return Whether or not it is OK to export this element
|
* @return Whether or not it is OK to export this element
|
||||||
*/
|
*/
|
||||||
protected boolean okToExport(MetadataField md) {
|
protected boolean okToExport(MetadataField md) {
|
||||||
@@ -649,12 +649,8 @@ public class DSpaceCSV implements Serializable {
|
|||||||
if (md.getQualifier() != null) {
|
if (md.getQualifier() != null) {
|
||||||
key += "." + md.getQualifier();
|
key += "." + md.getQualifier();
|
||||||
}
|
}
|
||||||
if (ignore.get(key) != null) {
|
|
||||||
return false;
|
|
||||||
}
|
|
||||||
|
|
||||||
// Must be OK, so don't ignore
|
// Must be OK, so don't ignore
|
||||||
return true;
|
return ignore.get(key) == null;
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
@@ -25,6 +25,7 @@ import javax.annotation.Nullable;
|
|||||||
import org.apache.commons.cli.ParseException;
|
import org.apache.commons.cli.ParseException;
|
||||||
import org.apache.commons.lang3.StringUtils;
|
import org.apache.commons.lang3.StringUtils;
|
||||||
import org.apache.logging.log4j.Logger;
|
import org.apache.logging.log4j.Logger;
|
||||||
|
import org.dspace.app.util.RelationshipUtils;
|
||||||
import org.dspace.authority.AuthorityValue;
|
import org.dspace.authority.AuthorityValue;
|
||||||
import org.dspace.authority.factory.AuthorityServiceFactory;
|
import org.dspace.authority.factory.AuthorityServiceFactory;
|
||||||
import org.dspace.authority.service.AuthorityValueService;
|
import org.dspace.authority.service.AuthorityValueService;
|
||||||
@@ -53,7 +54,7 @@ import org.dspace.content.service.RelationshipTypeService;
|
|||||||
import org.dspace.content.service.WorkspaceItemService;
|
import org.dspace.content.service.WorkspaceItemService;
|
||||||
import org.dspace.core.Constants;
|
import org.dspace.core.Constants;
|
||||||
import org.dspace.core.Context;
|
import org.dspace.core.Context;
|
||||||
import org.dspace.core.LogManager;
|
import org.dspace.core.LogHelper;
|
||||||
import org.dspace.eperson.EPerson;
|
import org.dspace.eperson.EPerson;
|
||||||
import org.dspace.eperson.factory.EPersonServiceFactory;
|
import org.dspace.eperson.factory.EPersonServiceFactory;
|
||||||
import org.dspace.handle.factory.HandleServiceFactory;
|
import org.dspace.handle.factory.HandleServiceFactory;
|
||||||
@@ -640,7 +641,7 @@ public class MetadataImport extends DSpaceRunnable<MetadataImportScriptConfigura
|
|||||||
all += part + ",";
|
all += part + ",";
|
||||||
}
|
}
|
||||||
all = all.substring(0, all.length());
|
all = all.substring(0, all.length());
|
||||||
log.debug(LogManager.getHeader(c, "metadata_import",
|
log.debug(LogHelper.getHeader(c, "metadata_import",
|
||||||
"item_id=" + item.getID() + ",fromCSV=" + all));
|
"item_id=" + item.getID() + ",fromCSV=" + all));
|
||||||
|
|
||||||
// Don't compare collections or actions or rowNames
|
// Don't compare collections or actions or rowNames
|
||||||
@@ -677,7 +678,7 @@ public class MetadataImport extends DSpaceRunnable<MetadataImportScriptConfigura
|
|||||||
qualifier = qualifier.substring(0, qualifier.indexOf('['));
|
qualifier = qualifier.substring(0, qualifier.indexOf('['));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
log.debug(LogManager.getHeader(c, "metadata_import",
|
log.debug(LogHelper.getHeader(c, "metadata_import",
|
||||||
"item_id=" + item.getID() + ",fromCSV=" + all +
|
"item_id=" + item.getID() + ",fromCSV=" + all +
|
||||||
",looking_for_schema=" + schema +
|
",looking_for_schema=" + schema +
|
||||||
",looking_for_element=" + element +
|
",looking_for_element=" + element +
|
||||||
@@ -697,7 +698,7 @@ public class MetadataImport extends DSpaceRunnable<MetadataImportScriptConfigura
|
|||||||
.getConfidence() : Choices.CF_ACCEPTED);
|
.getConfidence() : Choices.CF_ACCEPTED);
|
||||||
}
|
}
|
||||||
i++;
|
i++;
|
||||||
log.debug(LogManager.getHeader(c, "metadata_import",
|
log.debug(LogHelper.getHeader(c, "metadata_import",
|
||||||
"item_id=" + item.getID() + ",fromCSV=" + all +
|
"item_id=" + item.getID() + ",fromCSV=" + all +
|
||||||
",found=" + dcv.getValue()));
|
",found=" + dcv.getValue()));
|
||||||
}
|
}
|
||||||
@@ -748,7 +749,7 @@ public class MetadataImport extends DSpaceRunnable<MetadataImportScriptConfigura
|
|||||||
// column "dc.contributor.author" so don't remove it
|
// column "dc.contributor.author" so don't remove it
|
||||||
if ((value != null) && (!"".equals(value)) && (!contains(value, fromCSV)) && fromAuthority == null) {
|
if ((value != null) && (!"".equals(value)) && (!contains(value, fromCSV)) && fromAuthority == null) {
|
||||||
// Remove it
|
// Remove it
|
||||||
log.debug(LogManager.getHeader(c, "metadata_import",
|
log.debug(LogHelper.getHeader(c, "metadata_import",
|
||||||
"item_id=" + item.getID() + ",fromCSV=" + all +
|
"item_id=" + item.getID() + ",fromCSV=" + all +
|
||||||
",removing_schema=" + schema +
|
",removing_schema=" + schema +
|
||||||
",removing_element=" + element +
|
",removing_element=" + element +
|
||||||
@@ -1793,36 +1794,7 @@ public class MetadataImport extends DSpaceRunnable<MetadataImportScriptConfigura
|
|||||||
*/
|
*/
|
||||||
private RelationshipType matchRelationshipType(List<RelationshipType> relTypes,
|
private RelationshipType matchRelationshipType(List<RelationshipType> relTypes,
|
||||||
String targetType, String originType, String originTypeName) {
|
String targetType, String originType, String originTypeName) {
|
||||||
RelationshipType foundRelationshipType = null;
|
return RelationshipUtils.matchRelationshipType(relTypes, targetType, originType, originTypeName);
|
||||||
if (originTypeName.split("\\.").length > 1) {
|
|
||||||
originTypeName = originTypeName.split("\\.")[1];
|
|
||||||
}
|
|
||||||
for (RelationshipType relationshipType : relTypes) {
|
|
||||||
// Is origin type leftward or righward
|
|
||||||
boolean isLeft = false;
|
|
||||||
if (relationshipType.getLeftType().getLabel().equalsIgnoreCase(originType)) {
|
|
||||||
isLeft = true;
|
|
||||||
}
|
|
||||||
if (isLeft) {
|
|
||||||
// Validate typeName reference
|
|
||||||
if (!relationshipType.getLeftwardType().equalsIgnoreCase(originTypeName)) {
|
|
||||||
continue;
|
|
||||||
}
|
|
||||||
if (relationshipType.getLeftType().getLabel().equalsIgnoreCase(originType) &&
|
|
||||||
relationshipType.getRightType().getLabel().equalsIgnoreCase(targetType)) {
|
|
||||||
foundRelationshipType = relationshipType;
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
if (!relationshipType.getRightwardType().equalsIgnoreCase(originTypeName)) {
|
|
||||||
continue;
|
|
||||||
}
|
|
||||||
if (relationshipType.getLeftType().getLabel().equalsIgnoreCase(targetType) &&
|
|
||||||
relationshipType.getRightType().getLabel().equalsIgnoreCase(originType)) {
|
|
||||||
foundRelationshipType = relationshipType;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return foundRelationshipType;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
}
|
}
|
||||||
|
@@ -13,11 +13,8 @@ import java.util.Iterator;
|
|||||||
import java.util.List;
|
import java.util.List;
|
||||||
import java.util.UUID;
|
import java.util.UUID;
|
||||||
|
|
||||||
import org.apache.commons.cli.CommandLine;
|
import org.apache.commons.cli.ParseException;
|
||||||
import org.apache.commons.cli.CommandLineParser;
|
import org.apache.commons.lang3.StringUtils;
|
||||||
import org.apache.commons.cli.DefaultParser;
|
|
||||||
import org.apache.commons.cli.HelpFormatter;
|
|
||||||
import org.apache.commons.cli.Options;
|
|
||||||
import org.dspace.authorize.AuthorizeException;
|
import org.dspace.authorize.AuthorizeException;
|
||||||
import org.dspace.content.Collection;
|
import org.dspace.content.Collection;
|
||||||
import org.dspace.content.DSpaceObject;
|
import org.dspace.content.DSpaceObject;
|
||||||
@@ -36,221 +33,223 @@ import org.dspace.harvest.HarvestingException;
|
|||||||
import org.dspace.harvest.OAIHarvester;
|
import org.dspace.harvest.OAIHarvester;
|
||||||
import org.dspace.harvest.factory.HarvestServiceFactory;
|
import org.dspace.harvest.factory.HarvestServiceFactory;
|
||||||
import org.dspace.harvest.service.HarvestedCollectionService;
|
import org.dspace.harvest.service.HarvestedCollectionService;
|
||||||
|
import org.dspace.scripts.DSpaceRunnable;
|
||||||
|
import org.dspace.utils.DSpace;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Test class for harvested collections.
|
* Test class for harvested collections.
|
||||||
*
|
*
|
||||||
* @author Alexey Maslov
|
* @author Alexey Maslov
|
||||||
*/
|
*/
|
||||||
public class Harvest {
|
public class Harvest extends DSpaceRunnable<HarvestScriptConfiguration> {
|
||||||
private static Context context;
|
|
||||||
|
|
||||||
private static final HarvestedCollectionService harvestedCollectionService =
|
private HarvestedCollectionService harvestedCollectionService;
|
||||||
|
protected EPersonService ePersonService;
|
||||||
|
private CollectionService collectionService;
|
||||||
|
|
||||||
|
private boolean help;
|
||||||
|
private String command = null;
|
||||||
|
private String collection = null;
|
||||||
|
private String oaiSource = null;
|
||||||
|
private String oaiSetID = null;
|
||||||
|
private String metadataKey = null;
|
||||||
|
private int harvestType = 0;
|
||||||
|
|
||||||
|
protected Context context;
|
||||||
|
|
||||||
|
|
||||||
|
public HarvestScriptConfiguration getScriptConfiguration() {
|
||||||
|
return new DSpace().getServiceManager()
|
||||||
|
.getServiceByName("harvest", HarvestScriptConfiguration.class);
|
||||||
|
}
|
||||||
|
|
||||||
|
public void setup() throws ParseException {
|
||||||
|
harvestedCollectionService =
|
||||||
HarvestServiceFactory.getInstance().getHarvestedCollectionService();
|
HarvestServiceFactory.getInstance().getHarvestedCollectionService();
|
||||||
private static final EPersonService ePersonService = EPersonServiceFactory.getInstance().getEPersonService();
|
ePersonService = EPersonServiceFactory.getInstance().getEPersonService();
|
||||||
private static final CollectionService collectionService =
|
collectionService =
|
||||||
ContentServiceFactory.getInstance().getCollectionService();
|
ContentServiceFactory.getInstance().getCollectionService();
|
||||||
|
|
||||||
public static void main(String[] argv) throws Exception {
|
assignCurrentUserInContext();
|
||||||
// create an options object and populate it
|
|
||||||
CommandLineParser parser = new DefaultParser();
|
|
||||||
|
|
||||||
Options options = new Options();
|
help = commandLine.hasOption('h');
|
||||||
|
|
||||||
options.addOption("p", "purge", false, "delete all items in the collection");
|
|
||||||
options.addOption("r", "run", false, "run the standard harvest procedure");
|
|
||||||
options.addOption("g", "ping", false, "test the OAI server and set");
|
|
||||||
options.addOption("s", "setup", false, "Set the collection up for harvesting");
|
|
||||||
options.addOption("S", "start", false, "start the harvest loop");
|
|
||||||
options.addOption("R", "reset", false, "reset harvest status on all collections");
|
|
||||||
options.addOption("P", "purge", false, "purge all harvestable collections");
|
|
||||||
|
|
||||||
|
|
||||||
options.addOption("e", "eperson", true,
|
if (commandLine.hasOption('s')) {
|
||||||
"eperson");
|
|
||||||
options.addOption("c", "collection", true,
|
|
||||||
"harvesting collection (handle or id)");
|
|
||||||
options.addOption("t", "type", true,
|
|
||||||
"type of harvesting (0 for none)");
|
|
||||||
options.addOption("a", "address", true,
|
|
||||||
"address of the OAI-PMH server");
|
|
||||||
options.addOption("i", "oai_set_id", true,
|
|
||||||
"id of the PMH set representing the harvested collection");
|
|
||||||
options.addOption("m", "metadata_format", true,
|
|
||||||
"the name of the desired metadata format for harvesting, resolved to namespace and " +
|
|
||||||
"crosswalk in dspace.cfg");
|
|
||||||
|
|
||||||
options.addOption("h", "help", false, "help");
|
|
||||||
|
|
||||||
CommandLine line = parser.parse(options, argv);
|
|
||||||
|
|
||||||
String command = null;
|
|
||||||
String eperson = null;
|
|
||||||
String collection = null;
|
|
||||||
String oaiSource = null;
|
|
||||||
String oaiSetID = null;
|
|
||||||
String metadataKey = null;
|
|
||||||
int harvestType = 0;
|
|
||||||
|
|
||||||
if (line.hasOption('h')) {
|
|
||||||
HelpFormatter myhelp = new HelpFormatter();
|
|
||||||
myhelp.printHelp("Harvest\n", options);
|
|
||||||
System.out.println("\nPING OAI server: Harvest -g -a oai_source -i oai_set_id");
|
|
||||||
System.out.println(
|
|
||||||
"SETUP a collection for harvesting: Harvest -s -c collection -t harvest_type -a oai_source -i " +
|
|
||||||
"oai_set_id -m metadata_format");
|
|
||||||
System.out.println("RUN harvest once: Harvest -r -e eperson -c collection");
|
|
||||||
System.out.println("START harvest scheduler: Harvest -S");
|
|
||||||
System.out.println("RESET all harvest status: Harvest -R");
|
|
||||||
System.out.println("PURGE a collection of items and settings: Harvest -p -e eperson -c collection");
|
|
||||||
System.out.println("PURGE all harvestable collections: Harvest -P -e eperson");
|
|
||||||
|
|
||||||
|
|
||||||
System.exit(0);
|
|
||||||
}
|
|
||||||
|
|
||||||
if (line.hasOption('s')) {
|
|
||||||
command = "config";
|
command = "config";
|
||||||
}
|
}
|
||||||
if (line.hasOption('p')) {
|
if (commandLine.hasOption('p')) {
|
||||||
command = "purge";
|
command = "purge";
|
||||||
}
|
}
|
||||||
if (line.hasOption('r')) {
|
if (commandLine.hasOption('r')) {
|
||||||
command = "run";
|
command = "run";
|
||||||
}
|
}
|
||||||
if (line.hasOption('g')) {
|
if (commandLine.hasOption('g')) {
|
||||||
command = "ping";
|
command = "ping";
|
||||||
}
|
}
|
||||||
if (line.hasOption('S')) {
|
if (commandLine.hasOption('S')) {
|
||||||
command = "start";
|
command = "start";
|
||||||
}
|
}
|
||||||
if (line.hasOption('R')) {
|
if (commandLine.hasOption('R')) {
|
||||||
command = "reset";
|
command = "reset";
|
||||||
}
|
}
|
||||||
if (line.hasOption('P')) {
|
if (commandLine.hasOption('P')) {
|
||||||
command = "purgeAll";
|
command = "purgeAll";
|
||||||
}
|
}
|
||||||
|
if (commandLine.hasOption('o')) {
|
||||||
|
command = "reimport";
|
||||||
if (line.hasOption('e')) {
|
|
||||||
eperson = line.getOptionValue('e');
|
|
||||||
}
|
}
|
||||||
if (line.hasOption('c')) {
|
if (commandLine.hasOption('c')) {
|
||||||
collection = line.getOptionValue('c');
|
collection = commandLine.getOptionValue('c');
|
||||||
}
|
}
|
||||||
if (line.hasOption('t')) {
|
if (commandLine.hasOption('t')) {
|
||||||
harvestType = Integer.parseInt(line.getOptionValue('t'));
|
harvestType = Integer.parseInt(commandLine.getOptionValue('t'));
|
||||||
} else {
|
} else {
|
||||||
harvestType = 0;
|
harvestType = 0;
|
||||||
}
|
}
|
||||||
if (line.hasOption('a')) {
|
if (commandLine.hasOption('a')) {
|
||||||
oaiSource = line.getOptionValue('a');
|
oaiSource = commandLine.getOptionValue('a');
|
||||||
}
|
}
|
||||||
if (line.hasOption('i')) {
|
if (commandLine.hasOption('i')) {
|
||||||
oaiSetID = line.getOptionValue('i');
|
oaiSetID = commandLine.getOptionValue('i');
|
||||||
|
}
|
||||||
|
if (commandLine.hasOption('m')) {
|
||||||
|
metadataKey = commandLine.getOptionValue('m');
|
||||||
}
|
}
|
||||||
if (line.hasOption('m')) {
|
|
||||||
metadataKey = line.getOptionValue('m');
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* This method will assign the currentUser to the {@link Context} variable which is also created in this method.
|
||||||
|
* The instance of the method in this class will fetch the EPersonIdentifier from this class, this identifier
|
||||||
|
* was given to this class upon instantiation, it'll then be used to find the {@link EPerson} associated with it
|
||||||
|
* and this {@link EPerson} will be set as the currentUser of the created {@link Context}
|
||||||
|
* @throws ParseException If something went wrong with the retrieval of the EPerson Identifier
|
||||||
|
*/
|
||||||
|
protected void assignCurrentUserInContext() throws ParseException {
|
||||||
|
UUID currentUserUuid = this.getEpersonIdentifier();
|
||||||
|
try {
|
||||||
|
this.context = new Context(Context.Mode.BATCH_EDIT);
|
||||||
|
EPerson eperson = ePersonService.find(context, currentUserUuid);
|
||||||
|
if (eperson == null) {
|
||||||
|
super.handler.logError("EPerson not found: " + currentUserUuid);
|
||||||
|
throw new IllegalArgumentException("Unable to find a user with uuid: " + currentUserUuid);
|
||||||
|
}
|
||||||
|
this.context.setCurrentUser(eperson);
|
||||||
|
} catch (SQLException e) {
|
||||||
|
handler.handleException("Something went wrong trying to fetch eperson for uuid: " + currentUserUuid, e);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
// Instantiate our class
|
public void internalRun() throws Exception {
|
||||||
Harvest harvester = new Harvest();
|
if (help) {
|
||||||
harvester.context = new Context(Context.Mode.BATCH_EDIT);
|
printHelp();
|
||||||
|
handler.logInfo("PING OAI server: Harvest -g -a oai_source -i oai_set_id");
|
||||||
|
handler.logInfo(
|
||||||
|
"SETUP a collection for harvesting: Harvest -s -c collection -t harvest_type -a oai_source -i " +
|
||||||
|
"oai_set_id -m metadata_format");
|
||||||
|
handler.logInfo("RUN harvest once: Harvest -r -e eperson -c collection");
|
||||||
|
handler.logInfo("START harvest scheduler: Harvest -S");
|
||||||
|
handler.logInfo("RESET all harvest status: Harvest -R");
|
||||||
|
handler.logInfo("PURGE a collection of items and settings: Harvest -p -e eperson -c collection");
|
||||||
|
handler.logInfo("PURGE all harvestable collections: Harvest -P -e eperson");
|
||||||
|
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
// Check our options
|
if (StringUtils.isBlank(command)) {
|
||||||
if (command == null) {
|
handler.logError("No parameters specified (run with -h flag for details)");
|
||||||
System.out
|
throw new UnsupportedOperationException("No command specified");
|
||||||
.println("Error - no parameters specified (run with -h flag for details)");
|
|
||||||
System.exit(1);
|
|
||||||
} else if ("run".equals(command)) {
|
} else if ("run".equals(command)) {
|
||||||
// Run a single harvest cycle on a collection using saved settings.
|
// Run a single harvest cycle on a collection using saved settings.
|
||||||
if (collection == null || eperson == null) {
|
if (collection == null || context.getCurrentUser() == null) {
|
||||||
System.out
|
handler.logError("A target collection and eperson must be provided (run with -h flag for details)");
|
||||||
.println("Error - a target collection and eperson must be provided");
|
throw new UnsupportedOperationException("A target collection and eperson must be provided");
|
||||||
System.out.println(" (run with -h flag for details)");
|
|
||||||
System.exit(1);
|
|
||||||
}
|
}
|
||||||
|
runHarvest(context, collection);
|
||||||
harvester.runHarvest(collection, eperson);
|
|
||||||
} else if ("start".equals(command)) {
|
} else if ("start".equals(command)) {
|
||||||
// start the harvest loop
|
// start the harvest loop
|
||||||
startHarvester();
|
startHarvester();
|
||||||
} else if ("reset".equals(command)) {
|
} else if ("reset".equals(command)) {
|
||||||
// reset harvesting status
|
// reset harvesting status
|
||||||
resetHarvesting();
|
resetHarvesting(context);
|
||||||
} else if ("purgeAll".equals(command)) {
|
} else if ("purgeAll".equals(command)) {
|
||||||
// purge all collections that are set up for harvesting (obviously for testing purposes only)
|
// purge all collections that are set up for harvesting (obviously for testing purposes only)
|
||||||
if (eperson == null) {
|
if (context.getCurrentUser() == null) {
|
||||||
System.out
|
handler.logError("An eperson must be provided (run with -h flag for details)");
|
||||||
.println("Error - an eperson must be provided");
|
throw new UnsupportedOperationException("An eperson must be provided");
|
||||||
System.out.println(" (run with -h flag for details)");
|
|
||||||
System.exit(1);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
List<HarvestedCollection> harvestedCollections = harvestedCollectionService.findAll(context);
|
List<HarvestedCollection> harvestedCollections = harvestedCollectionService.findAll(context);
|
||||||
for (HarvestedCollection harvestedCollection : harvestedCollections) {
|
for (HarvestedCollection harvestedCollection : harvestedCollections) {
|
||||||
System.out.println(
|
handler.logInfo(
|
||||||
"Purging the following collections (deleting items and resetting harvest status): " +
|
"Purging the following collections (deleting items and resetting harvest status): " +
|
||||||
harvestedCollection
|
harvestedCollection
|
||||||
.getCollection().getID().toString());
|
.getCollection().getID().toString());
|
||||||
harvester.purgeCollection(harvestedCollection.getCollection().getID().toString(), eperson);
|
purgeCollection(context, harvestedCollection.getCollection().getID().toString());
|
||||||
}
|
}
|
||||||
context.complete();
|
context.complete();
|
||||||
} else if ("purge".equals(command)) {
|
} else if ("purge".equals(command)) {
|
||||||
// Delete all items in a collection. Useful for testing fresh harvests.
|
// Delete all items in a collection. Useful for testing fresh harvests.
|
||||||
if (collection == null || eperson == null) {
|
if (collection == null || context.getCurrentUser() == null) {
|
||||||
System.out
|
handler.logError("A target collection and eperson must be provided (run with -h flag for details)");
|
||||||
.println("Error - a target collection and eperson must be provided");
|
throw new UnsupportedOperationException("A target collection and eperson must be provided");
|
||||||
System.out.println(" (run with -h flag for details)");
|
|
||||||
System.exit(1);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
harvester.purgeCollection(collection, eperson);
|
purgeCollection(context, collection);
|
||||||
|
context.complete();
|
||||||
|
|
||||||
|
} else if ("reimport".equals(command)) {
|
||||||
|
// Delete all items in a collection. Useful for testing fresh harvests.
|
||||||
|
if (collection == null || context.getCurrentUser() == null) {
|
||||||
|
handler.logError("A target collection and eperson must be provided (run with -h flag for details)");
|
||||||
|
throw new UnsupportedOperationException("A target collection and eperson must be provided");
|
||||||
|
}
|
||||||
|
purgeCollection(context, collection);
|
||||||
|
runHarvest(context, collection);
|
||||||
context.complete();
|
context.complete();
|
||||||
|
|
||||||
//TODO: implement this... remove all items and remember to unset "last-harvested" settings
|
|
||||||
} else if ("config".equals(command)) {
|
} else if ("config".equals(command)) {
|
||||||
// Configure a collection with the three main settings
|
// Configure a collection with the three main settings
|
||||||
if (collection == null) {
|
if (collection == null) {
|
||||||
System.out.println("Error - a target collection must be provided");
|
handler.logError("A target collection must be provided (run with -h flag for details)");
|
||||||
System.out.println(" (run with -h flag for details)");
|
throw new UnsupportedOperationException("A target collection must be provided");
|
||||||
System.exit(1);
|
|
||||||
}
|
}
|
||||||
if (oaiSource == null || oaiSetID == null) {
|
if (oaiSource == null || oaiSetID == null) {
|
||||||
System.out.println("Error - both the OAI server address and OAI set id must be specified");
|
handler.logError(
|
||||||
System.out.println(" (run with -h flag for details)");
|
"Both the OAI server address and OAI set id must be specified (run with -h flag for details)");
|
||||||
System.exit(1);
|
throw new UnsupportedOperationException("Both the OAI server address and OAI set id must be specified");
|
||||||
}
|
}
|
||||||
if (metadataKey == null) {
|
if (metadataKey == null) {
|
||||||
System.out
|
handler.logError(
|
||||||
.println("Error - a metadata key (commonly the prefix) must be specified for this collection");
|
"A metadata key (commonly the prefix) must be specified for this collection (run with -h flag" +
|
||||||
System.out.println(" (run with -h flag for details)");
|
" for details)");
|
||||||
System.exit(1);
|
throw new UnsupportedOperationException(
|
||||||
|
"A metadata key (commonly the prefix) must be specified for this collection");
|
||||||
}
|
}
|
||||||
|
|
||||||
harvester.configureCollection(collection, harvestType, oaiSource, oaiSetID, metadataKey);
|
configureCollection(context, collection, harvestType, oaiSource, oaiSetID, metadataKey);
|
||||||
} else if ("ping".equals(command)) {
|
} else if ("ping".equals(command)) {
|
||||||
if (oaiSource == null || oaiSetID == null) {
|
if (oaiSource == null || oaiSetID == null) {
|
||||||
System.out.println("Error - both the OAI server address and OAI set id must be specified");
|
handler.logError(
|
||||||
System.out.println(" (run with -h flag for details)");
|
"Both the OAI server address and OAI set id must be specified (run with -h flag for details)");
|
||||||
System.exit(1);
|
throw new UnsupportedOperationException("Both the OAI server address and OAI set id must be specified");
|
||||||
}
|
}
|
||||||
|
|
||||||
pingResponder(oaiSource, oaiSetID, metadataKey);
|
pingResponder(oaiSource, oaiSetID, metadataKey);
|
||||||
} else {
|
} else {
|
||||||
System.out.println("Error - your command '" + command + "' was not recoginzed properly");
|
handler.logError(
|
||||||
System.out.println(" (run with -h flag for details)");
|
"Your command '" + command + "' was not recognized properly (run with -h flag for details)");
|
||||||
System.exit(1);
|
throw new UnsupportedOperationException("Your command '" + command + "' was not recognized properly");
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
/*
|
/*
|
||||||
* Resolve the ID into a collection and check to see if its harvesting options are set. If so, return
|
* Resolve the ID into a collection and check to see if its harvesting options are set. If so, return
|
||||||
* the collection, if not, bail out.
|
* the collection, if not, bail out.
|
||||||
*/
|
*/
|
||||||
private Collection resolveCollection(String collectionID) {
|
private Collection resolveCollection(Context context, String collectionID) {
|
||||||
|
|
||||||
DSpaceObject dso;
|
DSpaceObject dso;
|
||||||
Collection targetCollection = null;
|
Collection targetCollection = null;
|
||||||
@@ -270,14 +269,14 @@ public class Harvest {
|
|||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
// not a handle, try and treat it as an collection database UUID
|
// not a handle, try and treat it as an collection database UUID
|
||||||
System.out.println("Looking up by UUID: " + collectionID + ", " + "in context: " + context);
|
handler.logInfo("Looking up by UUID: " + collectionID + ", " + "in context: " + context);
|
||||||
targetCollection = collectionService.find(context, UUID.fromString(collectionID));
|
targetCollection = collectionService.find(context, UUID.fromString(collectionID));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
// was the collection valid?
|
// was the collection valid?
|
||||||
if (targetCollection == null) {
|
if (targetCollection == null) {
|
||||||
System.out.println("Cannot resolve " + collectionID + " to collection");
|
handler.logError("Cannot resolve " + collectionID + " to collection");
|
||||||
System.exit(1);
|
throw new UnsupportedOperationException("Cannot resolve " + collectionID + " to collection");
|
||||||
}
|
}
|
||||||
} catch (SQLException se) {
|
} catch (SQLException se) {
|
||||||
se.printStackTrace();
|
se.printStackTrace();
|
||||||
@@ -287,12 +286,12 @@ public class Harvest {
|
|||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
private void configureCollection(String collectionID, int type, String oaiSource, String oaiSetId,
|
private void configureCollection(Context context, String collectionID, int type, String oaiSource, String oaiSetId,
|
||||||
String mdConfigId) {
|
String mdConfigId) {
|
||||||
System.out.println("Running: configure collection");
|
handler.logInfo("Running: configure collection");
|
||||||
|
|
||||||
Collection collection = resolveCollection(collectionID);
|
Collection collection = resolveCollection(context, collectionID);
|
||||||
System.out.println(collection.getID());
|
handler.logInfo(String.valueOf(collection.getID()));
|
||||||
|
|
||||||
try {
|
try {
|
||||||
HarvestedCollection hc = harvestedCollectionService.find(context, collection);
|
HarvestedCollection hc = harvestedCollectionService.find(context, collection);
|
||||||
@@ -307,9 +306,8 @@ public class Harvest {
|
|||||||
context.restoreAuthSystemState();
|
context.restoreAuthSystemState();
|
||||||
context.complete();
|
context.complete();
|
||||||
} catch (Exception e) {
|
} catch (Exception e) {
|
||||||
System.out.println("Changes could not be committed");
|
handler.logError("Changes could not be committed");
|
||||||
e.printStackTrace();
|
handler.handleException(e);
|
||||||
System.exit(1);
|
|
||||||
} finally {
|
} finally {
|
||||||
if (context != null) {
|
if (context != null) {
|
||||||
context.restoreAuthSystemState();
|
context.restoreAuthSystemState();
|
||||||
@@ -320,18 +318,15 @@ public class Harvest {
|
|||||||
|
|
||||||
/**
|
/**
|
||||||
* Purges a collection of all harvest-related data and settings. All items in the collection will be deleted.
|
* Purges a collection of all harvest-related data and settings. All items in the collection will be deleted.
|
||||||
*
|
|
||||||
* @param collectionID
|
* @param collectionID
|
||||||
* @param email
|
*
|
||||||
*/
|
*/
|
||||||
private void purgeCollection(String collectionID, String email) {
|
private void purgeCollection(Context context, String collectionID) {
|
||||||
System.out.println(
|
handler.logInfo(
|
||||||
"Purging collection of all items and resetting last_harvested and harvest_message: " + collectionID);
|
"Purging collection of all items and resetting last_harvested and harvest_message: " + collectionID);
|
||||||
Collection collection = resolveCollection(collectionID);
|
Collection collection = resolveCollection(context, collectionID);
|
||||||
|
|
||||||
try {
|
try {
|
||||||
EPerson eperson = ePersonService.findByEmail(context, email);
|
|
||||||
context.setCurrentUser(eperson);
|
|
||||||
context.turnOffAuthorisationSystem();
|
context.turnOffAuthorisationSystem();
|
||||||
|
|
||||||
ItemService itemService = ContentServiceFactory.getInstance().getItemService();
|
ItemService itemService = ContentServiceFactory.getInstance().getItemService();
|
||||||
@@ -340,7 +335,7 @@ public class Harvest {
|
|||||||
while (it.hasNext()) {
|
while (it.hasNext()) {
|
||||||
i++;
|
i++;
|
||||||
Item item = it.next();
|
Item item = it.next();
|
||||||
System.out.println("Deleting: " + item.getHandle());
|
handler.logInfo("Deleting: " + item.getHandle());
|
||||||
collectionService.removeItem(context, collection, item);
|
collectionService.removeItem(context, collection, item);
|
||||||
context.uncacheEntity(item);// Dispatch events every 50 items
|
context.uncacheEntity(item);// Dispatch events every 50 items
|
||||||
if (i % 50 == 0) {
|
if (i % 50 == 0) {
|
||||||
@@ -360,9 +355,8 @@ public class Harvest {
|
|||||||
context.restoreAuthSystemState();
|
context.restoreAuthSystemState();
|
||||||
context.dispatchEvents();
|
context.dispatchEvents();
|
||||||
} catch (Exception e) {
|
} catch (Exception e) {
|
||||||
System.out.println("Changes could not be committed");
|
handler.logError("Changes could not be committed");
|
||||||
e.printStackTrace();
|
handler.handleException(e);
|
||||||
System.exit(1);
|
|
||||||
} finally {
|
} finally {
|
||||||
context.restoreAuthSystemState();
|
context.restoreAuthSystemState();
|
||||||
}
|
}
|
||||||
@@ -372,46 +366,42 @@ public class Harvest {
|
|||||||
/**
|
/**
|
||||||
* Run a single harvest cycle on the specified collection under the authorization of the supplied EPerson
|
* Run a single harvest cycle on the specified collection under the authorization of the supplied EPerson
|
||||||
*/
|
*/
|
||||||
private void runHarvest(String collectionID, String email) {
|
private void runHarvest(Context context, String collectionID) {
|
||||||
System.out.println("Running: a harvest cycle on " + collectionID);
|
handler.logInfo("Running: a harvest cycle on " + collectionID);
|
||||||
|
|
||||||
System.out.print("Initializing the harvester... ");
|
handler.logInfo("Initializing the harvester... ");
|
||||||
OAIHarvester harvester = null;
|
OAIHarvester harvester = null;
|
||||||
try {
|
try {
|
||||||
Collection collection = resolveCollection(collectionID);
|
Collection collection = resolveCollection(context, collectionID);
|
||||||
HarvestedCollection hc = harvestedCollectionService.find(context, collection);
|
HarvestedCollection hc = harvestedCollectionService.find(context, collection);
|
||||||
harvester = new OAIHarvester(context, collection, hc);
|
harvester = new OAIHarvester(context, collection, hc);
|
||||||
System.out.println("success. ");
|
handler.logInfo("Initialized the harvester successfully");
|
||||||
} catch (HarvestingException hex) {
|
} catch (HarvestingException hex) {
|
||||||
System.out.print("failed. ");
|
handler.logError("Initializing the harvester failed.");
|
||||||
System.out.println(hex.getMessage());
|
|
||||||
throw new IllegalStateException("Unable to harvest", hex);
|
throw new IllegalStateException("Unable to harvest", hex);
|
||||||
} catch (SQLException se) {
|
} catch (SQLException se) {
|
||||||
System.out.print("failed. ");
|
handler.logError("Initializing the harvester failed.");
|
||||||
System.out.println(se.getMessage());
|
|
||||||
throw new IllegalStateException("Unable to access database", se);
|
throw new IllegalStateException("Unable to access database", se);
|
||||||
}
|
}
|
||||||
|
|
||||||
try {
|
try {
|
||||||
// Harvest will not work for an anonymous user
|
// Harvest will not work for an anonymous user
|
||||||
EPerson eperson = ePersonService.findByEmail(context, email);
|
handler.logInfo("Harvest started... ");
|
||||||
System.out.println("Harvest started... ");
|
|
||||||
context.setCurrentUser(eperson);
|
|
||||||
harvester.runHarvest();
|
harvester.runHarvest();
|
||||||
context.complete();
|
context.complete();
|
||||||
} catch (SQLException | AuthorizeException | IOException e) {
|
} catch (SQLException | AuthorizeException | IOException e) {
|
||||||
throw new IllegalStateException("Failed to run harvester", e);
|
throw new IllegalStateException("Failed to run harvester", e);
|
||||||
}
|
}
|
||||||
|
|
||||||
System.out.println("Harvest complete. ");
|
handler.logInfo("Harvest complete. ");
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Resets harvest_status and harvest_start_time flags for all collections that have a row in the
|
* Resets harvest_status and harvest_start_time flags for all collections that have a row in the
|
||||||
* harvested_collections table
|
* harvested_collections table
|
||||||
*/
|
*/
|
||||||
private static void resetHarvesting() {
|
private void resetHarvesting(Context context) {
|
||||||
System.out.print("Resetting harvest status flag on all collections... ");
|
handler.logInfo("Resetting harvest status flag on all collections... ");
|
||||||
|
|
||||||
try {
|
try {
|
||||||
List<HarvestedCollection> harvestedCollections = harvestedCollectionService.findAll(context);
|
List<HarvestedCollection> harvestedCollections = harvestedCollectionService.findAll(context);
|
||||||
@@ -421,21 +411,21 @@ public class Harvest {
|
|||||||
harvestedCollection.setHarvestStatus(HarvestedCollection.STATUS_READY);
|
harvestedCollection.setHarvestStatus(HarvestedCollection.STATUS_READY);
|
||||||
harvestedCollectionService.update(context, harvestedCollection);
|
harvestedCollectionService.update(context, harvestedCollection);
|
||||||
}
|
}
|
||||||
System.out.println("success. ");
|
handler.logInfo("Reset harvest status flag successfully");
|
||||||
} catch (Exception ex) {
|
} catch (Exception ex) {
|
||||||
System.out.println("failed. ");
|
handler.logError("Resetting harvest status flag failed");
|
||||||
ex.printStackTrace();
|
handler.handleException(ex);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Starts up the harvest scheduler. Terminating this process will stop the scheduler.
|
* Starts up the harvest scheduler. Terminating this process will stop the scheduler.
|
||||||
*/
|
*/
|
||||||
private static void startHarvester() {
|
private void startHarvester() {
|
||||||
try {
|
try {
|
||||||
System.out.print("Starting harvest loop... ");
|
handler.logInfo("Starting harvest loop... ");
|
||||||
HarvestServiceFactory.getInstance().getHarvestSchedulingService().startNewScheduler();
|
HarvestServiceFactory.getInstance().getHarvestSchedulingService().startNewScheduler();
|
||||||
System.out.println("running. ");
|
handler.logInfo("running. ");
|
||||||
} catch (Exception ex) {
|
} catch (Exception ex) {
|
||||||
ex.printStackTrace();
|
ex.printStackTrace();
|
||||||
}
|
}
|
||||||
@@ -448,29 +438,31 @@ public class Harvest {
|
|||||||
* @param set name of an item set.
|
* @param set name of an item set.
|
||||||
* @param metadataFormat local prefix name, or null for "dc".
|
* @param metadataFormat local prefix name, or null for "dc".
|
||||||
*/
|
*/
|
||||||
private static void pingResponder(String server, String set, String metadataFormat) {
|
private void pingResponder(String server, String set, String metadataFormat) {
|
||||||
List<String> errors;
|
List<String> errors;
|
||||||
|
|
||||||
System.out.print("Testing basic PMH access: ");
|
handler.logInfo("Testing basic PMH access: ");
|
||||||
errors = harvestedCollectionService.verifyOAIharvester(server, set,
|
errors = harvestedCollectionService.verifyOAIharvester(server, set,
|
||||||
(null != metadataFormat) ? metadataFormat : "dc", false);
|
(null != metadataFormat) ? metadataFormat : "dc", false);
|
||||||
if (errors.isEmpty()) {
|
if (errors.isEmpty()) {
|
||||||
System.out.println("OK");
|
handler.logInfo("OK");
|
||||||
} else {
|
} else {
|
||||||
for (String error : errors) {
|
for (String error : errors) {
|
||||||
System.err.println(error);
|
handler.logError(error);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
System.out.print("Testing ORE support: ");
|
handler.logInfo("Testing ORE support: ");
|
||||||
errors = harvestedCollectionService.verifyOAIharvester(server, set,
|
errors = harvestedCollectionService.verifyOAIharvester(server, set,
|
||||||
(null != metadataFormat) ? metadataFormat : "dc", true);
|
(null != metadataFormat) ? metadataFormat : "dc", true);
|
||||||
if (errors.isEmpty()) {
|
if (errors.isEmpty()) {
|
||||||
System.out.println("OK");
|
handler.logInfo("OK");
|
||||||
} else {
|
} else {
|
||||||
for (String error : errors) {
|
for (String error : errors) {
|
||||||
System.err.println(error);
|
handler.logError(error);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
}
|
}
|
||||||
|
@@ -0,0 +1,45 @@
|
|||||||
|
/**
|
||||||
|
* The contents of this file are subject to the license and copyright
|
||||||
|
* detailed in the LICENSE and NOTICE files at the root of the source
|
||||||
|
* tree and available online at
|
||||||
|
*
|
||||||
|
* http://www.dspace.org/license/
|
||||||
|
*/
|
||||||
|
package org.dspace.app.harvest;
|
||||||
|
|
||||||
|
import java.sql.SQLException;
|
||||||
|
|
||||||
|
import org.apache.commons.cli.ParseException;
|
||||||
|
import org.dspace.core.Context;
|
||||||
|
import org.dspace.eperson.EPerson;
|
||||||
|
|
||||||
|
public class HarvestCli extends Harvest {
|
||||||
|
|
||||||
|
/**
|
||||||
|
* This is the overridden instance of the {@link Harvest#assignCurrentUserInContext()} method in the parent class
|
||||||
|
* {@link Harvest}.
|
||||||
|
* This is done so that the CLI version of the Script is able to retrieve its currentUser from the -e flag given
|
||||||
|
* with the parameters of the Script.
|
||||||
|
*
|
||||||
|
* @throws ParseException If the e flag was not given to the parameters when calling the script
|
||||||
|
*/
|
||||||
|
@Override
|
||||||
|
protected void assignCurrentUserInContext() throws ParseException {
|
||||||
|
if (this.commandLine.hasOption('e')) {
|
||||||
|
String ePersonEmail = this.commandLine.getOptionValue('e');
|
||||||
|
this.context = new Context(Context.Mode.BATCH_EDIT);
|
||||||
|
try {
|
||||||
|
EPerson ePerson = ePersonService.findByEmail(this.context, ePersonEmail);
|
||||||
|
if (ePerson == null) {
|
||||||
|
super.handler.logError("EPerson not found: " + ePersonEmail);
|
||||||
|
throw new IllegalArgumentException("Unable to find a user with email: " + ePersonEmail);
|
||||||
|
}
|
||||||
|
this.context.setCurrentUser(ePerson);
|
||||||
|
} catch (SQLException e) {
|
||||||
|
throw new IllegalArgumentException("SQLException trying to find user with email: " + ePersonEmail);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
}
|
@@ -0,0 +1,22 @@
|
|||||||
|
/**
|
||||||
|
* The contents of this file are subject to the license and copyright
|
||||||
|
* detailed in the LICENSE and NOTICE files at the root of the source
|
||||||
|
* tree and available online at
|
||||||
|
*
|
||||||
|
* http://www.dspace.org/license/
|
||||||
|
*/
|
||||||
|
package org.dspace.app.harvest;
|
||||||
|
|
||||||
|
import org.apache.commons.cli.Options;
|
||||||
|
|
||||||
|
|
||||||
|
public class HarvestCliScriptConfiguration extends HarvestScriptConfiguration {
|
||||||
|
|
||||||
|
public Options getOptions() {
|
||||||
|
Options options = super.getOptions();
|
||||||
|
options.addOption("e", "eperson", true,
|
||||||
|
"eperson");
|
||||||
|
|
||||||
|
return options;
|
||||||
|
}
|
||||||
|
}
|
@@ -0,0 +1,79 @@
|
|||||||
|
/**
|
||||||
|
* The contents of this file are subject to the license and copyright
|
||||||
|
* detailed in the LICENSE and NOTICE files at the root of the source
|
||||||
|
* tree and available online at
|
||||||
|
*
|
||||||
|
* http://www.dspace.org/license/
|
||||||
|
*/
|
||||||
|
package org.dspace.app.harvest;
|
||||||
|
|
||||||
|
import java.sql.SQLException;
|
||||||
|
|
||||||
|
import org.apache.commons.cli.Options;
|
||||||
|
import org.dspace.authorize.service.AuthorizeService;
|
||||||
|
import org.dspace.core.Context;
|
||||||
|
import org.dspace.scripts.configuration.ScriptConfiguration;
|
||||||
|
import org.springframework.beans.factory.annotation.Autowired;
|
||||||
|
|
||||||
|
|
||||||
|
public class HarvestScriptConfiguration<T extends Harvest> extends ScriptConfiguration<T> {
|
||||||
|
@Autowired
|
||||||
|
private AuthorizeService authorizeService;
|
||||||
|
|
||||||
|
private Class<T> dspaceRunnableClass;
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public Class<T> getDspaceRunnableClass() {
|
||||||
|
return dspaceRunnableClass;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public void setDspaceRunnableClass(Class<T> dspaceRunnableClass) {
|
||||||
|
this.dspaceRunnableClass = dspaceRunnableClass;
|
||||||
|
}
|
||||||
|
|
||||||
|
public boolean isAllowedToExecute(final Context context) {
|
||||||
|
try {
|
||||||
|
return authorizeService.isAdmin(context);
|
||||||
|
} catch (SQLException e) {
|
||||||
|
throw new RuntimeException("SQLException occurred when checking if the current user is an admin", e);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
public Options getOptions() {
|
||||||
|
Options options = new Options();
|
||||||
|
options.addOption("p", "purge", false, "delete all items in the collection");
|
||||||
|
options.getOption("p").setType(boolean.class);
|
||||||
|
options.addOption("r", "run", false, "run the standard harvest procedure");
|
||||||
|
options.getOption("r").setType(boolean.class);
|
||||||
|
options.addOption("g", "ping", false, "test the OAI server and set");
|
||||||
|
options.getOption("g").setType(boolean.class);
|
||||||
|
options.addOption("s", "setup", false, "Set the collection up for harvesting");
|
||||||
|
options.getOption("s").setType(boolean.class);
|
||||||
|
options.addOption("S", "start", false, "start the harvest loop");
|
||||||
|
options.getOption("S").setType(boolean.class);
|
||||||
|
options.addOption("R", "reset", false, "reset harvest status on all collections");
|
||||||
|
options.getOption("R").setType(boolean.class);
|
||||||
|
options.addOption("P", "purgeCollections", false, "purge all harvestable collections");
|
||||||
|
options.getOption("P").setType(boolean.class);
|
||||||
|
options.addOption("o", "reimport", false, "reimport all items in the collection, " +
|
||||||
|
"this is equivalent to -p -r, purging all items in a collection and reimporting them");
|
||||||
|
options.getOption("o").setType(boolean.class);
|
||||||
|
options.addOption("c", "collection", true,
|
||||||
|
"harvesting collection (handle or id)");
|
||||||
|
options.addOption("t", "type", true,
|
||||||
|
"type of harvesting (0 for none)");
|
||||||
|
options.addOption("a", "address", true,
|
||||||
|
"address of the OAI-PMH server");
|
||||||
|
options.addOption("i", "oai_set_id", true,
|
||||||
|
"id of the PMH set representing the harvested collection");
|
||||||
|
options.addOption("m", "metadata_format", true,
|
||||||
|
"the name of the desired metadata format for harvesting, resolved to namespace and " +
|
||||||
|
"crosswalk in dspace.cfg");
|
||||||
|
|
||||||
|
options.addOption("h", "help", false, "help");
|
||||||
|
options.getOption("h").setType(boolean.class);
|
||||||
|
|
||||||
|
return options;
|
||||||
|
}
|
||||||
|
}
|
@@ -16,6 +16,7 @@ import java.io.FileWriter;
|
|||||||
import java.io.IOException;
|
import java.io.IOException;
|
||||||
import java.io.InputStream;
|
import java.io.InputStream;
|
||||||
import java.io.PrintWriter;
|
import java.io.PrintWriter;
|
||||||
|
import java.nio.charset.StandardCharsets;
|
||||||
import java.sql.SQLException;
|
import java.sql.SQLException;
|
||||||
import java.text.SimpleDateFormat;
|
import java.text.SimpleDateFormat;
|
||||||
import java.util.ArrayList;
|
import java.util.ArrayList;
|
||||||
@@ -51,7 +52,7 @@ import org.dspace.core.Constants;
|
|||||||
import org.dspace.core.Context;
|
import org.dspace.core.Context;
|
||||||
import org.dspace.core.Email;
|
import org.dspace.core.Email;
|
||||||
import org.dspace.core.I18nUtil;
|
import org.dspace.core.I18nUtil;
|
||||||
import org.dspace.core.LogManager;
|
import org.dspace.core.LogHelper;
|
||||||
import org.dspace.core.Utils;
|
import org.dspace.core.Utils;
|
||||||
import org.dspace.eperson.EPerson;
|
import org.dspace.eperson.EPerson;
|
||||||
import org.dspace.eperson.service.EPersonService;
|
import org.dspace.eperson.service.EPersonService;
|
||||||
@@ -129,7 +130,7 @@ public class ItemExportServiceImpl implements ItemExportService {
|
|||||||
|
|
||||||
while (i.hasNext()) {
|
while (i.hasNext()) {
|
||||||
if (SUBDIR_LIMIT > 0 && ++counter == SUBDIR_LIMIT) {
|
if (SUBDIR_LIMIT > 0 && ++counter == SUBDIR_LIMIT) {
|
||||||
subdir = Integer.valueOf(subDirSuffix++).toString();
|
subdir = Integer.toString(subDirSuffix++);
|
||||||
fullPath = destDirName + File.separatorChar + subdir;
|
fullPath = destDirName + File.separatorChar + subdir;
|
||||||
counter = 0;
|
counter = 0;
|
||||||
|
|
||||||
@@ -191,7 +192,7 @@ public class ItemExportServiceImpl implements ItemExportService {
|
|||||||
*/
|
*/
|
||||||
protected void writeMetadata(Context c, Item i, File destDir, boolean migrate)
|
protected void writeMetadata(Context c, Item i, File destDir, boolean migrate)
|
||||||
throws Exception {
|
throws Exception {
|
||||||
Set<String> schemas = new HashSet<String>();
|
Set<String> schemas = new HashSet<>();
|
||||||
List<MetadataValue> dcValues = itemService.getMetadata(i, Item.ANY, Item.ANY, Item.ANY, Item.ANY);
|
List<MetadataValue> dcValues = itemService.getMetadata(i, Item.ANY, Item.ANY, Item.ANY, Item.ANY);
|
||||||
for (MetadataValue metadataValue : dcValues) {
|
for (MetadataValue metadataValue : dcValues) {
|
||||||
schemas.add(metadataValue.getMetadataField().getMetadataSchema().getName());
|
schemas.add(metadataValue.getMetadataField().getMetadataSchema().getName());
|
||||||
@@ -267,7 +268,7 @@ public class ItemExportServiceImpl implements ItemExportService {
|
|||||||
+ Utils.addEntities(dcv.getValue()) + "</dcvalue>\n")
|
+ Utils.addEntities(dcv.getValue()) + "</dcvalue>\n")
|
||||||
.getBytes("UTF-8");
|
.getBytes("UTF-8");
|
||||||
|
|
||||||
if ((!migrate) ||
|
if (!migrate ||
|
||||||
(migrate && !(
|
(migrate && !(
|
||||||
("date".equals(metadataField.getElement()) && "issued".equals(qualifier)) ||
|
("date".equals(metadataField.getElement()) && "issued".equals(qualifier)) ||
|
||||||
("date".equals(metadataField.getElement()) && "accessioned".equals(qualifier)) ||
|
("date".equals(metadataField.getElement()) && "accessioned".equals(qualifier)) ||
|
||||||
@@ -292,10 +293,10 @@ public class ItemExportServiceImpl implements ItemExportService {
|
|||||||
}
|
}
|
||||||
|
|
||||||
// When migrating, only keep date.issued if it is different to date.accessioned
|
// When migrating, only keep date.issued if it is different to date.accessioned
|
||||||
if ((migrate) &&
|
if (migrate &&
|
||||||
(dateIssued != null) &&
|
(dateIssued != null) &&
|
||||||
(dateAccessioned != null) &&
|
(dateAccessioned != null) &&
|
||||||
(!dateIssued.equals(dateAccessioned))) {
|
!dateIssued.equals(dateAccessioned)) {
|
||||||
utf8 = (" <dcvalue element=\"date\" "
|
utf8 = (" <dcvalue element=\"date\" "
|
||||||
+ "qualifier=\"issued\">"
|
+ "qualifier=\"issued\">"
|
||||||
+ Utils.addEntities(dateIssued) + "</dcvalue>\n")
|
+ Utils.addEntities(dateIssued) + "</dcvalue>\n")
|
||||||
@@ -330,7 +331,7 @@ public class ItemExportServiceImpl implements ItemExportService {
|
|||||||
File outFile = new File(destDir, filename);
|
File outFile = new File(destDir, filename);
|
||||||
|
|
||||||
if (outFile.createNewFile()) {
|
if (outFile.createNewFile()) {
|
||||||
PrintWriter out = new PrintWriter(new FileWriter(outFile));
|
PrintWriter out = new PrintWriter(new FileWriter(outFile, StandardCharsets.UTF_8));
|
||||||
|
|
||||||
out.println(i.getHandle());
|
out.println(i.getHandle());
|
||||||
|
|
||||||
@@ -360,7 +361,7 @@ public class ItemExportServiceImpl implements ItemExportService {
|
|||||||
File outFile = new File(destDir, "contents");
|
File outFile = new File(destDir, "contents");
|
||||||
|
|
||||||
if (outFile.createNewFile()) {
|
if (outFile.createNewFile()) {
|
||||||
PrintWriter out = new PrintWriter(new FileWriter(outFile));
|
PrintWriter out = new PrintWriter(new FileWriter(outFile, StandardCharsets.UTF_8));
|
||||||
|
|
||||||
List<Bundle> bundles = i.getBundles();
|
List<Bundle> bundles = i.getBundles();
|
||||||
|
|
||||||
@@ -474,7 +475,7 @@ public class ItemExportServiceImpl implements ItemExportService {
|
|||||||
public void createDownloadableExport(DSpaceObject dso,
|
public void createDownloadableExport(DSpaceObject dso,
|
||||||
Context context, boolean migrate) throws Exception {
|
Context context, boolean migrate) throws Exception {
|
||||||
EPerson eperson = context.getCurrentUser();
|
EPerson eperson = context.getCurrentUser();
|
||||||
ArrayList<DSpaceObject> list = new ArrayList<DSpaceObject>(1);
|
ArrayList<DSpaceObject> list = new ArrayList<>(1);
|
||||||
list.add(dso);
|
list.add(dso);
|
||||||
processDownloadableExport(list, context, eperson == null ? null
|
processDownloadableExport(list, context, eperson == null ? null
|
||||||
: eperson.getEmail(), migrate);
|
: eperson.getEmail(), migrate);
|
||||||
@@ -491,7 +492,7 @@ public class ItemExportServiceImpl implements ItemExportService {
|
|||||||
@Override
|
@Override
|
||||||
public void createDownloadableExport(DSpaceObject dso,
|
public void createDownloadableExport(DSpaceObject dso,
|
||||||
Context context, String additionalEmail, boolean migrate) throws Exception {
|
Context context, String additionalEmail, boolean migrate) throws Exception {
|
||||||
ArrayList<DSpaceObject> list = new ArrayList<DSpaceObject>(1);
|
ArrayList<DSpaceObject> list = new ArrayList<>(1);
|
||||||
list.add(dso);
|
list.add(dso);
|
||||||
processDownloadableExport(list, context, additionalEmail, migrate);
|
processDownloadableExport(list, context, additionalEmail, migrate);
|
||||||
}
|
}
|
||||||
@@ -652,7 +653,7 @@ public class ItemExportServiceImpl implements ItemExportService {
|
|||||||
while (iter.hasNext()) {
|
while (iter.hasNext()) {
|
||||||
String keyName = iter.next();
|
String keyName = iter.next();
|
||||||
List<UUID> uuids = itemsMap.get(keyName);
|
List<UUID> uuids = itemsMap.get(keyName);
|
||||||
List<Item> items = new ArrayList<Item>();
|
List<Item> items = new ArrayList<>();
|
||||||
for (UUID uuid : uuids) {
|
for (UUID uuid : uuids) {
|
||||||
items.add(itemService.find(context, uuid));
|
items.add(itemService.find(context, uuid));
|
||||||
}
|
}
|
||||||
@@ -876,7 +877,7 @@ public class ItemExportServiceImpl implements ItemExportService {
|
|||||||
.getIntProperty("org.dspace.app.itemexport.life.span.hours");
|
.getIntProperty("org.dspace.app.itemexport.life.span.hours");
|
||||||
Calendar now = Calendar.getInstance();
|
Calendar now = Calendar.getInstance();
|
||||||
now.setTime(new Date());
|
now.setTime(new Date());
|
||||||
now.add(Calendar.HOUR, (-hours));
|
now.add(Calendar.HOUR, -hours);
|
||||||
File downloadDir = new File(getExportDownloadDirectory(eperson));
|
File downloadDir = new File(getExportDownloadDirectory(eperson));
|
||||||
if (downloadDir.exists()) {
|
if (downloadDir.exists()) {
|
||||||
File[] files = downloadDir.listFiles();
|
File[] files = downloadDir.listFiles();
|
||||||
@@ -896,7 +897,7 @@ public class ItemExportServiceImpl implements ItemExportService {
|
|||||||
int hours = configurationService.getIntProperty("org.dspace.app.itemexport.life.span.hours");
|
int hours = configurationService.getIntProperty("org.dspace.app.itemexport.life.span.hours");
|
||||||
Calendar now = Calendar.getInstance();
|
Calendar now = Calendar.getInstance();
|
||||||
now.setTime(new Date());
|
now.setTime(new Date());
|
||||||
now.add(Calendar.HOUR, (-hours));
|
now.add(Calendar.HOUR, -hours);
|
||||||
File downloadDir = new File(configurationService.getProperty("org.dspace.app.itemexport.download.dir"));
|
File downloadDir = new File(configurationService.getProperty("org.dspace.app.itemexport.download.dir"));
|
||||||
if (downloadDir.exists()) {
|
if (downloadDir.exists()) {
|
||||||
// Get a list of all the sub-directories, potentially one for each ePerson.
|
// Get a list of all the sub-directories, potentially one for each ePerson.
|
||||||
@@ -936,7 +937,7 @@ public class ItemExportServiceImpl implements ItemExportService {
|
|||||||
|
|
||||||
email.send();
|
email.send();
|
||||||
} catch (Exception e) {
|
} catch (Exception e) {
|
||||||
log.warn(LogManager.getHeader(context, "emailSuccessMessage", "cannot notify user of export"), e);
|
log.warn(LogHelper.getHeader(context, "emailSuccessMessage", "cannot notify user of export"), e);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@@ -296,29 +296,36 @@ public class ItemImportCLITool {
|
|||||||
|
|
||||||
// validate each collection arg to see if it's a real collection
|
// validate each collection arg to see if it's a real collection
|
||||||
for (int i = 0; i < collections.length; i++) {
|
for (int i = 0; i < collections.length; i++) {
|
||||||
|
|
||||||
|
Collection resolved = null;
|
||||||
|
|
||||||
|
if (collections[i] != null) {
|
||||||
|
|
||||||
// is the ID a handle?
|
// is the ID a handle?
|
||||||
if (collections[i].indexOf('/') != -1) {
|
if (collections[i].indexOf('/') != -1) {
|
||||||
// string has a / so it must be a handle - try and resolve
|
// string has a / so it must be a handle - try and resolve
|
||||||
// it
|
// it
|
||||||
mycollections.add((Collection) handleService
|
resolved = ((Collection) handleService
|
||||||
.resolveToObject(c, collections[i]));
|
.resolveToObject(c, collections[i]));
|
||||||
|
|
||||||
// resolved, now make sure it's a collection
|
} else {
|
||||||
if ((mycollections.get(i) == null)
|
|
||||||
|| (mycollections.get(i).getType() != Constants.COLLECTION)) {
|
|
||||||
mycollections.set(i, null);
|
|
||||||
}
|
|
||||||
} else if (collections[i] != null) {
|
|
||||||
// not a handle, try and treat it as an integer collection database ID
|
// not a handle, try and treat it as an integer collection database ID
|
||||||
mycollections.set(i, collectionService.find(c, UUID.fromString(collections[i])));
|
resolved = collectionService.find(c, UUID.fromString(collections[i]));
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
// was the collection valid?
|
// was the collection valid?
|
||||||
if (mycollections.get(i) == null) {
|
if ((resolved == null)
|
||||||
|
|| (resolved.getType() != Constants.COLLECTION)) {
|
||||||
throw new IllegalArgumentException("Cannot resolve "
|
throw new IllegalArgumentException("Cannot resolve "
|
||||||
+ collections[i] + " to collection");
|
+ collections[i] + " to collection");
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// add resolved collection to list
|
||||||
|
mycollections.add(resolved);
|
||||||
|
|
||||||
// print progress info
|
// print progress info
|
||||||
String owningPrefix = "";
|
String owningPrefix = "";
|
||||||
|
|
||||||
@@ -327,7 +334,7 @@ public class ItemImportCLITool {
|
|||||||
}
|
}
|
||||||
|
|
||||||
System.out.println(owningPrefix + " Collection: "
|
System.out.println(owningPrefix + " Collection: "
|
||||||
+ mycollections.get(i).getName());
|
+ resolved.getName());
|
||||||
}
|
}
|
||||||
} // end of validating collections
|
} // end of validating collections
|
||||||
|
|
||||||
|
@@ -55,6 +55,7 @@ import org.apache.logging.log4j.Logger;
|
|||||||
import org.apache.xpath.XPathAPI;
|
import org.apache.xpath.XPathAPI;
|
||||||
import org.dspace.app.itemimport.service.ItemImportService;
|
import org.dspace.app.itemimport.service.ItemImportService;
|
||||||
import org.dspace.app.util.LocalSchemaFilenameFilter;
|
import org.dspace.app.util.LocalSchemaFilenameFilter;
|
||||||
|
import org.dspace.app.util.RelationshipUtils;
|
||||||
import org.dspace.authorize.AuthorizeException;
|
import org.dspace.authorize.AuthorizeException;
|
||||||
import org.dspace.authorize.ResourcePolicy;
|
import org.dspace.authorize.ResourcePolicy;
|
||||||
import org.dspace.authorize.service.AuthorizeService;
|
import org.dspace.authorize.service.AuthorizeService;
|
||||||
@@ -68,6 +69,9 @@ import org.dspace.content.Item;
|
|||||||
import org.dspace.content.MetadataField;
|
import org.dspace.content.MetadataField;
|
||||||
import org.dspace.content.MetadataSchema;
|
import org.dspace.content.MetadataSchema;
|
||||||
import org.dspace.content.MetadataSchemaEnum;
|
import org.dspace.content.MetadataSchemaEnum;
|
||||||
|
import org.dspace.content.MetadataValue;
|
||||||
|
import org.dspace.content.Relationship;
|
||||||
|
import org.dspace.content.RelationshipType;
|
||||||
import org.dspace.content.WorkspaceItem;
|
import org.dspace.content.WorkspaceItem;
|
||||||
import org.dspace.content.service.BitstreamFormatService;
|
import org.dspace.content.service.BitstreamFormatService;
|
||||||
import org.dspace.content.service.BitstreamService;
|
import org.dspace.content.service.BitstreamService;
|
||||||
@@ -77,12 +81,15 @@ import org.dspace.content.service.InstallItemService;
|
|||||||
import org.dspace.content.service.ItemService;
|
import org.dspace.content.service.ItemService;
|
||||||
import org.dspace.content.service.MetadataFieldService;
|
import org.dspace.content.service.MetadataFieldService;
|
||||||
import org.dspace.content.service.MetadataSchemaService;
|
import org.dspace.content.service.MetadataSchemaService;
|
||||||
|
import org.dspace.content.service.MetadataValueService;
|
||||||
|
import org.dspace.content.service.RelationshipService;
|
||||||
|
import org.dspace.content.service.RelationshipTypeService;
|
||||||
import org.dspace.content.service.WorkspaceItemService;
|
import org.dspace.content.service.WorkspaceItemService;
|
||||||
import org.dspace.core.Constants;
|
import org.dspace.core.Constants;
|
||||||
import org.dspace.core.Context;
|
import org.dspace.core.Context;
|
||||||
import org.dspace.core.Email;
|
import org.dspace.core.Email;
|
||||||
import org.dspace.core.I18nUtil;
|
import org.dspace.core.I18nUtil;
|
||||||
import org.dspace.core.LogManager;
|
import org.dspace.core.LogHelper;
|
||||||
import org.dspace.eperson.EPerson;
|
import org.dspace.eperson.EPerson;
|
||||||
import org.dspace.eperson.Group;
|
import org.dspace.eperson.Group;
|
||||||
import org.dspace.eperson.service.EPersonService;
|
import org.dspace.eperson.service.EPersonService;
|
||||||
@@ -151,6 +158,12 @@ public class ItemImportServiceImpl implements ItemImportService, InitializingBea
|
|||||||
protected WorkflowService workflowService;
|
protected WorkflowService workflowService;
|
||||||
@Autowired(required = true)
|
@Autowired(required = true)
|
||||||
protected ConfigurationService configurationService;
|
protected ConfigurationService configurationService;
|
||||||
|
@Autowired(required = true)
|
||||||
|
protected RelationshipService relationshipService;
|
||||||
|
@Autowired(required = true)
|
||||||
|
protected RelationshipTypeService relationshipTypeService;
|
||||||
|
@Autowired(required = true)
|
||||||
|
protected MetadataValueService metadataValueService;
|
||||||
|
|
||||||
protected String tempWorkDir;
|
protected String tempWorkDir;
|
||||||
|
|
||||||
@@ -160,6 +173,9 @@ public class ItemImportServiceImpl implements ItemImportService, InitializingBea
|
|||||||
protected boolean useWorkflowSendEmail = false;
|
protected boolean useWorkflowSendEmail = false;
|
||||||
protected boolean isQuiet = false;
|
protected boolean isQuiet = false;
|
||||||
|
|
||||||
|
//remember which folder item was imported from
|
||||||
|
Map<String, Item> itemFolderMap = null;
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public void afterPropertiesSet() throws Exception {
|
public void afterPropertiesSet() throws Exception {
|
||||||
tempWorkDir = configurationService.getProperty("org.dspace.app.batchitemimport.work.dir");
|
tempWorkDir = configurationService.getProperty("org.dspace.app.batchitemimport.work.dir");
|
||||||
@@ -211,10 +227,13 @@ public class ItemImportServiceImpl implements ItemImportService, InitializingBea
|
|||||||
// create the mapfile
|
// create the mapfile
|
||||||
File outFile = null;
|
File outFile = null;
|
||||||
PrintWriter mapOut = null;
|
PrintWriter mapOut = null;
|
||||||
|
|
||||||
try {
|
try {
|
||||||
Map<String, String> skipItems = new HashMap<>(); // set of items to skip if in 'resume'
|
Map<String, String> skipItems = new HashMap<>(); // set of items to skip if in 'resume'
|
||||||
// mode
|
// mode
|
||||||
|
|
||||||
|
itemFolderMap = new HashMap<>();
|
||||||
|
|
||||||
System.out.println("Adding items from directory: " + sourceDir);
|
System.out.println("Adding items from directory: " + sourceDir);
|
||||||
log.debug("Adding items from directory: " + sourceDir);
|
log.debug("Adding items from directory: " + sourceDir);
|
||||||
System.out.println("Generating mapfile: " + mapFile);
|
System.out.println("Generating mapfile: " + mapFile);
|
||||||
@@ -255,6 +274,12 @@ public class ItemImportServiceImpl implements ItemImportService, InitializingBea
|
|||||||
for (int i = 0; i < dircontents.length; i++) {
|
for (int i = 0; i < dircontents.length; i++) {
|
||||||
if (skipItems.containsKey(dircontents[i])) {
|
if (skipItems.containsKey(dircontents[i])) {
|
||||||
System.out.println("Skipping import of " + dircontents[i]);
|
System.out.println("Skipping import of " + dircontents[i]);
|
||||||
|
|
||||||
|
//we still need the item in the map for relationship linking
|
||||||
|
String skippedHandle = skipItems.get(dircontents[i]);
|
||||||
|
Item skippedItem = (Item) handleService.resolveToObject(c, skippedHandle);
|
||||||
|
itemFolderMap.put(dircontents[i], skippedItem);
|
||||||
|
|
||||||
} else {
|
} else {
|
||||||
List<Collection> clist;
|
List<Collection> clist;
|
||||||
if (directoryFileCollections) {
|
if (directoryFileCollections) {
|
||||||
@@ -274,12 +299,19 @@ public class ItemImportServiceImpl implements ItemImportService, InitializingBea
|
|||||||
} else {
|
} else {
|
||||||
clist = mycollections;
|
clist = mycollections;
|
||||||
}
|
}
|
||||||
|
|
||||||
Item item = addItem(c, clist, sourceDir, dircontents[i], mapOut, template);
|
Item item = addItem(c, clist, sourceDir, dircontents[i], mapOut, template);
|
||||||
|
|
||||||
|
itemFolderMap.put(dircontents[i], item);
|
||||||
|
|
||||||
c.uncacheEntity(item);
|
c.uncacheEntity(item);
|
||||||
System.out.println(i + " " + dircontents[i]);
|
System.out.println(i + " " + dircontents[i]);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
//now that all items are imported, iterate again to link relationships
|
||||||
|
addRelationships(c, sourceDir);
|
||||||
|
|
||||||
} finally {
|
} finally {
|
||||||
if (mapOut != null) {
|
if (mapOut != null) {
|
||||||
mapOut.flush();
|
mapOut.flush();
|
||||||
@@ -288,6 +320,276 @@ public class ItemImportServiceImpl implements ItemImportService, InitializingBea
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Add relationships from a 'relationships' manifest file.
|
||||||
|
*
|
||||||
|
* @param c Context
|
||||||
|
* @param sourceDir The parent import source directory
|
||||||
|
* @throws Exception
|
||||||
|
*/
|
||||||
|
protected void addRelationships(Context c, String sourceDir) throws Exception {
|
||||||
|
|
||||||
|
for (Map.Entry<String, Item> itemEntry : itemFolderMap.entrySet()) {
|
||||||
|
|
||||||
|
String folderName = itemEntry.getKey();
|
||||||
|
String path = sourceDir + File.separatorChar + folderName;
|
||||||
|
Item item = itemEntry.getValue();
|
||||||
|
|
||||||
|
//look for a 'relationship' manifest
|
||||||
|
Map<String, List<String>> relationships = processRelationshipFile(path, "relationships");
|
||||||
|
if (!relationships.isEmpty()) {
|
||||||
|
|
||||||
|
for (Map.Entry<String, List<String>> relEntry : relationships.entrySet()) {
|
||||||
|
|
||||||
|
String relationshipType = relEntry.getKey();
|
||||||
|
List<String> identifierList = relEntry.getValue();
|
||||||
|
|
||||||
|
for (String itemIdentifier : identifierList) {
|
||||||
|
|
||||||
|
if (isTest) {
|
||||||
|
System.out.println("\tAdding relationship (type: " + relationshipType +
|
||||||
|
") from " + folderName + " to " + itemIdentifier);
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
|
||||||
|
//find referenced item
|
||||||
|
Item relationItem = resolveRelatedItem(c, itemIdentifier);
|
||||||
|
if (null == relationItem) {
|
||||||
|
throw new Exception("Could not find item for " + itemIdentifier);
|
||||||
|
}
|
||||||
|
|
||||||
|
//get entity type of entity and item
|
||||||
|
String itemEntityType = getEntityType(item);
|
||||||
|
String relatedEntityType = getEntityType(relationItem);
|
||||||
|
|
||||||
|
//find matching relationship type
|
||||||
|
List<RelationshipType> relTypes = relationshipTypeService.findByLeftwardOrRightwardTypeName(
|
||||||
|
c, relationshipType);
|
||||||
|
RelationshipType foundRelationshipType = RelationshipUtils.matchRelationshipType(
|
||||||
|
relTypes, relatedEntityType, itemEntityType, relationshipType);
|
||||||
|
|
||||||
|
if (foundRelationshipType == null) {
|
||||||
|
throw new Exception("No Relationship type found for:\n" +
|
||||||
|
"Target type: " + relatedEntityType + "\n" +
|
||||||
|
"Origin referer type: " + itemEntityType + "\n" +
|
||||||
|
"with typeName: " + relationshipType
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
boolean left = false;
|
||||||
|
if (foundRelationshipType.getLeftwardType().equalsIgnoreCase(relationshipType)) {
|
||||||
|
left = true;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Placeholder items for relation placing
|
||||||
|
Item leftItem = null;
|
||||||
|
Item rightItem = null;
|
||||||
|
if (left) {
|
||||||
|
leftItem = item;
|
||||||
|
rightItem = relationItem;
|
||||||
|
} else {
|
||||||
|
leftItem = relationItem;
|
||||||
|
rightItem = item;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Create the relationship
|
||||||
|
int leftPlace = relationshipService.findNextLeftPlaceByLeftItem(c, leftItem);
|
||||||
|
int rightPlace = relationshipService.findNextRightPlaceByRightItem(c, rightItem);
|
||||||
|
Relationship persistedRelationship = relationshipService.create(
|
||||||
|
c, leftItem, rightItem, foundRelationshipType, leftPlace, rightPlace);
|
||||||
|
// relationshipService.update(c, persistedRelationship);
|
||||||
|
|
||||||
|
System.out.println("\tAdded relationship (type: " + relationshipType + ") from " +
|
||||||
|
leftItem.getHandle() + " to " + rightItem.getHandle());
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Get the item's entity type from meta.
|
||||||
|
*
|
||||||
|
* @param item
|
||||||
|
* @return
|
||||||
|
*/
|
||||||
|
protected String getEntityType(Item item) throws Exception {
|
||||||
|
return itemService.getMetadata(item, "dspace", "entity", "type", Item.ANY).get(0).getValue();
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Read the relationship manifest file.
|
||||||
|
*
|
||||||
|
* Each line in the file contains a relationship type id and an item identifier in the following format:
|
||||||
|
*
|
||||||
|
* relation.<relation_key> <handle|uuid|folderName:import_item_folder|schema.element[.qualifier]:value>
|
||||||
|
*
|
||||||
|
* The input_item_folder should refer the folder name of another item in this import batch.
|
||||||
|
*
|
||||||
|
* @param path The main import folder path.
|
||||||
|
* @param filename The name of the manifest file to check ('relationships')
|
||||||
|
* @return Map of found relationships
|
||||||
|
* @throws Exception
|
||||||
|
*/
|
||||||
|
protected Map<String, List<String>> processRelationshipFile(String path, String filename) throws Exception {
|
||||||
|
|
||||||
|
File file = new File(path + File.separatorChar + filename);
|
||||||
|
Map<String, List<String>> result = new HashMap<>();
|
||||||
|
|
||||||
|
if (file.exists()) {
|
||||||
|
|
||||||
|
System.out.println("\tProcessing relationships file: " + filename);
|
||||||
|
|
||||||
|
BufferedReader br = null;
|
||||||
|
try {
|
||||||
|
br = new BufferedReader(new FileReader(file));
|
||||||
|
String line = null;
|
||||||
|
while ((line = br.readLine()) != null) {
|
||||||
|
line = line.trim();
|
||||||
|
if ("".equals(line)) {
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
|
||||||
|
String relationshipType = null;
|
||||||
|
String itemIdentifier = null;
|
||||||
|
|
||||||
|
StringTokenizer st = new StringTokenizer(line);
|
||||||
|
|
||||||
|
if (st.hasMoreTokens()) {
|
||||||
|
relationshipType = st.nextToken();
|
||||||
|
if (relationshipType.split("\\.").length > 1) {
|
||||||
|
relationshipType = relationshipType.split("\\.")[1];
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
throw new Exception("Bad mapfile line:\n" + line);
|
||||||
|
}
|
||||||
|
|
||||||
|
if (st.hasMoreTokens()) {
|
||||||
|
itemIdentifier = st.nextToken("").trim();
|
||||||
|
} else {
|
||||||
|
throw new Exception("Bad mapfile line:\n" + line);
|
||||||
|
}
|
||||||
|
|
||||||
|
if (!result.containsKey(relationshipType)) {
|
||||||
|
result.put(relationshipType, new ArrayList<>());
|
||||||
|
}
|
||||||
|
|
||||||
|
result.get(relationshipType).add(itemIdentifier);
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
} catch (FileNotFoundException e) {
|
||||||
|
System.out.println("\tNo relationships file found.");
|
||||||
|
} finally {
|
||||||
|
if (br != null) {
|
||||||
|
try {
|
||||||
|
br.close();
|
||||||
|
} catch (IOException e) {
|
||||||
|
System.out.println("Non-critical problem releasing resources.");
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
return result;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Resolve an item identifier referred to in the relationships manifest file.
|
||||||
|
*
|
||||||
|
* The import item map will be checked first to see if the identifier refers to an item folder
|
||||||
|
* that was just imported. Next it will try to find the item by handle or UUID, or by a unique
|
||||||
|
* meta value.
|
||||||
|
*
|
||||||
|
* @param c Context
|
||||||
|
* @param itemIdentifier The identifier string found in the import manifest (handle, uuid, or import subfolder)
|
||||||
|
* @return Item if found, or null.
|
||||||
|
* @throws Exception
|
||||||
|
*/
|
||||||
|
protected Item resolveRelatedItem(Context c, String itemIdentifier) throws Exception {
|
||||||
|
|
||||||
|
if (itemIdentifier.contains(":")) {
|
||||||
|
|
||||||
|
if (itemIdentifier.startsWith("folderName:") || itemIdentifier.startsWith("rowName:")) {
|
||||||
|
//identifier refers to a folder name in this import
|
||||||
|
int i = itemIdentifier.indexOf(":");
|
||||||
|
String folderName = itemIdentifier.substring(i + 1);
|
||||||
|
if (itemFolderMap.containsKey(folderName)) {
|
||||||
|
return itemFolderMap.get(folderName);
|
||||||
|
}
|
||||||
|
|
||||||
|
} else {
|
||||||
|
|
||||||
|
//lookup by meta value
|
||||||
|
int i = itemIdentifier.indexOf(":");
|
||||||
|
String metaKey = itemIdentifier.substring(0, i);
|
||||||
|
String metaValue = itemIdentifier.substring(i + 1);
|
||||||
|
return findItemByMetaValue(c, metaKey, metaValue);
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
} else if (itemIdentifier.indexOf('/') != -1) {
|
||||||
|
//resolve by handle
|
||||||
|
return (Item) handleService.resolveToObject(c, itemIdentifier);
|
||||||
|
|
||||||
|
} else {
|
||||||
|
//try to resolve by UUID
|
||||||
|
return itemService.findByIdOrLegacyId(c, itemIdentifier);
|
||||||
|
}
|
||||||
|
|
||||||
|
return null;
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Lookup an item by a (unique) meta value.
|
||||||
|
*
|
||||||
|
* @param metaKey
|
||||||
|
* @param metaValue
|
||||||
|
* @return Item
|
||||||
|
* @throws Exception if single item not found.
|
||||||
|
*/
|
||||||
|
protected Item findItemByMetaValue(Context c, String metaKey, String metaValue) throws Exception {
|
||||||
|
|
||||||
|
Item item = null;
|
||||||
|
|
||||||
|
String mf[] = metaKey.split("\\.");
|
||||||
|
if (mf.length < 2) {
|
||||||
|
throw new Exception("Bad metadata field in reference: '" + metaKey +
|
||||||
|
"' (expected syntax is schema.element[.qualifier])");
|
||||||
|
}
|
||||||
|
String schema = mf[0];
|
||||||
|
String element = mf[1];
|
||||||
|
String qualifier = mf.length == 2 ? null : mf[2];
|
||||||
|
try {
|
||||||
|
MetadataField mfo = metadataFieldService.findByElement(c, schema, element, qualifier);
|
||||||
|
Iterator<MetadataValue> mdv = metadataValueService.findByFieldAndValue(c, mfo, metaValue);
|
||||||
|
if (mdv.hasNext()) {
|
||||||
|
MetadataValue mdvVal = mdv.next();
|
||||||
|
UUID uuid = mdvVal.getDSpaceObject().getID();
|
||||||
|
if (mdv.hasNext()) {
|
||||||
|
throw new Exception("Ambiguous reference; multiple matches in db: " + metaKey);
|
||||||
|
}
|
||||||
|
item = itemService.find(c, uuid);
|
||||||
|
}
|
||||||
|
} catch (SQLException e) {
|
||||||
|
throw new Exception("Error looking up item by metadata reference: " + metaKey, e);
|
||||||
|
}
|
||||||
|
|
||||||
|
if (item == null) {
|
||||||
|
throw new Exception("Item not found by metadata reference: " + metaKey);
|
||||||
|
}
|
||||||
|
|
||||||
|
return item;
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public void replaceItems(Context c, List<Collection> mycollections,
|
public void replaceItems(Context c, List<Collection> mycollections,
|
||||||
String sourceDir, String mapFile, boolean template) throws Exception {
|
String sourceDir, String mapFile, boolean template) throws Exception {
|
||||||
@@ -1689,7 +1991,7 @@ public class ItemImportServiceImpl implements ItemImportService, InitializingBea
|
|||||||
|
|
||||||
email.send();
|
email.send();
|
||||||
} catch (Exception e) {
|
} catch (Exception e) {
|
||||||
log.warn(LogManager.getHeader(context, "emailSuccessMessage", "cannot notify user of import"), e);
|
log.warn(LogHelper.getHeader(context, "emailSuccessMessage", "cannot notify user of import"), e);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -1823,4 +2125,5 @@ public class ItemImportServiceImpl implements ItemImportService, InitializingBea
|
|||||||
public void setQuiet(boolean isQuiet) {
|
public void setQuiet(boolean isQuiet) {
|
||||||
this.isQuiet = isQuiet;
|
this.isQuiet = isQuiet;
|
||||||
}
|
}
|
||||||
|
|
||||||
}
|
}
|
||||||
|
@@ -11,6 +11,8 @@ import java.io.UnsupportedEncodingException;
|
|||||||
import java.sql.SQLException;
|
import java.sql.SQLException;
|
||||||
import java.util.List;
|
import java.util.List;
|
||||||
|
|
||||||
|
import org.apache.logging.log4j.LogManager;
|
||||||
|
import org.apache.logging.log4j.Logger;
|
||||||
import org.dspace.app.util.Util;
|
import org.dspace.app.util.Util;
|
||||||
import org.dspace.content.Bitstream;
|
import org.dspace.content.Bitstream;
|
||||||
import org.dspace.content.Bundle;
|
import org.dspace.content.Bundle;
|
||||||
@@ -34,8 +36,9 @@ public class ItemMarkingAvailabilityBitstreamStrategy implements ItemMarkingExtr
|
|||||||
@Autowired(required = true)
|
@Autowired(required = true)
|
||||||
protected ItemService itemService;
|
protected ItemService itemService;
|
||||||
|
|
||||||
public ItemMarkingAvailabilityBitstreamStrategy() {
|
private static final Logger LOG = LogManager.getLogger();
|
||||||
|
|
||||||
|
public ItemMarkingAvailabilityBitstreamStrategy() {
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
@@ -43,14 +46,14 @@ public class ItemMarkingAvailabilityBitstreamStrategy implements ItemMarkingExtr
|
|||||||
throws SQLException {
|
throws SQLException {
|
||||||
|
|
||||||
List<Bundle> bundles = itemService.getBundles(item, "ORIGINAL");
|
List<Bundle> bundles = itemService.getBundles(item, "ORIGINAL");
|
||||||
if (bundles.size() == 0) {
|
if (bundles.isEmpty()) {
|
||||||
ItemMarkingInfo markInfo = new ItemMarkingInfo();
|
ItemMarkingInfo markInfo = new ItemMarkingInfo();
|
||||||
markInfo.setImageName(nonAvailableImageName);
|
markInfo.setImageName(nonAvailableImageName);
|
||||||
|
|
||||||
return markInfo;
|
return markInfo;
|
||||||
} else {
|
} else {
|
||||||
Bundle originalBundle = bundles.iterator().next();
|
Bundle originalBundle = bundles.iterator().next();
|
||||||
if (originalBundle.getBitstreams().size() == 0) {
|
if (originalBundle.getBitstreams().isEmpty()) {
|
||||||
ItemMarkingInfo markInfo = new ItemMarkingInfo();
|
ItemMarkingInfo markInfo = new ItemMarkingInfo();
|
||||||
markInfo.setImageName(nonAvailableImageName);
|
markInfo.setImageName(nonAvailableImageName);
|
||||||
|
|
||||||
@@ -72,8 +75,7 @@ public class ItemMarkingAvailabilityBitstreamStrategy implements ItemMarkingExtr
|
|||||||
try {
|
try {
|
||||||
bsLink = bsLink + Util.encodeBitstreamName(bitstream.getName(), Constants.DEFAULT_ENCODING);
|
bsLink = bsLink + Util.encodeBitstreamName(bitstream.getName(), Constants.DEFAULT_ENCODING);
|
||||||
} catch (UnsupportedEncodingException e) {
|
} catch (UnsupportedEncodingException e) {
|
||||||
|
LOG.warn("DSpace uses an unsupported encoding", e);
|
||||||
e.printStackTrace();
|
|
||||||
}
|
}
|
||||||
|
|
||||||
signInfo.setLink(bsLink);
|
signInfo.setLink(bsLink);
|
||||||
|
@@ -105,6 +105,7 @@ public class ContentsEntry {
|
|||||||
return new ContentsEntry(arp[0], arp[1], actionId, groupName, arp[3]);
|
return new ContentsEntry(arp[0], arp[1], actionId, groupName, arp[3]);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
public String toString() {
|
public String toString() {
|
||||||
StringBuilder sb = new StringBuilder(filename);
|
StringBuilder sb = new StringBuilder(filename);
|
||||||
if (bundlename != null) {
|
if (bundlename != null) {
|
||||||
|
@@ -120,6 +120,7 @@ class DtoMetadata {
|
|||||||
return true;
|
return true;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
public String toString() {
|
public String toString() {
|
||||||
String s = "\tSchema: " + schema + " Element: " + element;
|
String s = "\tSchema: " + schema + " Element: " + element;
|
||||||
if (qualifier != null) {
|
if (qualifier != null) {
|
||||||
|
@@ -17,6 +17,7 @@ import java.io.IOException;
|
|||||||
import java.io.InputStream;
|
import java.io.InputStream;
|
||||||
import java.io.OutputStream;
|
import java.io.OutputStream;
|
||||||
import java.io.PrintWriter;
|
import java.io.PrintWriter;
|
||||||
|
import java.nio.charset.StandardCharsets;
|
||||||
import java.sql.SQLException;
|
import java.sql.SQLException;
|
||||||
import java.util.ArrayList;
|
import java.util.ArrayList;
|
||||||
import java.util.Iterator;
|
import java.util.Iterator;
|
||||||
@@ -55,7 +56,7 @@ public class ItemArchive {
|
|||||||
protected Transformer transformer = null;
|
protected Transformer transformer = null;
|
||||||
|
|
||||||
protected List<DtoMetadata> dtomList = null;
|
protected List<DtoMetadata> dtomList = null;
|
||||||
protected List<DtoMetadata> undoDtomList = new ArrayList<DtoMetadata>();
|
protected List<DtoMetadata> undoDtomList = new ArrayList<>();
|
||||||
|
|
||||||
protected List<UUID> undoAddContents = new ArrayList<>(); // for undo of add
|
protected List<UUID> undoAddContents = new ArrayList<>(); // for undo of add
|
||||||
|
|
||||||
@@ -325,7 +326,7 @@ public class ItemArchive {
|
|||||||
PrintWriter pw = null;
|
PrintWriter pw = null;
|
||||||
try {
|
try {
|
||||||
File f = new File(dir, ItemUpdate.DELETE_CONTENTS_FILE);
|
File f = new File(dir, ItemUpdate.DELETE_CONTENTS_FILE);
|
||||||
pw = new PrintWriter(new BufferedWriter(new FileWriter(f)));
|
pw = new PrintWriter(new BufferedWriter(new FileWriter(f, StandardCharsets.UTF_8)));
|
||||||
for (UUID i : undoAddContents) {
|
for (UUID i : undoAddContents) {
|
||||||
pw.println(i);
|
pw.println(i);
|
||||||
}
|
}
|
||||||
|
@@ -9,6 +9,7 @@ package org.dspace.app.mediafilter;
|
|||||||
|
|
||||||
import java.io.ByteArrayInputStream;
|
import java.io.ByteArrayInputStream;
|
||||||
import java.io.InputStream;
|
import java.io.InputStream;
|
||||||
|
import java.nio.charset.StandardCharsets;
|
||||||
import javax.swing.text.Document;
|
import javax.swing.text.Document;
|
||||||
import javax.swing.text.html.HTMLEditorKit;
|
import javax.swing.text.html.HTMLEditorKit;
|
||||||
|
|
||||||
@@ -73,9 +74,9 @@ public class HTMLFilter extends MediaFilter {
|
|||||||
String extractedText = doc.getText(0, doc.getLength());
|
String extractedText = doc.getText(0, doc.getLength());
|
||||||
|
|
||||||
// generate an input stream with the extracted text
|
// generate an input stream with the extracted text
|
||||||
byte[] textBytes = extractedText.getBytes();
|
byte[] textBytes = extractedText.getBytes(StandardCharsets.UTF_8);
|
||||||
ByteArrayInputStream bais = new ByteArrayInputStream(textBytes);
|
ByteArrayInputStream bais = new ByteArrayInputStream(textBytes);
|
||||||
|
|
||||||
return bais; // will this work? or will the byte array be out of scope?
|
return bais;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@@ -10,6 +10,7 @@ package org.dspace.app.mediafilter;
|
|||||||
import java.io.ByteArrayInputStream;
|
import java.io.ByteArrayInputStream;
|
||||||
import java.io.IOException;
|
import java.io.IOException;
|
||||||
import java.io.InputStream;
|
import java.io.InputStream;
|
||||||
|
import java.nio.charset.StandardCharsets;
|
||||||
|
|
||||||
import org.apache.poi.POITextExtractor;
|
import org.apache.poi.POITextExtractor;
|
||||||
import org.apache.poi.extractor.ExtractorFactory;
|
import org.apache.poi.extractor.ExtractorFactory;
|
||||||
@@ -66,6 +67,6 @@ public class PoiWordFilter
|
|||||||
}
|
}
|
||||||
|
|
||||||
// return the extracted text as a stream.
|
// return the extracted text as a stream.
|
||||||
return new ByteArrayInputStream(text.getBytes());
|
return new ByteArrayInputStream(text.getBytes(StandardCharsets.UTF_8));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@@ -37,7 +37,7 @@ public class RequestItemDAOImpl extends AbstractHibernateDAO<RequestItem> implem
|
|||||||
Root<RequestItem> requestItemRoot = criteriaQuery.from(RequestItem.class);
|
Root<RequestItem> requestItemRoot = criteriaQuery.from(RequestItem.class);
|
||||||
criteriaQuery.select(requestItemRoot);
|
criteriaQuery.select(requestItemRoot);
|
||||||
criteriaQuery.where(criteriaBuilder.equal(requestItemRoot.get(RequestItem_.token), token));
|
criteriaQuery.where(criteriaBuilder.equal(requestItemRoot.get(RequestItem_.token), token));
|
||||||
return uniqueResult(context, criteriaQuery, false, RequestItem.class, -1, -1);
|
return uniqueResult(context, criteriaQuery, false, RequestItem.class);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
@@ -73,6 +73,7 @@ public class SHERPAService {
|
|||||||
/**
|
/**
|
||||||
* Complete initialization of the Bean.
|
* Complete initialization of the Bean.
|
||||||
*/
|
*/
|
||||||
|
@SuppressWarnings("unused")
|
||||||
@PostConstruct
|
@PostConstruct
|
||||||
private void init() {
|
private void init() {
|
||||||
// Get endoint and API key from configuration
|
// Get endoint and API key from configuration
|
||||||
|
@@ -18,7 +18,7 @@ import org.dspace.app.sherpa.SHERPAService;
|
|||||||
import org.dspace.app.sherpa.v2.SHERPAResponse;
|
import org.dspace.app.sherpa.v2.SHERPAResponse;
|
||||||
import org.dspace.content.Item;
|
import org.dspace.content.Item;
|
||||||
import org.dspace.core.Context;
|
import org.dspace.core.Context;
|
||||||
import org.dspace.core.LogManager;
|
import org.dspace.core.LogHelper;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* SHERPASubmitService is
|
* SHERPASubmitService is
|
||||||
@@ -115,7 +115,7 @@ public class SHERPASubmitService {
|
|||||||
public Set<String> getISSNs(Context context, Item item) {
|
public Set<String> getISSNs(Context context, Item item) {
|
||||||
Set<String> issns = new LinkedHashSet<String>();
|
Set<String> issns = new LinkedHashSet<String>();
|
||||||
if (configuration.getIssnItemExtractors() == null) {
|
if (configuration.getIssnItemExtractors() == null) {
|
||||||
log.warn(LogManager.getHeader(context, "searchRelatedJournals",
|
log.warn(LogHelper.getHeader(context, "searchRelatedJournals",
|
||||||
"no issnItemExtractors defined"));
|
"no issnItemExtractors defined"));
|
||||||
return null;
|
return null;
|
||||||
}
|
}
|
||||||
|
@@ -13,15 +13,18 @@ import java.util.List;
|
|||||||
* Plain java representation of a SHERPA Permitted Version object, based on SHERPA API v2 responses.
|
* Plain java representation of a SHERPA Permitted Version object, based on SHERPA API v2 responses.
|
||||||
*
|
*
|
||||||
* In a SHERPA search for journal deposit policies, this data is contained within a publisher policy.
|
* In a SHERPA search for journal deposit policies, this data is contained within a publisher policy.
|
||||||
* Each permitted version is for a particular article version (eg. submitted, accepted, published) and contains
|
* Each permitted version is for a particular article version (e.g. submitted, accepted, published) and contains:
|
||||||
*
|
*
|
||||||
* A list of general conditions / terms for deposit of this version of work
|
* <ul>
|
||||||
* A list of allowed locations (eg. institutional repository, personal homepage, non-commercial repository)
|
* <li>A list of general conditions / terms for deposit of this version of work</li>
|
||||||
* A list of prerequisite conditions for deposit (eg. attribution, linking to published version)
|
* <li>A list of allowed locations (e.g. institutional repository, personal homepage, non-commercial repository)</li>
|
||||||
* A list of required licences for the deposited work (eg. CC-BY-NC)
|
* <li>A list of prerequisite conditions for deposit (e.g. attribution, linking to published version)</li>
|
||||||
* Embargo requirements, if any
|
* <li>A list of required licenses for the deposited work (e.g. CC-BY-NC)</li>
|
||||||
|
* <li>Embargo requirements, if any</li>
|
||||||
|
* </ul>
|
||||||
*
|
*
|
||||||
* This class also has some helper data for labels, which can be used with i18n when displaying policy information
|
* This class also has some helper data for labels, which can be used with i18n
|
||||||
|
* when displaying policy information.
|
||||||
*
|
*
|
||||||
* @see SHERPAPublisherPolicy
|
* @see SHERPAPublisherPolicy
|
||||||
*/
|
*/
|
||||||
@@ -44,7 +47,7 @@ public class SHERPAPermittedVersion {
|
|||||||
// Embargo
|
// Embargo
|
||||||
private SHERPAEmbargo embargo;
|
private SHERPAEmbargo embargo;
|
||||||
|
|
||||||
protected class SHERPAEmbargo {
|
protected static class SHERPAEmbargo {
|
||||||
String units;
|
String units;
|
||||||
int amount;
|
int amount;
|
||||||
}
|
}
|
||||||
|
@@ -10,7 +10,8 @@ package org.dspace.app.sherpa.v2;
|
|||||||
import java.io.IOException;
|
import java.io.IOException;
|
||||||
import java.io.InputStream;
|
import java.io.InputStream;
|
||||||
import java.io.InputStreamReader;
|
import java.io.InputStreamReader;
|
||||||
import java.util.LinkedList;
|
import java.nio.charset.StandardCharsets;
|
||||||
|
import java.util.ArrayList;
|
||||||
import java.util.List;
|
import java.util.List;
|
||||||
|
|
||||||
import org.apache.logging.log4j.LogManager;
|
import org.apache.logging.log4j.LogManager;
|
||||||
@@ -74,7 +75,7 @@ public class SHERPAPublisherResponse {
|
|||||||
* @param jsonData - the JSON input stream from the API result response body
|
* @param jsonData - the JSON input stream from the API result response body
|
||||||
*/
|
*/
|
||||||
private void parseJSON(InputStream jsonData) throws IOException {
|
private void parseJSON(InputStream jsonData) throws IOException {
|
||||||
InputStreamReader streamReader = new InputStreamReader(jsonData);
|
InputStreamReader streamReader = new InputStreamReader(jsonData, StandardCharsets.UTF_8);
|
||||||
JSONTokener jsonTokener = new JSONTokener(streamReader);
|
JSONTokener jsonTokener = new JSONTokener(streamReader);
|
||||||
JSONObject httpResponse;
|
JSONObject httpResponse;
|
||||||
try {
|
try {
|
||||||
@@ -86,7 +87,7 @@ public class SHERPAPublisherResponse {
|
|||||||
// parsing the full journal / policy responses
|
// parsing the full journal / policy responses
|
||||||
if (items.length() > 0) {
|
if (items.length() > 0) {
|
||||||
metadata = new SHERPASystemMetadata();
|
metadata = new SHERPASystemMetadata();
|
||||||
this.publishers = new LinkedList<>();
|
this.publishers = new ArrayList<>();
|
||||||
// Iterate search result items
|
// Iterate search result items
|
||||||
for (int itemIndex = 0; itemIndex < items.length(); itemIndex++) {
|
for (int itemIndex = 0; itemIndex < items.length(); itemIndex++) {
|
||||||
SHERPAPublisher sherpaPublisher = new SHERPAPublisher();
|
SHERPAPublisher sherpaPublisher = new SHERPAPublisher();
|
||||||
|
@@ -10,8 +10,8 @@ package org.dspace.app.sherpa.v2;
|
|||||||
import java.io.IOException;
|
import java.io.IOException;
|
||||||
import java.io.InputStream;
|
import java.io.InputStream;
|
||||||
import java.io.InputStreamReader;
|
import java.io.InputStreamReader;
|
||||||
|
import java.nio.charset.StandardCharsets;
|
||||||
import java.util.ArrayList;
|
import java.util.ArrayList;
|
||||||
import java.util.LinkedList;
|
|
||||||
import java.util.List;
|
import java.util.List;
|
||||||
import java.util.Map;
|
import java.util.Map;
|
||||||
import java.util.TreeMap;
|
import java.util.TreeMap;
|
||||||
@@ -78,7 +78,7 @@ public class SHERPAResponse {
|
|||||||
* @param jsonData - the JSON input stream from the API result response body
|
* @param jsonData - the JSON input stream from the API result response body
|
||||||
*/
|
*/
|
||||||
private void parseJSON(InputStream jsonData) throws IOException {
|
private void parseJSON(InputStream jsonData) throws IOException {
|
||||||
InputStreamReader streamReader = new InputStreamReader(jsonData);
|
InputStreamReader streamReader = new InputStreamReader(jsonData, StandardCharsets.UTF_8);
|
||||||
JSONTokener jsonTokener = new JSONTokener(streamReader);
|
JSONTokener jsonTokener = new JSONTokener(streamReader);
|
||||||
JSONObject httpResponse;
|
JSONObject httpResponse;
|
||||||
try {
|
try {
|
||||||
@@ -90,10 +90,10 @@ public class SHERPAResponse {
|
|||||||
// - however, we only ever want one result since we're passing an "equals ISSN" query
|
// - however, we only ever want one result since we're passing an "equals ISSN" query
|
||||||
if (items.length() > 0) {
|
if (items.length() > 0) {
|
||||||
metadata = new SHERPASystemMetadata();
|
metadata = new SHERPASystemMetadata();
|
||||||
this.journals = new LinkedList<>();
|
this.journals = new ArrayList<>();
|
||||||
// Iterate search result items
|
// Iterate search result items
|
||||||
for (int itemIndex = 0; itemIndex < items.length(); itemIndex++) {
|
for (int itemIndex = 0; itemIndex < items.length(); itemIndex++) {
|
||||||
List<SHERPAPublisher> sherpaPublishers = new LinkedList<>();
|
List<SHERPAPublisher> sherpaPublishers = new ArrayList<>();
|
||||||
List<SHERPAPublisherPolicy> policies = new ArrayList<>();
|
List<SHERPAPublisherPolicy> policies = new ArrayList<>();
|
||||||
SHERPAPublisher sherpaPublisher = new SHERPAPublisher();
|
SHERPAPublisher sherpaPublisher = new SHERPAPublisher();
|
||||||
SHERPAJournal sherpaJournal = new SHERPAJournal();
|
SHERPAJournal sherpaJournal = new SHERPAJournal();
|
||||||
@@ -289,7 +289,7 @@ public class SHERPAResponse {
|
|||||||
|
|
||||||
// Is the item in DOAJ?
|
// Is the item in DOAJ?
|
||||||
if (item.has("listed_in_doaj")) {
|
if (item.has("listed_in_doaj")) {
|
||||||
sherpaJournal.setInDOAJ(("yes".equals(item.getString("listed_in_doaj"))));
|
sherpaJournal.setInDOAJ("yes".equals(item.getString("listed_in_doaj")));
|
||||||
}
|
}
|
||||||
|
|
||||||
return sherpaJournal;
|
return sherpaJournal;
|
||||||
@@ -403,7 +403,6 @@ public class SHERPAResponse {
|
|||||||
// published = pdfversion
|
// published = pdfversion
|
||||||
// These strings can be used to construct i18n messages.
|
// These strings can be used to construct i18n messages.
|
||||||
String articleVersion = "unknown";
|
String articleVersion = "unknown";
|
||||||
String versionLabel = "Unknown";
|
|
||||||
|
|
||||||
// Each 'permitted OA' can actually refer to multiple versions
|
// Each 'permitted OA' can actually refer to multiple versions
|
||||||
if (permitted.has("article_version")) {
|
if (permitted.has("article_version")) {
|
||||||
|
@@ -40,7 +40,7 @@ import org.dspace.content.service.CollectionService;
|
|||||||
import org.dspace.content.service.CommunityService;
|
import org.dspace.content.service.CommunityService;
|
||||||
import org.dspace.content.service.ItemService;
|
import org.dspace.content.service.ItemService;
|
||||||
import org.dspace.core.Context;
|
import org.dspace.core.Context;
|
||||||
import org.dspace.core.LogManager;
|
import org.dspace.core.LogHelper;
|
||||||
import org.dspace.discovery.DiscoverQuery;
|
import org.dspace.discovery.DiscoverQuery;
|
||||||
import org.dspace.discovery.DiscoverResult;
|
import org.dspace.discovery.DiscoverResult;
|
||||||
import org.dspace.discovery.SearchService;
|
import org.dspace.discovery.SearchService;
|
||||||
@@ -284,7 +284,7 @@ public class GenerateSitemaps {
|
|||||||
|
|
||||||
if (makeHTMLMap) {
|
if (makeHTMLMap) {
|
||||||
int files = html.finish();
|
int files = html.finish();
|
||||||
log.info(LogManager.getHeader(c, "write_sitemap",
|
log.info(LogHelper.getHeader(c, "write_sitemap",
|
||||||
"type=html,num_files=" + files + ",communities="
|
"type=html,num_files=" + files + ",communities="
|
||||||
+ comms.size() + ",collections=" + colls.size()
|
+ comms.size() + ",collections=" + colls.size()
|
||||||
+ ",items=" + itemCount));
|
+ ",items=" + itemCount));
|
||||||
@@ -292,7 +292,7 @@ public class GenerateSitemaps {
|
|||||||
|
|
||||||
if (makeSitemapOrg) {
|
if (makeSitemapOrg) {
|
||||||
int files = sitemapsOrg.finish();
|
int files = sitemapsOrg.finish();
|
||||||
log.info(LogManager.getHeader(c, "write_sitemap",
|
log.info(LogHelper.getHeader(c, "write_sitemap",
|
||||||
"type=html,num_files=" + files + ",communities="
|
"type=html,num_files=" + files + ",communities="
|
||||||
+ comms.size() + ",collections=" + colls.size()
|
+ comms.size() + ",collections=" + colls.size()
|
||||||
+ ",items=" + itemCount));
|
+ ",items=" + itemCount));
|
||||||
|
@@ -86,7 +86,7 @@ public class SitemapsOrgGenerator extends AbstractGenerator {
|
|||||||
|
|
||||||
@Override
|
@Override
|
||||||
public String getURLText(String url, Date lastMod) {
|
public String getURLText(String url, Date lastMod) {
|
||||||
StringBuffer urlText = new StringBuffer();
|
StringBuilder urlText = new StringBuilder();
|
||||||
|
|
||||||
urlText.append("<url><loc>").append(url).append("</loc>");
|
urlText.append("<url><loc>").append(url).append("</loc>");
|
||||||
if (lastMod != null) {
|
if (lastMod != null) {
|
||||||
|
@@ -64,10 +64,6 @@ public class CreateStatReport {
|
|||||||
*/
|
*/
|
||||||
private static Context context;
|
private static Context context;
|
||||||
|
|
||||||
/**
|
|
||||||
* the config file from which to configure the analyser
|
|
||||||
*/
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Default constructor
|
* Default constructor
|
||||||
*/
|
*/
|
||||||
@@ -170,22 +166,19 @@ public class CreateStatReport {
|
|||||||
String myLogDir = null;
|
String myLogDir = null;
|
||||||
String myFileTemplate = null;
|
String myFileTemplate = null;
|
||||||
String myConfigFile = null;
|
String myConfigFile = null;
|
||||||
StringBuffer myOutFile = null;
|
|
||||||
Date myStartDate = null;
|
|
||||||
Date myEndDate = null;
|
|
||||||
boolean myLookUp = false;
|
boolean myLookUp = false;
|
||||||
|
|
||||||
Calendar start = new GregorianCalendar(calendar.get(Calendar.YEAR),
|
Calendar start = new GregorianCalendar(calendar.get(Calendar.YEAR),
|
||||||
calendar.get(Calendar.MONTH),
|
calendar.get(Calendar.MONTH),
|
||||||
calendar.getActualMinimum(Calendar.DAY_OF_MONTH));
|
calendar.getActualMinimum(Calendar.DAY_OF_MONTH));
|
||||||
myStartDate = start.getTime();
|
Date myStartDate = start.getTime();
|
||||||
|
|
||||||
Calendar end = new GregorianCalendar(calendar.get(Calendar.YEAR),
|
Calendar end = new GregorianCalendar(calendar.get(Calendar.YEAR),
|
||||||
calendar.get(Calendar.MONTH),
|
calendar.get(Calendar.MONTH),
|
||||||
calendar.getActualMaximum(Calendar.DAY_OF_MONTH));
|
calendar.getActualMaximum(Calendar.DAY_OF_MONTH));
|
||||||
myEndDate = end.getTime();
|
Date myEndDate = end.getTime();
|
||||||
|
|
||||||
myOutFile = new StringBuffer(outputLogDirectory);
|
StringBuilder myOutFile = new StringBuilder(outputLogDirectory);
|
||||||
myOutFile.append(outputPrefix);
|
myOutFile.append(outputPrefix);
|
||||||
myOutFile.append(calendar.get(Calendar.YEAR));
|
myOutFile.append(calendar.get(Calendar.YEAR));
|
||||||
myOutFile.append("-");
|
myOutFile.append("-");
|
||||||
@@ -211,12 +204,11 @@ public class CreateStatReport {
|
|||||||
String myLogDir = null;
|
String myLogDir = null;
|
||||||
String myFileTemplate = null;
|
String myFileTemplate = null;
|
||||||
String myConfigFile = null;
|
String myConfigFile = null;
|
||||||
StringBuffer myOutFile = null;
|
|
||||||
Date myStartDate = null;
|
Date myStartDate = null;
|
||||||
Date myEndDate = null;
|
Date myEndDate = null;
|
||||||
boolean myLookUp = false;
|
boolean myLookUp = false;
|
||||||
|
|
||||||
myOutFile = new StringBuffer(outputLogDirectory);
|
StringBuilder myOutFile = new StringBuilder(outputLogDirectory);
|
||||||
myOutFile.append(outputPrefix);
|
myOutFile.append(outputPrefix);
|
||||||
myOutFile.append(calendar.get(Calendar.YEAR));
|
myOutFile.append(calendar.get(Calendar.YEAR));
|
||||||
myOutFile.append("-");
|
myOutFile.append("-");
|
||||||
@@ -245,9 +237,6 @@ public class CreateStatReport {
|
|||||||
String myLogDir = null;
|
String myLogDir = null;
|
||||||
String myFileTemplate = null;
|
String myFileTemplate = null;
|
||||||
String myConfigFile = null;
|
String myConfigFile = null;
|
||||||
StringBuffer myOutFile = null;
|
|
||||||
Date myStartDate = null;
|
|
||||||
Date myEndDate = null;
|
|
||||||
boolean myLookUp = false;
|
boolean myLookUp = false;
|
||||||
|
|
||||||
Calendar reportEndDate = new GregorianCalendar(calendar.get(Calendar.YEAR),
|
Calendar reportEndDate = new GregorianCalendar(calendar.get(Calendar.YEAR),
|
||||||
@@ -260,14 +249,14 @@ public class CreateStatReport {
|
|||||||
Calendar start = new GregorianCalendar(currentMonth.get(Calendar.YEAR),
|
Calendar start = new GregorianCalendar(currentMonth.get(Calendar.YEAR),
|
||||||
currentMonth.get(Calendar.MONTH),
|
currentMonth.get(Calendar.MONTH),
|
||||||
currentMonth.getActualMinimum(Calendar.DAY_OF_MONTH));
|
currentMonth.getActualMinimum(Calendar.DAY_OF_MONTH));
|
||||||
myStartDate = start.getTime();
|
Date myStartDate = start.getTime();
|
||||||
|
|
||||||
Calendar end = new GregorianCalendar(currentMonth.get(Calendar.YEAR),
|
Calendar end = new GregorianCalendar(currentMonth.get(Calendar.YEAR),
|
||||||
currentMonth.get(Calendar.MONTH),
|
currentMonth.get(Calendar.MONTH),
|
||||||
currentMonth.getActualMaximum(Calendar.DAY_OF_MONTH));
|
currentMonth.getActualMaximum(Calendar.DAY_OF_MONTH));
|
||||||
myEndDate = end.getTime();
|
Date myEndDate = end.getTime();
|
||||||
|
|
||||||
myOutFile = new StringBuffer(outputLogDirectory);
|
StringBuilder myOutFile = new StringBuilder(outputLogDirectory);
|
||||||
myOutFile.append(outputPrefix);
|
myOutFile.append(outputPrefix);
|
||||||
myOutFile.append(currentMonth.get(Calendar.YEAR));
|
myOutFile.append(currentMonth.get(Calendar.YEAR));
|
||||||
myOutFile.append("-");
|
myOutFile.append("-");
|
||||||
@@ -293,11 +282,9 @@ public class CreateStatReport {
|
|||||||
String outputPrefix = "report-general-";
|
String outputPrefix = "report-general-";
|
||||||
|
|
||||||
String myFormat = "html";
|
String myFormat = "html";
|
||||||
StringBuffer myInput = null;
|
|
||||||
StringBuffer myOutput = null;
|
|
||||||
String myMap = null;
|
String myMap = null;
|
||||||
|
|
||||||
myInput = new StringBuffer(outputLogDirectory);
|
StringBuilder myInput = new StringBuilder(outputLogDirectory);
|
||||||
myInput.append(inputPrefix);
|
myInput.append(inputPrefix);
|
||||||
myInput.append(calendar.get(Calendar.YEAR));
|
myInput.append(calendar.get(Calendar.YEAR));
|
||||||
myInput.append("-");
|
myInput.append("-");
|
||||||
@@ -306,7 +293,7 @@ public class CreateStatReport {
|
|||||||
myInput.append(calendar.get(Calendar.DAY_OF_MONTH));
|
myInput.append(calendar.get(Calendar.DAY_OF_MONTH));
|
||||||
myInput.append(outputSuffix);
|
myInput.append(outputSuffix);
|
||||||
|
|
||||||
myOutput = new StringBuffer(outputReportDirectory);
|
StringBuilder myOutput = new StringBuilder(outputReportDirectory);
|
||||||
myOutput.append(outputPrefix);
|
myOutput.append(outputPrefix);
|
||||||
myOutput.append(calendar.get(Calendar.YEAR));
|
myOutput.append(calendar.get(Calendar.YEAR));
|
||||||
myOutput.append("-");
|
myOutput.append("-");
|
||||||
@@ -332,8 +319,6 @@ public class CreateStatReport {
|
|||||||
String outputPrefix = "report-";
|
String outputPrefix = "report-";
|
||||||
|
|
||||||
String myFormat = "html";
|
String myFormat = "html";
|
||||||
StringBuffer myInput = null;
|
|
||||||
StringBuffer myOutput = null;
|
|
||||||
String myMap = null;
|
String myMap = null;
|
||||||
|
|
||||||
Calendar reportEndDate = new GregorianCalendar(calendar.get(Calendar.YEAR),
|
Calendar reportEndDate = new GregorianCalendar(calendar.get(Calendar.YEAR),
|
||||||
@@ -344,14 +329,14 @@ public class CreateStatReport {
|
|||||||
|
|
||||||
while (currentMonth.before(reportEndDate)) {
|
while (currentMonth.before(reportEndDate)) {
|
||||||
|
|
||||||
myInput = new StringBuffer(outputLogDirectory);
|
StringBuilder myInput = new StringBuilder(outputLogDirectory);
|
||||||
myInput.append(inputPrefix);
|
myInput.append(inputPrefix);
|
||||||
myInput.append(currentMonth.get(Calendar.YEAR));
|
myInput.append(currentMonth.get(Calendar.YEAR));
|
||||||
myInput.append("-");
|
myInput.append("-");
|
||||||
myInput.append(currentMonth.get(Calendar.MONTH) + 1);
|
myInput.append(currentMonth.get(Calendar.MONTH) + 1);
|
||||||
myInput.append(outputSuffix);
|
myInput.append(outputSuffix);
|
||||||
|
|
||||||
myOutput = new StringBuffer(outputReportDirectory);
|
StringBuilder myOutput = new StringBuilder(outputReportDirectory);
|
||||||
myOutput.append(outputPrefix);
|
myOutput.append(outputPrefix);
|
||||||
myOutput.append(currentMonth.get(Calendar.YEAR));
|
myOutput.append(currentMonth.get(Calendar.YEAR));
|
||||||
myOutput.append("-");
|
myOutput.append("-");
|
||||||
@@ -376,18 +361,16 @@ public class CreateStatReport {
|
|||||||
String outputPrefix = "report-";
|
String outputPrefix = "report-";
|
||||||
|
|
||||||
String myFormat = "html";
|
String myFormat = "html";
|
||||||
StringBuffer myInput = null;
|
|
||||||
StringBuffer myOutput = null;
|
|
||||||
String myMap = null;
|
String myMap = null;
|
||||||
|
|
||||||
myInput = new StringBuffer(outputLogDirectory);
|
StringBuilder myInput = new StringBuilder(outputLogDirectory);
|
||||||
myInput.append(inputPrefix);
|
myInput.append(inputPrefix);
|
||||||
myInput.append(calendar.get(Calendar.YEAR));
|
myInput.append(calendar.get(Calendar.YEAR));
|
||||||
myInput.append("-");
|
myInput.append("-");
|
||||||
myInput.append(calendar.get(Calendar.MONTH) + 1);
|
myInput.append(calendar.get(Calendar.MONTH) + 1);
|
||||||
myInput.append(outputSuffix);
|
myInput.append(outputSuffix);
|
||||||
|
|
||||||
myOutput = new StringBuffer(outputReportDirectory);
|
StringBuilder myOutput = new StringBuilder(outputReportDirectory);
|
||||||
myOutput.append(outputPrefix);
|
myOutput.append(outputPrefix);
|
||||||
myOutput.append(calendar.get(Calendar.YEAR));
|
myOutput.append(calendar.get(Calendar.YEAR));
|
||||||
myOutput.append("-");
|
myOutput.append("-");
|
||||||
|
@@ -31,7 +31,7 @@ import java.util.regex.Pattern;
|
|||||||
|
|
||||||
import org.apache.commons.lang3.StringUtils;
|
import org.apache.commons.lang3.StringUtils;
|
||||||
import org.dspace.core.Context;
|
import org.dspace.core.Context;
|
||||||
import org.dspace.core.LogManager;
|
import org.dspace.core.LogHelper;
|
||||||
import org.dspace.core.Utils;
|
import org.dspace.core.Utils;
|
||||||
import org.dspace.discovery.DiscoverQuery;
|
import org.dspace.discovery.DiscoverQuery;
|
||||||
import org.dspace.discovery.SearchServiceException;
|
import org.dspace.discovery.SearchServiceException;
|
||||||
@@ -1144,17 +1144,17 @@ public class LogAnalyser {
|
|||||||
if (match.matches()) {
|
if (match.matches()) {
|
||||||
// set up a new log line object
|
// set up a new log line object
|
||||||
LogLine logLine = new LogLine(parseDate(match.group(1).trim()),
|
LogLine logLine = new LogLine(parseDate(match.group(1).trim()),
|
||||||
LogManager.unescapeLogField(match.group(2)).trim(),
|
LogHelper.unescapeLogField(match.group(2)).trim(),
|
||||||
LogManager.unescapeLogField(match.group(3)).trim(),
|
LogHelper.unescapeLogField(match.group(3)).trim(),
|
||||||
LogManager.unescapeLogField(match.group(4)).trim(),
|
LogHelper.unescapeLogField(match.group(4)).trim(),
|
||||||
LogManager.unescapeLogField(match.group(5)).trim());
|
LogHelper.unescapeLogField(match.group(5)).trim());
|
||||||
|
|
||||||
return logLine;
|
return logLine;
|
||||||
} else {
|
} else {
|
||||||
match = validBase.matcher(line);
|
match = validBase.matcher(line);
|
||||||
if (match.matches()) {
|
if (match.matches()) {
|
||||||
LogLine logLine = new LogLine(parseDate(match.group(1).trim()),
|
LogLine logLine = new LogLine(parseDate(match.group(1).trim()),
|
||||||
LogManager.unescapeLogField(match.group(2)).trim(),
|
LogHelper.unescapeLogField(match.group(2)).trim(),
|
||||||
null,
|
null,
|
||||||
null,
|
null,
|
||||||
null
|
null
|
||||||
|
@@ -10,8 +10,6 @@ package org.dspace.app.util;
|
|||||||
import java.io.File;
|
import java.io.File;
|
||||||
import java.io.IOException;
|
import java.io.IOException;
|
||||||
import java.sql.SQLException;
|
import java.sql.SQLException;
|
||||||
import java.util.LinkedList;
|
|
||||||
import java.util.List;
|
|
||||||
import javax.xml.parsers.DocumentBuilder;
|
import javax.xml.parsers.DocumentBuilder;
|
||||||
import javax.xml.parsers.DocumentBuilderFactory;
|
import javax.xml.parsers.DocumentBuilderFactory;
|
||||||
import javax.xml.parsers.ParserConfigurationException;
|
import javax.xml.parsers.ParserConfigurationException;
|
||||||
@@ -30,7 +28,6 @@ import org.dspace.content.EntityType;
|
|||||||
import org.dspace.content.RelationshipType;
|
import org.dspace.content.RelationshipType;
|
||||||
import org.dspace.content.factory.ContentServiceFactory;
|
import org.dspace.content.factory.ContentServiceFactory;
|
||||||
import org.dspace.content.service.EntityTypeService;
|
import org.dspace.content.service.EntityTypeService;
|
||||||
import org.dspace.content.service.RelationshipService;
|
|
||||||
import org.dspace.content.service.RelationshipTypeService;
|
import org.dspace.content.service.RelationshipTypeService;
|
||||||
import org.dspace.core.Context;
|
import org.dspace.core.Context;
|
||||||
import org.w3c.dom.Document;
|
import org.w3c.dom.Document;
|
||||||
@@ -42,20 +39,18 @@ import org.xml.sax.SAXException;
|
|||||||
/**
|
/**
|
||||||
* This script is used to initialize the database with a set of relationship types that are written
|
* This script is used to initialize the database with a set of relationship types that are written
|
||||||
* in an xml file that is given to this script.
|
* in an xml file that is given to this script.
|
||||||
* This XML file needs to have a proper XML structure and needs to define the variables of the RelationshipType object
|
* This XML file needs to have a proper XML structure and needs to define the variables of the RelationshipType object.
|
||||||
*/
|
*/
|
||||||
public class InitializeEntities {
|
public class InitializeEntities {
|
||||||
|
|
||||||
private final static Logger log = LogManager.getLogger();
|
private final static Logger log = LogManager.getLogger();
|
||||||
|
|
||||||
private final RelationshipTypeService relationshipTypeService;
|
private final RelationshipTypeService relationshipTypeService;
|
||||||
private final RelationshipService relationshipService;
|
|
||||||
private final EntityTypeService entityTypeService;
|
private final EntityTypeService entityTypeService;
|
||||||
|
|
||||||
|
|
||||||
private InitializeEntities() {
|
private InitializeEntities() {
|
||||||
relationshipTypeService = ContentServiceFactory.getInstance().getRelationshipTypeService();
|
relationshipTypeService = ContentServiceFactory.getInstance().getRelationshipTypeService();
|
||||||
relationshipService = ContentServiceFactory.getInstance().getRelationshipService();
|
|
||||||
entityTypeService = ContentServiceFactory.getInstance().getEntityTypeService();
|
entityTypeService = ContentServiceFactory.getInstance().getEntityTypeService();
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -111,14 +106,12 @@ public class InitializeEntities {
|
|||||||
try {
|
try {
|
||||||
File fXmlFile = new File(fileLocation);
|
File fXmlFile = new File(fileLocation);
|
||||||
DocumentBuilderFactory dbFactory = DocumentBuilderFactory.newInstance();
|
DocumentBuilderFactory dbFactory = DocumentBuilderFactory.newInstance();
|
||||||
DocumentBuilder dBuilder = null;
|
DocumentBuilder dBuilder = dbFactory.newDocumentBuilder();
|
||||||
dBuilder = dbFactory.newDocumentBuilder();
|
|
||||||
Document doc = dBuilder.parse(fXmlFile);
|
Document doc = dBuilder.parse(fXmlFile);
|
||||||
|
|
||||||
doc.getDocumentElement().normalize();
|
doc.getDocumentElement().normalize();
|
||||||
|
|
||||||
NodeList nList = doc.getElementsByTagName("type");
|
NodeList nList = doc.getElementsByTagName("type");
|
||||||
List<RelationshipType> relationshipTypes = new LinkedList<>();
|
|
||||||
for (int i = 0; i < nList.getLength(); i++) {
|
for (int i = 0; i < nList.getLength(); i++) {
|
||||||
Node nNode = nList.item(i);
|
Node nNode = nList.item(i);
|
||||||
|
|
||||||
|
@@ -0,0 +1,69 @@
|
|||||||
|
/**
|
||||||
|
* The contents of this file are subject to the license and copyright
|
||||||
|
* detailed in the LICENSE and NOTICE files at the root of the source
|
||||||
|
* tree and available online at
|
||||||
|
*
|
||||||
|
* http://www.dspace.org/license/
|
||||||
|
*/
|
||||||
|
package org.dspace.app.util;
|
||||||
|
|
||||||
|
import java.util.List;
|
||||||
|
|
||||||
|
import org.dspace.content.RelationshipType;
|
||||||
|
|
||||||
|
public class RelationshipUtils {
|
||||||
|
|
||||||
|
private RelationshipUtils() {
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Matches two Entity types to a Relationship Type from a set of Relationship Types.
|
||||||
|
*
|
||||||
|
* Given a list of Relationship Types, this method will find a Relationship Type that
|
||||||
|
* is configured between the originType and the targetType, with the matching originTypeName.
|
||||||
|
* It will match a relationship between these two entities in either direction (eg leftward
|
||||||
|
* or rightward).
|
||||||
|
*
|
||||||
|
* Example: originType = Author, targetType = Publication, originTypeName = isAuthorOfPublication.
|
||||||
|
*
|
||||||
|
* @param relTypes set of Relationship Types in which to find a match.
|
||||||
|
* @param targetType entity type of target (eg. Publication).
|
||||||
|
* @param originType entity type of origin referer (eg. Author).
|
||||||
|
* @param originTypeName the name of the relationship (eg. isAuthorOfPublication)
|
||||||
|
* @return null or matched Relationship Type.
|
||||||
|
*/
|
||||||
|
public static RelationshipType matchRelationshipType(List<RelationshipType> relTypes, String targetType,
|
||||||
|
String originType, String originTypeName) {
|
||||||
|
RelationshipType foundRelationshipType = null;
|
||||||
|
if (originTypeName.split("\\.").length > 1) {
|
||||||
|
originTypeName = originTypeName.split("\\.")[1];
|
||||||
|
}
|
||||||
|
for (RelationshipType relationshipType : relTypes) {
|
||||||
|
// Is origin type leftward or righward
|
||||||
|
boolean isLeft = false;
|
||||||
|
if (relationshipType.getLeftType().getLabel().equalsIgnoreCase(originType)) {
|
||||||
|
isLeft = true;
|
||||||
|
}
|
||||||
|
if (isLeft) {
|
||||||
|
// Validate typeName reference
|
||||||
|
if (!relationshipType.getLeftwardType().equalsIgnoreCase(originTypeName)) {
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
if (relationshipType.getLeftType().getLabel().equalsIgnoreCase(originType)
|
||||||
|
&& relationshipType.getRightType().getLabel().equalsIgnoreCase(targetType)) {
|
||||||
|
foundRelationshipType = relationshipType;
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
if (!relationshipType.getRightwardType().equalsIgnoreCase(originTypeName)) {
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
if (relationshipType.getLeftType().getLabel().equalsIgnoreCase(targetType)
|
||||||
|
&& relationshipType.getRightType().getLabel().equalsIgnoreCase(originType)) {
|
||||||
|
foundRelationshipType = relationshipType;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return foundRelationshipType;
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
@@ -38,13 +38,12 @@ import org.dspace.core.Utils;
|
|||||||
*
|
*
|
||||||
* @author Robert Tansley
|
* @author Robert Tansley
|
||||||
* @author Mark Diggory
|
* @author Mark Diggory
|
||||||
* @version $Revision$
|
|
||||||
*/
|
*/
|
||||||
public class Util {
|
public class Util {
|
||||||
// cache for source version result
|
// cache for source version result
|
||||||
private static String sourceVersion = null;
|
private static String sourceVersion = null;
|
||||||
|
|
||||||
private static Logger log = org.apache.logging.log4j.LogManager.getLogger(Util.class);
|
private static final Logger log = org.apache.logging.log4j.LogManager.getLogger();
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Default constructor. Must be protected as org.dspace.xmlworkflow.WorkflowUtils extends it
|
* Default constructor. Must be protected as org.dspace.xmlworkflow.WorkflowUtils extends it
|
||||||
@@ -60,7 +59,7 @@ public class Util {
|
|||||||
* spaces
|
* spaces
|
||||||
*/
|
*/
|
||||||
public static String nonBreakSpace(String s) {
|
public static String nonBreakSpace(String s) {
|
||||||
StringBuffer newString = new StringBuffer();
|
StringBuilder newString = new StringBuilder();
|
||||||
|
|
||||||
for (int i = 0; i < s.length(); i++) {
|
for (int i = 0; i < s.length(); i++) {
|
||||||
char ch = s.charAt(i);
|
char ch = s.charAt(i);
|
||||||
@@ -99,7 +98,7 @@ public class Util {
|
|||||||
return "";
|
return "";
|
||||||
}
|
}
|
||||||
|
|
||||||
StringBuffer out = new StringBuffer();
|
StringBuilder out = new StringBuilder();
|
||||||
|
|
||||||
final String[] pctEncoding = {"%00", "%01", "%02", "%03", "%04",
|
final String[] pctEncoding = {"%00", "%01", "%02", "%03", "%04",
|
||||||
"%05", "%06", "%07", "%08", "%09", "%0a", "%0b", "%0c", "%0d",
|
"%05", "%06", "%07", "%08", "%09", "%0a", "%0b", "%0c", "%0d",
|
||||||
@@ -263,7 +262,7 @@ public class Util {
|
|||||||
return null;
|
return null;
|
||||||
}
|
}
|
||||||
|
|
||||||
List<UUID> return_values = new ArrayList<UUID>(request_values.length);
|
List<UUID> return_values = new ArrayList<>(request_values.length);
|
||||||
|
|
||||||
for (String s : request_values) {
|
for (String s : request_values) {
|
||||||
try {
|
try {
|
||||||
@@ -402,7 +401,7 @@ public class Util {
|
|||||||
Item item, List<MetadataValue> values, String schema, String element,
|
Item item, List<MetadataValue> values, String schema, String element,
|
||||||
String qualifier, Locale locale) throws SQLException,
|
String qualifier, Locale locale) throws SQLException,
|
||||||
DCInputsReaderException {
|
DCInputsReaderException {
|
||||||
List<String> toReturn = new ArrayList<String>();
|
List<String> toReturn = new ArrayList<>();
|
||||||
DCInput myInputs = null;
|
DCInput myInputs = null;
|
||||||
boolean myInputsFound = false;
|
boolean myInputsFound = false;
|
||||||
String formFileName = I18nUtil.getInputFormsFileName(locale);
|
String formFileName = I18nUtil.getInputFormsFileName(locale);
|
||||||
@@ -478,8 +477,9 @@ public class Util {
|
|||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Split a list in an array of i sub-lists uniformly sized
|
* Split a list in an array of i sub-lists uniformly sized.
|
||||||
*
|
*
|
||||||
|
* @param <T> type of objects in the list.
|
||||||
* @param idsList the list to split
|
* @param idsList the list to split
|
||||||
* @param i the number of sublists to return
|
* @param i the number of sublists to return
|
||||||
*
|
*
|
||||||
|
@@ -58,6 +58,7 @@ public class WebApp implements ReloadableEntity<Integer> {
|
|||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
public Integer getID() {
|
public Integer getID() {
|
||||||
return id;
|
return id;
|
||||||
}
|
}
|
||||||
|
@@ -8,7 +8,6 @@
|
|||||||
package org.dspace.app.util;
|
package org.dspace.app.util;
|
||||||
|
|
||||||
import java.util.ArrayList;
|
import java.util.ArrayList;
|
||||||
import java.util.LinkedList;
|
|
||||||
import java.util.List;
|
import java.util.List;
|
||||||
|
|
||||||
import org.apache.commons.lang3.StringUtils;
|
import org.apache.commons.lang3.StringUtils;
|
||||||
@@ -29,13 +28,13 @@ public class XMLUtils {
|
|||||||
|
|
||||||
/**
|
/**
|
||||||
* @param dataRoot the starting node
|
* @param dataRoot the starting node
|
||||||
* @param name the name of the subelement to find
|
* @param name the tag name of the child element to find.
|
||||||
* @return the list of all DOM Element with the provided name direct child
|
* @return the list of all DOM Element with the provided name direct child
|
||||||
* of the starting node
|
* of the starting node
|
||||||
*/
|
*/
|
||||||
public static List<Element> getElementList(Element dataRoot, String name) {
|
public static List<Element> getElementList(Element dataRoot, String name) {
|
||||||
NodeList list = dataRoot.getElementsByTagName(name);
|
NodeList list = dataRoot.getElementsByTagName(name);
|
||||||
List<Element> listElements = new ArrayList<Element>();
|
List<Element> listElements = new ArrayList<>();
|
||||||
for (int i = 0; i < list.getLength(); i++) {
|
for (int i = 0; i < list.getLength(); i++) {
|
||||||
Element item = (Element) list.item(i);
|
Element item = (Element) list.item(i);
|
||||||
if (item.getParentNode().equals(dataRoot)) {
|
if (item.getParentNode().equals(dataRoot)) {
|
||||||
@@ -105,7 +104,7 @@ public class XMLUtils {
|
|||||||
|
|
||||||
/**
|
/**
|
||||||
* @param rootElement the starting node
|
* @param rootElement the starting node
|
||||||
* @param subElementName the name of the subelement to find
|
* @param subElementName the tag name of the child element to find.
|
||||||
* @return a list of string including all the text contents of the sub
|
* @return a list of string including all the text contents of the sub
|
||||||
* element with the specified name. If there are not sub element
|
* element with the specified name. If there are not sub element
|
||||||
* with the supplied name the method will return null
|
* with the supplied name the method will return null
|
||||||
@@ -121,7 +120,7 @@ public class XMLUtils {
|
|||||||
return null;
|
return null;
|
||||||
}
|
}
|
||||||
|
|
||||||
List<String> result = new LinkedList<String>();
|
List<String> result = new ArrayList<>();
|
||||||
for (Element el : subElements) {
|
for (Element el : subElements) {
|
||||||
if (StringUtils.isNotBlank(el.getTextContent())) {
|
if (StringUtils.isNotBlank(el.getTextContent())) {
|
||||||
result.add(el.getTextContent().trim());
|
result.add(el.getTextContent().trim());
|
||||||
@@ -152,7 +151,7 @@ public class XMLUtils {
|
|||||||
return null;
|
return null;
|
||||||
}
|
}
|
||||||
|
|
||||||
List<String[]> result = new LinkedList<String[]>();
|
List<String[]> result = new ArrayList<>();
|
||||||
for (Element el : subElements) {
|
for (Element el : subElements) {
|
||||||
String[] tmp = new String[fieldsName.length];
|
String[] tmp = new String[fieldsName.length];
|
||||||
for (int idx = 0; idx < fieldsName.length; idx++) {
|
for (int idx = 0; idx < fieldsName.length; idx++) {
|
||||||
|
@@ -19,7 +19,7 @@ import javax.servlet.http.HttpServletResponse;
|
|||||||
|
|
||||||
import org.apache.logging.log4j.Logger;
|
import org.apache.logging.log4j.Logger;
|
||||||
import org.dspace.core.Context;
|
import org.dspace.core.Context;
|
||||||
import org.dspace.core.LogManager;
|
import org.dspace.core.LogHelper;
|
||||||
import org.dspace.core.factory.CoreServiceFactory;
|
import org.dspace.core.factory.CoreServiceFactory;
|
||||||
import org.dspace.eperson.EPerson;
|
import org.dspace.eperson.EPerson;
|
||||||
import org.dspace.eperson.Group;
|
import org.dspace.eperson.Group;
|
||||||
@@ -194,7 +194,7 @@ public class IPAuthentication implements AuthenticationMethod {
|
|||||||
|
|
||||||
groups.add(group);
|
groups.add(group);
|
||||||
} else {
|
} else {
|
||||||
log.warn(LogManager.getHeader(context,
|
log.warn(LogHelper.getHeader(context,
|
||||||
"configuration_error", "unknown_group="
|
"configuration_error", "unknown_group="
|
||||||
+ groupName));
|
+ groupName));
|
||||||
}
|
}
|
||||||
@@ -202,7 +202,7 @@ public class IPAuthentication implements AuthenticationMethod {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
} catch (IPMatcherException ipme) {
|
} catch (IPMatcherException ipme) {
|
||||||
log.warn(LogManager.getHeader(context, "configuration_error",
|
log.warn(LogHelper.getHeader(context, "configuration_error",
|
||||||
"bad_ip=" + addr), ipme);
|
"bad_ip=" + addr), ipme);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -228,7 +228,7 @@ public class IPAuthentication implements AuthenticationMethod {
|
|||||||
|
|
||||||
groups.remove(group);
|
groups.remove(group);
|
||||||
} else {
|
} else {
|
||||||
log.warn(LogManager.getHeader(context,
|
log.warn(LogHelper.getHeader(context,
|
||||||
"configuration_error", "unknown_group="
|
"configuration_error", "unknown_group="
|
||||||
+ groupName));
|
+ groupName));
|
||||||
}
|
}
|
||||||
@@ -236,7 +236,7 @@ public class IPAuthentication implements AuthenticationMethod {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
} catch (IPMatcherException ipme) {
|
} catch (IPMatcherException ipme) {
|
||||||
log.warn(LogManager.getHeader(context, "configuration_error",
|
log.warn(LogHelper.getHeader(context, "configuration_error",
|
||||||
"bad_ip=" + addr), ipme);
|
"bad_ip=" + addr), ipme);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -248,7 +248,7 @@ public class IPAuthentication implements AuthenticationMethod {
|
|||||||
gsb.append(group.getID()).append(", ");
|
gsb.append(group.getID()).append(", ");
|
||||||
}
|
}
|
||||||
|
|
||||||
log.debug(LogManager.getHeader(context, "authenticated",
|
log.debug(LogHelper.getHeader(context, "authenticated",
|
||||||
"special_groups=" + gsb.toString()
|
"special_groups=" + gsb.toString()
|
||||||
+ " (by IP=" + addr + ", useProxies=" + useProxies.toString() + ")"
|
+ " (by IP=" + addr + ", useProxies=" + useProxies.toString() + ")"
|
||||||
));
|
));
|
||||||
|
@@ -36,7 +36,7 @@ import org.dspace.authenticate.factory.AuthenticateServiceFactory;
|
|||||||
import org.dspace.authenticate.service.AuthenticationService;
|
import org.dspace.authenticate.service.AuthenticationService;
|
||||||
import org.dspace.authorize.AuthorizeException;
|
import org.dspace.authorize.AuthorizeException;
|
||||||
import org.dspace.core.Context;
|
import org.dspace.core.Context;
|
||||||
import org.dspace.core.LogManager;
|
import org.dspace.core.LogHelper;
|
||||||
import org.dspace.eperson.EPerson;
|
import org.dspace.eperson.EPerson;
|
||||||
import org.dspace.eperson.Group;
|
import org.dspace.eperson.Group;
|
||||||
import org.dspace.eperson.factory.EPersonServiceFactory;
|
import org.dspace.eperson.factory.EPersonServiceFactory;
|
||||||
@@ -156,7 +156,7 @@ public class LDAPAuthentication
|
|||||||
Group ldapGroup = groupService.findByName(context, groupName);
|
Group ldapGroup = groupService.findByName(context, groupName);
|
||||||
if (ldapGroup == null) {
|
if (ldapGroup == null) {
|
||||||
// Oops - the group isn't there.
|
// Oops - the group isn't there.
|
||||||
log.warn(LogManager.getHeader(context,
|
log.warn(LogHelper.getHeader(context,
|
||||||
"ldap_specialgroup",
|
"ldap_specialgroup",
|
||||||
"Group defined in login.specialgroup does not exist"));
|
"Group defined in login.specialgroup does not exist"));
|
||||||
return Collections.EMPTY_LIST;
|
return Collections.EMPTY_LIST;
|
||||||
@@ -211,7 +211,7 @@ public class LDAPAuthentication
|
|||||||
String realm,
|
String realm,
|
||||||
HttpServletRequest request)
|
HttpServletRequest request)
|
||||||
throws SQLException {
|
throws SQLException {
|
||||||
log.info(LogManager.getHeader(context, "auth", "attempting trivial auth of user=" + netid));
|
log.info(LogHelper.getHeader(context, "auth", "attempting trivial auth of user=" + netid));
|
||||||
|
|
||||||
// Skip out when no netid or password is given.
|
// Skip out when no netid or password is given.
|
||||||
if (netid == null || password == null) {
|
if (netid == null || password == null) {
|
||||||
@@ -245,7 +245,7 @@ public class LDAPAuthentication
|
|||||||
|
|
||||||
// Check a DN was found
|
// Check a DN was found
|
||||||
if ((dn == null) || (dn.trim().equals(""))) {
|
if ((dn == null) || (dn.trim().equals(""))) {
|
||||||
log.info(LogManager
|
log.info(LogHelper
|
||||||
.getHeader(context, "failed_login", "no DN found for user " + netid));
|
.getHeader(context, "failed_login", "no DN found for user " + netid));
|
||||||
return BAD_CREDENTIALS;
|
return BAD_CREDENTIALS;
|
||||||
}
|
}
|
||||||
@@ -265,7 +265,7 @@ public class LDAPAuthentication
|
|||||||
// assign user to groups based on ldap dn
|
// assign user to groups based on ldap dn
|
||||||
assignGroups(dn, ldap.ldapGroup, context);
|
assignGroups(dn, ldap.ldapGroup, context);
|
||||||
|
|
||||||
log.info(LogManager
|
log.info(LogHelper
|
||||||
.getHeader(context, "authenticate", "type=ldap"));
|
.getHeader(context, "authenticate", "type=ldap"));
|
||||||
return SUCCESS;
|
return SUCCESS;
|
||||||
} else {
|
} else {
|
||||||
@@ -277,7 +277,7 @@ public class LDAPAuthentication
|
|||||||
|
|
||||||
if (ldap.ldapAuthenticate(dn, password, context)) {
|
if (ldap.ldapAuthenticate(dn, password, context)) {
|
||||||
// Register the new user automatically
|
// Register the new user automatically
|
||||||
log.info(LogManager.getHeader(context,
|
log.info(LogHelper.getHeader(context,
|
||||||
"autoregister", "netid=" + netid));
|
"autoregister", "netid=" + netid));
|
||||||
|
|
||||||
String email = ldap.ldapEmail;
|
String email = ldap.ldapEmail;
|
||||||
@@ -290,7 +290,7 @@ public class LDAPAuthentication
|
|||||||
email = netid + configurationService.getProperty("authentication-ldap.netid_email_domain");
|
email = netid + configurationService.getProperty("authentication-ldap.netid_email_domain");
|
||||||
} else {
|
} else {
|
||||||
// We don't have a valid email address. We'll default it to 'netid' but log a warning
|
// We don't have a valid email address. We'll default it to 'netid' but log a warning
|
||||||
log.warn(LogManager.getHeader(context, "autoregister",
|
log.warn(LogHelper.getHeader(context, "autoregister",
|
||||||
"Unable to locate email address for account '" + netid + "', so" +
|
"Unable to locate email address for account '" + netid + "', so" +
|
||||||
" it has been set to '" + netid + "'. " +
|
" it has been set to '" + netid + "'. " +
|
||||||
"Please check the LDAP 'email_field' OR consider " +
|
"Please check the LDAP 'email_field' OR consider " +
|
||||||
@@ -303,7 +303,7 @@ public class LDAPAuthentication
|
|||||||
try {
|
try {
|
||||||
eperson = ePersonService.findByEmail(context, email);
|
eperson = ePersonService.findByEmail(context, email);
|
||||||
if (eperson != null) {
|
if (eperson != null) {
|
||||||
log.info(LogManager.getHeader(context,
|
log.info(LogHelper.getHeader(context,
|
||||||
"type=ldap-login", "type=ldap_but_already_email"));
|
"type=ldap-login", "type=ldap_but_already_email"));
|
||||||
context.turnOffAuthorisationSystem();
|
context.turnOffAuthorisationSystem();
|
||||||
eperson.setNetid(netid.toLowerCase());
|
eperson.setNetid(netid.toLowerCase());
|
||||||
@@ -350,12 +350,12 @@ public class LDAPAuthentication
|
|||||||
context.restoreAuthSystemState();
|
context.restoreAuthSystemState();
|
||||||
}
|
}
|
||||||
|
|
||||||
log.info(LogManager.getHeader(context, "authenticate",
|
log.info(LogHelper.getHeader(context, "authenticate",
|
||||||
"type=ldap-login, created ePerson"));
|
"type=ldap-login, created ePerson"));
|
||||||
return SUCCESS;
|
return SUCCESS;
|
||||||
} else {
|
} else {
|
||||||
// No auto-registration for valid certs
|
// No auto-registration for valid certs
|
||||||
log.info(LogManager.getHeader(context,
|
log.info(LogHelper.getHeader(context,
|
||||||
"failed_login", "type=ldap_but_no_record"));
|
"failed_login", "type=ldap_but_no_record"));
|
||||||
return NO_SUCH_USER;
|
return NO_SUCH_USER;
|
||||||
}
|
}
|
||||||
@@ -429,7 +429,7 @@ public class LDAPAuthentication
|
|||||||
} catch (NumberFormatException e) {
|
} catch (NumberFormatException e) {
|
||||||
// Log the error if it has been set but is invalid
|
// Log the error if it has been set but is invalid
|
||||||
if (ldap_search_scope != null) {
|
if (ldap_search_scope != null) {
|
||||||
log.warn(LogManager.getHeader(context,
|
log.warn(LogHelper.getHeader(context,
|
||||||
"ldap_authentication", "invalid search scope: " + ldap_search_scope));
|
"ldap_authentication", "invalid search scope: " + ldap_search_scope));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -548,19 +548,19 @@ public class LDAPAuthentication
|
|||||||
// Ambiguous user, can't continue
|
// Ambiguous user, can't continue
|
||||||
|
|
||||||
} else {
|
} else {
|
||||||
log.debug(LogManager.getHeader(context, "got DN", resultDN));
|
log.debug(LogHelper.getHeader(context, "got DN", resultDN));
|
||||||
return resultDN;
|
return resultDN;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
} catch (NamingException e) {
|
} catch (NamingException e) {
|
||||||
// if the lookup fails go ahead and create a new record for them because the authentication
|
// if the lookup fails go ahead and create a new record for them because the authentication
|
||||||
// succeeded
|
// succeeded
|
||||||
log.warn(LogManager.getHeader(context,
|
log.warn(LogHelper.getHeader(context,
|
||||||
"ldap_attribute_lookup", "type=failed_search "
|
"ldap_attribute_lookup", "type=failed_search "
|
||||||
+ e));
|
+ e));
|
||||||
}
|
}
|
||||||
} catch (NamingException | IOException e) {
|
} catch (NamingException | IOException e) {
|
||||||
log.warn(LogManager.getHeader(context,
|
log.warn(LogHelper.getHeader(context,
|
||||||
"ldap_authentication", "type=failed_auth " + e));
|
"ldap_authentication", "type=failed_auth " + e));
|
||||||
} finally {
|
} finally {
|
||||||
// Close the context when we're done
|
// Close the context when we're done
|
||||||
@@ -630,7 +630,7 @@ public class LDAPAuthentication
|
|||||||
}
|
}
|
||||||
} catch (NamingException | IOException e) {
|
} catch (NamingException | IOException e) {
|
||||||
// something went wrong (like wrong password) so return false
|
// something went wrong (like wrong password) so return false
|
||||||
log.warn(LogManager.getHeader(context,
|
log.warn(LogHelper.getHeader(context,
|
||||||
"ldap_authentication", "type=failed_auth " + e));
|
"ldap_authentication", "type=failed_auth " + e));
|
||||||
return false;
|
return false;
|
||||||
} finally {
|
} finally {
|
||||||
@@ -714,18 +714,18 @@ public class LDAPAuthentication
|
|||||||
groupService.update(context, ldapGroup);
|
groupService.update(context, ldapGroup);
|
||||||
} else {
|
} else {
|
||||||
// The group does not exist
|
// The group does not exist
|
||||||
log.warn(LogManager.getHeader(context,
|
log.warn(LogHelper.getHeader(context,
|
||||||
"ldap_assignGroupsBasedOnLdapDn",
|
"ldap_assignGroupsBasedOnLdapDn",
|
||||||
"Group defined in authentication-ldap.login.groupmap." + i
|
"Group defined in authentication-ldap.login.groupmap." + i
|
||||||
+ " does not exist :: " + dspaceGroupName));
|
+ " does not exist :: " + dspaceGroupName));
|
||||||
}
|
}
|
||||||
} catch (AuthorizeException ae) {
|
} catch (AuthorizeException ae) {
|
||||||
log.debug(LogManager.getHeader(context,
|
log.debug(LogHelper.getHeader(context,
|
||||||
"assignGroupsBasedOnLdapDn could not authorize addition to " +
|
"assignGroupsBasedOnLdapDn could not authorize addition to " +
|
||||||
"group",
|
"group",
|
||||||
dspaceGroupName));
|
dspaceGroupName));
|
||||||
} catch (SQLException e) {
|
} catch (SQLException e) {
|
||||||
log.debug(LogManager.getHeader(context, "assignGroupsBasedOnLdapDn could not find group",
|
log.debug(LogHelper.getHeader(context, "assignGroupsBasedOnLdapDn could not find group",
|
||||||
dspaceGroupName));
|
dspaceGroupName));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@@ -15,9 +15,10 @@ import javax.servlet.http.HttpServletRequest;
|
|||||||
import javax.servlet.http.HttpServletResponse;
|
import javax.servlet.http.HttpServletResponse;
|
||||||
|
|
||||||
import org.apache.commons.lang3.StringUtils;
|
import org.apache.commons.lang3.StringUtils;
|
||||||
|
import org.apache.logging.log4j.LogManager;
|
||||||
import org.apache.logging.log4j.Logger;
|
import org.apache.logging.log4j.Logger;
|
||||||
import org.dspace.core.Context;
|
import org.dspace.core.Context;
|
||||||
import org.dspace.core.LogManager;
|
import org.dspace.core.LogHelper;
|
||||||
import org.dspace.eperson.EPerson;
|
import org.dspace.eperson.EPerson;
|
||||||
import org.dspace.eperson.Group;
|
import org.dspace.eperson.Group;
|
||||||
import org.dspace.eperson.factory.EPersonServiceFactory;
|
import org.dspace.eperson.factory.EPersonServiceFactory;
|
||||||
@@ -41,7 +42,6 @@ import org.dspace.services.factory.DSpaceServicesFactory;
|
|||||||
* Basic Auth username and password to the <code>AuthenticationManager</code>.
|
* Basic Auth username and password to the <code>AuthenticationManager</code>.
|
||||||
*
|
*
|
||||||
* @author Larry Stone
|
* @author Larry Stone
|
||||||
* @version $Revision$
|
|
||||||
*/
|
*/
|
||||||
public class PasswordAuthentication
|
public class PasswordAuthentication
|
||||||
implements AuthenticationMethod {
|
implements AuthenticationMethod {
|
||||||
@@ -49,7 +49,7 @@ public class PasswordAuthentication
|
|||||||
/**
|
/**
|
||||||
* log4j category
|
* log4j category
|
||||||
*/
|
*/
|
||||||
private static Logger log = org.apache.logging.log4j.LogManager.getLogger(PasswordAuthentication.class);
|
private static final Logger log = LogManager.getLogger();
|
||||||
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
@@ -142,12 +142,12 @@ public class PasswordAuthentication
|
|||||||
.toString())) {
|
.toString())) {
|
||||||
String groupName = DSpaceServicesFactory.getInstance().getConfigurationService()
|
String groupName = DSpaceServicesFactory.getInstance().getConfigurationService()
|
||||||
.getProperty("authentication-password.login.specialgroup");
|
.getProperty("authentication-password.login.specialgroup");
|
||||||
if ((groupName != null) && (!groupName.trim().equals(""))) {
|
if ((groupName != null) && !groupName.trim().isEmpty()) {
|
||||||
Group specialGroup = EPersonServiceFactory.getInstance().getGroupService()
|
Group specialGroup = EPersonServiceFactory.getInstance().getGroupService()
|
||||||
.findByName(context, groupName);
|
.findByName(context, groupName);
|
||||||
if (specialGroup == null) {
|
if (specialGroup == null) {
|
||||||
// Oops - the group isn't there.
|
// Oops - the group isn't there.
|
||||||
log.warn(LogManager.getHeader(context,
|
log.warn(LogHelper.getHeader(context,
|
||||||
"password_specialgroup",
|
"password_specialgroup",
|
||||||
"Group defined in modules/authentication-password.cfg login" +
|
"Group defined in modules/authentication-password.cfg login" +
|
||||||
".specialgroup does not exist"));
|
".specialgroup does not exist"));
|
||||||
@@ -158,7 +158,7 @@ public class PasswordAuthentication
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
} catch (Exception e) {
|
} catch (Exception e) {
|
||||||
log.error(LogManager.getHeader(context, "getSpecialGroups", ""), e);
|
log.error(LogHelper.getHeader(context, "getSpecialGroups", ""), e);
|
||||||
}
|
}
|
||||||
return Collections.EMPTY_LIST;
|
return Collections.EMPTY_LIST;
|
||||||
}
|
}
|
||||||
@@ -181,7 +181,7 @@ public class PasswordAuthentication
|
|||||||
* SUCCESS, BAD_CREDENTIALS, CERT_REQUIRED, NO_SUCH_USER, BAD_ARGS
|
* SUCCESS, BAD_CREDENTIALS, CERT_REQUIRED, NO_SUCH_USER, BAD_ARGS
|
||||||
* <p>Meaning:
|
* <p>Meaning:
|
||||||
* <br>SUCCESS - authenticated OK.
|
* <br>SUCCESS - authenticated OK.
|
||||||
* <br>BAD_CREDENTIALS - user exists, but assword doesn't match
|
* <br>BAD_CREDENTIALS - user exists, but password doesn't match
|
||||||
* <br>CERT_REQUIRED - not allowed to login this way without X.509 cert.
|
* <br>CERT_REQUIRED - not allowed to login this way without X.509 cert.
|
||||||
* <br>NO_SUCH_USER - no EPerson with matching email address.
|
* <br>NO_SUCH_USER - no EPerson with matching email address.
|
||||||
* <br>BAD_ARGS - missing username, or user matched but cannot login.
|
* <br>BAD_ARGS - missing username, or user matched but cannot login.
|
||||||
@@ -196,7 +196,7 @@ public class PasswordAuthentication
|
|||||||
throws SQLException {
|
throws SQLException {
|
||||||
if (username != null && password != null) {
|
if (username != null && password != null) {
|
||||||
EPerson eperson = null;
|
EPerson eperson = null;
|
||||||
log.info(LogManager.getHeader(context, "authenticate", "attempting password auth of user=" + username));
|
log.info(LogHelper.getHeader(context, "authenticate", "attempting password auth of user=" + username));
|
||||||
eperson = EPersonServiceFactory.getInstance().getEPersonService()
|
eperson = EPersonServiceFactory.getInstance().getEPersonService()
|
||||||
.findByEmail(context, username.toLowerCase());
|
.findByEmail(context, username.toLowerCase());
|
||||||
|
|
||||||
@@ -208,7 +208,7 @@ public class PasswordAuthentication
|
|||||||
return BAD_ARGS;
|
return BAD_ARGS;
|
||||||
} else if (eperson.getRequireCertificate()) {
|
} else if (eperson.getRequireCertificate()) {
|
||||||
// this user can only login with x.509 certificate
|
// this user can only login with x.509 certificate
|
||||||
log.warn(LogManager.getHeader(context, "authenticate",
|
log.warn(LogHelper.getHeader(context, "authenticate",
|
||||||
"rejecting PasswordAuthentication because " + username + " requires " +
|
"rejecting PasswordAuthentication because " + username + " requires " +
|
||||||
"certificate."));
|
"certificate."));
|
||||||
return CERT_REQUIRED;
|
return CERT_REQUIRED;
|
||||||
@@ -216,7 +216,7 @@ public class PasswordAuthentication
|
|||||||
.checkPassword(context, eperson, password)) {
|
.checkPassword(context, eperson, password)) {
|
||||||
// login is ok if password matches:
|
// login is ok if password matches:
|
||||||
context.setCurrentUser(eperson);
|
context.setCurrentUser(eperson);
|
||||||
log.info(LogManager.getHeader(context, "authenticate", "type=PasswordAuthentication"));
|
log.info(LogHelper.getHeader(context, "authenticate", "type=PasswordAuthentication"));
|
||||||
return SUCCESS;
|
return SUCCESS;
|
||||||
} else {
|
} else {
|
||||||
return BAD_CREDENTIALS;
|
return BAD_CREDENTIALS;
|
||||||
|
@@ -35,7 +35,7 @@ import org.dspace.authenticate.factory.AuthenticateServiceFactory;
|
|||||||
import org.dspace.authenticate.service.AuthenticationService;
|
import org.dspace.authenticate.service.AuthenticationService;
|
||||||
import org.dspace.authorize.AuthorizeException;
|
import org.dspace.authorize.AuthorizeException;
|
||||||
import org.dspace.core.Context;
|
import org.dspace.core.Context;
|
||||||
import org.dspace.core.LogManager;
|
import org.dspace.core.LogHelper;
|
||||||
import org.dspace.eperson.EPerson;
|
import org.dspace.eperson.EPerson;
|
||||||
import org.dspace.eperson.Group;
|
import org.dspace.eperson.Group;
|
||||||
import org.dspace.eperson.factory.EPersonServiceFactory;
|
import org.dspace.eperson.factory.EPersonServiceFactory;
|
||||||
@@ -286,7 +286,7 @@ public class X509Authentication implements AuthenticationMethod {
|
|||||||
try {
|
try {
|
||||||
certificate.checkValidity();
|
certificate.checkValidity();
|
||||||
} catch (CertificateException e) {
|
} catch (CertificateException e) {
|
||||||
log.info(LogManager.getHeader(context, "authentication",
|
log.info(LogHelper.getHeader(context, "authentication",
|
||||||
"X.509 Certificate is EXPIRED or PREMATURE: "
|
"X.509 Certificate is EXPIRED or PREMATURE: "
|
||||||
+ e.toString()));
|
+ e.toString()));
|
||||||
return false;
|
return false;
|
||||||
@@ -298,7 +298,7 @@ public class X509Authentication implements AuthenticationMethod {
|
|||||||
certificate.verify(caPublicKey);
|
certificate.verify(caPublicKey);
|
||||||
return true;
|
return true;
|
||||||
} catch (GeneralSecurityException e) {
|
} catch (GeneralSecurityException e) {
|
||||||
log.info(LogManager.getHeader(context, "authentication",
|
log.info(LogHelper.getHeader(context, "authentication",
|
||||||
"X.509 Certificate FAILED SIGNATURE check: "
|
"X.509 Certificate FAILED SIGNATURE check: "
|
||||||
+ e.toString()));
|
+ e.toString()));
|
||||||
}
|
}
|
||||||
@@ -322,11 +322,11 @@ public class X509Authentication implements AuthenticationMethod {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
log
|
log
|
||||||
.info(LogManager
|
.info(LogHelper
|
||||||
.getHeader(context, "authentication",
|
.getHeader(context, "authentication",
|
||||||
"Keystore method FAILED SIGNATURE check on client cert."));
|
"Keystore method FAILED SIGNATURE check on client cert."));
|
||||||
} catch (GeneralSecurityException e) {
|
} catch (GeneralSecurityException e) {
|
||||||
log.info(LogManager.getHeader(context, "authentication",
|
log.info(LogHelper.getHeader(context, "authentication",
|
||||||
"X.509 Certificate FAILED SIGNATURE check: "
|
"X.509 Certificate FAILED SIGNATURE check: "
|
||||||
+ e.toString()));
|
+ e.toString()));
|
||||||
}
|
}
|
||||||
@@ -461,7 +461,7 @@ public class X509Authentication implements AuthenticationMethod {
|
|||||||
if (group != null) {
|
if (group != null) {
|
||||||
groups.add(group);
|
groups.add(group);
|
||||||
} else {
|
} else {
|
||||||
log.warn(LogManager.getHeader(context,
|
log.warn(LogHelper.getHeader(context,
|
||||||
"configuration_error", "unknown_group="
|
"configuration_error", "unknown_group="
|
||||||
+ groupName));
|
+ groupName));
|
||||||
}
|
}
|
||||||
@@ -513,7 +513,7 @@ public class X509Authentication implements AuthenticationMethod {
|
|||||||
try {
|
try {
|
||||||
if (!isValid(context, certs[0])) {
|
if (!isValid(context, certs[0])) {
|
||||||
log
|
log
|
||||||
.warn(LogManager
|
.warn(LogHelper
|
||||||
.getHeader(context, "authenticate",
|
.getHeader(context, "authenticate",
|
||||||
"type=x509certificate, status=BAD_CREDENTIALS (not valid)"));
|
"type=x509certificate, status=BAD_CREDENTIALS (not valid)"));
|
||||||
return BAD_CREDENTIALS;
|
return BAD_CREDENTIALS;
|
||||||
@@ -530,7 +530,7 @@ public class X509Authentication implements AuthenticationMethod {
|
|||||||
if (email != null
|
if (email != null
|
||||||
&& canSelfRegister(context, request, null)) {
|
&& canSelfRegister(context, request, null)) {
|
||||||
// Register the new user automatically
|
// Register the new user automatically
|
||||||
log.info(LogManager.getHeader(context, "autoregister",
|
log.info(LogHelper.getHeader(context, "autoregister",
|
||||||
"from=x.509, email=" + email));
|
"from=x.509, email=" + email));
|
||||||
|
|
||||||
// TEMPORARILY turn off authorisation
|
// TEMPORARILY turn off authorisation
|
||||||
@@ -549,25 +549,25 @@ public class X509Authentication implements AuthenticationMethod {
|
|||||||
} else {
|
} else {
|
||||||
// No auto-registration for valid certs
|
// No auto-registration for valid certs
|
||||||
log
|
log
|
||||||
.warn(LogManager
|
.warn(LogHelper
|
||||||
.getHeader(context, "authenticate",
|
.getHeader(context, "authenticate",
|
||||||
"type=cert_but_no_record, cannot auto-register"));
|
"type=cert_but_no_record, cannot auto-register"));
|
||||||
return NO_SUCH_USER;
|
return NO_SUCH_USER;
|
||||||
}
|
}
|
||||||
} else if (!eperson.canLogIn()) { // make sure this is a login account
|
} else if (!eperson.canLogIn()) { // make sure this is a login account
|
||||||
log.warn(LogManager.getHeader(context, "authenticate",
|
log.warn(LogHelper.getHeader(context, "authenticate",
|
||||||
"type=x509certificate, email=" + email
|
"type=x509certificate, email=" + email
|
||||||
+ ", canLogIn=false, rejecting."));
|
+ ", canLogIn=false, rejecting."));
|
||||||
return BAD_ARGS;
|
return BAD_ARGS;
|
||||||
} else {
|
} else {
|
||||||
log.info(LogManager.getHeader(context, "login",
|
log.info(LogHelper.getHeader(context, "login",
|
||||||
"type=x509certificate"));
|
"type=x509certificate"));
|
||||||
context.setCurrentUser(eperson);
|
context.setCurrentUser(eperson);
|
||||||
setSpecialGroupsFlag(request, email);
|
setSpecialGroupsFlag(request, email);
|
||||||
return SUCCESS;
|
return SUCCESS;
|
||||||
}
|
}
|
||||||
} catch (AuthorizeException ce) {
|
} catch (AuthorizeException ce) {
|
||||||
log.warn(LogManager.getHeader(context, "authorize_exception",
|
log.warn(LogHelper.getHeader(context, "authorize_exception",
|
||||||
""), ce);
|
""), ce);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@@ -21,7 +21,7 @@ import org.apache.solr.common.SolrDocument;
|
|||||||
import org.dspace.authority.service.AuthorityValueService;
|
import org.dspace.authority.service.AuthorityValueService;
|
||||||
import org.dspace.content.authority.SolrAuthority;
|
import org.dspace.content.authority.SolrAuthority;
|
||||||
import org.dspace.core.Context;
|
import org.dspace.core.Context;
|
||||||
import org.dspace.core.LogManager;
|
import org.dspace.core.LogHelper;
|
||||||
import org.springframework.beans.factory.annotation.Autowired;
|
import org.springframework.beans.factory.annotation.Autowired;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
@@ -220,7 +220,7 @@ public class AuthorityValueServiceImpl implements AuthorityValueService {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
} catch (Exception e) {
|
} catch (Exception e) {
|
||||||
log.error(LogManager.getHeader(context, "Error while retrieving AuthorityValue from solr",
|
log.error(LogHelper.getHeader(context, "Error while retrieving AuthorityValue from solr",
|
||||||
"query: " + queryString), e);
|
"query: " + queryString), e);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@@ -11,7 +11,6 @@ import java.sql.SQLException;
|
|||||||
import java.util.ArrayList;
|
import java.util.ArrayList;
|
||||||
import java.util.Arrays;
|
import java.util.Arrays;
|
||||||
import java.util.Date;
|
import java.util.Date;
|
||||||
import java.util.LinkedList;
|
|
||||||
import java.util.List;
|
import java.util.List;
|
||||||
import java.util.UUID;
|
import java.util.UUID;
|
||||||
|
|
||||||
@@ -62,7 +61,7 @@ import org.springframework.beans.factory.annotation.Autowired;
|
|||||||
*/
|
*/
|
||||||
public class AuthorizeServiceImpl implements AuthorizeService {
|
public class AuthorizeServiceImpl implements AuthorizeService {
|
||||||
|
|
||||||
private static Logger log = LogManager.getLogger(AuthorizeServiceImpl.class);
|
private static final Logger log = LogManager.getLogger();
|
||||||
|
|
||||||
@Autowired(required = true)
|
@Autowired(required = true)
|
||||||
protected BitstreamService bitstreamService;
|
protected BitstreamService bitstreamService;
|
||||||
@@ -243,7 +242,7 @@ public class AuthorizeServiceImpl implements AuthorizeService {
|
|||||||
// If authorization was given before and cached
|
// If authorization was given before and cached
|
||||||
Boolean cachedResult = c.getCachedAuthorizationResult(o, action, e);
|
Boolean cachedResult = c.getCachedAuthorizationResult(o, action, e);
|
||||||
if (cachedResult != null) {
|
if (cachedResult != null) {
|
||||||
return cachedResult.booleanValue();
|
return cachedResult;
|
||||||
}
|
}
|
||||||
|
|
||||||
// is eperson set? if not, userToCheck = null (anonymous)
|
// is eperson set? if not, userToCheck = null (anonymous)
|
||||||
@@ -308,7 +307,7 @@ public class AuthorizeServiceImpl implements AuthorizeService {
|
|||||||
}
|
}
|
||||||
|
|
||||||
if ((rp.getGroup() != null)
|
if ((rp.getGroup() != null)
|
||||||
&& (groupService.isMember(c, e, rp.getGroup()))) {
|
&& groupService.isMember(c, e, rp.getGroup())) {
|
||||||
// group was set, and eperson is a member
|
// group was set, and eperson is a member
|
||||||
// of that group
|
// of that group
|
||||||
c.cacheAuthorizedAction(o, action, e, true, rp);
|
c.cacheAuthorizedAction(o, action, e, true, rp);
|
||||||
@@ -366,7 +365,7 @@ public class AuthorizeServiceImpl implements AuthorizeService {
|
|||||||
|
|
||||||
Boolean cachedResult = c.getCachedAuthorizationResult(o, Constants.ADMIN, e);
|
Boolean cachedResult = c.getCachedAuthorizationResult(o, Constants.ADMIN, e);
|
||||||
if (cachedResult != null) {
|
if (cachedResult != null) {
|
||||||
return cachedResult.booleanValue();
|
return cachedResult;
|
||||||
}
|
}
|
||||||
|
|
||||||
//
|
//
|
||||||
@@ -383,7 +382,7 @@ public class AuthorizeServiceImpl implements AuthorizeService {
|
|||||||
}
|
}
|
||||||
|
|
||||||
if ((rp.getGroup() != null)
|
if ((rp.getGroup() != null)
|
||||||
&& (groupService.isMember(c, e, rp.getGroup()))) {
|
&& groupService.isMember(c, e, rp.getGroup())) {
|
||||||
// group was set, and eperson is a member
|
// group was set, and eperson is a member
|
||||||
// of that group
|
// of that group
|
||||||
c.cacheAuthorizedAction(o, Constants.ADMIN, e, true, rp);
|
c.cacheAuthorizedAction(o, Constants.ADMIN, e, true, rp);
|
||||||
@@ -502,7 +501,7 @@ public class AuthorizeServiceImpl implements AuthorizeService {
|
|||||||
List<ResourcePolicy> policies = getPolicies(c, src);
|
List<ResourcePolicy> policies = getPolicies(c, src);
|
||||||
|
|
||||||
//Only inherit non-ADMIN policies (since ADMIN policies are automatically inherited)
|
//Only inherit non-ADMIN policies (since ADMIN policies are automatically inherited)
|
||||||
List<ResourcePolicy> nonAdminPolicies = new ArrayList<ResourcePolicy>();
|
List<ResourcePolicy> nonAdminPolicies = new ArrayList<>();
|
||||||
for (ResourcePolicy rp : policies) {
|
for (ResourcePolicy rp : policies) {
|
||||||
if (rp.getAction() != Constants.ADMIN) {
|
if (rp.getAction() != Constants.ADMIN) {
|
||||||
nonAdminPolicies.add(rp);
|
nonAdminPolicies.add(rp);
|
||||||
@@ -525,7 +524,7 @@ public class AuthorizeServiceImpl implements AuthorizeService {
|
|||||||
public void addPolicies(Context c, List<ResourcePolicy> policies, DSpaceObject dest)
|
public void addPolicies(Context c, List<ResourcePolicy> policies, DSpaceObject dest)
|
||||||
throws SQLException, AuthorizeException {
|
throws SQLException, AuthorizeException {
|
||||||
// now add them to the destination object
|
// now add them to the destination object
|
||||||
List<ResourcePolicy> newPolicies = new LinkedList<>();
|
List<ResourcePolicy> newPolicies = new ArrayList<>(policies.size());
|
||||||
|
|
||||||
for (ResourcePolicy srp : policies) {
|
for (ResourcePolicy srp : policies) {
|
||||||
ResourcePolicy rp = resourcePolicyService.create(c);
|
ResourcePolicy rp = resourcePolicyService.create(c);
|
||||||
@@ -600,7 +599,7 @@ public class AuthorizeServiceImpl implements AuthorizeService {
|
|||||||
int actionID) throws java.sql.SQLException {
|
int actionID) throws java.sql.SQLException {
|
||||||
List<ResourcePolicy> policies = getPoliciesActionFilter(c, o, actionID);
|
List<ResourcePolicy> policies = getPoliciesActionFilter(c, o, actionID);
|
||||||
|
|
||||||
List<Group> groups = new ArrayList<Group>();
|
List<Group> groups = new ArrayList<>();
|
||||||
for (ResourcePolicy resourcePolicy : policies) {
|
for (ResourcePolicy resourcePolicy : policies) {
|
||||||
if (resourcePolicy.getGroup() != null && resourcePolicyService.isDateValid(resourcePolicy)) {
|
if (resourcePolicy.getGroup() != null && resourcePolicyService.isDateValid(resourcePolicy)) {
|
||||||
groups.add(resourcePolicy.getGroup());
|
groups.add(resourcePolicy.getGroup());
|
||||||
@@ -768,6 +767,7 @@ public class AuthorizeServiceImpl implements AuthorizeService {
|
|||||||
* @param context context with the current user
|
* @param context context with the current user
|
||||||
* @return true if the current user is a community admin in the site
|
* @return true if the current user is a community admin in the site
|
||||||
* false when this is not the case, or an exception occurred
|
* false when this is not the case, or an exception occurred
|
||||||
|
* @throws java.sql.SQLException passed through.
|
||||||
*/
|
*/
|
||||||
@Override
|
@Override
|
||||||
public boolean isCommunityAdmin(Context context) throws SQLException {
|
public boolean isCommunityAdmin(Context context) throws SQLException {
|
||||||
@@ -780,6 +780,7 @@ public class AuthorizeServiceImpl implements AuthorizeService {
|
|||||||
* @param context context with the current user
|
* @param context context with the current user
|
||||||
* @return true if the current user is a collection admin in the site
|
* @return true if the current user is a collection admin in the site
|
||||||
* false when this is not the case, or an exception occurred
|
* false when this is not the case, or an exception occurred
|
||||||
|
* @throws java.sql.SQLException passed through.
|
||||||
*/
|
*/
|
||||||
@Override
|
@Override
|
||||||
public boolean isCollectionAdmin(Context context) throws SQLException {
|
public boolean isCollectionAdmin(Context context) throws SQLException {
|
||||||
@@ -792,6 +793,7 @@ public class AuthorizeServiceImpl implements AuthorizeService {
|
|||||||
* @param context context with the current user
|
* @param context context with the current user
|
||||||
* @return true if the current user is a community or collection admin in the site
|
* @return true if the current user is a community or collection admin in the site
|
||||||
* false when this is not the case, or an exception occurred
|
* false when this is not the case, or an exception occurred
|
||||||
|
* @throws java.sql.SQLException passed through.
|
||||||
*/
|
*/
|
||||||
@Override
|
@Override
|
||||||
public boolean isComColAdmin(Context context) throws SQLException {
|
public boolean isComColAdmin(Context context) throws SQLException {
|
||||||
|
@@ -169,6 +169,7 @@ public class ResourcePolicy implements ReloadableEntity<Integer> {
|
|||||||
*
|
*
|
||||||
* @return the internal identifier
|
* @return the internal identifier
|
||||||
*/
|
*/
|
||||||
|
@Override
|
||||||
public Integer getID() {
|
public Integer getID() {
|
||||||
return id;
|
return id;
|
||||||
}
|
}
|
||||||
|
@@ -11,13 +11,12 @@ import java.sql.SQLException;
|
|||||||
import java.util.ArrayList;
|
import java.util.ArrayList;
|
||||||
import java.util.List;
|
import java.util.List;
|
||||||
|
|
||||||
import org.apache.commons.lang3.StringUtils;
|
|
||||||
import org.apache.logging.log4j.Logger;
|
import org.apache.logging.log4j.Logger;
|
||||||
import org.dspace.content.Collection;
|
import org.dspace.content.Collection;
|
||||||
import org.dspace.content.Community;
|
import org.dspace.content.Community;
|
||||||
import org.dspace.content.Item;
|
import org.dspace.content.Item;
|
||||||
import org.dspace.core.Context;
|
import org.dspace.core.Context;
|
||||||
import org.dspace.core.LogManager;
|
import org.dspace.core.LogHelper;
|
||||||
import org.dspace.sort.OrderFormat;
|
import org.dspace.sort.OrderFormat;
|
||||||
import org.dspace.sort.SortOption;
|
import org.dspace.sort.SortOption;
|
||||||
|
|
||||||
@@ -85,7 +84,7 @@ public class BrowseEngine {
|
|||||||
*/
|
*/
|
||||||
public BrowseInfo browse(BrowserScope bs)
|
public BrowseInfo browse(BrowserScope bs)
|
||||||
throws BrowseException {
|
throws BrowseException {
|
||||||
log.debug(LogManager.getHeader(context, "browse", ""));
|
log.debug(LogHelper.getHeader(context, "browse", ""));
|
||||||
|
|
||||||
// first, load the browse scope into the object
|
// first, load the browse scope into the object
|
||||||
this.scope = bs;
|
this.scope = bs;
|
||||||
@@ -119,7 +118,7 @@ public class BrowseEngine {
|
|||||||
*/
|
*/
|
||||||
public BrowseInfo browseMini(BrowserScope bs)
|
public BrowseInfo browseMini(BrowserScope bs)
|
||||||
throws BrowseException {
|
throws BrowseException {
|
||||||
log.info(LogManager.getHeader(context, "browse_mini", ""));
|
log.info(LogHelper.getHeader(context, "browse_mini", ""));
|
||||||
|
|
||||||
// load the scope into the object
|
// load the scope into the object
|
||||||
this.scope = bs;
|
this.scope = bs;
|
||||||
@@ -198,7 +197,7 @@ public class BrowseEngine {
|
|||||||
*/
|
*/
|
||||||
private BrowseInfo browseByItem(BrowserScope bs)
|
private BrowseInfo browseByItem(BrowserScope bs)
|
||||||
throws BrowseException {
|
throws BrowseException {
|
||||||
log.info(LogManager.getHeader(context, "browse_by_item", ""));
|
log.info(LogHelper.getHeader(context, "browse_by_item", ""));
|
||||||
try {
|
try {
|
||||||
// get the table name that we are going to be getting our data from
|
// get the table name that we are going to be getting our data from
|
||||||
dao.setTable(browseIndex.getTableName());
|
dao.setTable(browseIndex.getTableName());
|
||||||
@@ -374,14 +373,14 @@ public class BrowseEngine {
|
|||||||
*/
|
*/
|
||||||
private BrowseInfo browseByValue(BrowserScope bs)
|
private BrowseInfo browseByValue(BrowserScope bs)
|
||||||
throws BrowseException {
|
throws BrowseException {
|
||||||
log.info(LogManager.getHeader(context, "browse_by_value", "focus=" + bs.getJumpToValue()));
|
log.info(LogHelper.getHeader(context, "browse_by_value", "focus=" + bs.getJumpToValue()));
|
||||||
|
|
||||||
try {
|
try {
|
||||||
// get the table name that we are going to be getting our data from
|
// get the table name that we are going to be getting our data from
|
||||||
// this is the distinct table constrained to either community or collection
|
// this is the distinct table constrained to either community or collection
|
||||||
dao.setTable(browseIndex.getDistinctTableName());
|
dao.setTable(browseIndex.getDistinctTableName());
|
||||||
|
|
||||||
dao.setStartsWith(StringUtils.lowerCase(scope.getStartsWith()));
|
dao.setStartsWith(normalizeJumpToValue(scope.getStartsWith()));
|
||||||
// remind the DAO that this is a distinct value browse, so it knows what sort
|
// remind the DAO that this is a distinct value browse, so it knows what sort
|
||||||
// of query to build
|
// of query to build
|
||||||
dao.setDistinct(true);
|
dao.setDistinct(true);
|
||||||
@@ -518,17 +517,17 @@ public class BrowseEngine {
|
|||||||
*/
|
*/
|
||||||
private String getJumpToValue()
|
private String getJumpToValue()
|
||||||
throws BrowseException {
|
throws BrowseException {
|
||||||
log.debug(LogManager.getHeader(context, "get_focus_value", ""));
|
log.debug(LogHelper.getHeader(context, "get_focus_value", ""));
|
||||||
|
|
||||||
// if the focus is by value, just return it
|
// if the focus is by value, just return it
|
||||||
if (scope.hasJumpToValue()) {
|
if (scope.hasJumpToValue()) {
|
||||||
log.debug(LogManager.getHeader(context, "get_focus_value_return", "return=" + scope.getJumpToValue()));
|
log.debug(LogHelper.getHeader(context, "get_focus_value_return", "return=" + scope.getJumpToValue()));
|
||||||
return scope.getJumpToValue();
|
return scope.getJumpToValue();
|
||||||
}
|
}
|
||||||
|
|
||||||
// if the focus is to start with, then we need to return the value of the starts with
|
// if the focus is to start with, then we need to return the value of the starts with
|
||||||
if (scope.hasStartsWith()) {
|
if (scope.hasStartsWith()) {
|
||||||
log.debug(LogManager.getHeader(context, "get_focus_value_return", "return=" + scope.getStartsWith()));
|
log.debug(LogHelper.getHeader(context, "get_focus_value_return", "return=" + scope.getStartsWith()));
|
||||||
return scope.getStartsWith();
|
return scope.getStartsWith();
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -565,7 +564,7 @@ public class BrowseEngine {
|
|||||||
// item (I think)
|
// item (I think)
|
||||||
String max = dao.doMaxQuery(col, tableName, id);
|
String max = dao.doMaxQuery(col, tableName, id);
|
||||||
|
|
||||||
log.debug(LogManager.getHeader(context, "get_focus_value_return", "return=" + max));
|
log.debug(LogHelper.getHeader(context, "get_focus_value_return", "return=" + max));
|
||||||
|
|
||||||
return max;
|
return max;
|
||||||
}
|
}
|
||||||
@@ -671,7 +670,7 @@ public class BrowseEngine {
|
|||||||
*/
|
*/
|
||||||
private int getTotalResults(boolean distinct)
|
private int getTotalResults(boolean distinct)
|
||||||
throws SQLException, BrowseException {
|
throws SQLException, BrowseException {
|
||||||
log.debug(LogManager.getHeader(context, "get_total_results", "distinct=" + distinct));
|
log.debug(LogHelper.getHeader(context, "get_total_results", "distinct=" + distinct));
|
||||||
|
|
||||||
// tell the browse query whether we are distinct
|
// tell the browse query whether we are distinct
|
||||||
dao.setDistinct(distinct);
|
dao.setDistinct(distinct);
|
||||||
@@ -706,7 +705,7 @@ public class BrowseEngine {
|
|||||||
dao.setOffset(offset);
|
dao.setOffset(offset);
|
||||||
dao.setCountValues(null);
|
dao.setCountValues(null);
|
||||||
|
|
||||||
log.debug(LogManager.getHeader(context, "get_total_results_return", "return=" + count));
|
log.debug(LogHelper.getHeader(context, "get_total_results_return", "return=" + count));
|
||||||
|
|
||||||
return count;
|
return count;
|
||||||
}
|
}
|
||||||
|
@@ -170,7 +170,7 @@ public class MostRecentChecksum implements Serializable {
|
|||||||
return true;
|
return true;
|
||||||
}
|
}
|
||||||
|
|
||||||
if (o == null || getClass() != o.getClass()) {
|
if (o == null || !(o instanceof MostRecentChecksum)) {
|
||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@@ -109,7 +109,7 @@ public class ResultsLogger implements ChecksumResultsCollector {
|
|||||||
"unknown"));
|
"unknown"));
|
||||||
LOG.info(msg("new-checksum") + ": " + info.getCurrentChecksum());
|
LOG.info(msg("new-checksum") + ": " + info.getCurrentChecksum());
|
||||||
LOG.info(msg("checksum-comparison-result") + ": "
|
LOG.info(msg("checksum-comparison-result") + ": "
|
||||||
+ (info.getChecksumResult().getResultCode()));
|
+ info.getChecksumResult().getResultCode());
|
||||||
LOG.info("\n\n");
|
LOG.info("\n\n");
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@@ -77,7 +77,7 @@ public class SimpleReporterServiceImpl implements SimpleReporterService {
|
|||||||
osw.write(applyDateFormatShort(endDate));
|
osw.write(applyDateFormatShort(endDate));
|
||||||
osw.write("\n\n\n");
|
osw.write("\n\n\n");
|
||||||
|
|
||||||
if (recentChecksums.size() == 0) {
|
if (recentChecksums.isEmpty()) {
|
||||||
osw.write("\n\n");
|
osw.write("\n\n");
|
||||||
osw.write(msg("no-bitstreams-to-delete"));
|
osw.write(msg("no-bitstreams-to-delete"));
|
||||||
osw.write("\n");
|
osw.write("\n");
|
||||||
@@ -119,7 +119,7 @@ public class SimpleReporterServiceImpl implements SimpleReporterService {
|
|||||||
osw.write(applyDateFormatShort(endDate));
|
osw.write(applyDateFormatShort(endDate));
|
||||||
osw.write("\n\n\n");
|
osw.write("\n\n\n");
|
||||||
|
|
||||||
if (history.size() == 0) {
|
if (history.isEmpty()) {
|
||||||
osw.write("\n\n");
|
osw.write("\n\n");
|
||||||
osw.write(msg("no-changed-bitstreams"));
|
osw.write(msg("no-changed-bitstreams"));
|
||||||
osw.write("\n");
|
osw.write("\n");
|
||||||
@@ -159,7 +159,7 @@ public class SimpleReporterServiceImpl implements SimpleReporterService {
|
|||||||
osw.write(applyDateFormatShort(endDate));
|
osw.write(applyDateFormatShort(endDate));
|
||||||
osw.write("\n\n\n");
|
osw.write("\n\n\n");
|
||||||
|
|
||||||
if (history.size() == 0) {
|
if (history.isEmpty()) {
|
||||||
osw.write("\n\n");
|
osw.write("\n\n");
|
||||||
osw.write(msg("no-bitstreams-changed"));
|
osw.write(msg("no-bitstreams-changed"));
|
||||||
osw.write("\n");
|
osw.write("\n");
|
||||||
@@ -201,7 +201,7 @@ public class SimpleReporterServiceImpl implements SimpleReporterService {
|
|||||||
osw.write(applyDateFormatShort(endDate));
|
osw.write(applyDateFormatShort(endDate));
|
||||||
osw.write("\n\n\n");
|
osw.write("\n\n\n");
|
||||||
|
|
||||||
if (mostRecentChecksums.size() == 0) {
|
if (mostRecentChecksums.isEmpty()) {
|
||||||
osw.write("\n\n");
|
osw.write("\n\n");
|
||||||
osw.write(msg("no-bitstreams-to-no-longer-be-processed"));
|
osw.write(msg("no-bitstreams-to-no-longer-be-processed"));
|
||||||
osw.write("\n");
|
osw.write("\n");
|
||||||
@@ -233,7 +233,7 @@ public class SimpleReporterServiceImpl implements SimpleReporterService {
|
|||||||
osw.write(applyDateFormatShort(new Date()));
|
osw.write(applyDateFormatShort(new Date()));
|
||||||
osw.write("\n\n\n");
|
osw.write("\n\n\n");
|
||||||
|
|
||||||
if (bitstreams.size() == 0) {
|
if (bitstreams.isEmpty()) {
|
||||||
osw.write("\n\n");
|
osw.write("\n\n");
|
||||||
osw.write(msg("no-unchecked-bitstreams"));
|
osw.write(msg("no-unchecked-bitstreams"));
|
||||||
osw.write("\n");
|
osw.write("\n");
|
||||||
@@ -257,7 +257,7 @@ public class SimpleReporterServiceImpl implements SimpleReporterService {
|
|||||||
protected void printHistoryRecords(List<MostRecentChecksum> mostRecentChecksums, OutputStreamWriter osw)
|
protected void printHistoryRecords(List<MostRecentChecksum> mostRecentChecksums, OutputStreamWriter osw)
|
||||||
throws IOException {
|
throws IOException {
|
||||||
for (MostRecentChecksum mostRecentChecksum : mostRecentChecksums) {
|
for (MostRecentChecksum mostRecentChecksum : mostRecentChecksums) {
|
||||||
StringBuffer buf = new StringBuffer(1000);
|
StringBuilder buf = new StringBuilder(1000);
|
||||||
buf.append("------------------------------------------------ \n");
|
buf.append("------------------------------------------------ \n");
|
||||||
buf.append(msg("bitstream-id")).append(" = ").append(
|
buf.append(msg("bitstream-id")).append(" = ").append(
|
||||||
mostRecentChecksum.getBitstream().getID()).append("\n");
|
mostRecentChecksum.getBitstream().getID()).append("\n");
|
||||||
@@ -292,7 +292,7 @@ public class SimpleReporterServiceImpl implements SimpleReporterService {
|
|||||||
throws IOException, SQLException {
|
throws IOException, SQLException {
|
||||||
|
|
||||||
for (Bitstream info : bitstreams) {
|
for (Bitstream info : bitstreams) {
|
||||||
StringBuffer buf = new StringBuffer(1000);
|
StringBuilder buf = new StringBuilder(1000);
|
||||||
buf.append("------------------------------------------------ \n");
|
buf.append("------------------------------------------------ \n");
|
||||||
buf.append(msg("format-id")).append(" = ").append(
|
buf.append(msg("format-id")).append(" = ").append(
|
||||||
info.getFormat(context).getID()).append("\n");
|
info.getFormat(context).getID()).append("\n");
|
||||||
|
@@ -21,7 +21,7 @@ import org.dspace.core.Context;
|
|||||||
|
|
||||||
/**
|
/**
|
||||||
* Hibernate implementation of the Database Access Object interface class for the ChecksumResult object.
|
* Hibernate implementation of the Database Access Object interface class for the ChecksumResult object.
|
||||||
* This class is responsible for all database calls for the ChecksumResult object and is autowired by spring
|
* This class is responsible for all database calls for the ChecksumResult object and is autowired by Spring.
|
||||||
* This class should never be accessed directly.
|
* This class should never be accessed directly.
|
||||||
*
|
*
|
||||||
* @author kevinvandevelde at atmire.com
|
* @author kevinvandevelde at atmire.com
|
||||||
@@ -39,6 +39,6 @@ public class ChecksumResultDAOImpl extends AbstractHibernateDAO<ChecksumResult>
|
|||||||
Root<ChecksumResult> checksumResultRoot = criteriaQuery.from(ChecksumResult.class);
|
Root<ChecksumResult> checksumResultRoot = criteriaQuery.from(ChecksumResult.class);
|
||||||
criteriaQuery.select(checksumResultRoot);
|
criteriaQuery.select(checksumResultRoot);
|
||||||
criteriaQuery.where(criteriaBuilder.equal(checksumResultRoot.get(ChecksumResult_.resultCode), code));
|
criteriaQuery.where(criteriaBuilder.equal(checksumResultRoot.get(ChecksumResult_.resultCode), code));
|
||||||
return uniqueResult(context, criteriaQuery, false, ChecksumResult.class, -1, -1);
|
return uniqueResult(context, criteriaQuery, false, ChecksumResult.class);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@@ -21,8 +21,6 @@ import javax.persistence.OneToOne;
|
|||||||
import javax.persistence.Table;
|
import javax.persistence.Table;
|
||||||
import javax.persistence.Transient;
|
import javax.persistence.Transient;
|
||||||
|
|
||||||
import org.apache.logging.log4j.LogManager;
|
|
||||||
import org.apache.logging.log4j.Logger;
|
|
||||||
import org.dspace.content.factory.ContentServiceFactory;
|
import org.dspace.content.factory.ContentServiceFactory;
|
||||||
import org.dspace.content.service.BitstreamService;
|
import org.dspace.content.service.BitstreamService;
|
||||||
import org.dspace.core.Constants;
|
import org.dspace.core.Constants;
|
||||||
@@ -37,17 +35,10 @@ import org.hibernate.proxy.HibernateProxyHelper;
|
|||||||
* the contents of a bitstream; you need to create a new bitstream.
|
* the contents of a bitstream; you need to create a new bitstream.
|
||||||
*
|
*
|
||||||
* @author Robert Tansley
|
* @author Robert Tansley
|
||||||
* @version $Revision$
|
|
||||||
*/
|
*/
|
||||||
@Entity
|
@Entity
|
||||||
@Table(name = "bitstream")
|
@Table(name = "bitstream")
|
||||||
public class Bitstream extends DSpaceObject implements DSpaceObjectLegacySupport {
|
public class Bitstream extends DSpaceObject implements DSpaceObjectLegacySupport {
|
||||||
|
|
||||||
/**
|
|
||||||
* log4j logger
|
|
||||||
*/
|
|
||||||
private static final Logger log = LogManager.getLogger();
|
|
||||||
|
|
||||||
@Column(name = "bitstream_id", insertable = false, updatable = false)
|
@Column(name = "bitstream_id", insertable = false, updatable = false)
|
||||||
private Integer legacyId;
|
private Integer legacyId;
|
||||||
|
|
||||||
@@ -412,7 +403,7 @@ public class Bitstream extends DSpaceObject implements DSpaceObjectLegacySupport
|
|||||||
*/
|
*/
|
||||||
@Override
|
@Override
|
||||||
public boolean equals(Object other) {
|
public boolean equals(Object other) {
|
||||||
if (other == null) {
|
if (!(other instanceof Bitstream)) {
|
||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
Class<?> objClass = HibernateProxyHelper.getClassWithoutInitializingProxy(other);
|
Class<?> objClass = HibernateProxyHelper.getClassWithoutInitializingProxy(other);
|
||||||
@@ -420,11 +411,7 @@ public class Bitstream extends DSpaceObject implements DSpaceObjectLegacySupport
|
|||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
final Bitstream otherBitstream = (Bitstream) other;
|
final Bitstream otherBitstream = (Bitstream) other;
|
||||||
if (!this.getID().equals(otherBitstream.getID())) {
|
return this.getID().equals(otherBitstream.getID());
|
||||||
return false;
|
|
||||||
}
|
|
||||||
|
|
||||||
return true;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
|
@@ -9,7 +9,7 @@ package org.dspace.content;
|
|||||||
|
|
||||||
import java.io.Serializable;
|
import java.io.Serializable;
|
||||||
import java.sql.SQLException;
|
import java.sql.SQLException;
|
||||||
import java.util.LinkedList;
|
import java.util.ArrayList;
|
||||||
import java.util.List;
|
import java.util.List;
|
||||||
import javax.persistence.CollectionTable;
|
import javax.persistence.CollectionTable;
|
||||||
import javax.persistence.Column;
|
import javax.persistence.Column;
|
||||||
@@ -40,7 +40,6 @@ import org.hibernate.proxy.HibernateProxyHelper;
|
|||||||
* when <code>update</code> is called.
|
* when <code>update</code> is called.
|
||||||
*
|
*
|
||||||
* @author Robert Tansley
|
* @author Robert Tansley
|
||||||
* @version $Revision$
|
|
||||||
*/
|
*/
|
||||||
@Entity
|
@Entity
|
||||||
@Table(name = "bitstreamformatregistry")
|
@Table(name = "bitstreamformatregistry")
|
||||||
@@ -112,7 +111,7 @@ public class BitstreamFormat implements Serializable, ReloadableEntity<Integer>
|
|||||||
* {@link org.dspace.content.service.BitstreamFormatService#create(Context)}
|
* {@link org.dspace.content.service.BitstreamFormatService#create(Context)}
|
||||||
*/
|
*/
|
||||||
protected BitstreamFormat() {
|
protected BitstreamFormat() {
|
||||||
fileExtensions = new LinkedList<>();
|
fileExtensions = new ArrayList<>();
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
@@ -120,6 +119,7 @@ public class BitstreamFormat implements Serializable, ReloadableEntity<Integer>
|
|||||||
*
|
*
|
||||||
* @return the internal identifier
|
* @return the internal identifier
|
||||||
*/
|
*/
|
||||||
|
@Override
|
||||||
public final Integer getID() {
|
public final Integer getID() {
|
||||||
return id;
|
return id;
|
||||||
}
|
}
|
||||||
@@ -267,7 +267,7 @@ public class BitstreamFormat implements Serializable, ReloadableEntity<Integer>
|
|||||||
*/
|
*/
|
||||||
@Override
|
@Override
|
||||||
public boolean equals(Object other) {
|
public boolean equals(Object other) {
|
||||||
if (other == null) {
|
if (!(other instanceof BitstreamFormat)) {
|
||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
Class<?> objClass = HibernateProxyHelper.getClassWithoutInitializingProxy(other);
|
Class<?> objClass = HibernateProxyHelper.getClassWithoutInitializingProxy(other);
|
||||||
@@ -275,11 +275,7 @@ public class BitstreamFormat implements Serializable, ReloadableEntity<Integer>
|
|||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
final BitstreamFormat otherBitstreamFormat = (BitstreamFormat) other;
|
final BitstreamFormat otherBitstreamFormat = (BitstreamFormat) other;
|
||||||
if (!this.getID().equals(otherBitstreamFormat.getID())) {
|
return this.getID().equals(otherBitstreamFormat.getID());
|
||||||
return false;
|
|
||||||
}
|
|
||||||
|
|
||||||
return true;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
|
@@ -18,7 +18,7 @@ import org.dspace.authorize.service.AuthorizeService;
|
|||||||
import org.dspace.content.dao.BitstreamFormatDAO;
|
import org.dspace.content.dao.BitstreamFormatDAO;
|
||||||
import org.dspace.content.service.BitstreamFormatService;
|
import org.dspace.content.service.BitstreamFormatService;
|
||||||
import org.dspace.core.Context;
|
import org.dspace.core.Context;
|
||||||
import org.dspace.core.LogManager;
|
import org.dspace.core.LogHelper;
|
||||||
import org.springframework.beans.factory.annotation.Autowired;
|
import org.springframework.beans.factory.annotation.Autowired;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
@@ -68,7 +68,7 @@ public class BitstreamFormatServiceImpl implements BitstreamFormatService {
|
|||||||
|
|
||||||
if (bitstreamFormat == null) {
|
if (bitstreamFormat == null) {
|
||||||
if (log.isDebugEnabled()) {
|
if (log.isDebugEnabled()) {
|
||||||
log.debug(LogManager.getHeader(context,
|
log.debug(LogHelper.getHeader(context,
|
||||||
"find_bitstream_format",
|
"find_bitstream_format",
|
||||||
"not_found,bitstream_format_id=" + id));
|
"not_found,bitstream_format_id=" + id));
|
||||||
}
|
}
|
||||||
@@ -78,7 +78,7 @@ public class BitstreamFormatServiceImpl implements BitstreamFormatService {
|
|||||||
|
|
||||||
// not null, return format object
|
// not null, return format object
|
||||||
if (log.isDebugEnabled()) {
|
if (log.isDebugEnabled()) {
|
||||||
log.debug(LogManager.getHeader(context, "find_bitstream_format",
|
log.debug(LogHelper.getHeader(context, "find_bitstream_format",
|
||||||
"bitstream_format_id=" + id));
|
"bitstream_format_id=" + id));
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -129,7 +129,7 @@ public class BitstreamFormatServiceImpl implements BitstreamFormatService {
|
|||||||
BitstreamFormat bitstreamFormat = bitstreamFormatDAO.create(context, new BitstreamFormat());
|
BitstreamFormat bitstreamFormat = bitstreamFormatDAO.create(context, new BitstreamFormat());
|
||||||
|
|
||||||
|
|
||||||
log.info(LogManager.getHeader(context, "create_bitstream_format",
|
log.info(LogHelper.getHeader(context, "create_bitstream_format",
|
||||||
"bitstream_format_id="
|
"bitstream_format_id="
|
||||||
+ bitstreamFormat.getID()));
|
+ bitstreamFormat.getID()));
|
||||||
|
|
||||||
@@ -189,7 +189,7 @@ public class BitstreamFormatServiceImpl implements BitstreamFormatService {
|
|||||||
}
|
}
|
||||||
|
|
||||||
for (BitstreamFormat bitstreamFormat : bitstreamFormats) {
|
for (BitstreamFormat bitstreamFormat : bitstreamFormats) {
|
||||||
log.info(LogManager.getHeader(context, "update_bitstream_format",
|
log.info(LogHelper.getHeader(context, "update_bitstream_format",
|
||||||
"bitstream_format_id=" + bitstreamFormat.getID()));
|
"bitstream_format_id=" + bitstreamFormat.getID()));
|
||||||
|
|
||||||
bitstreamFormatDAO.save(context, bitstreamFormat);
|
bitstreamFormatDAO.save(context, bitstreamFormat);
|
||||||
@@ -218,7 +218,7 @@ public class BitstreamFormatServiceImpl implements BitstreamFormatService {
|
|||||||
// Delete this format from database
|
// Delete this format from database
|
||||||
bitstreamFormatDAO.delete(context, bitstreamFormat);
|
bitstreamFormatDAO.delete(context, bitstreamFormat);
|
||||||
|
|
||||||
log.info(LogManager.getHeader(context, "delete_bitstream_format",
|
log.info(LogHelper.getHeader(context, "delete_bitstream_format",
|
||||||
"bitstream_format_id=" + bitstreamFormat.getID() + ",bitstreams_changed="
|
"bitstream_format_id=" + bitstreamFormat.getID() + ",bitstreams_changed="
|
||||||
+ numberChanged));
|
+ numberChanged));
|
||||||
}
|
}
|
||||||
|
@@ -28,7 +28,7 @@ import org.dspace.content.service.BundleService;
|
|||||||
import org.dspace.content.service.ItemService;
|
import org.dspace.content.service.ItemService;
|
||||||
import org.dspace.core.Constants;
|
import org.dspace.core.Constants;
|
||||||
import org.dspace.core.Context;
|
import org.dspace.core.Context;
|
||||||
import org.dspace.core.LogManager;
|
import org.dspace.core.LogHelper;
|
||||||
import org.dspace.event.Event;
|
import org.dspace.event.Event;
|
||||||
import org.dspace.storage.bitstore.service.BitstreamStorageService;
|
import org.dspace.storage.bitstore.service.BitstreamStorageService;
|
||||||
import org.springframework.beans.factory.annotation.Autowired;
|
import org.springframework.beans.factory.annotation.Autowired;
|
||||||
@@ -45,7 +45,8 @@ public class BitstreamServiceImpl extends DSpaceObjectServiceImpl<Bitstream> imp
|
|||||||
/**
|
/**
|
||||||
* log4j logger
|
* log4j logger
|
||||||
*/
|
*/
|
||||||
private static Logger log = org.apache.logging.log4j.LogManager.getLogger(BitstreamServiceImpl.class);
|
private static final Logger log
|
||||||
|
= org.apache.logging.log4j.LogManager.getLogger();
|
||||||
|
|
||||||
|
|
||||||
@Autowired(required = true)
|
@Autowired(required = true)
|
||||||
@@ -73,7 +74,7 @@ public class BitstreamServiceImpl extends DSpaceObjectServiceImpl<Bitstream> imp
|
|||||||
|
|
||||||
if (bitstream == null) {
|
if (bitstream == null) {
|
||||||
if (log.isDebugEnabled()) {
|
if (log.isDebugEnabled()) {
|
||||||
log.debug(LogManager.getHeader(context, "find_bitstream",
|
log.debug(LogHelper.getHeader(context, "find_bitstream",
|
||||||
"not_found,bitstream_id=" + id));
|
"not_found,bitstream_id=" + id));
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -82,7 +83,7 @@ public class BitstreamServiceImpl extends DSpaceObjectServiceImpl<Bitstream> imp
|
|||||||
|
|
||||||
// not null, return Bitstream
|
// not null, return Bitstream
|
||||||
if (log.isDebugEnabled()) {
|
if (log.isDebugEnabled()) {
|
||||||
log.debug(LogManager.getHeader(context, "find_bitstream",
|
log.debug(LogHelper.getHeader(context, "find_bitstream",
|
||||||
"bitstream_id=" + id));
|
"bitstream_id=" + id));
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -131,7 +132,7 @@ public class BitstreamServiceImpl extends DSpaceObjectServiceImpl<Bitstream> imp
|
|||||||
// Store the bits
|
// Store the bits
|
||||||
UUID bitstreamID = bitstreamStorageService.store(context, bitstreamDAO.create(context, new Bitstream()), is);
|
UUID bitstreamID = bitstreamStorageService.store(context, bitstreamDAO.create(context, new Bitstream()), is);
|
||||||
|
|
||||||
log.info(LogManager.getHeader(context, "create_bitstream",
|
log.info(LogHelper.getHeader(context, "create_bitstream",
|
||||||
"bitstream_id=" + bitstreamID));
|
"bitstream_id=" + bitstreamID));
|
||||||
|
|
||||||
// Set the format to "unknown"
|
// Set the format to "unknown"
|
||||||
@@ -191,7 +192,7 @@ public class BitstreamServiceImpl extends DSpaceObjectServiceImpl<Bitstream> imp
|
|||||||
bitstreamStorageService.register(
|
bitstreamStorageService.register(
|
||||||
context, bitstream, assetstore, bitstreamPath);
|
context, bitstream, assetstore, bitstreamPath);
|
||||||
|
|
||||||
log.info(LogManager.getHeader(context,
|
log.info(LogHelper.getHeader(context,
|
||||||
"create_bitstream",
|
"create_bitstream",
|
||||||
"bitstream_id=" + bitstream.getID()));
|
"bitstream_id=" + bitstream.getID()));
|
||||||
|
|
||||||
@@ -248,7 +249,7 @@ public class BitstreamServiceImpl extends DSpaceObjectServiceImpl<Bitstream> imp
|
|||||||
// Check authorisation
|
// Check authorisation
|
||||||
authorizeService.authorizeAction(context, bitstream, Constants.WRITE);
|
authorizeService.authorizeAction(context, bitstream, Constants.WRITE);
|
||||||
|
|
||||||
log.info(LogManager.getHeader(context, "update_bitstream",
|
log.info(LogHelper.getHeader(context, "update_bitstream",
|
||||||
"bitstream_id=" + bitstream.getID()));
|
"bitstream_id=" + bitstream.getID()));
|
||||||
super.update(context, bitstream);
|
super.update(context, bitstream);
|
||||||
if (bitstream.isModified()) {
|
if (bitstream.isModified()) {
|
||||||
@@ -273,7 +274,7 @@ public class BitstreamServiceImpl extends DSpaceObjectServiceImpl<Bitstream> imp
|
|||||||
// changed to a check on delete
|
// changed to a check on delete
|
||||||
// Check authorisation
|
// Check authorisation
|
||||||
authorizeService.authorizeAction(context, bitstream, Constants.DELETE);
|
authorizeService.authorizeAction(context, bitstream, Constants.DELETE);
|
||||||
log.info(LogManager.getHeader(context, "delete_bitstream",
|
log.info(LogHelper.getHeader(context, "delete_bitstream",
|
||||||
"bitstream_id=" + bitstream.getID()));
|
"bitstream_id=" + bitstream.getID()));
|
||||||
|
|
||||||
context.addEvent(new Event(Event.DELETE, Constants.BITSTREAM, bitstream.getID(),
|
context.addEvent(new Event(Event.DELETE, Constants.BITSTREAM, bitstream.getID(),
|
||||||
@@ -350,7 +351,8 @@ public class BitstreamServiceImpl extends DSpaceObjectServiceImpl<Bitstream> imp
|
|||||||
public void expunge(Context context, Bitstream bitstream) throws SQLException, AuthorizeException {
|
public void expunge(Context context, Bitstream bitstream) throws SQLException, AuthorizeException {
|
||||||
authorizeService.authorizeAction(context, bitstream, Constants.DELETE);
|
authorizeService.authorizeAction(context, bitstream, Constants.DELETE);
|
||||||
if (!bitstream.isDeleted()) {
|
if (!bitstream.isDeleted()) {
|
||||||
throw new IllegalStateException("Bitstream must be deleted before it can be removed from the database");
|
throw new IllegalStateException("Bitstream " + bitstream.getID().toString()
|
||||||
|
+ " must be deleted before it can be removed from the database.");
|
||||||
}
|
}
|
||||||
bitstreamDAO.delete(context, bitstream);
|
bitstreamDAO.delete(context, bitstream);
|
||||||
}
|
}
|
||||||
|
@@ -9,7 +9,6 @@ package org.dspace.content;
|
|||||||
|
|
||||||
import java.sql.SQLException;
|
import java.sql.SQLException;
|
||||||
import java.util.ArrayList;
|
import java.util.ArrayList;
|
||||||
import java.util.LinkedList;
|
|
||||||
import java.util.List;
|
import java.util.List;
|
||||||
import javax.persistence.Column;
|
import javax.persistence.Column;
|
||||||
import javax.persistence.Entity;
|
import javax.persistence.Entity;
|
||||||
@@ -138,7 +137,7 @@ public class Bundle extends DSpaceObject implements DSpaceObjectLegacySupport {
|
|||||||
* @return the bitstreams
|
* @return the bitstreams
|
||||||
*/
|
*/
|
||||||
public List<Bitstream> getBitstreams() {
|
public List<Bitstream> getBitstreams() {
|
||||||
List<Bitstream> bitstreamList = new LinkedList<>(this.bitstreams);
|
List<Bitstream> bitstreamList = new ArrayList<>(this.bitstreams);
|
||||||
return bitstreamList;
|
return bitstreamList;
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -191,7 +190,7 @@ public class Bundle extends DSpaceObject implements DSpaceObjectLegacySupport {
|
|||||||
|
|
||||||
@Override
|
@Override
|
||||||
public boolean equals(Object obj) {
|
public boolean equals(Object obj) {
|
||||||
if (obj == null) {
|
if (obj == null || !(obj instanceof Bundle)) {
|
||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
Class<?> objClass = HibernateProxyHelper.getClassWithoutInitializingProxy(obj);
|
Class<?> objClass = HibernateProxyHelper.getClassWithoutInitializingProxy(obj);
|
||||||
@@ -202,10 +201,7 @@ public class Bundle extends DSpaceObject implements DSpaceObjectLegacySupport {
|
|||||||
if (this.getType() != other.getType()) {
|
if (this.getType() != other.getType()) {
|
||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
if (!this.getID().equals(other.getID())) {
|
return this.getID().equals(other.getID());
|
||||||
return false;
|
|
||||||
}
|
|
||||||
return true;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
|
@@ -33,7 +33,7 @@ import org.dspace.content.service.BundleService;
|
|||||||
import org.dspace.content.service.ItemService;
|
import org.dspace.content.service.ItemService;
|
||||||
import org.dspace.core.Constants;
|
import org.dspace.core.Constants;
|
||||||
import org.dspace.core.Context;
|
import org.dspace.core.Context;
|
||||||
import org.dspace.core.LogManager;
|
import org.dspace.core.LogHelper;
|
||||||
import org.dspace.event.Event;
|
import org.dspace.event.Event;
|
||||||
import org.springframework.beans.factory.annotation.Autowired;
|
import org.springframework.beans.factory.annotation.Autowired;
|
||||||
|
|
||||||
@@ -73,14 +73,14 @@ public class BundleServiceImpl extends DSpaceObjectServiceImpl<Bundle> implement
|
|||||||
Bundle bundle = bundleDAO.findByID(context, Bundle.class, id);
|
Bundle bundle = bundleDAO.findByID(context, Bundle.class, id);
|
||||||
if (bundle == null) {
|
if (bundle == null) {
|
||||||
if (log.isDebugEnabled()) {
|
if (log.isDebugEnabled()) {
|
||||||
log.debug(LogManager.getHeader(context, "find_bundle",
|
log.debug(LogHelper.getHeader(context, "find_bundle",
|
||||||
"not_found,bundle_id=" + id));
|
"not_found,bundle_id=" + id));
|
||||||
}
|
}
|
||||||
|
|
||||||
return null;
|
return null;
|
||||||
} else {
|
} else {
|
||||||
if (log.isDebugEnabled()) {
|
if (log.isDebugEnabled()) {
|
||||||
log.debug(LogManager.getHeader(context, "find_bundle",
|
log.debug(LogHelper.getHeader(context, "find_bundle",
|
||||||
"bundle_id=" + id));
|
"bundle_id=" + id));
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -105,7 +105,7 @@ public class BundleServiceImpl extends DSpaceObjectServiceImpl<Bundle> implement
|
|||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
log.info(LogManager.getHeader(context, "create_bundle", "bundle_id="
|
log.info(LogHelper.getHeader(context, "create_bundle", "bundle_id="
|
||||||
+ bundle.getID()));
|
+ bundle.getID()));
|
||||||
|
|
||||||
// if we ever use the identifier service for bundles, we should
|
// if we ever use the identifier service for bundles, we should
|
||||||
@@ -136,7 +136,7 @@ public class BundleServiceImpl extends DSpaceObjectServiceImpl<Bundle> implement
|
|||||||
// Check authorisation
|
// Check authorisation
|
||||||
authorizeService.authorizeAction(context, bundle, Constants.ADD);
|
authorizeService.authorizeAction(context, bundle, Constants.ADD);
|
||||||
|
|
||||||
log.info(LogManager.getHeader(context, "add_bitstream", "bundle_id="
|
log.info(LogHelper.getHeader(context, "add_bitstream", "bundle_id="
|
||||||
+ bundle.getID() + ",bitstream_id=" + bitstream.getID()));
|
+ bundle.getID() + ",bitstream_id=" + bitstream.getID()));
|
||||||
|
|
||||||
// First check that the bitstream isn't already in the list
|
// First check that the bitstream isn't already in the list
|
||||||
@@ -177,7 +177,7 @@ public class BundleServiceImpl extends DSpaceObjectServiceImpl<Bundle> implement
|
|||||||
// Check authorisation
|
// Check authorisation
|
||||||
authorizeService.authorizeAction(context, bundle, Constants.REMOVE);
|
authorizeService.authorizeAction(context, bundle, Constants.REMOVE);
|
||||||
|
|
||||||
log.info(LogManager.getHeader(context, "remove_bitstream",
|
log.info(LogHelper.getHeader(context, "remove_bitstream",
|
||||||
"bundle_id=" + bundle.getID() + ",bitstream_id=" + bitstream.getID()));
|
"bundle_id=" + bundle.getID() + ",bitstream_id=" + bitstream.getID()));
|
||||||
|
|
||||||
|
|
||||||
@@ -362,14 +362,14 @@ public class BundleServiceImpl extends DSpaceObjectServiceImpl<Bundle> implement
|
|||||||
// If we have an invalid Bitstream ID, just ignore it, but log a warning
|
// If we have an invalid Bitstream ID, just ignore it, but log a warning
|
||||||
if (bitstream == null) {
|
if (bitstream == null) {
|
||||||
//This should never occur but just in case
|
//This should never occur but just in case
|
||||||
log.warn(LogManager.getHeader(context, "Invalid bitstream id while changing bitstream order",
|
log.warn(LogHelper.getHeader(context, "Invalid bitstream id while changing bitstream order",
|
||||||
"Bundle: " + bundle.getID() + ", bitstream id: " + bitstreamId));
|
"Bundle: " + bundle.getID() + ", bitstream id: " + bitstreamId));
|
||||||
continue;
|
continue;
|
||||||
}
|
}
|
||||||
|
|
||||||
// If we have a Bitstream not in the current list, log a warning & exit immediately
|
// If we have a Bitstream not in the current list, log a warning & exit immediately
|
||||||
if (!currentBitstreams.contains(bitstream)) {
|
if (!currentBitstreams.contains(bitstream)) {
|
||||||
log.warn(LogManager.getHeader(context,
|
log.warn(LogHelper.getHeader(context,
|
||||||
"Encountered a bitstream not in this bundle while changing bitstream " +
|
"Encountered a bitstream not in this bundle while changing bitstream " +
|
||||||
"order. Bitstream order will not be changed.",
|
"order. Bitstream order will not be changed.",
|
||||||
"Bundle: " + bundle.getID() + ", bitstream id: " + bitstreamId));
|
"Bundle: " + bundle.getID() + ", bitstream id: " + bitstreamId));
|
||||||
@@ -380,7 +380,7 @@ public class BundleServiceImpl extends DSpaceObjectServiceImpl<Bundle> implement
|
|||||||
|
|
||||||
// If our lists are different sizes, exit immediately
|
// If our lists are different sizes, exit immediately
|
||||||
if (updatedBitstreams.size() != currentBitstreams.size()) {
|
if (updatedBitstreams.size() != currentBitstreams.size()) {
|
||||||
log.warn(LogManager.getHeader(context,
|
log.warn(LogHelper.getHeader(context,
|
||||||
"Size of old list and new list do not match. Bitstream order will not be " +
|
"Size of old list and new list do not match. Bitstream order will not be " +
|
||||||
"changed.",
|
"changed.",
|
||||||
"Bundle: " + bundle.getID()));
|
"Bundle: " + bundle.getID()));
|
||||||
@@ -471,7 +471,7 @@ public class BundleServiceImpl extends DSpaceObjectServiceImpl<Bundle> implement
|
|||||||
public void update(Context context, Bundle bundle) throws SQLException, AuthorizeException {
|
public void update(Context context, Bundle bundle) throws SQLException, AuthorizeException {
|
||||||
// Check authorisation
|
// Check authorisation
|
||||||
//AuthorizeManager.authorizeAction(ourContext, this, Constants.WRITE);
|
//AuthorizeManager.authorizeAction(ourContext, this, Constants.WRITE);
|
||||||
log.info(LogManager.getHeader(context, "update_bundle", "bundle_id="
|
log.info(LogHelper.getHeader(context, "update_bundle", "bundle_id="
|
||||||
+ bundle.getID()));
|
+ bundle.getID()));
|
||||||
|
|
||||||
super.update(context, bundle);
|
super.update(context, bundle);
|
||||||
@@ -491,7 +491,7 @@ public class BundleServiceImpl extends DSpaceObjectServiceImpl<Bundle> implement
|
|||||||
|
|
||||||
@Override
|
@Override
|
||||||
public void delete(Context context, Bundle bundle) throws SQLException, AuthorizeException, IOException {
|
public void delete(Context context, Bundle bundle) throws SQLException, AuthorizeException, IOException {
|
||||||
log.info(LogManager.getHeader(context, "delete_bundle", "bundle_id="
|
log.info(LogHelper.getHeader(context, "delete_bundle", "bundle_id="
|
||||||
+ bundle.getID()));
|
+ bundle.getID()));
|
||||||
|
|
||||||
authorizeService.authorizeAction(context, bundle, Constants.DELETE);
|
authorizeService.authorizeAction(context, bundle, Constants.DELETE);
|
||||||
|
@@ -14,6 +14,7 @@ import java.util.Arrays;
|
|||||||
import java.util.HashSet;
|
import java.util.HashSet;
|
||||||
import java.util.List;
|
import java.util.List;
|
||||||
import java.util.Set;
|
import java.util.Set;
|
||||||
|
import java.util.UUID;
|
||||||
import javax.annotation.Nonnull;
|
import javax.annotation.Nonnull;
|
||||||
import javax.persistence.Cacheable;
|
import javax.persistence.Cacheable;
|
||||||
import javax.persistence.CascadeType;
|
import javax.persistence.CascadeType;
|
||||||
@@ -104,6 +105,16 @@ public class Collection extends DSpaceObject implements DSpaceObjectLegacySuppor
|
|||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Takes a pre-determined UUID to be passed to the object to allow for the
|
||||||
|
* restoration of previously defined UUID's.
|
||||||
|
*
|
||||||
|
* @param uuid Takes a uuid to be passed to the Pre-Defined UUID Generator
|
||||||
|
*/
|
||||||
|
protected Collection(UUID uuid) {
|
||||||
|
this.predefinedUUID = uuid;
|
||||||
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public String getName() {
|
public String getName() {
|
||||||
String value = getCollectionService()
|
String value = getCollectionService()
|
||||||
|
@@ -39,7 +39,7 @@ import org.dspace.content.service.WorkspaceItemService;
|
|||||||
import org.dspace.core.Constants;
|
import org.dspace.core.Constants;
|
||||||
import org.dspace.core.Context;
|
import org.dspace.core.Context;
|
||||||
import org.dspace.core.I18nUtil;
|
import org.dspace.core.I18nUtil;
|
||||||
import org.dspace.core.LogManager;
|
import org.dspace.core.LogHelper;
|
||||||
import org.dspace.core.service.LicenseService;
|
import org.dspace.core.service.LicenseService;
|
||||||
import org.dspace.discovery.DiscoverQuery;
|
import org.dspace.discovery.DiscoverQuery;
|
||||||
import org.dspace.discovery.DiscoverResult;
|
import org.dspace.discovery.DiscoverResult;
|
||||||
@@ -130,11 +130,22 @@ public class CollectionServiceImpl extends DSpaceObjectServiceImpl<Collection> i
|
|||||||
@Override
|
@Override
|
||||||
public Collection create(Context context, Community community, String handle)
|
public Collection create(Context context, Community community, String handle)
|
||||||
throws SQLException, AuthorizeException {
|
throws SQLException, AuthorizeException {
|
||||||
|
return create(context, community, handle, null);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public Collection create(Context context, Community community,
|
||||||
|
String handle, UUID uuid) throws SQLException, AuthorizeException {
|
||||||
if (community == null) {
|
if (community == null) {
|
||||||
throw new IllegalArgumentException("Community cannot be null when creating a new collection.");
|
throw new IllegalArgumentException("Community cannot be null when creating a new collection.");
|
||||||
}
|
}
|
||||||
|
|
||||||
Collection newCollection = collectionDAO.create(context, new Collection());
|
Collection newCollection;
|
||||||
|
if (uuid != null) {
|
||||||
|
newCollection = collectionDAO.create(context, new Collection(uuid));
|
||||||
|
} else {
|
||||||
|
newCollection = collectionDAO.create(context, new Collection());
|
||||||
|
}
|
||||||
//Add our newly created collection to our community, authorization checks occur in THIS method
|
//Add our newly created collection to our community, authorization checks occur in THIS method
|
||||||
communityService.addCollection(context, community, newCollection);
|
communityService.addCollection(context, community, newCollection);
|
||||||
|
|
||||||
@@ -148,7 +159,8 @@ public class CollectionServiceImpl extends DSpaceObjectServiceImpl<Collection> i
|
|||||||
authorizeService
|
authorizeService
|
||||||
.createResourcePolicy(context, newCollection, anonymousGroup, null, Constants.DEFAULT_ITEM_READ, null);
|
.createResourcePolicy(context, newCollection, anonymousGroup, null, Constants.DEFAULT_ITEM_READ, null);
|
||||||
authorizeService
|
authorizeService
|
||||||
.createResourcePolicy(context, newCollection, anonymousGroup, null, Constants.DEFAULT_BITSTREAM_READ, null);
|
.createResourcePolicy(context, newCollection, anonymousGroup, null,
|
||||||
|
Constants.DEFAULT_BITSTREAM_READ, null);
|
||||||
|
|
||||||
collectionDAO.save(context, newCollection);
|
collectionDAO.save(context, newCollection);
|
||||||
|
|
||||||
@@ -167,7 +179,7 @@ public class CollectionServiceImpl extends DSpaceObjectServiceImpl<Collection> i
|
|||||||
newCollection.getID(), newCollection.getHandle(),
|
newCollection.getID(), newCollection.getHandle(),
|
||||||
getIdentifiers(context, newCollection)));
|
getIdentifiers(context, newCollection)));
|
||||||
|
|
||||||
log.info(LogManager.getHeader(context, "create_collection",
|
log.info(LogHelper.getHeader(context, "create_collection",
|
||||||
"collection_id=" + newCollection.getID())
|
"collection_id=" + newCollection.getID())
|
||||||
+ ",handle=" + newCollection.getHandle());
|
+ ",handle=" + newCollection.getHandle());
|
||||||
|
|
||||||
@@ -318,7 +330,7 @@ public class CollectionServiceImpl extends DSpaceObjectServiceImpl<Collection> i
|
|||||||
* whitespace.
|
* whitespace.
|
||||||
*/
|
*/
|
||||||
if (value == null) {
|
if (value == null) {
|
||||||
clearMetadata(context, collection, field.SCHEMA, field.ELEMENT, field.QUALIFIER, Item.ANY);
|
clearMetadata(context, collection, field.schema, field.element, field.qualifier, Item.ANY);
|
||||||
collection.setMetadataModified();
|
collection.setMetadataModified();
|
||||||
} else {
|
} else {
|
||||||
super.setMetadataSingleValue(context, collection, field, null, value);
|
super.setMetadataSingleValue(context, collection, field, null, value);
|
||||||
@@ -345,7 +357,7 @@ public class CollectionServiceImpl extends DSpaceObjectServiceImpl<Collection> i
|
|||||||
|
|
||||||
if (is == null) {
|
if (is == null) {
|
||||||
collection.setLogo(null);
|
collection.setLogo(null);
|
||||||
log.info(LogManager.getHeader(context, "remove_logo",
|
log.info(LogHelper.getHeader(context, "remove_logo",
|
||||||
"collection_id=" + collection.getID()));
|
"collection_id=" + collection.getID()));
|
||||||
} else {
|
} else {
|
||||||
Bitstream newLogo = bitstreamService.create(context, is);
|
Bitstream newLogo = bitstreamService.create(context, is);
|
||||||
@@ -357,7 +369,7 @@ public class CollectionServiceImpl extends DSpaceObjectServiceImpl<Collection> i
|
|||||||
.getPoliciesActionFilter(context, collection, Constants.READ);
|
.getPoliciesActionFilter(context, collection, Constants.READ);
|
||||||
authorizeService.addPolicies(context, policies, newLogo);
|
authorizeService.addPolicies(context, policies, newLogo);
|
||||||
|
|
||||||
log.info(LogManager.getHeader(context, "set_logo",
|
log.info(LogHelper.getHeader(context, "set_logo",
|
||||||
"collection_id=" + collection.getID() + "logo_bitstream_id="
|
"collection_id=" + collection.getID() + "logo_bitstream_id="
|
||||||
+ newLogo.getID()));
|
+ newLogo.getID()));
|
||||||
}
|
}
|
||||||
@@ -393,7 +405,7 @@ public class CollectionServiceImpl extends DSpaceObjectServiceImpl<Collection> i
|
|||||||
try {
|
try {
|
||||||
workflow = workflowFactory.getWorkflow(collection);
|
workflow = workflowFactory.getWorkflow(collection);
|
||||||
} catch (WorkflowConfigurationException e) {
|
} catch (WorkflowConfigurationException e) {
|
||||||
log.error(LogManager.getHeader(context, "setWorkflowGroup",
|
log.error(LogHelper.getHeader(context, "setWorkflowGroup",
|
||||||
"collection_id=" + collection.getID() + " " + e.getMessage()), e);
|
"collection_id=" + collection.getID() + " " + e.getMessage()), e);
|
||||||
}
|
}
|
||||||
if (!StringUtils.equals(workflowFactory.getDefaultWorkflow().getID(), workflow.getID())) {
|
if (!StringUtils.equals(workflowFactory.getDefaultWorkflow().getID(), workflow.getID())) {
|
||||||
@@ -524,6 +536,8 @@ public class CollectionServiceImpl extends DSpaceObjectServiceImpl<Collection> i
|
|||||||
|
|
||||||
// register this as the admin group
|
// register this as the admin group
|
||||||
collection.setAdmins(admins);
|
collection.setAdmins(admins);
|
||||||
|
context.addEvent(new Event(Event.MODIFY, Constants.COLLECTION, collection.getID(),
|
||||||
|
null, getIdentifiers(context, collection)));
|
||||||
return admins;
|
return admins;
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -540,6 +554,8 @@ public class CollectionServiceImpl extends DSpaceObjectServiceImpl<Collection> i
|
|||||||
|
|
||||||
// Remove the link to the collection table.
|
// Remove the link to the collection table.
|
||||||
collection.setAdmins(null);
|
collection.setAdmins(null);
|
||||||
|
context.addEvent(new Event(Event.MODIFY, Constants.COLLECTION, collection.getID(),
|
||||||
|
null, getIdentifiers(context, collection)));
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
@@ -569,7 +585,7 @@ public class CollectionServiceImpl extends DSpaceObjectServiceImpl<Collection> i
|
|||||||
Item template = itemService.createTemplateItem(context, collection);
|
Item template = itemService.createTemplateItem(context, collection);
|
||||||
collection.setTemplateItem(template);
|
collection.setTemplateItem(template);
|
||||||
|
|
||||||
log.info(LogManager.getHeader(context, "create_template_item",
|
log.info(LogHelper.getHeader(context, "create_template_item",
|
||||||
"collection_id=" + collection.getID() + ",template_item_id="
|
"collection_id=" + collection.getID() + ",template_item_id="
|
||||||
+ template.getID()));
|
+ template.getID()));
|
||||||
}
|
}
|
||||||
@@ -584,7 +600,7 @@ public class CollectionServiceImpl extends DSpaceObjectServiceImpl<Collection> i
|
|||||||
Item template = collection.getTemplateItem();
|
Item template = collection.getTemplateItem();
|
||||||
|
|
||||||
if (template != null) {
|
if (template != null) {
|
||||||
log.info(LogManager.getHeader(context, "remove_template_item",
|
log.info(LogHelper.getHeader(context, "remove_template_item",
|
||||||
"collection_id=" + collection.getID() + ",template_item_id="
|
"collection_id=" + collection.getID() + ",template_item_id="
|
||||||
+ template.getID()));
|
+ template.getID()));
|
||||||
// temporarily turn off auth system, we have already checked the permission on the top of the method
|
// temporarily turn off auth system, we have already checked the permission on the top of the method
|
||||||
@@ -604,7 +620,7 @@ public class CollectionServiceImpl extends DSpaceObjectServiceImpl<Collection> i
|
|||||||
// Check authorisation
|
// Check authorisation
|
||||||
authorizeService.authorizeAction(context, collection, Constants.ADD);
|
authorizeService.authorizeAction(context, collection, Constants.ADD);
|
||||||
|
|
||||||
log.info(LogManager.getHeader(context, "add_item", "collection_id="
|
log.info(LogHelper.getHeader(context, "add_item", "collection_id="
|
||||||
+ collection.getID() + ",item_id=" + item.getID()));
|
+ collection.getID() + ",item_id=" + item.getID()));
|
||||||
|
|
||||||
// Create mapping
|
// Create mapping
|
||||||
@@ -645,7 +661,7 @@ public class CollectionServiceImpl extends DSpaceObjectServiceImpl<Collection> i
|
|||||||
// Check authorisation
|
// Check authorisation
|
||||||
canEdit(context, collection, true);
|
canEdit(context, collection, true);
|
||||||
|
|
||||||
log.info(LogManager.getHeader(context, "update_collection",
|
log.info(LogHelper.getHeader(context, "update_collection",
|
||||||
"collection_id=" + collection.getID()));
|
"collection_id=" + collection.getID()));
|
||||||
|
|
||||||
super.update(context, collection);
|
super.update(context, collection);
|
||||||
@@ -657,8 +673,11 @@ public class CollectionServiceImpl extends DSpaceObjectServiceImpl<Collection> i
|
|||||||
collection.clearModified();
|
collection.clearModified();
|
||||||
}
|
}
|
||||||
if (collection.isMetadataModified()) {
|
if (collection.isMetadataModified()) {
|
||||||
collection.clearDetails();
|
context.addEvent(new Event(Event.MODIFY_METADATA, Constants.COLLECTION, collection.getID(),
|
||||||
|
collection.getDetails(),getIdentifiers(context, collection)));
|
||||||
|
collection.clearModified();
|
||||||
}
|
}
|
||||||
|
collection.clearDetails();
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
@@ -702,7 +721,7 @@ public class CollectionServiceImpl extends DSpaceObjectServiceImpl<Collection> i
|
|||||||
|
|
||||||
@Override
|
@Override
|
||||||
public void delete(Context context, Collection collection) throws SQLException, AuthorizeException, IOException {
|
public void delete(Context context, Collection collection) throws SQLException, AuthorizeException, IOException {
|
||||||
log.info(LogManager.getHeader(context, "delete_collection",
|
log.info(LogHelper.getHeader(context, "delete_collection",
|
||||||
"collection_id=" + collection.getID()));
|
"collection_id=" + collection.getID()));
|
||||||
|
|
||||||
// remove harvested collections.
|
// remove harvested collections.
|
||||||
|
@@ -11,6 +11,7 @@ import java.util.Arrays;
|
|||||||
import java.util.HashSet;
|
import java.util.HashSet;
|
||||||
import java.util.List;
|
import java.util.List;
|
||||||
import java.util.Set;
|
import java.util.Set;
|
||||||
|
import java.util.UUID;
|
||||||
import javax.persistence.Cacheable;
|
import javax.persistence.Cacheable;
|
||||||
import javax.persistence.CascadeType;
|
import javax.persistence.CascadeType;
|
||||||
import javax.persistence.Column;
|
import javax.persistence.Column;
|
||||||
@@ -24,7 +25,6 @@ import javax.persistence.Table;
|
|||||||
import javax.persistence.Transient;
|
import javax.persistence.Transient;
|
||||||
|
|
||||||
import org.apache.commons.lang3.builder.HashCodeBuilder;
|
import org.apache.commons.lang3.builder.HashCodeBuilder;
|
||||||
import org.apache.logging.log4j.Logger;
|
|
||||||
import org.dspace.content.comparator.NameAscendingComparator;
|
import org.dspace.content.comparator.NameAscendingComparator;
|
||||||
import org.dspace.content.factory.ContentServiceFactory;
|
import org.dspace.content.factory.ContentServiceFactory;
|
||||||
import org.dspace.content.service.CommunityService;
|
import org.dspace.content.service.CommunityService;
|
||||||
@@ -42,18 +42,12 @@ import org.hibernate.proxy.HibernateProxyHelper;
|
|||||||
* <code>update</code> is called.
|
* <code>update</code> is called.
|
||||||
*
|
*
|
||||||
* @author Robert Tansley
|
* @author Robert Tansley
|
||||||
* @version $Revision$
|
|
||||||
*/
|
*/
|
||||||
@Entity
|
@Entity
|
||||||
@Table(name = "community")
|
@Table(name = "community")
|
||||||
@Cacheable
|
@Cacheable
|
||||||
@org.hibernate.annotations.Cache(usage = CacheConcurrencyStrategy.NONSTRICT_READ_WRITE, include = "non-lazy")
|
@org.hibernate.annotations.Cache(usage = CacheConcurrencyStrategy.NONSTRICT_READ_WRITE, include = "non-lazy")
|
||||||
public class Community extends DSpaceObject implements DSpaceObjectLegacySupport {
|
public class Community extends DSpaceObject implements DSpaceObjectLegacySupport {
|
||||||
/**
|
|
||||||
* log4j category
|
|
||||||
*/
|
|
||||||
private static final Logger log = org.apache.logging.log4j.LogManager.getLogger(Community.class);
|
|
||||||
|
|
||||||
@Column(name = "community_id", insertable = false, updatable = false)
|
@Column(name = "community_id", insertable = false, updatable = false)
|
||||||
private Integer legacyId;
|
private Integer legacyId;
|
||||||
|
|
||||||
@@ -96,6 +90,16 @@ public class Community extends DSpaceObject implements DSpaceObjectLegacySupport
|
|||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Takes a pre-determined UUID to be passed to the object to allow for the
|
||||||
|
* restoration of previously defined UUID's.
|
||||||
|
*
|
||||||
|
* @param uuid Takes a uuid to be passed to the Pre-Defined UUID Generator
|
||||||
|
*/
|
||||||
|
protected Community(UUID uuid) {
|
||||||
|
this.predefinedUUID = uuid;
|
||||||
|
}
|
||||||
|
|
||||||
void addSubCommunity(Community subCommunity) {
|
void addSubCommunity(Community subCommunity) {
|
||||||
subCommunities.add(subCommunity);
|
subCommunities.add(subCommunity);
|
||||||
setModified();
|
setModified();
|
||||||
@@ -215,7 +219,7 @@ public class Community extends DSpaceObject implements DSpaceObjectLegacySupport
|
|||||||
*/
|
*/
|
||||||
@Override
|
@Override
|
||||||
public boolean equals(Object other) {
|
public boolean equals(Object other) {
|
||||||
if (other == null) {
|
if (!(other instanceof Community)) {
|
||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
Class<?> objClass = HibernateProxyHelper.getClassWithoutInitializingProxy(other);
|
Class<?> objClass = HibernateProxyHelper.getClassWithoutInitializingProxy(other);
|
||||||
@@ -223,11 +227,7 @@ public class Community extends DSpaceObject implements DSpaceObjectLegacySupport
|
|||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
final Community otherCommunity = (Community) other;
|
final Community otherCommunity = (Community) other;
|
||||||
if (!this.getID().equals(otherCommunity.getID())) {
|
return this.getID().equals(otherCommunity.getID());
|
||||||
return false;
|
|
||||||
}
|
|
||||||
|
|
||||||
return true;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
|
@@ -33,7 +33,7 @@ import org.dspace.content.service.SiteService;
|
|||||||
import org.dspace.core.Constants;
|
import org.dspace.core.Constants;
|
||||||
import org.dspace.core.Context;
|
import org.dspace.core.Context;
|
||||||
import org.dspace.core.I18nUtil;
|
import org.dspace.core.I18nUtil;
|
||||||
import org.dspace.core.LogManager;
|
import org.dspace.core.LogHelper;
|
||||||
import org.dspace.eperson.Group;
|
import org.dspace.eperson.Group;
|
||||||
import org.dspace.eperson.service.GroupService;
|
import org.dspace.eperson.service.GroupService;
|
||||||
import org.dspace.event.Event;
|
import org.dspace.event.Event;
|
||||||
@@ -86,13 +86,24 @@ public class CommunityServiceImpl extends DSpaceObjectServiceImpl<Community> imp
|
|||||||
|
|
||||||
@Override
|
@Override
|
||||||
public Community create(Community parent, Context context, String handle) throws SQLException, AuthorizeException {
|
public Community create(Community parent, Context context, String handle) throws SQLException, AuthorizeException {
|
||||||
|
return create(parent, context, handle, null);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public Community create(Community parent, Context context, String handle,
|
||||||
|
UUID uuid) throws SQLException, AuthorizeException {
|
||||||
if (!(authorizeService.isAdmin(context) ||
|
if (!(authorizeService.isAdmin(context) ||
|
||||||
(parent != null && authorizeService.authorizeActionBoolean(context, parent, Constants.ADD)))) {
|
(parent != null && authorizeService.authorizeActionBoolean(context, parent, Constants.ADD)))) {
|
||||||
throw new AuthorizeException(
|
throw new AuthorizeException(
|
||||||
"Only administrators can create communities");
|
"Only administrators can create communities");
|
||||||
}
|
}
|
||||||
|
|
||||||
Community newCommunity = communityDAO.create(context, new Community());
|
Community newCommunity;
|
||||||
|
if (uuid != null) {
|
||||||
|
newCommunity = communityDAO.create(context, new Community(uuid));
|
||||||
|
} else {
|
||||||
|
newCommunity = communityDAO.create(context, new Community());
|
||||||
|
}
|
||||||
|
|
||||||
if (parent != null) {
|
if (parent != null) {
|
||||||
parent.addSubCommunity(newCommunity);
|
parent.addSubCommunity(newCommunity);
|
||||||
@@ -128,7 +139,7 @@ public class CommunityServiceImpl extends DSpaceObjectServiceImpl<Community> imp
|
|||||||
getIdentifiers(context, newCommunity)));
|
getIdentifiers(context, newCommunity)));
|
||||||
}
|
}
|
||||||
|
|
||||||
log.info(LogManager.getHeader(context, "create_community",
|
log.info(LogHelper.getHeader(context, "create_community",
|
||||||
"community_id=" + newCommunity.getID())
|
"community_id=" + newCommunity.getID())
|
||||||
+ ",handle=" + newCommunity.getHandle());
|
+ ",handle=" + newCommunity.getHandle());
|
||||||
|
|
||||||
@@ -195,7 +206,7 @@ public class CommunityServiceImpl extends DSpaceObjectServiceImpl<Community> imp
|
|||||||
* whitespace.
|
* whitespace.
|
||||||
*/
|
*/
|
||||||
if (value == null) {
|
if (value == null) {
|
||||||
clearMetadata(context, community, field.SCHEMA, field.ELEMENT, field.QUALIFIER, Item.ANY);
|
clearMetadata(context, community, field.schema, field.element, field.qualifier, Item.ANY);
|
||||||
community.setMetadataModified();
|
community.setMetadataModified();
|
||||||
} else {
|
} else {
|
||||||
super.setMetadataSingleValue(context, community, field, null, value);
|
super.setMetadataSingleValue(context, community, field, null, value);
|
||||||
@@ -218,7 +229,7 @@ public class CommunityServiceImpl extends DSpaceObjectServiceImpl<Community> imp
|
|||||||
// First, delete any existing logo
|
// First, delete any existing logo
|
||||||
Bitstream oldLogo = community.getLogo();
|
Bitstream oldLogo = community.getLogo();
|
||||||
if (oldLogo != null) {
|
if (oldLogo != null) {
|
||||||
log.info(LogManager.getHeader(context, "remove_logo",
|
log.info(LogHelper.getHeader(context, "remove_logo",
|
||||||
"community_id=" + community.getID()));
|
"community_id=" + community.getID()));
|
||||||
community.setLogo(null);
|
community.setLogo(null);
|
||||||
bitstreamService.delete(context, oldLogo);
|
bitstreamService.delete(context, oldLogo);
|
||||||
@@ -234,7 +245,7 @@ public class CommunityServiceImpl extends DSpaceObjectServiceImpl<Community> imp
|
|||||||
.getPoliciesActionFilter(context, community, Constants.READ);
|
.getPoliciesActionFilter(context, community, Constants.READ);
|
||||||
authorizeService.addPolicies(context, policies, newLogo);
|
authorizeService.addPolicies(context, policies, newLogo);
|
||||||
|
|
||||||
log.info(LogManager.getHeader(context, "set_logo",
|
log.info(LogHelper.getHeader(context, "set_logo",
|
||||||
"community_id=" + community.getID() + "logo_bitstream_id="
|
"community_id=" + community.getID() + "logo_bitstream_id="
|
||||||
+ newLogo.getID()));
|
+ newLogo.getID()));
|
||||||
}
|
}
|
||||||
@@ -247,7 +258,7 @@ public class CommunityServiceImpl extends DSpaceObjectServiceImpl<Community> imp
|
|||||||
// Check authorisation
|
// Check authorisation
|
||||||
canEdit(context, community);
|
canEdit(context, community);
|
||||||
|
|
||||||
log.info(LogManager.getHeader(context, "update_community",
|
log.info(LogHelper.getHeader(context, "update_community",
|
||||||
"community_id=" + community.getID()));
|
"community_id=" + community.getID()));
|
||||||
|
|
||||||
super.update(context, community);
|
super.update(context, community);
|
||||||
@@ -287,6 +298,8 @@ public class CommunityServiceImpl extends DSpaceObjectServiceImpl<Community> imp
|
|||||||
|
|
||||||
// register this as the admin group
|
// register this as the admin group
|
||||||
community.setAdmins(admins);
|
community.setAdmins(admins);
|
||||||
|
context.addEvent(new Event(Event.MODIFY, Constants.COMMUNITY, community.getID(),
|
||||||
|
null, getIdentifiers(context, community)));
|
||||||
return admins;
|
return admins;
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -302,6 +315,8 @@ public class CommunityServiceImpl extends DSpaceObjectServiceImpl<Community> imp
|
|||||||
|
|
||||||
// Remove the link to the community table.
|
// Remove the link to the community table.
|
||||||
community.setAdmins(null);
|
community.setAdmins(null);
|
||||||
|
context.addEvent(new Event(Event.MODIFY, Constants.COMMUNITY, community.getID(),
|
||||||
|
null, getIdentifiers(context, community)));
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
@@ -365,7 +380,7 @@ public class CommunityServiceImpl extends DSpaceObjectServiceImpl<Community> imp
|
|||||||
// Check authorisation
|
// Check authorisation
|
||||||
authorizeService.authorizeAction(context, community, Constants.ADD);
|
authorizeService.authorizeAction(context, community, Constants.ADD);
|
||||||
|
|
||||||
log.info(LogManager.getHeader(context, "add_collection",
|
log.info(LogHelper.getHeader(context, "add_collection",
|
||||||
"community_id=" + community.getID() + ",collection_id=" + collection.getID()));
|
"community_id=" + community.getID() + ",collection_id=" + collection.getID()));
|
||||||
|
|
||||||
if (!community.getCollections().contains(collection)) {
|
if (!community.getCollections().contains(collection)) {
|
||||||
@@ -383,13 +398,22 @@ public class CommunityServiceImpl extends DSpaceObjectServiceImpl<Community> imp
|
|||||||
return createSubcommunity(context, parentCommunity, null);
|
return createSubcommunity(context, parentCommunity, null);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public Community createSubcommunity(Context context, Community parentCommunity, String handle)
|
public Community createSubcommunity(Context context, Community parentCommunity, String handle)
|
||||||
throws SQLException, AuthorizeException {
|
throws SQLException, AuthorizeException {
|
||||||
|
return createSubcommunity(context, parentCommunity, handle, null);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public Community createSubcommunity(Context context, Community parentCommunity, String handle,
|
||||||
|
UUID uuid) throws SQLException, AuthorizeException {
|
||||||
// Check authorisation
|
// Check authorisation
|
||||||
authorizeService.authorizeAction(context, parentCommunity, Constants.ADD);
|
authorizeService.authorizeAction(context, parentCommunity, Constants.ADD);
|
||||||
|
|
||||||
Community c = create(parentCommunity, context, handle);
|
Community c;
|
||||||
|
c = create(parentCommunity, context, handle, uuid);
|
||||||
|
|
||||||
addSubcommunity(context, parentCommunity, c);
|
addSubcommunity(context, parentCommunity, c);
|
||||||
|
|
||||||
return c;
|
return c;
|
||||||
@@ -401,7 +425,7 @@ public class CommunityServiceImpl extends DSpaceObjectServiceImpl<Community> imp
|
|||||||
// Check authorisation
|
// Check authorisation
|
||||||
authorizeService.authorizeAction(context, parentCommunity, Constants.ADD);
|
authorizeService.authorizeAction(context, parentCommunity, Constants.ADD);
|
||||||
|
|
||||||
log.info(LogManager.getHeader(context, "add_subcommunity",
|
log.info(LogHelper.getHeader(context, "add_subcommunity",
|
||||||
"parent_comm_id=" + parentCommunity.getID() + ",child_comm_id=" + childCommunity
|
"parent_comm_id=" + parentCommunity.getID() + ",child_comm_id=" + childCommunity
|
||||||
.getID()));
|
.getID()));
|
||||||
|
|
||||||
@@ -431,7 +455,7 @@ public class CommunityServiceImpl extends DSpaceObjectServiceImpl<Community> imp
|
|||||||
collection.removeCommunity(community);
|
collection.removeCommunity(community);
|
||||||
}
|
}
|
||||||
|
|
||||||
log.info(LogManager.getHeader(context, "remove_collection",
|
log.info(LogHelper.getHeader(context, "remove_collection",
|
||||||
"community_id=" + community.getID() + ",collection_id=" + collection.getID()));
|
"community_id=" + community.getID() + ",collection_id=" + collection.getID()));
|
||||||
|
|
||||||
// Remove any mappings
|
// Remove any mappings
|
||||||
@@ -451,7 +475,7 @@ public class CommunityServiceImpl extends DSpaceObjectServiceImpl<Community> imp
|
|||||||
|
|
||||||
rawDelete(context, childCommunity);
|
rawDelete(context, childCommunity);
|
||||||
|
|
||||||
log.info(LogManager.getHeader(context, "remove_subcommunity",
|
log.info(LogHelper.getHeader(context, "remove_subcommunity",
|
||||||
"parent_comm_id=" + parentCommunity.getID() + ",child_comm_id=" + childCommunity
|
"parent_comm_id=" + parentCommunity.getID() + ",child_comm_id=" + childCommunity
|
||||||
.getID()));
|
.getID()));
|
||||||
|
|
||||||
@@ -519,7 +543,7 @@ public class CommunityServiceImpl extends DSpaceObjectServiceImpl<Community> imp
|
|||||||
*/
|
*/
|
||||||
protected void rawDelete(Context context, Community community)
|
protected void rawDelete(Context context, Community community)
|
||||||
throws SQLException, AuthorizeException, IOException {
|
throws SQLException, AuthorizeException, IOException {
|
||||||
log.info(LogManager.getHeader(context, "delete_community",
|
log.info(LogHelper.getHeader(context, "delete_community",
|
||||||
"community_id=" + community.getID()));
|
"community_id=" + community.getID()));
|
||||||
|
|
||||||
context.addEvent(new Event(Event.DELETE, Constants.COMMUNITY, community.getID(), community.getHandle(),
|
context.addEvent(new Event(Event.DELETE, Constants.COMMUNITY, community.getID(), community.getHandle(),
|
||||||
|
@@ -34,12 +34,11 @@ import org.apache.logging.log4j.Logger;
|
|||||||
* There are four levels of granularity, depending on how much date information
|
* There are four levels of granularity, depending on how much date information
|
||||||
* is available: year, month, day, time.
|
* is available: year, month, day, time.
|
||||||
* <P>
|
* <P>
|
||||||
* Examples: <code>1994-05-03T15:30:24</code>,<code>1995-10-04</code>,
|
* Examples: {@code 1994-05-03T15:30:24}, {@code 1995-10-04},
|
||||||
* <code>2001-10</code>,<code>1975</code>
|
* {@code 2001-10}, {@code 1975}
|
||||||
*
|
*
|
||||||
* @author Robert Tansley
|
* @author Robert Tansley
|
||||||
* @author Larry Stone
|
* @author Larry Stone
|
||||||
* @version $Revision$
|
|
||||||
*/
|
*/
|
||||||
public class DCDate {
|
public class DCDate {
|
||||||
/**
|
/**
|
||||||
@@ -262,7 +261,7 @@ public class DCDate {
|
|||||||
* @return the year
|
* @return the year
|
||||||
*/
|
*/
|
||||||
public int getYear() {
|
public int getYear() {
|
||||||
return (!withinGranularity(DateGran.YEAR)) ? -1 : localCalendar.get(Calendar.YEAR);
|
return !withinGranularity(DateGran.YEAR) ? -1 : localCalendar.get(Calendar.YEAR);
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
@@ -271,7 +270,7 @@ public class DCDate {
|
|||||||
* @return the month
|
* @return the month
|
||||||
*/
|
*/
|
||||||
public int getMonth() {
|
public int getMonth() {
|
||||||
return (!withinGranularity(DateGran.MONTH)) ? -1 : localCalendar.get(Calendar.MONTH) + 1;
|
return !withinGranularity(DateGran.MONTH) ? -1 : localCalendar.get(Calendar.MONTH) + 1;
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
@@ -280,7 +279,7 @@ public class DCDate {
|
|||||||
* @return the day
|
* @return the day
|
||||||
*/
|
*/
|
||||||
public int getDay() {
|
public int getDay() {
|
||||||
return (!withinGranularity(DateGran.DAY)) ? -1 : localCalendar.get(Calendar.DAY_OF_MONTH);
|
return !withinGranularity(DateGran.DAY) ? -1 : localCalendar.get(Calendar.DAY_OF_MONTH);
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
@@ -289,7 +288,7 @@ public class DCDate {
|
|||||||
* @return the hour
|
* @return the hour
|
||||||
*/
|
*/
|
||||||
public int getHour() {
|
public int getHour() {
|
||||||
return (!withinGranularity(DateGran.TIME)) ? -1 : localCalendar.get(Calendar.HOUR_OF_DAY);
|
return !withinGranularity(DateGran.TIME) ? -1 : localCalendar.get(Calendar.HOUR_OF_DAY);
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
@@ -298,7 +297,7 @@ public class DCDate {
|
|||||||
* @return the minute
|
* @return the minute
|
||||||
*/
|
*/
|
||||||
public int getMinute() {
|
public int getMinute() {
|
||||||
return (!withinGranularity(DateGran.TIME)) ? -1 : localCalendar.get(Calendar.MINUTE);
|
return !withinGranularity(DateGran.TIME) ? -1 : localCalendar.get(Calendar.MINUTE);
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
@@ -307,7 +306,7 @@ public class DCDate {
|
|||||||
* @return the second
|
* @return the second
|
||||||
*/
|
*/
|
||||||
public int getSecond() {
|
public int getSecond() {
|
||||||
return (!withinGranularity(DateGran.TIME)) ? -1 : localCalendar.get(Calendar.SECOND);
|
return !withinGranularity(DateGran.TIME) ? -1 : localCalendar.get(Calendar.SECOND);
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
@@ -316,7 +315,7 @@ public class DCDate {
|
|||||||
* @return the year
|
* @return the year
|
||||||
*/
|
*/
|
||||||
public int getYearUTC() {
|
public int getYearUTC() {
|
||||||
return (!withinGranularity(DateGran.YEAR)) ? -1 : calendar.get(Calendar.YEAR);
|
return !withinGranularity(DateGran.YEAR) ? -1 : calendar.get(Calendar.YEAR);
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
@@ -325,7 +324,7 @@ public class DCDate {
|
|||||||
* @return the month
|
* @return the month
|
||||||
*/
|
*/
|
||||||
public int getMonthUTC() {
|
public int getMonthUTC() {
|
||||||
return (!withinGranularity(DateGran.MONTH)) ? -1 : calendar.get(Calendar.MONTH) + 1;
|
return !withinGranularity(DateGran.MONTH) ? -1 : calendar.get(Calendar.MONTH) + 1;
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
@@ -334,7 +333,7 @@ public class DCDate {
|
|||||||
* @return the day
|
* @return the day
|
||||||
*/
|
*/
|
||||||
public int getDayUTC() {
|
public int getDayUTC() {
|
||||||
return (!withinGranularity(DateGran.DAY)) ? -1 : calendar.get(Calendar.DAY_OF_MONTH);
|
return !withinGranularity(DateGran.DAY) ? -1 : calendar.get(Calendar.DAY_OF_MONTH);
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
@@ -343,7 +342,7 @@ public class DCDate {
|
|||||||
* @return the hour
|
* @return the hour
|
||||||
*/
|
*/
|
||||||
public int getHourUTC() {
|
public int getHourUTC() {
|
||||||
return (!withinGranularity(DateGran.TIME)) ? -1 : calendar.get(Calendar.HOUR_OF_DAY);
|
return !withinGranularity(DateGran.TIME) ? -1 : calendar.get(Calendar.HOUR_OF_DAY);
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
@@ -352,7 +351,7 @@ public class DCDate {
|
|||||||
* @return the minute
|
* @return the minute
|
||||||
*/
|
*/
|
||||||
public int getMinuteUTC() {
|
public int getMinuteUTC() {
|
||||||
return (!withinGranularity(DateGran.TIME)) ? -1 : calendar.get(Calendar.MINUTE);
|
return !withinGranularity(DateGran.TIME) ? -1 : calendar.get(Calendar.MINUTE);
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
@@ -361,15 +360,15 @@ public class DCDate {
|
|||||||
* @return the second
|
* @return the second
|
||||||
*/
|
*/
|
||||||
public int getSecondUTC() {
|
public int getSecondUTC() {
|
||||||
return (!withinGranularity(DateGran.TIME)) ? -1 : calendar.get(Calendar.SECOND);
|
return !withinGranularity(DateGran.TIME) ? -1 : calendar.get(Calendar.SECOND);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Get the date as a string to put back in the Dublin Core. Use the UTC/GMT calendar version.
|
* Get the date as a string to put back in the Dublin Core. Use the UTC/GMT calendar version.
|
||||||
*
|
*
|
||||||
* @return The date as a string.
|
* @return The date as a string.
|
||||||
*/
|
*/
|
||||||
|
@Override
|
||||||
public String toString() {
|
public String toString() {
|
||||||
if (calendar == null) {
|
if (calendar == null) {
|
||||||
return "null";
|
return "null";
|
||||||
|
@@ -18,7 +18,6 @@ package org.dspace.content;
|
|||||||
* <em>FIXME: No policy for dealing with "van"/"van der" and "Jr."</em>
|
* <em>FIXME: No policy for dealing with "van"/"van der" and "Jr."</em>
|
||||||
*
|
*
|
||||||
* @author Robert Tansley
|
* @author Robert Tansley
|
||||||
* @version $Revision$
|
|
||||||
*/
|
*/
|
||||||
public class DCPersonName {
|
public class DCPersonName {
|
||||||
/**
|
/**
|
||||||
@@ -89,8 +88,9 @@ public class DCPersonName {
|
|||||||
*
|
*
|
||||||
* @return the name, suitable for putting in the database
|
* @return the name, suitable for putting in the database
|
||||||
*/
|
*/
|
||||||
|
@Override
|
||||||
public String toString() {
|
public String toString() {
|
||||||
StringBuffer out = new StringBuffer();
|
StringBuilder out = new StringBuilder();
|
||||||
|
|
||||||
if (lastName != null) {
|
if (lastName != null) {
|
||||||
out.append(lastName);
|
out.append(lastName);
|
||||||
|
@@ -8,10 +8,9 @@
|
|||||||
package org.dspace.content;
|
package org.dspace.content;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Series and report number, as stored in relation.ispartofseries
|
* Series and report number, as stored in {@code relation.ispartofseries}.
|
||||||
*
|
*
|
||||||
* @author Robert Tansley
|
* @author Robert Tansley
|
||||||
* @version $Id$
|
|
||||||
*/
|
*/
|
||||||
public class DCSeriesNumber {
|
public class DCSeriesNumber {
|
||||||
/**
|
/**
|
||||||
@@ -70,6 +69,7 @@ public class DCSeriesNumber {
|
|||||||
*
|
*
|
||||||
* @return the series and number as they should be stored in the DB
|
* @return the series and number as they should be stored in the DB
|
||||||
*/
|
*/
|
||||||
|
@Override
|
||||||
public String toString() {
|
public String toString() {
|
||||||
if (series == null) {
|
if (series == null) {
|
||||||
return (null);
|
return (null);
|
||||||
|
@@ -38,8 +38,8 @@ import org.hibernate.annotations.GenericGenerator;
|
|||||||
@Table(name = "dspaceobject")
|
@Table(name = "dspaceobject")
|
||||||
public abstract class DSpaceObject implements Serializable, ReloadableEntity<java.util.UUID> {
|
public abstract class DSpaceObject implements Serializable, ReloadableEntity<java.util.UUID> {
|
||||||
@Id
|
@Id
|
||||||
@GeneratedValue(generator = "system-uuid")
|
@GeneratedValue(generator = "predefined-uuid")
|
||||||
@GenericGenerator(name = "system-uuid", strategy = "uuid2")
|
@GenericGenerator(name = "predefined-uuid", strategy = "org.dspace.content.PredefinedUUIDGenerator")
|
||||||
@Column(name = "uuid", unique = true, nullable = false, insertable = true, updatable = false)
|
@Column(name = "uuid", unique = true, nullable = false, insertable = true, updatable = false)
|
||||||
protected java.util.UUID id;
|
protected java.util.UUID id;
|
||||||
|
|
||||||
@@ -61,7 +61,7 @@ public abstract class DSpaceObject implements Serializable, ReloadableEntity<jav
|
|||||||
private List<Handle> handles = new ArrayList<>();
|
private List<Handle> handles = new ArrayList<>();
|
||||||
|
|
||||||
@OneToMany(fetch = FetchType.LAZY, mappedBy = "dSpaceObject", cascade = CascadeType.ALL)
|
@OneToMany(fetch = FetchType.LAZY, mappedBy = "dSpaceObject", cascade = CascadeType.ALL)
|
||||||
private List<ResourcePolicy> resourcePolicies = new ArrayList<>();
|
private final List<ResourcePolicy> resourcePolicies = new ArrayList<>();
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* True if anything else was changed since last update()
|
* True if anything else was changed since last update()
|
||||||
@@ -76,6 +76,15 @@ public abstract class DSpaceObject implements Serializable, ReloadableEntity<jav
|
|||||||
@Transient
|
@Transient
|
||||||
private boolean modified = false;
|
private boolean modified = false;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* This will read our predefinedUUID property to pass it along to the UUID generator
|
||||||
|
*/
|
||||||
|
@Transient
|
||||||
|
protected UUID predefinedUUID;
|
||||||
|
public UUID getPredefinedUUID() {
|
||||||
|
return predefinedUUID;
|
||||||
|
}
|
||||||
|
|
||||||
protected DSpaceObject() {
|
protected DSpaceObject() {
|
||||||
|
|
||||||
}
|
}
|
||||||
|
@@ -13,11 +13,11 @@ import java.util.Arrays;
|
|||||||
import java.util.Comparator;
|
import java.util.Comparator;
|
||||||
import java.util.HashMap;
|
import java.util.HashMap;
|
||||||
import java.util.Iterator;
|
import java.util.Iterator;
|
||||||
import java.util.LinkedList;
|
|
||||||
import java.util.List;
|
import java.util.List;
|
||||||
import java.util.Map;
|
import java.util.Map;
|
||||||
import java.util.StringTokenizer;
|
import java.util.StringTokenizer;
|
||||||
import java.util.function.Supplier;
|
import java.util.function.Supplier;
|
||||||
|
import java.util.stream.Collectors;
|
||||||
|
|
||||||
import org.apache.commons.collections4.CollectionUtils;
|
import org.apache.commons.collections4.CollectionUtils;
|
||||||
import org.apache.commons.lang.NotImplementedException;
|
import org.apache.commons.lang.NotImplementedException;
|
||||||
@@ -435,7 +435,7 @@ public abstract class DSpaceObjectServiceImpl<T extends DSpaceObject> implements
|
|||||||
@Override
|
@Override
|
||||||
public String getMetadataFirstValue(T dso, MetadataFieldName field, String language) {
|
public String getMetadataFirstValue(T dso, MetadataFieldName field, String language) {
|
||||||
List<MetadataValue> metadataValues
|
List<MetadataValue> metadataValues
|
||||||
= getMetadata(dso, field.SCHEMA, field.ELEMENT, field.QUALIFIER, language);
|
= getMetadata(dso, field.schema, field.element, field.qualifier, language);
|
||||||
if (CollectionUtils.isNotEmpty(metadataValues)) {
|
if (CollectionUtils.isNotEmpty(metadataValues)) {
|
||||||
return metadataValues.get(0).getValue();
|
return metadataValues.get(0).getValue();
|
||||||
}
|
}
|
||||||
@@ -462,11 +462,11 @@ public abstract class DSpaceObjectServiceImpl<T extends DSpaceObject> implements
|
|||||||
String language, String value)
|
String language, String value)
|
||||||
throws SQLException {
|
throws SQLException {
|
||||||
if (value != null) {
|
if (value != null) {
|
||||||
clearMetadata(context, dso, field.SCHEMA, field.ELEMENT, field.QUALIFIER,
|
clearMetadata(context, dso, field.schema, field.element, field.qualifier,
|
||||||
language);
|
language);
|
||||||
|
|
||||||
String newValueLanguage = (Item.ANY.equals(language)) ? null : language;
|
String newValueLanguage = Item.ANY.equals(language) ? null : language;
|
||||||
addMetadata(context, dso, field.SCHEMA, field.ELEMENT, field.QUALIFIER,
|
addMetadata(context, dso, field.schema, field.element, field.qualifier,
|
||||||
newValueLanguage, value);
|
newValueLanguage, value);
|
||||||
dso.setMetadataModified();
|
dso.setMetadataModified();
|
||||||
}
|
}
|
||||||
@@ -610,7 +610,7 @@ public abstract class DSpaceObjectServiceImpl<T extends DSpaceObject> implements
|
|||||||
*/
|
*/
|
||||||
// A map created to store the latest place for each metadata field
|
// A map created to store the latest place for each metadata field
|
||||||
Map<MetadataField, Integer> fieldToLastPlace = new HashMap<>();
|
Map<MetadataField, Integer> fieldToLastPlace = new HashMap<>();
|
||||||
List<MetadataValue> metadataValues = new LinkedList<>();
|
List<MetadataValue> metadataValues;
|
||||||
if (dso.getType() == Constants.ITEM) {
|
if (dso.getType() == Constants.ITEM) {
|
||||||
metadataValues = getMetadata(dso, Item.ANY, Item.ANY, Item.ANY, Item.ANY);
|
metadataValues = getMetadata(dso, Item.ANY, Item.ANY, Item.ANY, Item.ANY);
|
||||||
} else {
|
} else {
|
||||||
@@ -643,7 +643,7 @@ public abstract class DSpaceObjectServiceImpl<T extends DSpaceObject> implements
|
|||||||
String authority = metadataValue.getAuthority();
|
String authority = metadataValue.getAuthority();
|
||||||
String relationshipId = StringUtils.split(authority, "::")[1];
|
String relationshipId = StringUtils.split(authority, "::")[1];
|
||||||
Relationship relationship = relationshipService.find(context, Integer.parseInt(relationshipId));
|
Relationship relationship = relationshipService.find(context, Integer.parseInt(relationshipId));
|
||||||
if (relationship.getLeftItem() == (Item) dso) {
|
if (relationship.getLeftItem().equals((Item) dso)) {
|
||||||
relationship.setLeftPlace(mvPlace);
|
relationship.setLeftPlace(mvPlace);
|
||||||
} else {
|
} else {
|
||||||
relationship.setRightPlace(mvPlace);
|
relationship.setRightPlace(mvPlace);
|
||||||
@@ -742,12 +742,15 @@ public abstract class DSpaceObjectServiceImpl<T extends DSpaceObject> implements
|
|||||||
@Override
|
@Override
|
||||||
public void moveMetadata(Context context, T dso, String schema, String element, String qualifier, int from, int to)
|
public void moveMetadata(Context context, T dso, String schema, String element, String qualifier, int from, int to)
|
||||||
throws SQLException, IllegalArgumentException {
|
throws SQLException, IllegalArgumentException {
|
||||||
|
|
||||||
if (from == to) {
|
if (from == to) {
|
||||||
throw new IllegalArgumentException("The \"from\" location MUST be different from \"to\" location");
|
throw new IllegalArgumentException("The \"from\" location MUST be different from \"to\" location");
|
||||||
}
|
}
|
||||||
|
|
||||||
List<MetadataValue> list = getMetadata(dso, schema, element, qualifier);
|
List<MetadataValue> list =
|
||||||
|
getMetadata(dso, schema, element, qualifier).stream()
|
||||||
|
.sorted(Comparator.comparing(MetadataValue::getPlace))
|
||||||
|
.collect(Collectors.toList());
|
||||||
|
|
||||||
|
|
||||||
if (from >= list.size() || to >= list.size() || to < 0 || from < 0) {
|
if (from >= list.size() || to >= list.size() || to < 0 || from < 0) {
|
||||||
throw new IllegalArgumentException(
|
throw new IllegalArgumentException(
|
||||||
|
@@ -78,6 +78,7 @@ public class EntityType implements ReloadableEntity<Integer> {
|
|||||||
*
|
*
|
||||||
* @return The ID for this EntityType
|
* @return The ID for this EntityType
|
||||||
*/
|
*/
|
||||||
|
@Override
|
||||||
public Integer getID() {
|
public Integer getID() {
|
||||||
return id;
|
return id;
|
||||||
}
|
}
|
||||||
@@ -87,6 +88,7 @@ public class EntityType implements ReloadableEntity<Integer> {
|
|||||||
* @param obj object to be compared
|
* @param obj object to be compared
|
||||||
* @return
|
* @return
|
||||||
*/
|
*/
|
||||||
|
@Override
|
||||||
public boolean equals(Object obj) {
|
public boolean equals(Object obj) {
|
||||||
if (!(obj instanceof EntityType)) {
|
if (!(obj instanceof EntityType)) {
|
||||||
return false;
|
return false;
|
||||||
@@ -97,10 +99,7 @@ public class EntityType implements ReloadableEntity<Integer> {
|
|||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
|
|
||||||
if (!StringUtils.equals(this.getLabel(), entityType.getLabel())) {
|
return StringUtils.equals(this.getLabel(), entityType.getLabel());
|
||||||
return false;
|
|
||||||
}
|
|
||||||
return true;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
@@ -17,7 +17,6 @@ import org.dspace.eperson.EPerson;
|
|||||||
* which stage of submission they are (in workspace or workflow system)
|
* which stage of submission they are (in workspace or workflow system)
|
||||||
*
|
*
|
||||||
* @author Robert Tansley
|
* @author Robert Tansley
|
||||||
* @version $Revision$
|
|
||||||
*/
|
*/
|
||||||
public interface InProgressSubmission extends ReloadableEntity<Integer> {
|
public interface InProgressSubmission extends ReloadableEntity<Integer> {
|
||||||
/**
|
/**
|
||||||
@@ -25,6 +24,7 @@ public interface InProgressSubmission extends ReloadableEntity<Integer> {
|
|||||||
*
|
*
|
||||||
* @return the internal identifier
|
* @return the internal identifier
|
||||||
*/
|
*/
|
||||||
|
@Override
|
||||||
Integer getID();
|
Integer getID();
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
@@ -13,6 +13,7 @@ import java.util.Date;
|
|||||||
import java.util.HashSet;
|
import java.util.HashSet;
|
||||||
import java.util.List;
|
import java.util.List;
|
||||||
import java.util.Set;
|
import java.util.Set;
|
||||||
|
import java.util.UUID;
|
||||||
import javax.persistence.CascadeType;
|
import javax.persistence.CascadeType;
|
||||||
import javax.persistence.Column;
|
import javax.persistence.Column;
|
||||||
import javax.persistence.Entity;
|
import javax.persistence.Entity;
|
||||||
@@ -27,8 +28,6 @@ import javax.persistence.Temporal;
|
|||||||
import javax.persistence.TemporalType;
|
import javax.persistence.TemporalType;
|
||||||
import javax.persistence.Transient;
|
import javax.persistence.Transient;
|
||||||
|
|
||||||
import org.apache.logging.log4j.LogManager;
|
|
||||||
import org.apache.logging.log4j.Logger;
|
|
||||||
import org.dspace.content.comparator.NameAscendingComparator;
|
import org.dspace.content.comparator.NameAscendingComparator;
|
||||||
import org.dspace.content.factory.ContentServiceFactory;
|
import org.dspace.content.factory.ContentServiceFactory;
|
||||||
import org.dspace.content.service.ItemService;
|
import org.dspace.content.service.ItemService;
|
||||||
@@ -49,17 +48,10 @@ import org.hibernate.proxy.HibernateProxyHelper;
|
|||||||
*
|
*
|
||||||
* @author Robert Tansley
|
* @author Robert Tansley
|
||||||
* @author Martin Hald
|
* @author Martin Hald
|
||||||
* @version $Revision$
|
|
||||||
*/
|
*/
|
||||||
@Entity
|
@Entity
|
||||||
@Table(name = "item")
|
@Table(name = "item")
|
||||||
public class Item extends DSpaceObject implements DSpaceObjectLegacySupport {
|
public class Item extends DSpaceObject implements DSpaceObjectLegacySupport {
|
||||||
|
|
||||||
/**
|
|
||||||
* log4j logger
|
|
||||||
*/
|
|
||||||
private static final Logger log = LogManager.getLogger();
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Wild card for Dublin Core metadata qualifiers/languages
|
* Wild card for Dublin Core metadata qualifiers/languages
|
||||||
*/
|
*/
|
||||||
@@ -131,6 +123,16 @@ public class Item extends DSpaceObject implements DSpaceObjectLegacySupport {
|
|||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Takes a pre-determined UUID to be passed to the object to allow for the
|
||||||
|
* restoration of previously defined UUID's.
|
||||||
|
*
|
||||||
|
* @param uuid Takes a uuid to be passed to the Pre-Defined UUID Generator
|
||||||
|
*/
|
||||||
|
protected Item(UUID uuid) {
|
||||||
|
this.predefinedUUID = uuid;
|
||||||
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Find out if the item is part of the main archive
|
* Find out if the item is part of the main archive
|
||||||
*
|
*
|
||||||
@@ -297,7 +299,7 @@ public class Item extends DSpaceObject implements DSpaceObjectLegacySupport {
|
|||||||
* @return the bundles in an unordered array
|
* @return the bundles in an unordered array
|
||||||
*/
|
*/
|
||||||
public List<Bundle> getBundles(String name) {
|
public List<Bundle> getBundles(String name) {
|
||||||
List<Bundle> matchingBundles = new ArrayList<Bundle>();
|
List<Bundle> matchingBundles = new ArrayList<>();
|
||||||
// now only keep bundles with matching names
|
// now only keep bundles with matching names
|
||||||
List<Bundle> bunds = getBundles();
|
List<Bundle> bunds = getBundles();
|
||||||
for (Bundle bundle : bunds) {
|
for (Bundle bundle : bunds) {
|
||||||
@@ -328,7 +330,7 @@ public class Item extends DSpaceObject implements DSpaceObjectLegacySupport {
|
|||||||
|
|
||||||
/**
|
/**
|
||||||
* Return <code>true</code> if <code>other</code> is the same Item as
|
* Return <code>true</code> if <code>other</code> is the same Item as
|
||||||
* this object, <code>false</code> otherwise
|
* this object, <code>false</code> otherwise.
|
||||||
*
|
*
|
||||||
* @param obj object to compare to
|
* @param obj object to compare to
|
||||||
* @return <code>true</code> if object passed in represents the same item
|
* @return <code>true</code> if object passed in represents the same item
|
||||||
@@ -336,7 +338,7 @@ public class Item extends DSpaceObject implements DSpaceObjectLegacySupport {
|
|||||||
*/
|
*/
|
||||||
@Override
|
@Override
|
||||||
public boolean equals(Object obj) {
|
public boolean equals(Object obj) {
|
||||||
if (obj == null) {
|
if (!(obj instanceof Item)) {
|
||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
Class<?> objClass = HibernateProxyHelper.getClassWithoutInitializingProxy(obj);
|
Class<?> objClass = HibernateProxyHelper.getClassWithoutInitializingProxy(obj);
|
||||||
@@ -344,10 +346,7 @@ public class Item extends DSpaceObject implements DSpaceObjectLegacySupport {
|
|||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
final Item otherItem = (Item) obj;
|
final Item otherItem = (Item) obj;
|
||||||
if (!this.getID().equals(otherItem.getID())) {
|
return this.getID().equals(otherItem.getID());
|
||||||
return false;
|
|
||||||
}
|
|
||||||
return true;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
|
@@ -47,7 +47,7 @@ import org.dspace.content.service.WorkspaceItemService;
|
|||||||
import org.dspace.content.virtual.VirtualMetadataPopulator;
|
import org.dspace.content.virtual.VirtualMetadataPopulator;
|
||||||
import org.dspace.core.Constants;
|
import org.dspace.core.Constants;
|
||||||
import org.dspace.core.Context;
|
import org.dspace.core.Context;
|
||||||
import org.dspace.core.LogManager;
|
import org.dspace.core.LogHelper;
|
||||||
import org.dspace.eperson.EPerson;
|
import org.dspace.eperson.EPerson;
|
||||||
import org.dspace.eperson.Group;
|
import org.dspace.eperson.Group;
|
||||||
import org.dspace.event.Event;
|
import org.dspace.event.Event;
|
||||||
@@ -159,7 +159,7 @@ public class ItemServiceImpl extends DSpaceObjectServiceImpl<Item> implements It
|
|||||||
Item item = itemDAO.findByID(context, Item.class, id);
|
Item item = itemDAO.findByID(context, Item.class, id);
|
||||||
if (item == null) {
|
if (item == null) {
|
||||||
if (log.isDebugEnabled()) {
|
if (log.isDebugEnabled()) {
|
||||||
log.debug(LogManager.getHeader(context, "find_item",
|
log.debug(LogHelper.getHeader(context, "find_item",
|
||||||
"not_found,item_id=" + id));
|
"not_found,item_id=" + id));
|
||||||
}
|
}
|
||||||
return null;
|
return null;
|
||||||
@@ -167,7 +167,7 @@ public class ItemServiceImpl extends DSpaceObjectServiceImpl<Item> implements It
|
|||||||
|
|
||||||
// not null, return item
|
// not null, return item
|
||||||
if (log.isDebugEnabled()) {
|
if (log.isDebugEnabled()) {
|
||||||
log.debug(LogManager.getHeader(context, "find_item", "item_id="
|
log.debug(LogHelper.getHeader(context, "find_item", "item_id="
|
||||||
+ id));
|
+ id));
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -176,15 +176,28 @@ public class ItemServiceImpl extends DSpaceObjectServiceImpl<Item> implements It
|
|||||||
|
|
||||||
@Override
|
@Override
|
||||||
public Item create(Context context, WorkspaceItem workspaceItem) throws SQLException, AuthorizeException {
|
public Item create(Context context, WorkspaceItem workspaceItem) throws SQLException, AuthorizeException {
|
||||||
|
return create(context, workspaceItem, null);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public Item create(Context context, WorkspaceItem workspaceItem,
|
||||||
|
UUID uuid) throws SQLException, AuthorizeException {
|
||||||
|
Collection collection = workspaceItem.getCollection();
|
||||||
|
authorizeService.authorizeAction(context, collection, Constants.ADD);
|
||||||
if (workspaceItem.getItem() != null) {
|
if (workspaceItem.getItem() != null) {
|
||||||
throw new IllegalArgumentException(
|
throw new IllegalArgumentException(
|
||||||
"Attempting to create an item for a workspace item that already contains an item");
|
"Attempting to create an item for a workspace item that already contains an item");
|
||||||
}
|
}
|
||||||
Item item = createItem(context);
|
Item item = null;
|
||||||
|
if (uuid != null) {
|
||||||
|
item = createItem(context, uuid);
|
||||||
|
} else {
|
||||||
|
item = createItem(context);
|
||||||
|
}
|
||||||
workspaceItem.setItem(item);
|
workspaceItem.setItem(item);
|
||||||
|
|
||||||
|
|
||||||
log.info(LogManager.getHeader(context, "create_item", "item_id="
|
log.info(LogHelper.getHeader(context, "create_item", "item_id="
|
||||||
+ item.getID()));
|
+ item.getID()));
|
||||||
|
|
||||||
return item;
|
return item;
|
||||||
@@ -202,7 +215,7 @@ public class ItemServiceImpl extends DSpaceObjectServiceImpl<Item> implements It
|
|||||||
collection.setTemplateItem(template);
|
collection.setTemplateItem(template);
|
||||||
template.setTemplateItemOf(collection);
|
template.setTemplateItemOf(collection);
|
||||||
|
|
||||||
log.info(LogManager.getHeader(context, "create_template_item",
|
log.info(LogHelper.getHeader(context, "create_template_item",
|
||||||
"collection_id=" + collection.getID() + ",template_item_id="
|
"collection_id=" + collection.getID() + ",template_item_id="
|
||||||
+ template.getID()));
|
+ template.getID()));
|
||||||
|
|
||||||
@@ -340,7 +353,7 @@ public class ItemServiceImpl extends DSpaceObjectServiceImpl<Item> implements It
|
|||||||
// Check authorisation
|
// Check authorisation
|
||||||
authorizeService.authorizeAction(context, item, Constants.ADD);
|
authorizeService.authorizeAction(context, item, Constants.ADD);
|
||||||
|
|
||||||
log.info(LogManager.getHeader(context, "add_bundle", "item_id="
|
log.info(LogHelper.getHeader(context, "add_bundle", "item_id="
|
||||||
+ item.getID() + ",bundle_id=" + bundle.getID()));
|
+ item.getID() + ",bundle_id=" + bundle.getID()));
|
||||||
|
|
||||||
// Check it's not already there
|
// Check it's not already there
|
||||||
@@ -368,7 +381,7 @@ public class ItemServiceImpl extends DSpaceObjectServiceImpl<Item> implements It
|
|||||||
// Check authorisation
|
// Check authorisation
|
||||||
authorizeService.authorizeAction(context, item, Constants.REMOVE);
|
authorizeService.authorizeAction(context, item, Constants.REMOVE);
|
||||||
|
|
||||||
log.info(LogManager.getHeader(context, "remove_bundle", "item_id="
|
log.info(LogHelper.getHeader(context, "remove_bundle", "item_id="
|
||||||
+ item.getID() + ",bundle_id=" + bundle.getID()));
|
+ item.getID() + ",bundle_id=" + bundle.getID()));
|
||||||
|
|
||||||
context.addEvent(new Event(Event.REMOVE, Constants.ITEM, item.getID(),
|
context.addEvent(new Event(Event.REMOVE, Constants.ITEM, item.getID(),
|
||||||
@@ -418,6 +431,30 @@ public class ItemServiceImpl extends DSpaceObjectServiceImpl<Item> implements It
|
|||||||
return bitstreamList;
|
return bitstreamList;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
protected Item createItem(Context context, UUID uuid) throws SQLException, AuthorizeException {
|
||||||
|
Item item;
|
||||||
|
if (uuid != null) {
|
||||||
|
item = itemDAO.create(context, new Item(uuid));
|
||||||
|
} else {
|
||||||
|
item = itemDAO.create(context, new Item());
|
||||||
|
}
|
||||||
|
// set discoverable to true (default)
|
||||||
|
item.setDiscoverable(true);
|
||||||
|
|
||||||
|
// Call update to give the item a last modified date. OK this isn't
|
||||||
|
// amazingly efficient but creates don't happen that often.
|
||||||
|
context.turnOffAuthorisationSystem();
|
||||||
|
update(context, item);
|
||||||
|
context.restoreAuthSystemState();
|
||||||
|
|
||||||
|
context.addEvent(new Event(Event.CREATE, Constants.ITEM, item.getID(),
|
||||||
|
null, getIdentifiers(context, item)));
|
||||||
|
|
||||||
|
log.info(LogHelper.getHeader(context, "create_item", "item_id=" + item.getID()));
|
||||||
|
|
||||||
|
return item;
|
||||||
|
}
|
||||||
|
|
||||||
protected Item createItem(Context context) throws SQLException, AuthorizeException {
|
protected Item createItem(Context context) throws SQLException, AuthorizeException {
|
||||||
Item item = itemDAO.create(context, new Item());
|
Item item = itemDAO.create(context, new Item());
|
||||||
// set discoverable to true (default)
|
// set discoverable to true (default)
|
||||||
@@ -432,7 +469,7 @@ public class ItemServiceImpl extends DSpaceObjectServiceImpl<Item> implements It
|
|||||||
context.addEvent(new Event(Event.CREATE, Constants.ITEM, item.getID(),
|
context.addEvent(new Event(Event.CREATE, Constants.ITEM, item.getID(),
|
||||||
null, getIdentifiers(context, item)));
|
null, getIdentifiers(context, item)));
|
||||||
|
|
||||||
log.info(LogManager.getHeader(context, "create_item", "item_id=" + item.getID()));
|
log.info(LogHelper.getHeader(context, "create_item", "item_id=" + item.getID()));
|
||||||
|
|
||||||
return item;
|
return item;
|
||||||
}
|
}
|
||||||
@@ -490,7 +527,7 @@ public class ItemServiceImpl extends DSpaceObjectServiceImpl<Item> implements It
|
|||||||
authorizeService.authorizeAction(context, item, Constants.WRITE);
|
authorizeService.authorizeAction(context, item, Constants.WRITE);
|
||||||
}
|
}
|
||||||
|
|
||||||
log.info(LogManager.getHeader(context, "update_item", "item_id="
|
log.info(LogHelper.getHeader(context, "update_item", "item_id="
|
||||||
+ item.getID()));
|
+ item.getID()));
|
||||||
|
|
||||||
super.update(context, item);
|
super.update(context, item);
|
||||||
@@ -595,7 +632,7 @@ public class ItemServiceImpl extends DSpaceObjectServiceImpl<Item> implements It
|
|||||||
}
|
}
|
||||||
|
|
||||||
// Write log
|
// Write log
|
||||||
log.info(LogManager.getHeader(context, "withdraw_item", "user="
|
log.info(LogHelper.getHeader(context, "withdraw_item", "user="
|
||||||
+ e.getEmail() + ",item_id=" + item.getID()));
|
+ e.getEmail() + ",item_id=" + item.getID()));
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -661,7 +698,7 @@ public class ItemServiceImpl extends DSpaceObjectServiceImpl<Item> implements It
|
|||||||
}
|
}
|
||||||
|
|
||||||
// Write log
|
// Write log
|
||||||
log.info(LogManager.getHeader(context, "reinstate_item", "user="
|
log.info(LogHelper.getHeader(context, "reinstate_item", "user="
|
||||||
+ e.getEmail() + ",item_id=" + item.getID()));
|
+ e.getEmail() + ",item_id=" + item.getID()));
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -682,7 +719,7 @@ public class ItemServiceImpl extends DSpaceObjectServiceImpl<Item> implements It
|
|||||||
context.addEvent(new Event(Event.DELETE, Constants.ITEM, item.getID(),
|
context.addEvent(new Event(Event.DELETE, Constants.ITEM, item.getID(),
|
||||||
item.getHandle(), getIdentifiers(context, item)));
|
item.getHandle(), getIdentifiers(context, item)));
|
||||||
|
|
||||||
log.info(LogManager.getHeader(context, "delete_item", "item_id="
|
log.info(LogHelper.getHeader(context, "delete_item", "item_id="
|
||||||
+ item.getID()));
|
+ item.getID()));
|
||||||
|
|
||||||
// Remove relationships
|
// Remove relationships
|
||||||
@@ -731,7 +768,7 @@ public class ItemServiceImpl extends DSpaceObjectServiceImpl<Item> implements It
|
|||||||
|
|
||||||
bundleService.delete(context, b);
|
bundleService.delete(context, b);
|
||||||
|
|
||||||
log.info(LogManager.getHeader(context, "remove_bundle", "item_id="
|
log.info(LogHelper.getHeader(context, "remove_bundle", "item_id="
|
||||||
+ item.getID() + ",bundle_id=" + b.getID()));
|
+ item.getID() + ",bundle_id=" + b.getID()));
|
||||||
context
|
context
|
||||||
.addEvent(new Event(Event.REMOVE, Constants.ITEM, item.getID(), Constants.BUNDLE, b.getID(), b.getName()));
|
.addEvent(new Event(Event.REMOVE, Constants.ITEM, item.getID(), Constants.BUNDLE, b.getID(), b.getName()));
|
||||||
@@ -802,7 +839,7 @@ public class ItemServiceImpl extends DSpaceObjectServiceImpl<Item> implements It
|
|||||||
adjustItemPolicies(context, item, collection);
|
adjustItemPolicies(context, item, collection);
|
||||||
adjustBundleBitstreamPolicies(context, item, collection);
|
adjustBundleBitstreamPolicies(context, item, collection);
|
||||||
|
|
||||||
log.debug(LogManager.getHeader(context, "item_inheritCollectionDefaultPolicies",
|
log.debug(LogHelper.getHeader(context, "item_inheritCollectionDefaultPolicies",
|
||||||
"item_id=" + item.getID()));
|
"item_id=" + item.getID()));
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -890,7 +927,7 @@ public class ItemServiceImpl extends DSpaceObjectServiceImpl<Item> implements It
|
|||||||
// If we are moving from the owning collection, update that too
|
// If we are moving from the owning collection, update that too
|
||||||
if (isOwningCollection(item, from)) {
|
if (isOwningCollection(item, from)) {
|
||||||
// Update the owning collection
|
// Update the owning collection
|
||||||
log.info(LogManager.getHeader(context, "move_item",
|
log.info(LogHelper.getHeader(context, "move_item",
|
||||||
"item_id=" + item.getID() + ", from " +
|
"item_id=" + item.getID() + ", from " +
|
||||||
"collection_id=" + from.getID() + " to " +
|
"collection_id=" + from.getID() + " to " +
|
||||||
"collection_id=" + to.getID()));
|
"collection_id=" + to.getID()));
|
||||||
@@ -898,7 +935,7 @@ public class ItemServiceImpl extends DSpaceObjectServiceImpl<Item> implements It
|
|||||||
|
|
||||||
// If applicable, update the item policies
|
// If applicable, update the item policies
|
||||||
if (inheritDefaultPolicies) {
|
if (inheritDefaultPolicies) {
|
||||||
log.info(LogManager.getHeader(context, "move_item",
|
log.info(LogHelper.getHeader(context, "move_item",
|
||||||
"Updating item with inherited policies"));
|
"Updating item with inherited policies"));
|
||||||
inheritCollectionDefaultPolicies(context, item, to);
|
inheritCollectionDefaultPolicies(context, item, to);
|
||||||
}
|
}
|
||||||
|
@@ -32,7 +32,6 @@ import org.hibernate.proxy.HibernateProxyHelper;
|
|||||||
* metadata element belongs in a field.
|
* metadata element belongs in a field.
|
||||||
*
|
*
|
||||||
* @author Martin Hald
|
* @author Martin Hald
|
||||||
* @version $Revision$
|
|
||||||
* @see org.dspace.content.MetadataValue
|
* @see org.dspace.content.MetadataValue
|
||||||
* @see org.dspace.content.MetadataSchema
|
* @see org.dspace.content.MetadataSchema
|
||||||
*/
|
*/
|
||||||
@@ -77,6 +76,7 @@ public class MetadataField implements ReloadableEntity<Integer> {
|
|||||||
*
|
*
|
||||||
* @return metadata field id
|
* @return metadata field id
|
||||||
*/
|
*/
|
||||||
|
@Override
|
||||||
public Integer getID() {
|
public Integer getID() {
|
||||||
return id;
|
return id;
|
||||||
}
|
}
|
||||||
@@ -164,7 +164,7 @@ public class MetadataField implements ReloadableEntity<Integer> {
|
|||||||
*/
|
*/
|
||||||
@Override
|
@Override
|
||||||
public boolean equals(Object obj) {
|
public boolean equals(Object obj) {
|
||||||
if (obj == null) {
|
if (!(obj instanceof MetadataField)) {
|
||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
Class<?> objClass = HibernateProxyHelper.getClassWithoutInitializingProxy(obj);
|
Class<?> objClass = HibernateProxyHelper.getClassWithoutInitializingProxy(obj);
|
||||||
@@ -175,10 +175,7 @@ public class MetadataField implements ReloadableEntity<Integer> {
|
|||||||
if (!this.getID().equals(other.getID())) {
|
if (!this.getID().equals(other.getID())) {
|
||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
if (!getMetadataSchema().equals(other.getMetadataSchema())) {
|
return getMetadataSchema().equals(other.getMetadataSchema());
|
||||||
return false;
|
|
||||||
}
|
|
||||||
return true;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
|
@@ -17,13 +17,13 @@ import javax.annotation.Nonnull;
|
|||||||
*/
|
*/
|
||||||
public class MetadataFieldName {
|
public class MetadataFieldName {
|
||||||
/** Name of the metadata schema which defines this field. Never null. */
|
/** Name of the metadata schema which defines this field. Never null. */
|
||||||
public final String SCHEMA;
|
public final String schema;
|
||||||
|
|
||||||
/** Element name of this field. Never null. */
|
/** Element name of this field. Never null. */
|
||||||
public final String ELEMENT;
|
public final String element;
|
||||||
|
|
||||||
/** Qualifier name of this field. May be {@code null}. */
|
/** Qualifier name of this field. May be {@code null}. */
|
||||||
public final String QUALIFIER;
|
public final String qualifier;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Initialize a tuple of (schema, element, qualifier) to name a metadata field.
|
* Initialize a tuple of (schema, element, qualifier) to name a metadata field.
|
||||||
@@ -40,9 +40,9 @@ public class MetadataFieldName {
|
|||||||
throw new NullPointerException("Element must not be null.");
|
throw new NullPointerException("Element must not be null.");
|
||||||
}
|
}
|
||||||
|
|
||||||
SCHEMA = schema;
|
this.schema = schema;
|
||||||
ELEMENT = element;
|
this.element = element;
|
||||||
QUALIFIER = qualifier;
|
this.qualifier = qualifier;
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
@@ -59,9 +59,9 @@ public class MetadataFieldName {
|
|||||||
throw new NullPointerException("Element must not be null.");
|
throw new NullPointerException("Element must not be null.");
|
||||||
}
|
}
|
||||||
|
|
||||||
SCHEMA = schema;
|
this.schema = schema;
|
||||||
ELEMENT = element;
|
this.element = element;
|
||||||
QUALIFIER = null;
|
qualifier = null;
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
@@ -79,9 +79,9 @@ public class MetadataFieldName {
|
|||||||
throw new IllegalArgumentException("Element must not be null.");
|
throw new IllegalArgumentException("Element must not be null.");
|
||||||
}
|
}
|
||||||
|
|
||||||
SCHEMA = schema.getName();
|
this.schema = schema.getName();
|
||||||
ELEMENT = element;
|
this.element = element;
|
||||||
QUALIFIER = qualifier;
|
this.qualifier = qualifier;
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
@@ -98,9 +98,9 @@ public class MetadataFieldName {
|
|||||||
throw new IllegalArgumentException("Element must not be null.");
|
throw new IllegalArgumentException("Element must not be null.");
|
||||||
}
|
}
|
||||||
|
|
||||||
SCHEMA = schema.getName();
|
this.schema = schema.getName();
|
||||||
ELEMENT = element;
|
this.element = element;
|
||||||
QUALIFIER = null;
|
qualifier = null;
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
@@ -110,9 +110,9 @@ public class MetadataFieldName {
|
|||||||
*/
|
*/
|
||||||
public MetadataFieldName(@Nonnull String name) {
|
public MetadataFieldName(@Nonnull String name) {
|
||||||
String[] elements = parse(name);
|
String[] elements = parse(name);
|
||||||
SCHEMA = elements[0];
|
schema = elements[0];
|
||||||
ELEMENT = elements[1];
|
element = elements[1];
|
||||||
QUALIFIER = elements[2];
|
qualifier = elements[2];
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
@@ -138,17 +138,17 @@ public class MetadataFieldName {
|
|||||||
|
|
||||||
/**
|
/**
|
||||||
* Format a dotted-atoms representation of this field name.
|
* Format a dotted-atoms representation of this field name.
|
||||||
* @return SCHEMA.ELEMENT.QUALIFIER
|
* @return schema.element.qualifier
|
||||||
*/
|
*/
|
||||||
@Override
|
@Override
|
||||||
public String toString() {
|
public String toString() {
|
||||||
StringBuilder buffer = new StringBuilder(32);
|
StringBuilder buffer = new StringBuilder(32);
|
||||||
buffer.append(SCHEMA)
|
buffer.append(schema)
|
||||||
.append('.')
|
.append('.')
|
||||||
.append(ELEMENT);
|
.append(element);
|
||||||
if (null != QUALIFIER) {
|
if (null != qualifier) {
|
||||||
buffer.append('.')
|
buffer.append('.')
|
||||||
.append(QUALIFIER);
|
.append(qualifier);
|
||||||
}
|
}
|
||||||
return buffer.toString();
|
return buffer.toString();
|
||||||
}
|
}
|
||||||
|
@@ -25,7 +25,7 @@ import org.dspace.content.service.MetadataValueService;
|
|||||||
import org.dspace.content.service.SiteService;
|
import org.dspace.content.service.SiteService;
|
||||||
import org.dspace.core.Constants;
|
import org.dspace.core.Constants;
|
||||||
import org.dspace.core.Context;
|
import org.dspace.core.Context;
|
||||||
import org.dspace.core.LogManager;
|
import org.dspace.core.LogHelper;
|
||||||
import org.dspace.discovery.indexobject.IndexableMetadataField;
|
import org.dspace.discovery.indexobject.IndexableMetadataField;
|
||||||
import org.dspace.event.Event;
|
import org.dspace.event.Event;
|
||||||
import org.springframework.beans.factory.annotation.Autowired;
|
import org.springframework.beans.factory.annotation.Autowired;
|
||||||
@@ -83,7 +83,7 @@ public class MetadataFieldServiceImpl implements MetadataFieldService {
|
|||||||
metadataField = metadataFieldDAO.create(context, metadataField);
|
metadataField = metadataFieldDAO.create(context, metadataField);
|
||||||
metadataFieldDAO.save(context, metadataField);
|
metadataFieldDAO.save(context, metadataField);
|
||||||
|
|
||||||
log.info(LogManager.getHeader(context, "create_metadata_field",
|
log.info(LogHelper.getHeader(context, "create_metadata_field",
|
||||||
"metadata_field_id=" + metadataField.getID()));
|
"metadata_field_id=" + metadataField.getID()));
|
||||||
// Update the index of type metadatafield
|
// Update the index of type metadatafield
|
||||||
this.triggerEventToUpdateIndex(context, metadataField.getID());
|
this.triggerEventToUpdateIndex(context, metadataField.getID());
|
||||||
@@ -155,7 +155,7 @@ public class MetadataFieldServiceImpl implements MetadataFieldService {
|
|||||||
|
|
||||||
metadataFieldDAO.save(context, metadataField);
|
metadataFieldDAO.save(context, metadataField);
|
||||||
|
|
||||||
log.info(LogManager.getHeader(context, "update_metadatafieldregistry",
|
log.info(LogHelper.getHeader(context, "update_metadatafieldregistry",
|
||||||
"metadata_field_id=" + metadataField.getID() + "element=" + metadataField
|
"metadata_field_id=" + metadataField.getID() + "element=" + metadataField
|
||||||
.getElement()
|
.getElement()
|
||||||
+ "qualifier=" + metadataField.getQualifier()));
|
+ "qualifier=" + metadataField.getQualifier()));
|
||||||
@@ -187,7 +187,7 @@ public class MetadataFieldServiceImpl implements MetadataFieldService {
|
|||||||
.toString() + " cannot be deleted as it is currently used by one or more objects.");
|
.toString() + " cannot be deleted as it is currently used by one or more objects.");
|
||||||
}
|
}
|
||||||
|
|
||||||
log.info(LogManager.getHeader(context, "delete_metadata_field",
|
log.info(LogHelper.getHeader(context, "delete_metadata_field",
|
||||||
"metadata_field_id=" + metadataField.getID()));
|
"metadata_field_id=" + metadataField.getID()));
|
||||||
// Update the index of type metadatafield
|
// Update the index of type metadatafield
|
||||||
this.triggerEventToUpdateIndex(context, metadataField.getID());
|
this.triggerEventToUpdateIndex(context, metadataField.getID());
|
||||||
|
@@ -30,7 +30,6 @@ import org.hibernate.proxy.HibernateProxyHelper;
|
|||||||
* </p>
|
* </p>
|
||||||
*
|
*
|
||||||
* @author Martin Hald
|
* @author Martin Hald
|
||||||
* @version $Revision$
|
|
||||||
* @see org.dspace.content.MetadataValue
|
* @see org.dspace.content.MetadataValue
|
||||||
* @see org.dspace.content.MetadataField
|
* @see org.dspace.content.MetadataField
|
||||||
*/
|
*/
|
||||||
@@ -129,6 +128,7 @@ public class MetadataSchema implements ReloadableEntity<Integer> {
|
|||||||
*
|
*
|
||||||
* @return schema record key
|
* @return schema record key
|
||||||
*/
|
*/
|
||||||
|
@Override
|
||||||
public Integer getID() {
|
public Integer getID() {
|
||||||
return id;
|
return id;
|
||||||
}
|
}
|
||||||
|
@@ -17,7 +17,7 @@ import org.dspace.content.dao.MetadataSchemaDAO;
|
|||||||
import org.dspace.content.service.MetadataFieldService;
|
import org.dspace.content.service.MetadataFieldService;
|
||||||
import org.dspace.content.service.MetadataSchemaService;
|
import org.dspace.content.service.MetadataSchemaService;
|
||||||
import org.dspace.core.Context;
|
import org.dspace.core.Context;
|
||||||
import org.dspace.core.LogManager;
|
import org.dspace.core.LogHelper;
|
||||||
import org.springframework.beans.factory.annotation.Autowired;
|
import org.springframework.beans.factory.annotation.Autowired;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
@@ -74,7 +74,7 @@ public class MetadataSchemaServiceImpl implements MetadataSchemaService {
|
|||||||
metadataSchema.setNamespace(namespace);
|
metadataSchema.setNamespace(namespace);
|
||||||
metadataSchema.setName(name);
|
metadataSchema.setName(name);
|
||||||
metadataSchemaDAO.save(context, metadataSchema);
|
metadataSchemaDAO.save(context, metadataSchema);
|
||||||
log.info(LogManager.getHeader(context, "create_metadata_schema",
|
log.info(LogHelper.getHeader(context, "create_metadata_schema",
|
||||||
"metadata_schema_id="
|
"metadata_schema_id="
|
||||||
+ metadataSchema.getID()));
|
+ metadataSchema.getID()));
|
||||||
return metadataSchema;
|
return metadataSchema;
|
||||||
@@ -106,7 +106,7 @@ public class MetadataSchemaServiceImpl implements MetadataSchemaService {
|
|||||||
+ " unique");
|
+ " unique");
|
||||||
}
|
}
|
||||||
metadataSchemaDAO.save(context, metadataSchema);
|
metadataSchemaDAO.save(context, metadataSchema);
|
||||||
log.info(LogManager.getHeader(context, "update_metadata_schema",
|
log.info(LogHelper.getHeader(context, "update_metadata_schema",
|
||||||
"metadata_schema_id=" + metadataSchema.getID() + "namespace="
|
"metadata_schema_id=" + metadataSchema.getID() + "namespace="
|
||||||
+ metadataSchema.getNamespace() + "name=" + metadataSchema.getName()));
|
+ metadataSchema.getNamespace() + "name=" + metadataSchema.getName()));
|
||||||
}
|
}
|
||||||
@@ -125,7 +125,7 @@ public class MetadataSchemaServiceImpl implements MetadataSchemaService {
|
|||||||
|
|
||||||
metadataSchemaDAO.delete(context, metadataSchema);
|
metadataSchemaDAO.delete(context, metadataSchema);
|
||||||
|
|
||||||
log.info(LogManager.getHeader(context, "delete_metadata_schema",
|
log.info(LogHelper.getHeader(context, "delete_metadata_schema",
|
||||||
"metadata_schema_id=" + metadataSchema.getID()));
|
"metadata_schema_id=" + metadataSchema.getID()));
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@@ -46,7 +46,7 @@ public class MetadataValue implements ReloadableEntity<Integer> {
|
|||||||
@Column(name = "metadata_value_id")
|
@Column(name = "metadata_value_id")
|
||||||
@GeneratedValue(strategy = GenerationType.SEQUENCE, generator = "metadatavalue_seq")
|
@GeneratedValue(strategy = GenerationType.SEQUENCE, generator = "metadatavalue_seq")
|
||||||
@SequenceGenerator(name = "metadatavalue_seq", sequenceName = "metadatavalue_seq", allocationSize = 1)
|
@SequenceGenerator(name = "metadatavalue_seq", sequenceName = "metadatavalue_seq", allocationSize = 1)
|
||||||
private Integer id;
|
private final Integer id;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* The primary key for the metadata value
|
* The primary key for the metadata value
|
||||||
@@ -104,6 +104,7 @@ public class MetadataValue implements ReloadableEntity<Integer> {
|
|||||||
*
|
*
|
||||||
* @return metadata value ID
|
* @return metadata value ID
|
||||||
*/
|
*/
|
||||||
|
@Override
|
||||||
public Integer getID() {
|
public Integer getID() {
|
||||||
return id;
|
return id;
|
||||||
}
|
}
|
||||||
@@ -249,10 +250,7 @@ public class MetadataValue implements ReloadableEntity<Integer> {
|
|||||||
if (!this.getID().equals(other.getID())) {
|
if (!this.getID().equals(other.getID())) {
|
||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
if (!this.getDSpaceObject().getID().equals(other.getDSpaceObject().getID())) {
|
return this.getDSpaceObject().getID().equals(other.getDSpaceObject().getID());
|
||||||
return false;
|
|
||||||
}
|
|
||||||
return true;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
|
@@ -21,7 +21,7 @@ import org.dspace.content.service.DSpaceObjectService;
|
|||||||
import org.dspace.content.service.MetadataValueService;
|
import org.dspace.content.service.MetadataValueService;
|
||||||
import org.dspace.core.Constants;
|
import org.dspace.core.Constants;
|
||||||
import org.dspace.core.Context;
|
import org.dspace.core.Context;
|
||||||
import org.dspace.core.LogManager;
|
import org.dspace.core.LogHelper;
|
||||||
import org.springframework.beans.factory.annotation.Autowired;
|
import org.springframework.beans.factory.annotation.Autowired;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
@@ -80,7 +80,7 @@ public class MetadataValueServiceImpl implements MetadataValueService {
|
|||||||
@Override
|
@Override
|
||||||
public void update(Context context, MetadataValue metadataValue) throws SQLException {
|
public void update(Context context, MetadataValue metadataValue) throws SQLException {
|
||||||
metadataValueDAO.save(context, metadataValue);
|
metadataValueDAO.save(context, metadataValue);
|
||||||
log.info(LogManager.getHeader(context, "update_metadatavalue",
|
log.info(LogHelper.getHeader(context, "update_metadatavalue",
|
||||||
"metadata_value_id=" + metadataValue.getID()));
|
"metadata_value_id=" + metadataValue.getID()));
|
||||||
|
|
||||||
}
|
}
|
||||||
@@ -102,7 +102,7 @@ public class MetadataValueServiceImpl implements MetadataValueService {
|
|||||||
|
|
||||||
@Override
|
@Override
|
||||||
public void delete(Context context, MetadataValue metadataValue) throws SQLException {
|
public void delete(Context context, MetadataValue metadataValue) throws SQLException {
|
||||||
log.info(LogManager.getHeader(context, "delete_metadata_value",
|
log.info(LogHelper.getHeader(context, "delete_metadata_value",
|
||||||
" metadata_value_id=" + metadataValue.getID()));
|
" metadata_value_id=" + metadataValue.getID()));
|
||||||
metadataValueDAO.delete(context, metadataValue);
|
metadataValueDAO.delete(context, metadataValue);
|
||||||
}
|
}
|
||||||
|
@@ -0,0 +1,33 @@
|
|||||||
|
/**
|
||||||
|
* The contents of this file are subject to the license and copyright
|
||||||
|
* detailed in the LICENSE and NOTICE files at the root of the source
|
||||||
|
* tree and available online at
|
||||||
|
*
|
||||||
|
* http://www.dspace.org/license/
|
||||||
|
*/
|
||||||
|
package org.dspace.content;
|
||||||
|
|
||||||
|
import java.io.Serializable;
|
||||||
|
import java.util.UUID;
|
||||||
|
|
||||||
|
import org.hibernate.engine.spi.SharedSessionContractImplementor;
|
||||||
|
import org.hibernate.id.UUIDGenerator;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Allows DSpaceObjects to provide a pre-determined UUID
|
||||||
|
*
|
||||||
|
* @author April Herron
|
||||||
|
*/
|
||||||
|
public class PredefinedUUIDGenerator extends UUIDGenerator {
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public Serializable generate(SharedSessionContractImplementor session, Object object) {
|
||||||
|
if (object instanceof DSpaceObject) {
|
||||||
|
UUID uuid = ((DSpaceObject) object).getPredefinedUUID();
|
||||||
|
if (uuid != null) {
|
||||||
|
return uuid;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return super.generate(session, object);
|
||||||
|
}
|
||||||
|
}
|
@@ -220,6 +220,7 @@ public class Relationship implements ReloadableEntity<Integer> {
|
|||||||
* Standard getter for the ID for this Relationship
|
* Standard getter for the ID for this Relationship
|
||||||
* @return The ID of this relationship
|
* @return The ID of this relationship
|
||||||
*/
|
*/
|
||||||
|
@Override
|
||||||
public Integer getID() {
|
public Integer getID() {
|
||||||
return id;
|
return id;
|
||||||
}
|
}
|
||||||
|
@@ -8,9 +8,9 @@
|
|||||||
package org.dspace.content;
|
package org.dspace.content;
|
||||||
|
|
||||||
import java.sql.SQLException;
|
import java.sql.SQLException;
|
||||||
|
import java.util.ArrayList;
|
||||||
import java.util.Collections;
|
import java.util.Collections;
|
||||||
import java.util.Comparator;
|
import java.util.Comparator;
|
||||||
import java.util.LinkedList;
|
|
||||||
import java.util.List;
|
import java.util.List;
|
||||||
import java.util.UUID;
|
import java.util.UUID;
|
||||||
|
|
||||||
@@ -251,7 +251,7 @@ public class RelationshipServiceImpl implements RelationshipService {
|
|||||||
}
|
}
|
||||||
List<Relationship> rightRelationships = findByItemAndRelationshipType(context, itemToProcess, relationshipType,
|
List<Relationship> rightRelationships = findByItemAndRelationshipType(context, itemToProcess, relationshipType,
|
||||||
isLeft);
|
isLeft);
|
||||||
if (maxCardinality != null && rightRelationships.size() >= maxCardinality) {
|
if (rightRelationships.size() >= maxCardinality) {
|
||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
return true;
|
return true;
|
||||||
@@ -267,6 +267,7 @@ public class RelationshipServiceImpl implements RelationshipService {
|
|||||||
return StringUtils.equals(leftEntityType, entityTypeToProcess.getLabel());
|
return StringUtils.equals(leftEntityType, entityTypeToProcess.getLabel());
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
public Relationship find(Context context, int id) throws SQLException {
|
public Relationship find(Context context, int id) throws SQLException {
|
||||||
Relationship relationship = relationshipDAO.findByID(context, Relationship.class, id);
|
Relationship relationship = relationshipDAO.findByID(context, Relationship.class, id);
|
||||||
return relationship;
|
return relationship;
|
||||||
@@ -340,7 +341,7 @@ public class RelationshipServiceImpl implements RelationshipService {
|
|||||||
@Override
|
@Override
|
||||||
public void delete(Context context, Relationship relationship, boolean copyToLeftItem, boolean copyToRightItem)
|
public void delete(Context context, Relationship relationship, boolean copyToLeftItem, boolean copyToRightItem)
|
||||||
throws SQLException, AuthorizeException {
|
throws SQLException, AuthorizeException {
|
||||||
log.info(org.dspace.core.LogManager.getHeader(context, "delete_relationship",
|
log.info(org.dspace.core.LogHelper.getHeader(context, "delete_relationship",
|
||||||
"relationship_id=" + relationship.getID() + "&" +
|
"relationship_id=" + relationship.getID() + "&" +
|
||||||
"copyMetadataValuesToLeftItem=" + copyToLeftItem + "&" +
|
"copyMetadataValuesToLeftItem=" + copyToLeftItem + "&" +
|
||||||
"copyMetadataValuesToRightItem=" + copyToRightItem));
|
"copyMetadataValuesToRightItem=" + copyToRightItem));
|
||||||
@@ -357,7 +358,7 @@ public class RelationshipServiceImpl implements RelationshipService {
|
|||||||
@Override
|
@Override
|
||||||
public void forceDelete(Context context, Relationship relationship, boolean copyToLeftItem, boolean copyToRightItem)
|
public void forceDelete(Context context, Relationship relationship, boolean copyToLeftItem, boolean copyToRightItem)
|
||||||
throws SQLException, AuthorizeException {
|
throws SQLException, AuthorizeException {
|
||||||
log.info(org.dspace.core.LogManager.getHeader(context, "delete_relationship",
|
log.info(org.dspace.core.LogHelper.getHeader(context, "delete_relationship",
|
||||||
"relationship_id=" + relationship.getID() + "&" +
|
"relationship_id=" + relationship.getID() + "&" +
|
||||||
"copyMetadataValuesToLeftItem=" + copyToLeftItem + "&" +
|
"copyMetadataValuesToLeftItem=" + copyToLeftItem + "&" +
|
||||||
"copyMetadataValuesToRightItem=" + copyToRightItem));
|
"copyMetadataValuesToRightItem=" + copyToRightItem));
|
||||||
@@ -408,7 +409,7 @@ public class RelationshipServiceImpl implements RelationshipService {
|
|||||||
// Set a limit on the total depth of relationships to traverse during a relationship change
|
// Set a limit on the total depth of relationships to traverse during a relationship change
|
||||||
int maxDepth = configurationService.getIntProperty("relationship.update.relateditems.maxdepth", 5);
|
int maxDepth = configurationService.getIntProperty("relationship.update.relateditems.maxdepth", 5);
|
||||||
// This is the list containing all items which will have changes to their virtual metadata
|
// This is the list containing all items which will have changes to their virtual metadata
|
||||||
List<Item> itemsToUpdate = new LinkedList<>();
|
List<Item> itemsToUpdate = new ArrayList<>();
|
||||||
itemsToUpdate.add(relationship.getLeftItem());
|
itemsToUpdate.add(relationship.getLeftItem());
|
||||||
itemsToUpdate.add(relationship.getRightItem());
|
itemsToUpdate.add(relationship.getRightItem());
|
||||||
|
|
||||||
|
@@ -208,7 +208,7 @@ public class RelationshipType implements ReloadableEntity<Integer> {
|
|||||||
|
|
||||||
/**
|
/**
|
||||||
* Standard setter for the leftMinCardinality Integer for this RelationshipType
|
* Standard setter for the leftMinCardinality Integer for this RelationshipType
|
||||||
* @param leftMinCardinality The leftMinCardinality Integer that this RelationshipType should recieve
|
* @param leftMinCardinality The leftMinCardinality Integer that this RelationshipType should receive
|
||||||
*/
|
*/
|
||||||
public void setLeftMinCardinality(Integer leftMinCardinality) {
|
public void setLeftMinCardinality(Integer leftMinCardinality) {
|
||||||
this.leftMinCardinality = leftMinCardinality;
|
this.leftMinCardinality = leftMinCardinality;
|
||||||
@@ -224,7 +224,7 @@ public class RelationshipType implements ReloadableEntity<Integer> {
|
|||||||
|
|
||||||
/**
|
/**
|
||||||
* Standard setter for the leftMaxCardinality Integer for this RelationshipType
|
* Standard setter for the leftMaxCardinality Integer for this RelationshipType
|
||||||
* @param leftMaxCardinality The leftMaxCardinality Integer that this RelationshipType should recieve
|
* @param leftMaxCardinality The leftMaxCardinality Integer that this RelationshipType should receive
|
||||||
*/
|
*/
|
||||||
public void setLeftMaxCardinality(Integer leftMaxCardinality) {
|
public void setLeftMaxCardinality(Integer leftMaxCardinality) {
|
||||||
this.leftMaxCardinality = leftMaxCardinality;
|
this.leftMaxCardinality = leftMaxCardinality;
|
||||||
@@ -240,7 +240,7 @@ public class RelationshipType implements ReloadableEntity<Integer> {
|
|||||||
|
|
||||||
/**
|
/**
|
||||||
* Standard setter for the rightMinCardinality Integer for this RelationshipType
|
* Standard setter for the rightMinCardinality Integer for this RelationshipType
|
||||||
* @param rightMinCardinality The rightMinCardinality Integer that this RelationshipType should recieve
|
* @param rightMinCardinality The rightMinCardinality Integer that this RelationshipType should receive
|
||||||
*/
|
*/
|
||||||
public void setRightMinCardinality(Integer rightMinCardinality) {
|
public void setRightMinCardinality(Integer rightMinCardinality) {
|
||||||
this.rightMinCardinality = rightMinCardinality;
|
this.rightMinCardinality = rightMinCardinality;
|
||||||
@@ -256,7 +256,7 @@ public class RelationshipType implements ReloadableEntity<Integer> {
|
|||||||
|
|
||||||
/**
|
/**
|
||||||
* Standard setter for the rightMaxCardinality Integer for this RelationshipType
|
* Standard setter for the rightMaxCardinality Integer for this RelationshipType
|
||||||
* @param rightMaxCardinality The rightMaxCardinality Integer that this RelationshipType should recieve
|
* @param rightMaxCardinality The rightMaxCardinality Integer that this RelationshipType should receive
|
||||||
*/
|
*/
|
||||||
public void setRightMaxCardinality(Integer rightMaxCardinality) {
|
public void setRightMaxCardinality(Integer rightMaxCardinality) {
|
||||||
this.rightMaxCardinality = rightMaxCardinality;
|
this.rightMaxCardinality = rightMaxCardinality;
|
||||||
@@ -318,6 +318,7 @@ public class RelationshipType implements ReloadableEntity<Integer> {
|
|||||||
* Standard getter for the ID of this RelationshipType
|
* Standard getter for the ID of this RelationshipType
|
||||||
* @return The ID of this RelationshipType
|
* @return The ID of this RelationshipType
|
||||||
*/
|
*/
|
||||||
|
@Override
|
||||||
public Integer getID() {
|
public Integer getID() {
|
||||||
return id;
|
return id;
|
||||||
}
|
}
|
||||||
|
@@ -12,6 +12,7 @@ import java.sql.SQLException;
|
|||||||
import java.util.ArrayList;
|
import java.util.ArrayList;
|
||||||
import java.util.List;
|
import java.util.List;
|
||||||
import java.util.Map;
|
import java.util.Map;
|
||||||
|
import java.util.UUID;
|
||||||
|
|
||||||
import org.apache.logging.log4j.Logger;
|
import org.apache.logging.log4j.Logger;
|
||||||
import org.dspace.app.util.DCInputsReaderException;
|
import org.dspace.app.util.DCInputsReaderException;
|
||||||
@@ -25,7 +26,7 @@ import org.dspace.content.service.ItemService;
|
|||||||
import org.dspace.content.service.WorkspaceItemService;
|
import org.dspace.content.service.WorkspaceItemService;
|
||||||
import org.dspace.core.Constants;
|
import org.dspace.core.Constants;
|
||||||
import org.dspace.core.Context;
|
import org.dspace.core.Context;
|
||||||
import org.dspace.core.LogManager;
|
import org.dspace.core.LogHelper;
|
||||||
import org.dspace.eperson.EPerson;
|
import org.dspace.eperson.EPerson;
|
||||||
import org.dspace.event.Event;
|
import org.dspace.event.Event;
|
||||||
import org.dspace.workflow.WorkflowItem;
|
import org.dspace.workflow.WorkflowItem;
|
||||||
@@ -66,12 +67,12 @@ public class WorkspaceItemServiceImpl implements WorkspaceItemService {
|
|||||||
|
|
||||||
if (workspaceItem == null) {
|
if (workspaceItem == null) {
|
||||||
if (log.isDebugEnabled()) {
|
if (log.isDebugEnabled()) {
|
||||||
log.debug(LogManager.getHeader(context, "find_workspace_item",
|
log.debug(LogHelper.getHeader(context, "find_workspace_item",
|
||||||
"not_found,workspace_item_id=" + id));
|
"not_found,workspace_item_id=" + id));
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
if (log.isDebugEnabled()) {
|
if (log.isDebugEnabled()) {
|
||||||
log.debug(LogManager.getHeader(context, "find_workspace_item",
|
log.debug(LogHelper.getHeader(context, "find_workspace_item",
|
||||||
"workspace_item_id=" + id));
|
"workspace_item_id=" + id));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -81,6 +82,12 @@ public class WorkspaceItemServiceImpl implements WorkspaceItemService {
|
|||||||
@Override
|
@Override
|
||||||
public WorkspaceItem create(Context context, Collection collection, boolean template)
|
public WorkspaceItem create(Context context, Collection collection, boolean template)
|
||||||
throws AuthorizeException, SQLException {
|
throws AuthorizeException, SQLException {
|
||||||
|
return create(context, collection, null, template);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public WorkspaceItem create(Context context, Collection collection, UUID uuid, boolean template)
|
||||||
|
throws AuthorizeException, SQLException {
|
||||||
// Check the user has permission to ADD to the collection
|
// Check the user has permission to ADD to the collection
|
||||||
authorizeService.authorizeAction(context, collection, Constants.ADD);
|
authorizeService.authorizeAction(context, collection, Constants.ADD);
|
||||||
|
|
||||||
@@ -89,7 +96,12 @@ public class WorkspaceItemServiceImpl implements WorkspaceItemService {
|
|||||||
|
|
||||||
|
|
||||||
// Create an item
|
// Create an item
|
||||||
Item item = itemService.create(context, workspaceItem);
|
Item item;
|
||||||
|
if (uuid != null) {
|
||||||
|
item = itemService.create(context, workspaceItem, uuid);
|
||||||
|
} else {
|
||||||
|
item = itemService.create(context, workspaceItem);
|
||||||
|
}
|
||||||
item.setSubmitter(context.getCurrentUser());
|
item.setSubmitter(context.getCurrentUser());
|
||||||
|
|
||||||
// Now create the policies for the submitter to modify item and contents
|
// Now create the policies for the submitter to modify item and contents
|
||||||
@@ -126,7 +138,7 @@ public class WorkspaceItemServiceImpl implements WorkspaceItemService {
|
|||||||
itemService.update(context, item);
|
itemService.update(context, item);
|
||||||
workspaceItem.setItem(item);
|
workspaceItem.setItem(item);
|
||||||
|
|
||||||
log.info(LogManager.getHeader(context, "create_workspace_item",
|
log.info(LogHelper.getHeader(context, "create_workspace_item",
|
||||||
"workspace_item_id=" + workspaceItem.getID()
|
"workspace_item_id=" + workspaceItem.getID()
|
||||||
+ "item_id=" + item.getID() + "collection_id="
|
+ "item_id=" + item.getID() + "collection_id="
|
||||||
+ collection.getID()));
|
+ collection.getID()));
|
||||||
@@ -191,7 +203,7 @@ public class WorkspaceItemServiceImpl implements WorkspaceItemService {
|
|||||||
public void update(Context context, WorkspaceItem workspaceItem) throws SQLException, AuthorizeException {
|
public void update(Context context, WorkspaceItem workspaceItem) throws SQLException, AuthorizeException {
|
||||||
// Authorisation is checked by the item.update() method below
|
// Authorisation is checked by the item.update() method below
|
||||||
|
|
||||||
log.info(LogManager.getHeader(context, "update_workspace_item",
|
log.info(LogHelper.getHeader(context, "update_workspace_item",
|
||||||
"workspace_item_id=" + workspaceItem.getID()));
|
"workspace_item_id=" + workspaceItem.getID()));
|
||||||
|
|
||||||
// Update the item
|
// Update the item
|
||||||
@@ -219,7 +231,7 @@ public class WorkspaceItemServiceImpl implements WorkspaceItemService {
|
|||||||
+ "original submitter to delete a workspace item");
|
+ "original submitter to delete a workspace item");
|
||||||
}
|
}
|
||||||
|
|
||||||
log.info(LogManager.getHeader(context, "delete_workspace_item",
|
log.info(LogHelper.getHeader(context, "delete_workspace_item",
|
||||||
"workspace_item_id=" + workspaceItem.getID() + "item_id=" + item.getID()
|
"workspace_item_id=" + workspaceItem.getID() + "item_id=" + item.getID()
|
||||||
+ "collection_id=" + workspaceItem.getCollection().getID()));
|
+ "collection_id=" + workspaceItem.getCollection().getID()));
|
||||||
|
|
||||||
@@ -256,7 +268,7 @@ public class WorkspaceItemServiceImpl implements WorkspaceItemService {
|
|||||||
Item item = workspaceItem.getItem();
|
Item item = workspaceItem.getItem();
|
||||||
authorizeService.authorizeAction(context, item, Constants.WRITE);
|
authorizeService.authorizeAction(context, item, Constants.WRITE);
|
||||||
|
|
||||||
log.info(LogManager.getHeader(context, "delete_workspace_item",
|
log.info(LogHelper.getHeader(context, "delete_workspace_item",
|
||||||
"workspace_item_id=" + workspaceItem.getID() + "item_id=" + item.getID()
|
"workspace_item_id=" + workspaceItem.getID() + "item_id=" + item.getID()
|
||||||
+ "collection_id=" + workspaceItem.getCollection().getID()));
|
+ "collection_id=" + workspaceItem.getCollection().getID()));
|
||||||
|
|
||||||
|
@@ -44,17 +44,16 @@ import org.jdom.xpath.XPath;
|
|||||||
/**
|
/**
|
||||||
* ORE ingestion crosswalk
|
* ORE ingestion crosswalk
|
||||||
* <p>
|
* <p>
|
||||||
* Processes an Atom-encoded ORE resource map and attemps to interpret it as a DSpace item
|
* Processes an Atom-encoded ORE resource map and attempts to interpret it as a DSpace item.
|
||||||
*
|
*
|
||||||
* @author Alexey Maslov
|
* @author Alexey Maslov
|
||||||
* @version $Revision: 1 $
|
|
||||||
*/
|
*/
|
||||||
public class OREIngestionCrosswalk
|
public class OREIngestionCrosswalk
|
||||||
implements IngestionCrosswalk {
|
implements IngestionCrosswalk {
|
||||||
/**
|
/**
|
||||||
* log4j category
|
* log4j category
|
||||||
*/
|
*/
|
||||||
private static Logger log = org.apache.logging.log4j.LogManager.getLogger(OREDisseminationCrosswalk.class);
|
private static final Logger log = org.apache.logging.log4j.LogManager.getLogger();
|
||||||
|
|
||||||
/* Namespaces */
|
/* Namespaces */
|
||||||
public static final Namespace ATOM_NS =
|
public static final Namespace ATOM_NS =
|
||||||
@@ -149,7 +148,7 @@ public class OREIngestionCrosswalk
|
|||||||
xpathDesc.addNamespace(RDF_NS);
|
xpathDesc.addNamespace(RDF_NS);
|
||||||
desc = (Element) xpathDesc.selectSingleNode(doc);
|
desc = (Element) xpathDesc.selectSingleNode(doc);
|
||||||
} catch (JDOMException e) {
|
} catch (JDOMException e) {
|
||||||
e.printStackTrace();
|
log.warn("Could not find description for {}", href, e);
|
||||||
}
|
}
|
||||||
|
|
||||||
if (desc != null && desc.getChild("type", RDF_NS).getAttributeValue("resource", RDF_NS)
|
if (desc != null && desc.getChild("type", RDF_NS).getAttributeValue("resource", RDF_NS)
|
||||||
|
@@ -122,11 +122,12 @@ public interface RelationshipTypeDAO extends GenericDAO<RelationshipType> {
|
|||||||
throws SQLException;
|
throws SQLException;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Count all relationship types that matches provided EntityType object on any side of relationship
|
* Count all RelationshipType objects for which the given EntityType
|
||||||
|
* is equal to either the leftType or the rightType
|
||||||
*
|
*
|
||||||
* @param context The relevant DSpace context
|
* @param context DSpace context object
|
||||||
* @param entityType The EntityType object that will be used to check on
|
* @param entityType The EntityType object used to check the leftType and rightType properties
|
||||||
* @return
|
* @return Total RelationshipType objects
|
||||||
* @throws SQLException If database error
|
* @throws SQLException If database error
|
||||||
*/
|
*/
|
||||||
public int countByEntityType(Context context, EntityType entityType) throws SQLException;
|
public int countByEntityType(Context context, EntityType entityType) throws SQLException;
|
||||||
|
@@ -23,7 +23,7 @@ import org.dspace.core.Context;
|
|||||||
|
|
||||||
/**
|
/**
|
||||||
* Hibernate implementation of the Database Access Object interface class for the BitstreamFormat object.
|
* Hibernate implementation of the Database Access Object interface class for the BitstreamFormat object.
|
||||||
* This class is responsible for all database calls for the BitstreamFormat object and is autowired by spring
|
* This class is responsible for all database calls for the BitstreamFormat object and is autowired by Spring.
|
||||||
* This class should never be accessed directly.
|
* This class should never be accessed directly.
|
||||||
*
|
*
|
||||||
* @author kevinvandevelde at atmire.com
|
* @author kevinvandevelde at atmire.com
|
||||||
@@ -86,7 +86,7 @@ public class BitstreamFormatDAOImpl extends AbstractHibernateDAO<BitstreamFormat
|
|||||||
Root<BitstreamFormat> bitstreamFormatRoot = criteriaQuery.from(BitstreamFormat.class);
|
Root<BitstreamFormat> bitstreamFormatRoot = criteriaQuery.from(BitstreamFormat.class);
|
||||||
criteriaQuery.select(bitstreamFormatRoot);
|
criteriaQuery.select(bitstreamFormatRoot);
|
||||||
criteriaQuery.where(criteriaBuilder.equal(bitstreamFormatRoot.get(BitstreamFormat_.shortDescription), desc));
|
criteriaQuery.where(criteriaBuilder.equal(bitstreamFormatRoot.get(BitstreamFormat_.shortDescription), desc));
|
||||||
return uniqueResult(context, criteriaQuery, false, BitstreamFormat.class, -1, -1);
|
return uniqueResult(context, criteriaQuery, false, BitstreamFormat.class);
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
|
@@ -9,7 +9,7 @@ package org.dspace.content.dao.impl;
|
|||||||
|
|
||||||
import java.sql.SQLException;
|
import java.sql.SQLException;
|
||||||
import java.util.AbstractMap;
|
import java.util.AbstractMap;
|
||||||
import java.util.LinkedList;
|
import java.util.ArrayList;
|
||||||
import java.util.List;
|
import java.util.List;
|
||||||
import java.util.Map;
|
import java.util.Map;
|
||||||
import javax.persistence.Query;
|
import javax.persistence.Query;
|
||||||
@@ -34,7 +34,7 @@ import org.dspace.eperson.Group;
|
|||||||
|
|
||||||
/**
|
/**
|
||||||
* Hibernate implementation of the Database Access Object interface class for the Collection object.
|
* Hibernate implementation of the Database Access Object interface class for the Collection object.
|
||||||
* This class is responsible for all database calls for the Collection object and is autowired by spring
|
* This class is responsible for all database calls for the Collection object and is autowired by Spring.
|
||||||
* This class should never be accessed directly.
|
* This class should never be accessed directly.
|
||||||
*
|
*
|
||||||
* @author kevinvandevelde at atmire.com
|
* @author kevinvandevelde at atmire.com
|
||||||
@@ -95,7 +95,7 @@ public class CollectionDAOImpl extends AbstractHibernateDSODAO<Collection> imple
|
|||||||
Root<Collection> collectionRoot = criteriaQuery.from(Collection.class);
|
Root<Collection> collectionRoot = criteriaQuery.from(Collection.class);
|
||||||
criteriaQuery.select(collectionRoot);
|
criteriaQuery.select(collectionRoot);
|
||||||
criteriaQuery.where(criteriaBuilder.equal(collectionRoot.get(Collection_.template), item));
|
criteriaQuery.where(criteriaBuilder.equal(collectionRoot.get(Collection_.template), item));
|
||||||
return uniqueResult(context, criteriaQuery, false, Collection.class, -1, -1);
|
return uniqueResult(context, criteriaQuery, false, Collection.class);
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
@@ -119,7 +119,7 @@ public class CollectionDAOImpl extends AbstractHibernateDSODAO<Collection> imple
|
|||||||
CriteriaQuery criteriaQuery = getCriteriaQuery(criteriaBuilder, Collection.class);
|
CriteriaQuery criteriaQuery = getCriteriaQuery(criteriaBuilder, Collection.class);
|
||||||
Root<Collection> collectionRoot = criteriaQuery.from(Collection.class);
|
Root<Collection> collectionRoot = criteriaQuery.from(Collection.class);
|
||||||
Join<Collection, ResourcePolicy> join = collectionRoot.join("resourcePolicies");
|
Join<Collection, ResourcePolicy> join = collectionRoot.join("resourcePolicies");
|
||||||
List<Predicate> orPredicates = new LinkedList<Predicate>();
|
List<Predicate> orPredicates = new ArrayList<>(actions.size());
|
||||||
for (Integer action : actions) {
|
for (Integer action : actions) {
|
||||||
orPredicates.add(criteriaBuilder.equal(join.get(ResourcePolicy_.actionId), action));
|
orPredicates.add(criteriaBuilder.equal(join.get(ResourcePolicy_.actionId), action));
|
||||||
}
|
}
|
||||||
@@ -176,7 +176,7 @@ public class CollectionDAOImpl extends AbstractHibernateDSODAO<Collection> imple
|
|||||||
Query query = createQuery(context, q);
|
Query query = createQuery(context, q);
|
||||||
|
|
||||||
List<Object[]> list = query.getResultList();
|
List<Object[]> list = query.getResultList();
|
||||||
List<Map.Entry<Collection, Long>> returnList = new LinkedList<>();
|
List<Map.Entry<Collection, Long>> returnList = new ArrayList<>(list.size());
|
||||||
for (Object[] o : list) {
|
for (Object[] o : list) {
|
||||||
returnList.add(new AbstractMap.SimpleEntry<>((Collection) o[0], (Long) o[1]));
|
returnList.add(new AbstractMap.SimpleEntry<>((Collection) o[0], (Long) o[1]));
|
||||||
}
|
}
|
||||||
|
@@ -18,6 +18,15 @@ import org.dspace.content.dao.EntityTypeDAO;
|
|||||||
import org.dspace.core.AbstractHibernateDAO;
|
import org.dspace.core.AbstractHibernateDAO;
|
||||||
import org.dspace.core.Context;
|
import org.dspace.core.Context;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Hibernate implementation of the Database Access Object interface class for
|
||||||
|
* the EntityType object.
|
||||||
|
* This class is responsible for all database calls for the EntityType object
|
||||||
|
* and is autowired by Spring.
|
||||||
|
* This class should never be accessed directly.
|
||||||
|
*
|
||||||
|
* @author kevinvandevelde at atmire.com
|
||||||
|
*/
|
||||||
public class EntityTypeDAOImpl extends AbstractHibernateDAO<EntityType> implements EntityTypeDAO {
|
public class EntityTypeDAOImpl extends AbstractHibernateDAO<EntityType> implements EntityTypeDAO {
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
@@ -28,6 +37,6 @@ public class EntityTypeDAOImpl extends AbstractHibernateDAO<EntityType> implemen
|
|||||||
criteriaQuery.select(entityTypeRoot);
|
criteriaQuery.select(entityTypeRoot);
|
||||||
criteriaQuery.where(criteriaBuilder.equal(criteriaBuilder.upper(entityTypeRoot.get(EntityType_.label)),
|
criteriaQuery.where(criteriaBuilder.equal(criteriaBuilder.upper(entityTypeRoot.get(EntityType_.label)),
|
||||||
entityType.toUpperCase()));
|
entityType.toUpperCase()));
|
||||||
return uniqueResult(context, criteriaQuery, true, EntityType.class, -1, -1);
|
return uniqueResult(context, criteriaQuery, true, EntityType.class);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@@ -32,6 +32,7 @@ import org.dspace.eperson.EPerson;
|
|||||||
import org.hibernate.Criteria;
|
import org.hibernate.Criteria;
|
||||||
import org.hibernate.criterion.Criterion;
|
import org.hibernate.criterion.Criterion;
|
||||||
import org.hibernate.criterion.DetachedCriteria;
|
import org.hibernate.criterion.DetachedCriteria;
|
||||||
|
import org.hibernate.criterion.Order;
|
||||||
import org.hibernate.criterion.Projections;
|
import org.hibernate.criterion.Projections;
|
||||||
import org.hibernate.criterion.Property;
|
import org.hibernate.criterion.Property;
|
||||||
import org.hibernate.criterion.Restrictions;
|
import org.hibernate.criterion.Restrictions;
|
||||||
@@ -54,14 +55,14 @@ public class ItemDAOImpl extends AbstractHibernateDSODAO<Item> implements ItemDA
|
|||||||
|
|
||||||
@Override
|
@Override
|
||||||
public Iterator<Item> findAll(Context context, boolean archived) throws SQLException {
|
public Iterator<Item> findAll(Context context, boolean archived) throws SQLException {
|
||||||
Query query = createQuery(context, "FROM Item WHERE inArchive= :in_archive");
|
Query query = createQuery(context, "FROM Item WHERE inArchive=:in_archive ORDER BY id");
|
||||||
query.setParameter("in_archive", archived);
|
query.setParameter("in_archive", archived);
|
||||||
return iterate(query);
|
return iterate(query);
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public Iterator<Item> findAll(Context context, boolean archived, int limit, int offset) throws SQLException {
|
public Iterator<Item> findAll(Context context, boolean archived, int limit, int offset) throws SQLException {
|
||||||
Query query = createQuery(context, "FROM Item WHERE inArchive= :in_archive");
|
Query query = createQuery(context, "FROM Item WHERE inArchive=:in_archive ORDER BY id");
|
||||||
query.setParameter("in_archive", archived);
|
query.setParameter("in_archive", archived);
|
||||||
query.setFirstResult(offset);
|
query.setFirstResult(offset);
|
||||||
query.setMaxResults(limit);
|
query.setMaxResults(limit);
|
||||||
@@ -71,7 +72,8 @@ public class ItemDAOImpl extends AbstractHibernateDSODAO<Item> implements ItemDA
|
|||||||
|
|
||||||
@Override
|
@Override
|
||||||
public Iterator<Item> findAll(Context context, boolean archived, boolean withdrawn) throws SQLException {
|
public Iterator<Item> findAll(Context context, boolean archived, boolean withdrawn) throws SQLException {
|
||||||
Query query = createQuery(context, "FROM Item WHERE inArchive= :in_archive or withdrawn = :withdrawn");
|
Query query = createQuery(context,
|
||||||
|
"FROM Item WHERE inArchive=:in_archive or withdrawn=:withdrawn ORDER BY id");
|
||||||
query.setParameter("in_archive", archived);
|
query.setParameter("in_archive", archived);
|
||||||
query.setParameter("withdrawn", withdrawn);
|
query.setParameter("withdrawn", withdrawn);
|
||||||
return iterate(query);
|
return iterate(query);
|
||||||
@@ -89,6 +91,7 @@ public class ItemDAOImpl extends AbstractHibernateDSODAO<Item> implements ItemDA
|
|||||||
if (lastModified != null) {
|
if (lastModified != null) {
|
||||||
queryStr.append(" AND last_modified > :last_modified");
|
queryStr.append(" AND last_modified > :last_modified");
|
||||||
}
|
}
|
||||||
|
queryStr.append(" ORDER BY i.id");
|
||||||
|
|
||||||
Query query = createQuery(context, queryStr.toString());
|
Query query = createQuery(context, queryStr.toString());
|
||||||
query.setParameter("in_archive", archived);
|
query.setParameter("in_archive", archived);
|
||||||
@@ -102,7 +105,8 @@ public class ItemDAOImpl extends AbstractHibernateDSODAO<Item> implements ItemDA
|
|||||||
|
|
||||||
@Override
|
@Override
|
||||||
public Iterator<Item> findBySubmitter(Context context, EPerson eperson) throws SQLException {
|
public Iterator<Item> findBySubmitter(Context context, EPerson eperson) throws SQLException {
|
||||||
Query query = createQuery(context, "FROM Item WHERE inArchive= :in_archive and submitter= :submitter");
|
Query query = createQuery(context,
|
||||||
|
"FROM Item WHERE inArchive=:in_archive and submitter=:submitter ORDER BY id");
|
||||||
query.setParameter("in_archive", true);
|
query.setParameter("in_archive", true);
|
||||||
query.setParameter("submitter", eperson);
|
query.setParameter("submitter", eperson);
|
||||||
return iterate(query);
|
return iterate(query);
|
||||||
@@ -114,7 +118,7 @@ public class ItemDAOImpl extends AbstractHibernateDSODAO<Item> implements ItemDA
|
|||||||
if (!retrieveAllItems) {
|
if (!retrieveAllItems) {
|
||||||
return findBySubmitter(context, eperson);
|
return findBySubmitter(context, eperson);
|
||||||
}
|
}
|
||||||
Query query = createQuery(context, "FROM Item WHERE submitter= :submitter");
|
Query query = createQuery(context, "FROM Item WHERE submitter=:submitter ORDER BY id");
|
||||||
query.setParameter("submitter", eperson);
|
query.setParameter("submitter", eperson);
|
||||||
return iterate(query);
|
return iterate(query);
|
||||||
}
|
}
|
||||||
@@ -146,7 +150,7 @@ public class ItemDAOImpl extends AbstractHibernateDSODAO<Item> implements ItemDA
|
|||||||
if (value != null) {
|
if (value != null) {
|
||||||
hqlQueryString += " AND STR(metadatavalue.value) = :text_value";
|
hqlQueryString += " AND STR(metadatavalue.value) = :text_value";
|
||||||
}
|
}
|
||||||
Query query = createQuery(context, hqlQueryString);
|
Query query = createQuery(context, hqlQueryString + " ORDER BY item.id");
|
||||||
|
|
||||||
query.setParameter("in_archive", inArchive);
|
query.setParameter("in_archive", inArchive);
|
||||||
query.setParameter("metadata_field", metadataField);
|
query.setParameter("metadata_field", metadataField);
|
||||||
@@ -262,6 +266,8 @@ public class ItemDAOImpl extends AbstractHibernateDSODAO<Item> implements ItemDA
|
|||||||
criteria.add(Subqueries.notExists(subcriteria));
|
criteria.add(Subqueries.notExists(subcriteria));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
criteria.addOrder(Order.asc("item.id"));
|
||||||
|
|
||||||
log.debug(String.format("Running custom query with %d filters", index));
|
log.debug(String.format("Running custom query with %d filters", index));
|
||||||
|
|
||||||
return ((List<Item>) criteria.list()).iterator();
|
return ((List<Item>) criteria.list()).iterator();
|
||||||
@@ -274,7 +280,7 @@ public class ItemDAOImpl extends AbstractHibernateDSODAO<Item> implements ItemDA
|
|||||||
Query query = createQuery(context,
|
Query query = createQuery(context,
|
||||||
"SELECT item FROM Item as item join item.metadata metadatavalue " +
|
"SELECT item FROM Item as item join item.metadata metadatavalue " +
|
||||||
"WHERE item.inArchive=:in_archive AND metadatavalue.metadataField = :metadata_field AND " +
|
"WHERE item.inArchive=:in_archive AND metadatavalue.metadataField = :metadata_field AND " +
|
||||||
"metadatavalue.authority = :authority");
|
"metadatavalue.authority = :authority ORDER BY item.id");
|
||||||
query.setParameter("in_archive", inArchive);
|
query.setParameter("in_archive", inArchive);
|
||||||
query.setParameter("metadata_field", metadataField);
|
query.setParameter("metadata_field", metadataField);
|
||||||
query.setParameter("authority", authority);
|
query.setParameter("authority", authority);
|
||||||
@@ -286,7 +292,7 @@ public class ItemDAOImpl extends AbstractHibernateDSODAO<Item> implements ItemDA
|
|||||||
Integer offset) throws SQLException {
|
Integer offset) throws SQLException {
|
||||||
Query query = createQuery(context,
|
Query query = createQuery(context,
|
||||||
"select i from Item i join i.collections c " +
|
"select i from Item i join i.collections c " +
|
||||||
"WHERE :collection IN c AND i.inArchive=:in_archive");
|
"WHERE :collection IN c AND i.inArchive=:in_archive ORDER BY i.id");
|
||||||
query.setParameter("collection", collection);
|
query.setParameter("collection", collection);
|
||||||
query.setParameter("in_archive", true);
|
query.setParameter("in_archive", true);
|
||||||
if (offset != null) {
|
if (offset != null) {
|
||||||
@@ -309,6 +315,8 @@ public class ItemDAOImpl extends AbstractHibernateDSODAO<Item> implements ItemDA
|
|||||||
criteriaBuilder.notEqual(itemRoot.get(Item_.owningCollection), collection),
|
criteriaBuilder.notEqual(itemRoot.get(Item_.owningCollection), collection),
|
||||||
criteriaBuilder.isMember(collection, itemRoot.get(Item_.collections)),
|
criteriaBuilder.isMember(collection, itemRoot.get(Item_.collections)),
|
||||||
criteriaBuilder.isTrue(itemRoot.get(Item_.inArchive))));
|
criteriaBuilder.isTrue(itemRoot.get(Item_.inArchive))));
|
||||||
|
criteriaQuery.orderBy(criteriaBuilder.asc(itemRoot.get(Item_.id)));
|
||||||
|
criteriaQuery.groupBy(itemRoot.get(Item_.id));
|
||||||
return list(context, criteriaQuery, false, Item.class, limit, offset).iterator();
|
return list(context, criteriaQuery, false, Item.class, limit, offset).iterator();
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -327,7 +335,8 @@ public class ItemDAOImpl extends AbstractHibernateDSODAO<Item> implements ItemDA
|
|||||||
|
|
||||||
@Override
|
@Override
|
||||||
public Iterator<Item> findAllByCollection(Context context, Collection collection) throws SQLException {
|
public Iterator<Item> findAllByCollection(Context context, Collection collection) throws SQLException {
|
||||||
Query query = createQuery(context, "select i from Item i join i.collections c WHERE :collection IN c");
|
Query query = createQuery(context,
|
||||||
|
"select i from Item i join i.collections c WHERE :collection IN c ORDER BY i.id");
|
||||||
query.setParameter("collection", collection);
|
query.setParameter("collection", collection);
|
||||||
|
|
||||||
return iterate(query);
|
return iterate(query);
|
||||||
@@ -336,7 +345,8 @@ public class ItemDAOImpl extends AbstractHibernateDSODAO<Item> implements ItemDA
|
|||||||
@Override
|
@Override
|
||||||
public Iterator<Item> findAllByCollection(Context context, Collection collection, Integer limit, Integer offset)
|
public Iterator<Item> findAllByCollection(Context context, Collection collection, Integer limit, Integer offset)
|
||||||
throws SQLException {
|
throws SQLException {
|
||||||
Query query = createQuery(context, "select i from Item i join i.collections c WHERE :collection IN c");
|
Query query = createQuery(context,
|
||||||
|
"select i from Item i join i.collections c WHERE :collection IN c ORDER BY i.id");
|
||||||
query.setParameter("collection", collection);
|
query.setParameter("collection", collection);
|
||||||
|
|
||||||
if (offset != null) {
|
if (offset != null) {
|
||||||
@@ -381,7 +391,8 @@ public class ItemDAOImpl extends AbstractHibernateDSODAO<Item> implements ItemDA
|
|||||||
@Override
|
@Override
|
||||||
public Iterator<Item> findByLastModifiedSince(Context context, Date since)
|
public Iterator<Item> findByLastModifiedSince(Context context, Date since)
|
||||||
throws SQLException {
|
throws SQLException {
|
||||||
Query query = createQuery(context, "SELECT i FROM item i WHERE last_modified > :last_modified");
|
Query query = createQuery(context,
|
||||||
|
"SELECT i FROM item i WHERE last_modified > :last_modified ORDER BY id");
|
||||||
query.setParameter("last_modified", since, TemporalType.TIMESTAMP);
|
query.setParameter("last_modified", since, TemporalType.TIMESTAMP);
|
||||||
return iterate(query);
|
return iterate(query);
|
||||||
}
|
}
|
||||||
|
@@ -8,7 +8,7 @@
|
|||||||
package org.dspace.content.dao.impl;
|
package org.dspace.content.dao.impl;
|
||||||
|
|
||||||
import java.sql.SQLException;
|
import java.sql.SQLException;
|
||||||
import java.util.LinkedList;
|
import java.util.ArrayList;
|
||||||
import java.util.List;
|
import java.util.List;
|
||||||
import javax.persistence.Query;
|
import javax.persistence.Query;
|
||||||
import javax.persistence.criteria.CriteriaBuilder;
|
import javax.persistence.criteria.CriteriaBuilder;
|
||||||
@@ -63,7 +63,7 @@ public class MetadataSchemaDAOImpl extends AbstractHibernateDAO<MetadataSchema>
|
|||||||
Root<MetadataSchema> metadataSchemaRoot = criteriaQuery.from(MetadataSchema.class);
|
Root<MetadataSchema> metadataSchemaRoot = criteriaQuery.from(MetadataSchema.class);
|
||||||
criteriaQuery.select(metadataSchemaRoot);
|
criteriaQuery.select(metadataSchemaRoot);
|
||||||
|
|
||||||
List<javax.persistence.criteria.Order> orderList = new LinkedList<>();
|
List<javax.persistence.criteria.Order> orderList = new ArrayList<>();
|
||||||
orderList.add(criteriaBuilder.asc(metadataSchemaRoot.get(MetadataSchema_.id)));
|
orderList.add(criteriaBuilder.asc(metadataSchemaRoot.get(MetadataSchema_.id)));
|
||||||
criteriaQuery.orderBy(orderList);
|
criteriaQuery.orderBy(orderList);
|
||||||
|
|
||||||
|
@@ -8,6 +8,7 @@
|
|||||||
package org.dspace.content.dao.impl;
|
package org.dspace.content.dao.impl;
|
||||||
|
|
||||||
import java.sql.SQLException;
|
import java.sql.SQLException;
|
||||||
|
import java.util.LinkedList;
|
||||||
import java.util.List;
|
import java.util.List;
|
||||||
import javax.persistence.criteria.CriteriaBuilder;
|
import javax.persistence.criteria.CriteriaBuilder;
|
||||||
import javax.persistence.criteria.CriteriaQuery;
|
import javax.persistence.criteria.CriteriaQuery;
|
||||||
@@ -20,6 +21,15 @@ import org.dspace.content.dao.RelationshipTypeDAO;
|
|||||||
import org.dspace.core.AbstractHibernateDAO;
|
import org.dspace.core.AbstractHibernateDAO;
|
||||||
import org.dspace.core.Context;
|
import org.dspace.core.Context;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Hibernate implementation of the Database Access Object interface class for
|
||||||
|
* the RelationshipType object.
|
||||||
|
* This class is responsible for all database calls for the RelationshipType
|
||||||
|
* object and is autowired by Spring.
|
||||||
|
* This class should never be accessed directly.
|
||||||
|
*
|
||||||
|
* @author kevinvandevelde at atmire.com
|
||||||
|
*/
|
||||||
public class RelationshipTypeDAOImpl extends AbstractHibernateDAO<RelationshipType> implements RelationshipTypeDAO {
|
public class RelationshipTypeDAOImpl extends AbstractHibernateDAO<RelationshipType> implements RelationshipTypeDAO {
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
@@ -36,7 +46,7 @@ public class RelationshipTypeDAOImpl extends AbstractHibernateDAO<RelationshipTy
|
|||||||
criteriaBuilder.equal(relationshipTypeRoot.get(RelationshipType_.rightType), rightType),
|
criteriaBuilder.equal(relationshipTypeRoot.get(RelationshipType_.rightType), rightType),
|
||||||
criteriaBuilder.equal(relationshipTypeRoot.get(RelationshipType_.leftwardType), leftwardType),
|
criteriaBuilder.equal(relationshipTypeRoot.get(RelationshipType_.leftwardType), leftwardType),
|
||||||
criteriaBuilder.equal(relationshipTypeRoot.get(RelationshipType_.rightwardType), rightwardType)));
|
criteriaBuilder.equal(relationshipTypeRoot.get(RelationshipType_.rightwardType), rightwardType)));
|
||||||
return uniqueResult(context, criteriaQuery, false, RelationshipType.class, -1, -1);
|
return uniqueResult(context, criteriaQuery, false, RelationshipType.class);
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
@@ -83,7 +93,9 @@ public class RelationshipTypeDAOImpl extends AbstractHibernateDAO<RelationshipTy
|
|||||||
.equal(relationshipTypeRoot.get(RelationshipType_.rightType), entityType)
|
.equal(relationshipTypeRoot.get(RelationshipType_.rightType), entityType)
|
||||||
)
|
)
|
||||||
);
|
);
|
||||||
criteriaQuery.orderBy(criteriaBuilder.asc(relationshipTypeRoot.get(RelationshipType_.ID)));
|
List<javax.persistence.criteria.Order> orderList = new LinkedList<>();
|
||||||
|
orderList.add(criteriaBuilder.asc(relationshipTypeRoot.get(RelationshipType_.ID)));
|
||||||
|
criteriaQuery.orderBy(orderList);
|
||||||
return list(context, criteriaQuery, false, RelationshipType.class, limit, offset);
|
return list(context, criteriaQuery, false, RelationshipType.class, limit, offset);
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -119,13 +131,11 @@ public class RelationshipTypeDAOImpl extends AbstractHibernateDAO<RelationshipTy
|
|||||||
CriteriaQuery criteriaQuery = getCriteriaQuery(criteriaBuilder, RelationshipType.class);
|
CriteriaQuery criteriaQuery = getCriteriaQuery(criteriaBuilder, RelationshipType.class);
|
||||||
Root<RelationshipType> relationshipTypeRoot = criteriaQuery.from(RelationshipType.class);
|
Root<RelationshipType> relationshipTypeRoot = criteriaQuery.from(RelationshipType.class);
|
||||||
criteriaQuery.select(relationshipTypeRoot);
|
criteriaQuery.select(relationshipTypeRoot);
|
||||||
criteriaQuery.where(
|
criteriaQuery.where(criteriaBuilder.or(
|
||||||
criteriaBuilder.or(criteriaBuilder.
|
criteriaBuilder.equal(relationshipTypeRoot.get(RelationshipType_.leftType), entityType),
|
||||||
equal(relationshipTypeRoot.get(RelationshipType_.leftType), entityType),
|
criteriaBuilder.equal(relationshipTypeRoot.get(RelationshipType_.rightType), entityType)
|
||||||
criteriaBuilder
|
));
|
||||||
.equal(relationshipTypeRoot.get(RelationshipType_.rightType), entityType)
|
|
||||||
)
|
|
||||||
);
|
|
||||||
return count(context, criteriaQuery, criteriaBuilder, relationshipTypeRoot);
|
return count(context, criteriaQuery, criteriaBuilder, relationshipTypeRoot);
|
||||||
}
|
}
|
||||||
|
|
||||||
}
|
}
|
||||||
|
@@ -19,7 +19,7 @@ import org.dspace.core.Context;
|
|||||||
|
|
||||||
/**
|
/**
|
||||||
* Hibernate implementation of the Database Access Object interface class for the Site object.
|
* Hibernate implementation of the Database Access Object interface class for the Site object.
|
||||||
* This class is responsible for all database calls for the Site object and is autowired by spring
|
* This class is responsible for all database calls for the Site object and is autowired by Spring.
|
||||||
* This class should never be accessed directly.
|
* This class should never be accessed directly.
|
||||||
*
|
*
|
||||||
* @author kevinvandevelde at atmire.com
|
* @author kevinvandevelde at atmire.com
|
||||||
@@ -35,6 +35,6 @@ public class SiteDAOImpl extends AbstractHibernateDAO<Site> implements SiteDAO {
|
|||||||
CriteriaQuery criteriaQuery = getCriteriaQuery(criteriaBuilder, Site.class);
|
CriteriaQuery criteriaQuery = getCriteriaQuery(criteriaBuilder, Site.class);
|
||||||
Root<Site> siteRoot = criteriaQuery.from(Site.class);
|
Root<Site> siteRoot = criteriaQuery.from(Site.class);
|
||||||
criteriaQuery.select(siteRoot);
|
criteriaQuery.select(siteRoot);
|
||||||
return uniqueResult(context, criteriaQuery, true, Site.class, -1, -1);
|
return uniqueResult(context, criteriaQuery, true, Site.class);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@@ -31,7 +31,7 @@ import org.dspace.eperson.Group;
|
|||||||
|
|
||||||
/**
|
/**
|
||||||
* Hibernate implementation of the Database Access Object interface class for the WorkspaceItem object.
|
* Hibernate implementation of the Database Access Object interface class for the WorkspaceItem object.
|
||||||
* This class is responsible for all database calls for the WorkspaceItem object and is autowired by spring
|
* This class is responsible for all database calls for the WorkspaceItem object and is autowired by Spring.
|
||||||
* This class should never be accessed directly.
|
* This class should never be accessed directly.
|
||||||
*
|
*
|
||||||
* @author kevinvandevelde at atmire.com
|
* @author kevinvandevelde at atmire.com
|
||||||
@@ -81,7 +81,7 @@ public class WorkspaceItemDAOImpl extends AbstractHibernateDAO<WorkspaceItem> im
|
|||||||
Root<WorkspaceItem> workspaceItemRoot = criteriaQuery.from(WorkspaceItem.class);
|
Root<WorkspaceItem> workspaceItemRoot = criteriaQuery.from(WorkspaceItem.class);
|
||||||
criteriaQuery.select(workspaceItemRoot);
|
criteriaQuery.select(workspaceItemRoot);
|
||||||
criteriaQuery.where(criteriaBuilder.equal(workspaceItemRoot.get(WorkspaceItem_.item), i));
|
criteriaQuery.where(criteriaBuilder.equal(workspaceItemRoot.get(WorkspaceItem_.item), i));
|
||||||
return uniqueResult(context, criteriaQuery, false, WorkspaceItem.class, -1, -1);
|
return uniqueResult(context, criteriaQuery, false, WorkspaceItem.class);
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
|
@@ -0,0 +1,46 @@
|
|||||||
|
/**
|
||||||
|
* The contents of this file are subject to the license and copyright
|
||||||
|
* detailed in the LICENSE and NOTICE files at the root of the source
|
||||||
|
* tree and available online at
|
||||||
|
*
|
||||||
|
* http://www.dspace.org/license/
|
||||||
|
*/
|
||||||
|
package org.dspace.content.logic;
|
||||||
|
|
||||||
|
import org.apache.log4j.Logger;
|
||||||
|
import org.dspace.content.Item;
|
||||||
|
import org.dspace.core.Context;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* The default filter, a very simple implementation of Filter / LogicalStatement
|
||||||
|
* The idea is to have this as a wrapper / root class for all logical operations, so it takes a single
|
||||||
|
* statement as a property (unlike an operator) and takes no parameters (unlike a condition)
|
||||||
|
*
|
||||||
|
* @author Kim Shepherd
|
||||||
|
* @version $Revision$
|
||||||
|
*/
|
||||||
|
public class DefaultFilter implements Filter {
|
||||||
|
private LogicalStatement statement;
|
||||||
|
private static Logger log = Logger.getLogger(Filter.class);
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Set statement from Spring configuration in item-filters.xml
|
||||||
|
* Be aware that this is singular not plural. A filter can have one sub-statement only.
|
||||||
|
*
|
||||||
|
* @param statement LogicalStatement of this filter (operator, condition, or another filter)
|
||||||
|
*/
|
||||||
|
public void setStatement(LogicalStatement statement) {
|
||||||
|
this.statement = statement;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Get the result of logical evaluation for an item
|
||||||
|
* @param context DSpace context
|
||||||
|
* @param item Item to evaluate
|
||||||
|
* @return boolean
|
||||||
|
* @throws LogicalStatementException
|
||||||
|
*/
|
||||||
|
public boolean getResult(Context context, Item item) throws LogicalStatementException {
|
||||||
|
return this.statement.getResult(context, item);
|
||||||
|
}
|
||||||
|
}
|
@@ -0,0 +1,35 @@
|
|||||||
|
/**
|
||||||
|
* The contents of this file are subject to the license and copyright
|
||||||
|
* detailed in the LICENSE and NOTICE files at the root of the source
|
||||||
|
* tree and available online at
|
||||||
|
*
|
||||||
|
* http://www.dspace.org/license/
|
||||||
|
*/
|
||||||
|
package org.dspace.content.logic;
|
||||||
|
|
||||||
|
import org.dspace.content.Item;
|
||||||
|
import org.dspace.core.Context;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* The interface for Filter currently doesn't add anything to LogicalStatement but inherits from it
|
||||||
|
* just to keep naming / reflection clean, and in case Filters should do anything additional in future.
|
||||||
|
* We need this as filters have to be specified in the spring configuration (item-filters.xml).
|
||||||
|
* Filters are the top level elements of the logic. Only logical statements that implement this interface
|
||||||
|
* are allowed to be the root element of a spring configuration (item-filters.xml) of this logic framework.
|
||||||
|
* A filter is just helping to differentiate between logical statement that can be used as root elements and
|
||||||
|
* logical statement that shouldn't be use as root element. A filter may contain only one substatement.
|
||||||
|
*
|
||||||
|
* @author Kim Shepherd
|
||||||
|
* @see org.dspace.content.logic.DefaultFilter
|
||||||
|
*/
|
||||||
|
public interface Filter extends LogicalStatement {
|
||||||
|
/**
|
||||||
|
* Get the result of logical evaluation for an item
|
||||||
|
* @param context DSpace context
|
||||||
|
* @param item Item to evaluate
|
||||||
|
* @return boolean
|
||||||
|
* @throws LogicalStatementException
|
||||||
|
*/
|
||||||
|
@Override
|
||||||
|
boolean getResult(Context context, Item item) throws LogicalStatementException;
|
||||||
|
}
|
@@ -0,0 +1,31 @@
|
|||||||
|
/**
|
||||||
|
* The contents of this file are subject to the license and copyright
|
||||||
|
* detailed in the LICENSE and NOTICE files at the root of the source
|
||||||
|
* tree and available online at
|
||||||
|
*
|
||||||
|
* http://www.dspace.org/license/
|
||||||
|
*/
|
||||||
|
package org.dspace.content.logic;
|
||||||
|
|
||||||
|
import org.dspace.content.Item;
|
||||||
|
import org.dspace.core.Context;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* The base interface used by all logic classes: all operators and conditions are logical statements.
|
||||||
|
* All statements must accept an Item object and return a boolean result.
|
||||||
|
* The philosophy is that because Filter, Condition, Operator classes implement getResult(), they can all be
|
||||||
|
* used as sub-statements in other Filters and Operators.
|
||||||
|
*
|
||||||
|
* @author Kim Shepherd
|
||||||
|
* @version $Revision$
|
||||||
|
*/
|
||||||
|
public interface LogicalStatement {
|
||||||
|
/**
|
||||||
|
* Get the result of logical evaluation for an item
|
||||||
|
* @param context DSpace context
|
||||||
|
* @param item Item to evaluate
|
||||||
|
* @return boolean result of evaluation
|
||||||
|
* @throws LogicalStatementException
|
||||||
|
*/
|
||||||
|
boolean getResult(Context context, Item item) throws LogicalStatementException;
|
||||||
|
}
|
@@ -0,0 +1,35 @@
|
|||||||
|
/**
|
||||||
|
* The contents of this file are subject to the license and copyright
|
||||||
|
* detailed in the LICENSE and NOTICE files at the root of the source
|
||||||
|
* tree and available online at
|
||||||
|
*
|
||||||
|
* http://www.dspace.org/license/
|
||||||
|
*/
|
||||||
|
package org.dspace.content.logic;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Exception for errors encountered while evaluating logical statements
|
||||||
|
* defined as spring beans.
|
||||||
|
*
|
||||||
|
* @author Kim Shepherd
|
||||||
|
* @version $Revision$
|
||||||
|
*/
|
||||||
|
public class LogicalStatementException extends RuntimeException {
|
||||||
|
|
||||||
|
public LogicalStatementException() {
|
||||||
|
super();
|
||||||
|
}
|
||||||
|
|
||||||
|
public LogicalStatementException(String s, Throwable t) {
|
||||||
|
super(s, t);
|
||||||
|
}
|
||||||
|
|
||||||
|
public LogicalStatementException(String s) {
|
||||||
|
super(s);
|
||||||
|
}
|
||||||
|
|
||||||
|
public LogicalStatementException(Throwable t) {
|
||||||
|
super(t);
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
@@ -0,0 +1,143 @@
|
|||||||
|
/**
|
||||||
|
* The contents of this file are subject to the license and copyright
|
||||||
|
* detailed in the LICENSE and NOTICE files at the root of the source
|
||||||
|
* tree and available online at
|
||||||
|
*
|
||||||
|
* http://www.dspace.org/license/
|
||||||
|
*/
|
||||||
|
package org.dspace.content.logic;
|
||||||
|
|
||||||
|
import java.sql.SQLException;
|
||||||
|
import java.util.Iterator;
|
||||||
|
import java.util.List;
|
||||||
|
|
||||||
|
import org.apache.commons.cli.CommandLine;
|
||||||
|
import org.apache.commons.cli.CommandLineParser;
|
||||||
|
import org.apache.commons.cli.HelpFormatter;
|
||||||
|
import org.apache.commons.cli.Options;
|
||||||
|
import org.apache.commons.cli.ParseException;
|
||||||
|
import org.apache.commons.cli.PosixParser;
|
||||||
|
import org.apache.logging.log4j.Logger;
|
||||||
|
import org.dspace.content.DSpaceObject;
|
||||||
|
import org.dspace.content.Item;
|
||||||
|
import org.dspace.content.factory.ContentServiceFactory;
|
||||||
|
import org.dspace.content.service.ItemService;
|
||||||
|
import org.dspace.core.Constants;
|
||||||
|
import org.dspace.core.Context;
|
||||||
|
import org.dspace.handle.factory.HandleServiceFactory;
|
||||||
|
import org.dspace.handle.service.HandleService;
|
||||||
|
import org.dspace.kernel.ServiceManager;
|
||||||
|
import org.dspace.services.factory.DSpaceServicesFactory;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* A command-line runner used for testing a logical filter against an item, or all items
|
||||||
|
*
|
||||||
|
* @author Kim Shepherd
|
||||||
|
* @version $Revision$
|
||||||
|
*/
|
||||||
|
public class TestLogicRunner {
|
||||||
|
|
||||||
|
private static Logger log = org.apache.logging.log4j.LogManager.getLogger(TestLogicRunner.class);
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Default constructor
|
||||||
|
*/
|
||||||
|
private TestLogicRunner() { }
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Main runner method for CLI usage
|
||||||
|
* @param argv array of command-line arguments
|
||||||
|
*/
|
||||||
|
public static void main(String[] argv) {
|
||||||
|
System.out.println("Starting impl of main() test spring logic item filter");
|
||||||
|
|
||||||
|
// initialize options
|
||||||
|
Options options = new Options();
|
||||||
|
|
||||||
|
options.addOption("h", "help", false, "Help");
|
||||||
|
options.addOption("l", "list", false, "List filters");
|
||||||
|
options.addOption("f", "filter", true, "Use filter <filter>");
|
||||||
|
options.addOption("i","item", true, "Run filter over item <handle>");
|
||||||
|
options.addOption("a","all", false, "Run filter over all items");
|
||||||
|
|
||||||
|
// initialize parser
|
||||||
|
CommandLineParser parser = new PosixParser();
|
||||||
|
CommandLine line = null;
|
||||||
|
HelpFormatter helpformater = new HelpFormatter();
|
||||||
|
|
||||||
|
try {
|
||||||
|
line = parser.parse(options, argv);
|
||||||
|
} catch (ParseException ex) {
|
||||||
|
System.out.println(ex.getMessage());
|
||||||
|
System.exit(1);
|
||||||
|
}
|
||||||
|
|
||||||
|
if (line.hasOption("help")) {
|
||||||
|
helpformater.printHelp("\nTest the DSpace logical item filters\n", options);
|
||||||
|
System.exit(0);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Create a context
|
||||||
|
Context c = new Context(Context.Mode.READ_ONLY);
|
||||||
|
//c.turnOffAuthorisationSystem();
|
||||||
|
ServiceManager manager = DSpaceServicesFactory.getInstance().getServiceManager();
|
||||||
|
|
||||||
|
if (line.hasOption("list")) {
|
||||||
|
// Lit filters and exit
|
||||||
|
List<Filter> filters = manager.getServicesByType(Filter.class);
|
||||||
|
for (Filter filter : filters) {
|
||||||
|
System.out.println(filter.getClass().toString());
|
||||||
|
}
|
||||||
|
System.out.println("See item-filters.xml spring config for filter names");
|
||||||
|
System.exit(0);
|
||||||
|
}
|
||||||
|
|
||||||
|
Filter filter;
|
||||||
|
|
||||||
|
if (line.hasOption("filter")) {
|
||||||
|
String filterName = line.getOptionValue("filter");
|
||||||
|
filter = manager.getServiceByName(filterName, Filter.class);
|
||||||
|
if (filter == null) {
|
||||||
|
System.out.println("Error loading filter: " + filterName);
|
||||||
|
System.exit(1);
|
||||||
|
}
|
||||||
|
|
||||||
|
if (line.hasOption("item")) {
|
||||||
|
String handle = line.getOptionValue("item");
|
||||||
|
|
||||||
|
HandleService handleService = HandleServiceFactory.getInstance().getHandleService();
|
||||||
|
try {
|
||||||
|
DSpaceObject dso = handleService.resolveToObject(c, handle);
|
||||||
|
if (Constants.typeText[dso.getType()].equals("ITEM")) {
|
||||||
|
Item item = (Item) dso;
|
||||||
|
System.out.println(filter.getResult(c, item));
|
||||||
|
} else {
|
||||||
|
System.out.println(handle + " is not an ITEM");
|
||||||
|
}
|
||||||
|
} catch (SQLException | LogicalStatementException e) {
|
||||||
|
System.out.println("Error encountered processing item " + handle + ": " + e.getMessage());
|
||||||
|
}
|
||||||
|
|
||||||
|
} else if (line.hasOption("all")) {
|
||||||
|
ItemService itemService = ContentServiceFactory.getInstance().getItemService();
|
||||||
|
try {
|
||||||
|
Iterator<Item> itemIterator = itemService.findAll(c);
|
||||||
|
while (itemIterator.hasNext()) {
|
||||||
|
Item i = itemIterator.next();
|
||||||
|
System.out.println(
|
||||||
|
"Testing '" + filter + "' on item " + i.getHandle() + " ('" + i.getName() + "')"
|
||||||
|
);
|
||||||
|
System.out.println(filter.getResult(c, i));
|
||||||
|
|
||||||
|
}
|
||||||
|
} catch (SQLException | LogicalStatementException e) {
|
||||||
|
System.out.println("Error encountered processing items: " + e.getMessage());
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
helpformater.printHelp("\nTest the DSpace logical item filters\n", options);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
@@ -0,0 +1,91 @@
|
|||||||
|
/**
|
||||||
|
* The contents of this file are subject to the license and copyright
|
||||||
|
* detailed in the LICENSE and NOTICE files at the root of the source
|
||||||
|
* tree and available online at
|
||||||
|
*
|
||||||
|
* http://www.dspace.org/license/
|
||||||
|
*/
|
||||||
|
package org.dspace.content.logic.condition;
|
||||||
|
|
||||||
|
import java.util.Map;
|
||||||
|
|
||||||
|
import org.apache.logging.log4j.LogManager;
|
||||||
|
import org.apache.logging.log4j.Logger;
|
||||||
|
import org.dspace.content.Item;
|
||||||
|
import org.dspace.content.factory.ContentServiceFactory;
|
||||||
|
import org.dspace.content.logic.LogicalStatementException;
|
||||||
|
import org.dspace.content.service.CollectionService;
|
||||||
|
import org.dspace.content.service.ItemService;
|
||||||
|
import org.dspace.core.Context;
|
||||||
|
import org.dspace.handle.service.HandleService;
|
||||||
|
import org.springframework.beans.factory.annotation.Autowired;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Abstract class for conditions, to implement the basic getter and setter parameters
|
||||||
|
*
|
||||||
|
* @author Kim Shepherd
|
||||||
|
* @version $Revision$
|
||||||
|
*/
|
||||||
|
public abstract class AbstractCondition implements Condition {
|
||||||
|
|
||||||
|
// Parameters map (injected, required -- see setter annotation)
|
||||||
|
private Map<String, Object> parameters;
|
||||||
|
|
||||||
|
// Declare and instantiate spring services
|
||||||
|
//@Autowired(required = true)
|
||||||
|
protected ItemService itemService = ContentServiceFactory.getInstance().getItemService();
|
||||||
|
//@Autowired(required = true)
|
||||||
|
protected CollectionService collectionService = ContentServiceFactory.getInstance().getCollectionService();
|
||||||
|
@Autowired(required = true)
|
||||||
|
protected HandleService handleService;
|
||||||
|
|
||||||
|
// Logging
|
||||||
|
Logger log = LogManager.getLogger(AbstractCondition.class);
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Get parameters set by spring configuration in item-filters.xml
|
||||||
|
* These could be any kind of map that the extending condition class needs for evaluation
|
||||||
|
* @return map of parameters
|
||||||
|
* @throws LogicalStatementException
|
||||||
|
*/
|
||||||
|
@Override
|
||||||
|
public Map<String, Object> getParameters() throws LogicalStatementException {
|
||||||
|
return this.parameters;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Set parameters - used by Spring when creating beans from item-filters.xml
|
||||||
|
* These could be any kind of map that the extending condition class needs for evaluation
|
||||||
|
* @param parameters
|
||||||
|
* @throws LogicalStatementException
|
||||||
|
*/
|
||||||
|
@Autowired(required = true)
|
||||||
|
@Override
|
||||||
|
public void setParameters(Map<String, Object> parameters) throws LogicalStatementException {
|
||||||
|
this.parameters = parameters;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Get the result of logical evaluation for an item
|
||||||
|
* @param context DSpace context
|
||||||
|
* @param item Item to evaluate
|
||||||
|
* @return boolean
|
||||||
|
* @throws LogicalStatementException
|
||||||
|
*/
|
||||||
|
@Override
|
||||||
|
public boolean getResult(Context context, Item item) throws LogicalStatementException {
|
||||||
|
if (item == null) {
|
||||||
|
log.error("Error evaluating item. Passed item is null, returning false");
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
if (context == null) {
|
||||||
|
throw new IllegalStateException("Context is null");
|
||||||
|
}
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public void setItemService(ItemService itemService) {
|
||||||
|
this.itemService = itemService;
|
||||||
|
}
|
||||||
|
}
|
@@ -0,0 +1,72 @@
|
|||||||
|
/**
|
||||||
|
* The contents of this file are subject to the license and copyright
|
||||||
|
* detailed in the LICENSE and NOTICE files at the root of the source
|
||||||
|
* tree and available online at
|
||||||
|
*
|
||||||
|
* http://www.dspace.org/license/
|
||||||
|
*/
|
||||||
|
package org.dspace.content.logic.condition;
|
||||||
|
|
||||||
|
import java.util.List;
|
||||||
|
|
||||||
|
import org.dspace.content.Bundle;
|
||||||
|
import org.dspace.content.Item;
|
||||||
|
import org.dspace.content.logic.LogicalStatementException;
|
||||||
|
import org.dspace.core.Context;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* A condition to evaluate an item based on how many bitstreams it has in a particular bundle
|
||||||
|
*
|
||||||
|
* @author Kim Shepherd
|
||||||
|
* @version $Revision$
|
||||||
|
*/
|
||||||
|
public class BitstreamCountCondition extends AbstractCondition {
|
||||||
|
/**
|
||||||
|
* Return true if bitstream count is within bounds of min and/or max parameters
|
||||||
|
* Return false if out of bounds
|
||||||
|
* @param context DSpace context
|
||||||
|
* @param item Item to evaluate
|
||||||
|
* @return boolean result of evaluation
|
||||||
|
* @throws LogicalStatementException
|
||||||
|
*/
|
||||||
|
@Override
|
||||||
|
public boolean getResult(Context context, Item item) throws LogicalStatementException {
|
||||||
|
|
||||||
|
// This super call just throws some useful exceptions if required objects are null
|
||||||
|
super.getResult(context, item);
|
||||||
|
|
||||||
|
int min = -1;
|
||||||
|
if (getParameters().get("min") != null) {
|
||||||
|
min = Integer.parseInt((String)getParameters().get("min"));
|
||||||
|
}
|
||||||
|
int max = -1;
|
||||||
|
if (getParameters().get("max") != null) {
|
||||||
|
max = Integer.parseInt((String)getParameters().get("max"));
|
||||||
|
}
|
||||||
|
String bundleName = (String)getParameters().get("bundle");
|
||||||
|
if (min < 0 && max < 0) {
|
||||||
|
throw new LogicalStatementException("Either min or max parameter must be 0 or bigger.");
|
||||||
|
}
|
||||||
|
|
||||||
|
List<Bundle> bundles;
|
||||||
|
int count = 0;
|
||||||
|
|
||||||
|
if (bundleName != null) {
|
||||||
|
bundles = item.getBundles(bundleName);
|
||||||
|
} else {
|
||||||
|
bundles = item.getBundles();
|
||||||
|
}
|
||||||
|
|
||||||
|
for (Bundle bundle : bundles) {
|
||||||
|
count += bundle.getBitstreams().size();
|
||||||
|
}
|
||||||
|
|
||||||
|
if (min < 0) {
|
||||||
|
return (count <= max);
|
||||||
|
}
|
||||||
|
if (max < 0) {
|
||||||
|
return (count >= min);
|
||||||
|
}
|
||||||
|
return (count <= max && count >= min);
|
||||||
|
}
|
||||||
|
}
|
@@ -0,0 +1,56 @@
|
|||||||
|
/**
|
||||||
|
* The contents of this file are subject to the license and copyright
|
||||||
|
* detailed in the LICENSE and NOTICE files at the root of the source
|
||||||
|
* tree and available online at
|
||||||
|
*
|
||||||
|
* http://www.dspace.org/license/
|
||||||
|
*/
|
||||||
|
package org.dspace.content.logic.condition;
|
||||||
|
|
||||||
|
import java.util.Map;
|
||||||
|
|
||||||
|
import org.dspace.content.Item;
|
||||||
|
import org.dspace.content.logic.LogicalStatement;
|
||||||
|
import org.dspace.content.logic.LogicalStatementException;
|
||||||
|
import org.dspace.content.service.ItemService;
|
||||||
|
import org.dspace.core.Context;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* The Condition interface
|
||||||
|
*
|
||||||
|
* A condition is one logical statement testing an item for any idea. A condition is always a logical statements. An
|
||||||
|
* operator is not a condition but also a logical statement.
|
||||||
|
*
|
||||||
|
* @author Kim Shepherd
|
||||||
|
* @version $Revision$
|
||||||
|
*/
|
||||||
|
public interface Condition extends LogicalStatement {
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Set parameters - used by Spring
|
||||||
|
* @param parameters
|
||||||
|
* @throws LogicalStatementException
|
||||||
|
*/
|
||||||
|
void setParameters(Map<String, Object> parameters) throws LogicalStatementException;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Get parameters set by Spring in item-filters.xml
|
||||||
|
* These could be any kind of map that the extending condition class needs for evaluation
|
||||||
|
* @return map of parameters
|
||||||
|
* @throws LogicalStatementException
|
||||||
|
*/
|
||||||
|
Map<String, Object> getParameters() throws LogicalStatementException;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Get the result of logical evaluation for an item
|
||||||
|
* @param context DSpace context
|
||||||
|
* @param item Item to evaluate
|
||||||
|
* @return result
|
||||||
|
* @throws LogicalStatementException
|
||||||
|
*/
|
||||||
|
@Override
|
||||||
|
boolean getResult(Context context, Item item) throws LogicalStatementException;
|
||||||
|
|
||||||
|
public void setItemService(ItemService itemService);
|
||||||
|
|
||||||
|
}
|
@@ -0,0 +1,80 @@
|
|||||||
|
/**
|
||||||
|
* The contents of this file are subject to the license and copyright
|
||||||
|
* detailed in the LICENSE and NOTICE files at the root of the source
|
||||||
|
* tree and available online at
|
||||||
|
*
|
||||||
|
* http://www.dspace.org/license/
|
||||||
|
*/
|
||||||
|
package org.dspace.content.logic.condition;
|
||||||
|
|
||||||
|
import java.sql.SQLException;
|
||||||
|
import java.util.List;
|
||||||
|
|
||||||
|
import org.apache.logging.log4j.LogManager;
|
||||||
|
import org.apache.logging.log4j.Logger;
|
||||||
|
import org.dspace.content.Collection;
|
||||||
|
import org.dspace.content.DSpaceObject;
|
||||||
|
import org.dspace.content.Item;
|
||||||
|
import org.dspace.content.logic.LogicalStatementException;
|
||||||
|
import org.dspace.core.Context;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* A condition that accepts a list of collection handles and returns true
|
||||||
|
* if the item belongs to any of them.
|
||||||
|
*
|
||||||
|
* @author Kim Shepherd
|
||||||
|
* @version $Revision$
|
||||||
|
*/
|
||||||
|
public class InCollectionCondition extends AbstractCondition {
|
||||||
|
private static Logger log = LogManager.getLogger(InCollectionCondition.class);
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Return true if item is in one of the specified collections
|
||||||
|
* Return false if not
|
||||||
|
* @param context DSpace context
|
||||||
|
* @param item Item to evaluate
|
||||||
|
* @return boolean result of evaluation
|
||||||
|
* @throws LogicalStatementException
|
||||||
|
*/
|
||||||
|
@Override
|
||||||
|
public boolean getResult(Context context, Item item) throws LogicalStatementException {
|
||||||
|
|
||||||
|
List<String> collectionHandles = (List<String>)getParameters().get("collections");
|
||||||
|
|
||||||
|
// Look for the handle among an archived item's collections - this test will only work after submission
|
||||||
|
// and archival is complete
|
||||||
|
List<Collection> itemCollections = item.getCollections();
|
||||||
|
for (Collection collection : itemCollections) {
|
||||||
|
if (collectionHandles.contains(collection.getHandle())) {
|
||||||
|
log.debug("item " + item.getHandle() + " is in collection "
|
||||||
|
+ collection.getHandle() + ", returning true");
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Look for the parent object of the item. This is important as the item.getOwningCollection method
|
||||||
|
// may return null, even though the item itself does have a parent object, at the point of archival
|
||||||
|
try {
|
||||||
|
DSpaceObject parent = itemService.getParentObject(context, item);
|
||||||
|
if (parent != null) {
|
||||||
|
log.debug("Got parent DSO for item: " + parent.getID().toString());
|
||||||
|
log.debug("Parent DSO handle: " + parent.getHandle());
|
||||||
|
if (collectionHandles.contains(parent.getHandle())) {
|
||||||
|
log.debug("item " + item.getHandle() + " is in collection "
|
||||||
|
+ parent.getHandle() + ", returning true");
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
log.debug("Parent DSO is null...");
|
||||||
|
}
|
||||||
|
} catch (SQLException e) {
|
||||||
|
log.error("Error obtaining parent DSO", e);
|
||||||
|
throw new LogicalStatementException(e);
|
||||||
|
}
|
||||||
|
|
||||||
|
// If we reach this statement, the item did not appear in any of the collections from the parameters
|
||||||
|
log.debug("item " + item.getHandle() + " not found in the passed collection handle list");
|
||||||
|
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
}
|
@@ -0,0 +1,90 @@
|
|||||||
|
/**
|
||||||
|
* The contents of this file are subject to the license and copyright
|
||||||
|
* detailed in the LICENSE and NOTICE files at the root of the source
|
||||||
|
* tree and available online at
|
||||||
|
*
|
||||||
|
* http://www.dspace.org/license/
|
||||||
|
*/
|
||||||
|
package org.dspace.content.logic.condition;
|
||||||
|
|
||||||
|
import java.sql.SQLException;
|
||||||
|
import java.util.List;
|
||||||
|
|
||||||
|
import org.apache.log4j.Logger;
|
||||||
|
import org.dspace.content.Collection;
|
||||||
|
import org.dspace.content.Community;
|
||||||
|
import org.dspace.content.DSpaceObject;
|
||||||
|
import org.dspace.content.Item;
|
||||||
|
import org.dspace.content.logic.LogicalStatementException;
|
||||||
|
import org.dspace.core.Context;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* A condition that accepts a list of community handles and returns true
|
||||||
|
* if the item belongs to any of them.
|
||||||
|
*
|
||||||
|
* @author Kim Shepherd
|
||||||
|
* @version $Revision$
|
||||||
|
*/
|
||||||
|
public class InCommunityCondition extends AbstractCondition {
|
||||||
|
private static Logger log = Logger.getLogger(InCommunityCondition.class);
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Return true if item is in one of the specified collections
|
||||||
|
* Return false if not
|
||||||
|
* @param context DSpace context
|
||||||
|
* @param item Item to evaluate
|
||||||
|
* @return boolean result of evaluation
|
||||||
|
* @throws LogicalStatementException
|
||||||
|
*/
|
||||||
|
@Override
|
||||||
|
public boolean getResult(Context context, Item item) throws LogicalStatementException {
|
||||||
|
|
||||||
|
List<String> communityHandles = (List<String>)getParameters().get("communities");
|
||||||
|
List<Collection> itemCollections = item.getCollections();
|
||||||
|
|
||||||
|
// Check communities of item.getCollections() - this will only see collections if the item is archived
|
||||||
|
for (Collection collection : itemCollections) {
|
||||||
|
try {
|
||||||
|
List<Community> communities = collection.getCommunities();
|
||||||
|
for (Community community : communities) {
|
||||||
|
if (communityHandles.contains(community.getHandle())) {
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
} catch (SQLException e) {
|
||||||
|
log.error(e.getMessage());
|
||||||
|
throw new LogicalStatementException(e);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Look for the parent object of the item. This is important as the item.getOwningCollection method
|
||||||
|
// may return null, even though the item itself does have a parent object, at the point of archival
|
||||||
|
try {
|
||||||
|
DSpaceObject parent = itemService.getParentObject(context, item);
|
||||||
|
if (parent instanceof Collection) {
|
||||||
|
log.debug("Got parent DSO for item: " + parent.getID().toString());
|
||||||
|
log.debug("Parent DSO handle: " + parent.getHandle());
|
||||||
|
try {
|
||||||
|
// Now iterate communities of this parent collection
|
||||||
|
Collection collection = (Collection)parent;
|
||||||
|
List<Community> communities = collection.getCommunities();
|
||||||
|
for (Community community : communities) {
|
||||||
|
if (communityHandles.contains(community.getHandle())) {
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
} catch (SQLException e) {
|
||||||
|
log.error(e.getMessage());
|
||||||
|
throw new LogicalStatementException(e);
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
log.debug("Parent DSO is null or is not a Collection...");
|
||||||
|
}
|
||||||
|
} catch (SQLException e) {
|
||||||
|
log.error("Error obtaining parent DSO", e);
|
||||||
|
throw new LogicalStatementException(e);
|
||||||
|
}
|
||||||
|
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
}
|
@@ -0,0 +1,37 @@
|
|||||||
|
/**
|
||||||
|
* The contents of this file are subject to the license and copyright
|
||||||
|
* detailed in the LICENSE and NOTICE files at the root of the source
|
||||||
|
* tree and available online at
|
||||||
|
*
|
||||||
|
* http://www.dspace.org/license/
|
||||||
|
*/
|
||||||
|
package org.dspace.content.logic.condition;
|
||||||
|
|
||||||
|
import org.apache.log4j.Logger;
|
||||||
|
import org.dspace.content.Item;
|
||||||
|
import org.dspace.content.logic.LogicalStatementException;
|
||||||
|
import org.dspace.core.Context;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* A condition that returns true if the item is withdrawn
|
||||||
|
*
|
||||||
|
* @author Kim Shepherd
|
||||||
|
* @version $Revision$
|
||||||
|
*/
|
||||||
|
public class IsWithdrawnCondition extends AbstractCondition {
|
||||||
|
private static Logger log = Logger.getLogger(IsWithdrawnCondition.class);
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Return true if item is withdrawn
|
||||||
|
* Return false if not
|
||||||
|
* @param context DSpace context
|
||||||
|
* @param item Item to evaluate
|
||||||
|
* @return boolean result of evaluation
|
||||||
|
* @throws LogicalStatementException
|
||||||
|
*/
|
||||||
|
@Override
|
||||||
|
public boolean getResult(Context context, Item item) throws LogicalStatementException {
|
||||||
|
log.debug("Result of isWithdrawn is " + item.isWithdrawn());
|
||||||
|
return item.isWithdrawn();
|
||||||
|
}
|
||||||
|
}
|
@@ -0,0 +1,66 @@
|
|||||||
|
/**
|
||||||
|
* The contents of this file are subject to the license and copyright
|
||||||
|
* detailed in the LICENSE and NOTICE files at the root of the source
|
||||||
|
* tree and available online at
|
||||||
|
*
|
||||||
|
* http://www.dspace.org/license/
|
||||||
|
*/
|
||||||
|
package org.dspace.content.logic.condition;
|
||||||
|
|
||||||
|
import java.util.List;
|
||||||
|
import java.util.regex.Matcher;
|
||||||
|
import java.util.regex.Pattern;
|
||||||
|
|
||||||
|
import org.apache.log4j.Logger;
|
||||||
|
import org.dspace.content.Item;
|
||||||
|
import org.dspace.content.MetadataValue;
|
||||||
|
import org.dspace.content.logic.LogicalStatementException;
|
||||||
|
import org.dspace.core.Context;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* A condition that returns true if a pattern (regex) matches any value
|
||||||
|
* in a given metadata field
|
||||||
|
*
|
||||||
|
* @author Kim Shepherd
|
||||||
|
* @version $Revision$
|
||||||
|
*/
|
||||||
|
public class MetadataValueMatchCondition extends AbstractCondition {
|
||||||
|
|
||||||
|
private static Logger log = Logger.getLogger(MetadataValueMatchCondition.class);
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Return true if any value for a specified field in the item matches a specified regex pattern
|
||||||
|
* Return false if not
|
||||||
|
* @param context DSpace context
|
||||||
|
* @param item Item to evaluate
|
||||||
|
* @return boolean result of evaluation
|
||||||
|
* @throws LogicalStatementException
|
||||||
|
*/
|
||||||
|
@Override
|
||||||
|
public boolean getResult(Context context, Item item) throws LogicalStatementException {
|
||||||
|
String field = (String)getParameters().get("field");
|
||||||
|
if (field == null) {
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
|
||||||
|
String[] fieldParts = field.split("\\.");
|
||||||
|
String schema = (fieldParts.length > 0 ? fieldParts[0] : null);
|
||||||
|
String element = (fieldParts.length > 1 ? fieldParts[1] : null);
|
||||||
|
String qualifier = (fieldParts.length > 2 ? fieldParts[2] : null);
|
||||||
|
|
||||||
|
List<MetadataValue> values = itemService.getMetadata(item, schema, element, qualifier, Item.ANY);
|
||||||
|
for (MetadataValue value : values) {
|
||||||
|
if (getParameters().get("pattern") instanceof String) {
|
||||||
|
String pattern = (String)getParameters().get("pattern");
|
||||||
|
log.debug("logic for " + item.getHandle() + ": pattern passed is " + pattern
|
||||||
|
+ ", checking value " + value.getValue());
|
||||||
|
Pattern p = Pattern.compile(pattern);
|
||||||
|
Matcher m = p.matcher(value.getValue());
|
||||||
|
if (m.find()) {
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
}
|
@@ -0,0 +1,73 @@
|
|||||||
|
/**
|
||||||
|
* The contents of this file are subject to the license and copyright
|
||||||
|
* detailed in the LICENSE and NOTICE files at the root of the source
|
||||||
|
* tree and available online at
|
||||||
|
*
|
||||||
|
* http://www.dspace.org/license/
|
||||||
|
*/
|
||||||
|
package org.dspace.content.logic.condition;
|
||||||
|
|
||||||
|
import java.util.List;
|
||||||
|
import java.util.regex.Matcher;
|
||||||
|
import java.util.regex.Pattern;
|
||||||
|
|
||||||
|
import org.apache.log4j.Logger;
|
||||||
|
import org.dspace.content.Item;
|
||||||
|
import org.dspace.content.MetadataValue;
|
||||||
|
import org.dspace.content.logic.LogicalStatementException;
|
||||||
|
import org.dspace.core.Context;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* A condition that returns true if any pattern in a list of patterns matches any value
|
||||||
|
* in a given metadata field
|
||||||
|
*
|
||||||
|
* @author Kim Shepherd
|
||||||
|
* @version $Revision$
|
||||||
|
*/
|
||||||
|
public class MetadataValuesMatchCondition extends AbstractCondition {
|
||||||
|
|
||||||
|
private static Logger log = Logger.getLogger(MetadataValuesMatchCondition.class);
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Return true if any value for a specified field in the item matches any of the specified regex patterns
|
||||||
|
* Return false if not
|
||||||
|
* @param context DSpace context
|
||||||
|
* @param item Item to evaluate
|
||||||
|
* @return boolean result of evaluation
|
||||||
|
* @throws LogicalStatementException
|
||||||
|
*/
|
||||||
|
@Override
|
||||||
|
public boolean getResult(Context context, Item item) throws LogicalStatementException {
|
||||||
|
String field = (String)getParameters().get("field");
|
||||||
|
if (field == null) {
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
|
||||||
|
String[] fieldParts = field.split("\\.");
|
||||||
|
String schema = (fieldParts.length > 0 ? fieldParts[0] : null);
|
||||||
|
String element = (fieldParts.length > 1 ? fieldParts[1] : null);
|
||||||
|
String qualifier = (fieldParts.length > 2 ? fieldParts[2] : null);
|
||||||
|
|
||||||
|
List<MetadataValue> values = itemService.getMetadata(item, schema, element, qualifier, Item.ANY);
|
||||||
|
for (MetadataValue value : values) {
|
||||||
|
if (getParameters().get("patterns") instanceof List) {
|
||||||
|
List<String> patternList = (List<String>)getParameters().get("patterns");
|
||||||
|
// If the list is empty, just return true and log error?
|
||||||
|
log.error("No patterns were passed for metadata value matching, defaulting to 'true'");
|
||||||
|
if (patternList == null) {
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
for (String pattern : patternList) {
|
||||||
|
log.debug("logic for " + item.getHandle() + ": pattern passed is " + pattern
|
||||||
|
+ ", checking value " + value.getValue());
|
||||||
|
Pattern p = Pattern.compile(pattern);
|
||||||
|
Matcher m = p.matcher(value.getValue());
|
||||||
|
if (m.find()) {
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
}
|
@@ -0,0 +1,64 @@
|
|||||||
|
/**
|
||||||
|
* The contents of this file are subject to the license and copyright
|
||||||
|
* detailed in the LICENSE and NOTICE files at the root of the source
|
||||||
|
* tree and available online at
|
||||||
|
*
|
||||||
|
* http://www.dspace.org/license/
|
||||||
|
*/
|
||||||
|
package org.dspace.content.logic.condition;
|
||||||
|
|
||||||
|
import java.sql.SQLException;
|
||||||
|
import java.util.List;
|
||||||
|
|
||||||
|
import org.apache.log4j.Logger;
|
||||||
|
import org.dspace.authorize.ResourcePolicy;
|
||||||
|
import org.dspace.authorize.factory.AuthorizeServiceFactory;
|
||||||
|
import org.dspace.authorize.service.AuthorizeService;
|
||||||
|
import org.dspace.content.Item;
|
||||||
|
import org.dspace.content.logic.LogicalStatementException;
|
||||||
|
import org.dspace.core.Constants;
|
||||||
|
import org.dspace.core.Context;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* A condition that accepts a group and action parameter and returns true if the group
|
||||||
|
* can perform the action on a given item
|
||||||
|
*
|
||||||
|
* @author Kim Shepherd
|
||||||
|
* @version $Revision$
|
||||||
|
*/
|
||||||
|
public class ReadableByGroupCondition extends AbstractCondition {
|
||||||
|
private static Logger log = Logger.getLogger(ReadableByGroupCondition.class);
|
||||||
|
|
||||||
|
// Authorize service
|
||||||
|
AuthorizeService authorizeService = AuthorizeServiceFactory.getInstance().getAuthorizeService();
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Return true if this item allows a specified action (eg READ, WRITE, ADD) by a specified group
|
||||||
|
* @param context DSpace context
|
||||||
|
* @param item Item to evaluate
|
||||||
|
* @return boolean result of evaluation
|
||||||
|
* @throws LogicalStatementException
|
||||||
|
*/
|
||||||
|
@Override
|
||||||
|
public boolean getResult(Context context, Item item) throws LogicalStatementException {
|
||||||
|
|
||||||
|
String group = (String)getParameters().get("group");
|
||||||
|
String action = (String)getParameters().get("action");
|
||||||
|
|
||||||
|
try {
|
||||||
|
List<ResourcePolicy> policies = authorizeService
|
||||||
|
.getPoliciesActionFilter(context, item, Constants.getActionID(action));
|
||||||
|
for (ResourcePolicy policy : policies) {
|
||||||
|
if (policy.getGroup().getName().equals(group)) {
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
} catch (SQLException e) {
|
||||||
|
log.error("Error trying to read policies for " + item.getHandle() + ": " + e.getMessage());
|
||||||
|
throw new LogicalStatementException(e);
|
||||||
|
}
|
||||||
|
log.debug("item " + item.getHandle() + " not readable by anonymous group");
|
||||||
|
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
}
|
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user