Merge branch 'main' of https://github.com/DSpace/DSpace into feature-issue2816-openaire-funding-external-lookup

This commit is contained in:
Paulo Graça
2021-10-14 11:06:28 +01:00
60 changed files with 1804 additions and 562 deletions

View File

@@ -743,7 +743,7 @@
<dependency> <dependency>
<groupId>org.flywaydb</groupId> <groupId>org.flywaydb</groupId>
<artifactId>flyway-core</artifactId> <artifactId>flyway-core</artifactId>
<version>6.5.5</version> <version>6.5.7</version>
</dependency> </dependency>
<!-- Google Analytics --> <!-- Google Analytics -->

View File

@@ -25,6 +25,7 @@ import javax.annotation.Nullable;
import org.apache.commons.cli.ParseException; import org.apache.commons.cli.ParseException;
import org.apache.commons.lang3.StringUtils; import org.apache.commons.lang3.StringUtils;
import org.apache.logging.log4j.Logger; import org.apache.logging.log4j.Logger;
import org.dspace.app.util.RelationshipUtils;
import org.dspace.authority.AuthorityValue; import org.dspace.authority.AuthorityValue;
import org.dspace.authority.factory.AuthorityServiceFactory; import org.dspace.authority.factory.AuthorityServiceFactory;
import org.dspace.authority.service.AuthorityValueService; import org.dspace.authority.service.AuthorityValueService;
@@ -1793,36 +1794,7 @@ public class MetadataImport extends DSpaceRunnable<MetadataImportScriptConfigura
*/ */
private RelationshipType matchRelationshipType(List<RelationshipType> relTypes, private RelationshipType matchRelationshipType(List<RelationshipType> relTypes,
String targetType, String originType, String originTypeName) { String targetType, String originType, String originTypeName) {
RelationshipType foundRelationshipType = null; return RelationshipUtils.matchRelationshipType(relTypes, targetType, originType, originTypeName);
if (originTypeName.split("\\.").length > 1) {
originTypeName = originTypeName.split("\\.")[1];
}
for (RelationshipType relationshipType : relTypes) {
// Is origin type leftward or righward
boolean isLeft = false;
if (relationshipType.getLeftType().getLabel().equalsIgnoreCase(originType)) {
isLeft = true;
}
if (isLeft) {
// Validate typeName reference
if (!relationshipType.getLeftwardType().equalsIgnoreCase(originTypeName)) {
continue;
}
if (relationshipType.getLeftType().getLabel().equalsIgnoreCase(originType) &&
relationshipType.getRightType().getLabel().equalsIgnoreCase(targetType)) {
foundRelationshipType = relationshipType;
}
} else {
if (!relationshipType.getRightwardType().equalsIgnoreCase(originTypeName)) {
continue;
}
if (relationshipType.getLeftType().getLabel().equalsIgnoreCase(targetType) &&
relationshipType.getRightType().getLabel().equalsIgnoreCase(originType)) {
foundRelationshipType = relationshipType;
}
}
}
return foundRelationshipType;
} }
} }

View File

@@ -13,11 +13,8 @@ import java.util.Iterator;
import java.util.List; import java.util.List;
import java.util.UUID; import java.util.UUID;
import org.apache.commons.cli.CommandLine; import org.apache.commons.cli.ParseException;
import org.apache.commons.cli.CommandLineParser; import org.apache.commons.lang3.StringUtils;
import org.apache.commons.cli.DefaultParser;
import org.apache.commons.cli.HelpFormatter;
import org.apache.commons.cli.Options;
import org.dspace.authorize.AuthorizeException; import org.dspace.authorize.AuthorizeException;
import org.dspace.content.Collection; import org.dspace.content.Collection;
import org.dspace.content.DSpaceObject; import org.dspace.content.DSpaceObject;
@@ -36,221 +33,223 @@ import org.dspace.harvest.HarvestingException;
import org.dspace.harvest.OAIHarvester; import org.dspace.harvest.OAIHarvester;
import org.dspace.harvest.factory.HarvestServiceFactory; import org.dspace.harvest.factory.HarvestServiceFactory;
import org.dspace.harvest.service.HarvestedCollectionService; import org.dspace.harvest.service.HarvestedCollectionService;
import org.dspace.scripts.DSpaceRunnable;
import org.dspace.utils.DSpace;
/** /**
* Test class for harvested collections. * Test class for harvested collections.
* *
* @author Alexey Maslov * @author Alexey Maslov
*/ */
public class Harvest { public class Harvest extends DSpaceRunnable<HarvestScriptConfiguration> {
private static Context context;
private static final HarvestedCollectionService harvestedCollectionService = private HarvestedCollectionService harvestedCollectionService;
HarvestServiceFactory.getInstance().getHarvestedCollectionService(); protected EPersonService ePersonService;
private static final EPersonService ePersonService = EPersonServiceFactory.getInstance().getEPersonService(); private CollectionService collectionService;
private static final CollectionService collectionService =
ContentServiceFactory.getInstance().getCollectionService();
public static void main(String[] argv) throws Exception { private boolean help;
// create an options object and populate it private String command = null;
CommandLineParser parser = new DefaultParser(); private String collection = null;
private String oaiSource = null;
private String oaiSetID = null;
private String metadataKey = null;
private int harvestType = 0;
Options options = new Options(); protected Context context;
options.addOption("p", "purge", false, "delete all items in the collection");
options.addOption("r", "run", false, "run the standard harvest procedure");
options.addOption("g", "ping", false, "test the OAI server and set");
options.addOption("s", "setup", false, "Set the collection up for harvesting");
options.addOption("S", "start", false, "start the harvest loop");
options.addOption("R", "reset", false, "reset harvest status on all collections");
options.addOption("P", "purge", false, "purge all harvestable collections");
options.addOption("e", "eperson", true, public HarvestScriptConfiguration getScriptConfiguration() {
"eperson"); return new DSpace().getServiceManager()
options.addOption("c", "collection", true, .getServiceByName("harvest", HarvestScriptConfiguration.class);
"harvesting collection (handle or id)"); }
options.addOption("t", "type", true,
"type of harvesting (0 for none)");
options.addOption("a", "address", true,
"address of the OAI-PMH server");
options.addOption("i", "oai_set_id", true,
"id of the PMH set representing the harvested collection");
options.addOption("m", "metadata_format", true,
"the name of the desired metadata format for harvesting, resolved to namespace and " +
"crosswalk in dspace.cfg");
options.addOption("h", "help", false, "help"); public void setup() throws ParseException {
harvestedCollectionService =
HarvestServiceFactory.getInstance().getHarvestedCollectionService();
ePersonService = EPersonServiceFactory.getInstance().getEPersonService();
collectionService =
ContentServiceFactory.getInstance().getCollectionService();
CommandLine line = parser.parse(options, argv); assignCurrentUserInContext();
String command = null; help = commandLine.hasOption('h');
String eperson = null;
String collection = null;
String oaiSource = null;
String oaiSetID = null;
String metadataKey = null;
int harvestType = 0;
if (line.hasOption('h')) {
HelpFormatter myhelp = new HelpFormatter();
myhelp.printHelp("Harvest\n", options);
System.out.println("\nPING OAI server: Harvest -g -a oai_source -i oai_set_id");
System.out.println(
"SETUP a collection for harvesting: Harvest -s -c collection -t harvest_type -a oai_source -i " +
"oai_set_id -m metadata_format");
System.out.println("RUN harvest once: Harvest -r -e eperson -c collection");
System.out.println("START harvest scheduler: Harvest -S");
System.out.println("RESET all harvest status: Harvest -R");
System.out.println("PURGE a collection of items and settings: Harvest -p -e eperson -c collection");
System.out.println("PURGE all harvestable collections: Harvest -P -e eperson");
System.exit(0); if (commandLine.hasOption('s')) {
}
if (line.hasOption('s')) {
command = "config"; command = "config";
} }
if (line.hasOption('p')) { if (commandLine.hasOption('p')) {
command = "purge"; command = "purge";
} }
if (line.hasOption('r')) { if (commandLine.hasOption('r')) {
command = "run"; command = "run";
} }
if (line.hasOption('g')) { if (commandLine.hasOption('g')) {
command = "ping"; command = "ping";
} }
if (line.hasOption('S')) { if (commandLine.hasOption('S')) {
command = "start"; command = "start";
} }
if (line.hasOption('R')) { if (commandLine.hasOption('R')) {
command = "reset"; command = "reset";
} }
if (line.hasOption('P')) { if (commandLine.hasOption('P')) {
command = "purgeAll"; command = "purgeAll";
} }
if (commandLine.hasOption('o')) {
command = "reimport";
if (line.hasOption('e')) {
eperson = line.getOptionValue('e');
} }
if (line.hasOption('c')) { if (commandLine.hasOption('c')) {
collection = line.getOptionValue('c'); collection = commandLine.getOptionValue('c');
} }
if (line.hasOption('t')) { if (commandLine.hasOption('t')) {
harvestType = Integer.parseInt(line.getOptionValue('t')); harvestType = Integer.parseInt(commandLine.getOptionValue('t'));
} else { } else {
harvestType = 0; harvestType = 0;
} }
if (line.hasOption('a')) { if (commandLine.hasOption('a')) {
oaiSource = line.getOptionValue('a'); oaiSource = commandLine.getOptionValue('a');
} }
if (line.hasOption('i')) { if (commandLine.hasOption('i')) {
oaiSetID = line.getOptionValue('i'); oaiSetID = commandLine.getOptionValue('i');
} }
if (line.hasOption('m')) { if (commandLine.hasOption('m')) {
metadataKey = line.getOptionValue('m'); metadataKey = commandLine.getOptionValue('m');
}
}
/**
* This method will assign the currentUser to the {@link Context} variable which is also created in this method.
* The instance of the method in this class will fetch the EPersonIdentifier from this class, this identifier
* was given to this class upon instantiation, it'll then be used to find the {@link EPerson} associated with it
* and this {@link EPerson} will be set as the currentUser of the created {@link Context}
* @throws ParseException If something went wrong with the retrieval of the EPerson Identifier
*/
protected void assignCurrentUserInContext() throws ParseException {
UUID currentUserUuid = this.getEpersonIdentifier();
try {
this.context = new Context(Context.Mode.BATCH_EDIT);
EPerson eperson = ePersonService.find(context, currentUserUuid);
if (eperson == null) {
super.handler.logError("EPerson not found: " + currentUserUuid);
throw new IllegalArgumentException("Unable to find a user with uuid: " + currentUserUuid);
}
this.context.setCurrentUser(eperson);
} catch (SQLException e) {
handler.handleException("Something went wrong trying to fetch eperson for uuid: " + currentUserUuid, e);
}
}
public void internalRun() throws Exception {
if (help) {
printHelp();
handler.logInfo("PING OAI server: Harvest -g -a oai_source -i oai_set_id");
handler.logInfo(
"SETUP a collection for harvesting: Harvest -s -c collection -t harvest_type -a oai_source -i " +
"oai_set_id -m metadata_format");
handler.logInfo("RUN harvest once: Harvest -r -e eperson -c collection");
handler.logInfo("START harvest scheduler: Harvest -S");
handler.logInfo("RESET all harvest status: Harvest -R");
handler.logInfo("PURGE a collection of items and settings: Harvest -p -e eperson -c collection");
handler.logInfo("PURGE all harvestable collections: Harvest -P -e eperson");
return;
} }
if (StringUtils.isBlank(command)) {
// Instantiate our class handler.logError("No parameters specified (run with -h flag for details)");
Harvest harvester = new Harvest(); throw new UnsupportedOperationException("No command specified");
harvester.context = new Context(Context.Mode.BATCH_EDIT);
// Check our options
if (command == null) {
System.out
.println("Error - no parameters specified (run with -h flag for details)");
System.exit(1);
} else if ("run".equals(command)) { } else if ("run".equals(command)) {
// Run a single harvest cycle on a collection using saved settings. // Run a single harvest cycle on a collection using saved settings.
if (collection == null || eperson == null) { if (collection == null || context.getCurrentUser() == null) {
System.out handler.logError("A target collection and eperson must be provided (run with -h flag for details)");
.println("Error - a target collection and eperson must be provided"); throw new UnsupportedOperationException("A target collection and eperson must be provided");
System.out.println(" (run with -h flag for details)");
System.exit(1);
} }
runHarvest(context, collection);
harvester.runHarvest(collection, eperson);
} else if ("start".equals(command)) { } else if ("start".equals(command)) {
// start the harvest loop // start the harvest loop
startHarvester(); startHarvester();
} else if ("reset".equals(command)) { } else if ("reset".equals(command)) {
// reset harvesting status // reset harvesting status
resetHarvesting(); resetHarvesting(context);
} else if ("purgeAll".equals(command)) { } else if ("purgeAll".equals(command)) {
// purge all collections that are set up for harvesting (obviously for testing purposes only) // purge all collections that are set up for harvesting (obviously for testing purposes only)
if (eperson == null) { if (context.getCurrentUser() == null) {
System.out handler.logError("An eperson must be provided (run with -h flag for details)");
.println("Error - an eperson must be provided"); throw new UnsupportedOperationException("An eperson must be provided");
System.out.println(" (run with -h flag for details)");
System.exit(1);
} }
List<HarvestedCollection> harvestedCollections = harvestedCollectionService.findAll(context); List<HarvestedCollection> harvestedCollections = harvestedCollectionService.findAll(context);
for (HarvestedCollection harvestedCollection : harvestedCollections) { for (HarvestedCollection harvestedCollection : harvestedCollections) {
System.out.println( handler.logInfo(
"Purging the following collections (deleting items and resetting harvest status): " + "Purging the following collections (deleting items and resetting harvest status): " +
harvestedCollection harvestedCollection
.getCollection().getID().toString()); .getCollection().getID().toString());
harvester.purgeCollection(harvestedCollection.getCollection().getID().toString(), eperson); purgeCollection(context, harvestedCollection.getCollection().getID().toString());
} }
context.complete(); context.complete();
} else if ("purge".equals(command)) { } else if ("purge".equals(command)) {
// Delete all items in a collection. Useful for testing fresh harvests. // Delete all items in a collection. Useful for testing fresh harvests.
if (collection == null || eperson == null) { if (collection == null || context.getCurrentUser() == null) {
System.out handler.logError("A target collection and eperson must be provided (run with -h flag for details)");
.println("Error - a target collection and eperson must be provided"); throw new UnsupportedOperationException("A target collection and eperson must be provided");
System.out.println(" (run with -h flag for details)");
System.exit(1);
} }
harvester.purgeCollection(collection, eperson); purgeCollection(context, collection);
context.complete();
} else if ("reimport".equals(command)) {
// Delete all items in a collection. Useful for testing fresh harvests.
if (collection == null || context.getCurrentUser() == null) {
handler.logError("A target collection and eperson must be provided (run with -h flag for details)");
throw new UnsupportedOperationException("A target collection and eperson must be provided");
}
purgeCollection(context, collection);
runHarvest(context, collection);
context.complete(); context.complete();
//TODO: implement this... remove all items and remember to unset "last-harvested" settings
} else if ("config".equals(command)) { } else if ("config".equals(command)) {
// Configure a collection with the three main settings // Configure a collection with the three main settings
if (collection == null) { if (collection == null) {
System.out.println("Error - a target collection must be provided"); handler.logError("A target collection must be provided (run with -h flag for details)");
System.out.println(" (run with -h flag for details)"); throw new UnsupportedOperationException("A target collection must be provided");
System.exit(1);
} }
if (oaiSource == null || oaiSetID == null) { if (oaiSource == null || oaiSetID == null) {
System.out.println("Error - both the OAI server address and OAI set id must be specified"); handler.logError(
System.out.println(" (run with -h flag for details)"); "Both the OAI server address and OAI set id must be specified (run with -h flag for details)");
System.exit(1); throw new UnsupportedOperationException("Both the OAI server address and OAI set id must be specified");
} }
if (metadataKey == null) { if (metadataKey == null) {
System.out handler.logError(
.println("Error - a metadata key (commonly the prefix) must be specified for this collection"); "A metadata key (commonly the prefix) must be specified for this collection (run with -h flag" +
System.out.println(" (run with -h flag for details)"); " for details)");
System.exit(1); throw new UnsupportedOperationException(
"A metadata key (commonly the prefix) must be specified for this collection");
} }
harvester.configureCollection(collection, harvestType, oaiSource, oaiSetID, metadataKey); configureCollection(context, collection, harvestType, oaiSource, oaiSetID, metadataKey);
} else if ("ping".equals(command)) { } else if ("ping".equals(command)) {
if (oaiSource == null || oaiSetID == null) { if (oaiSource == null || oaiSetID == null) {
System.out.println("Error - both the OAI server address and OAI set id must be specified"); handler.logError(
System.out.println(" (run with -h flag for details)"); "Both the OAI server address and OAI set id must be specified (run with -h flag for details)");
System.exit(1); throw new UnsupportedOperationException("Both the OAI server address and OAI set id must be specified");
} }
pingResponder(oaiSource, oaiSetID, metadataKey); pingResponder(oaiSource, oaiSetID, metadataKey);
} else { } else {
System.out.println("Error - your command '" + command + "' was not recoginzed properly"); handler.logError(
System.out.println(" (run with -h flag for details)"); "Your command '" + command + "' was not recognized properly (run with -h flag for details)");
System.exit(1); throw new UnsupportedOperationException("Your command '" + command + "' was not recognized properly");
} }
} }
/* /*
* Resolve the ID into a collection and check to see if its harvesting options are set. If so, return * Resolve the ID into a collection and check to see if its harvesting options are set. If so, return
* the collection, if not, bail out. * the collection, if not, bail out.
*/ */
private Collection resolveCollection(String collectionID) { private Collection resolveCollection(Context context, String collectionID) {
DSpaceObject dso; DSpaceObject dso;
Collection targetCollection = null; Collection targetCollection = null;
@@ -270,14 +269,14 @@ public class Harvest {
} }
} else { } else {
// not a handle, try and treat it as an collection database UUID // not a handle, try and treat it as an collection database UUID
System.out.println("Looking up by UUID: " + collectionID + ", " + "in context: " + context); handler.logInfo("Looking up by UUID: " + collectionID + ", " + "in context: " + context);
targetCollection = collectionService.find(context, UUID.fromString(collectionID)); targetCollection = collectionService.find(context, UUID.fromString(collectionID));
} }
} }
// was the collection valid? // was the collection valid?
if (targetCollection == null) { if (targetCollection == null) {
System.out.println("Cannot resolve " + collectionID + " to collection"); handler.logError("Cannot resolve " + collectionID + " to collection");
System.exit(1); throw new UnsupportedOperationException("Cannot resolve " + collectionID + " to collection");
} }
} catch (SQLException se) { } catch (SQLException se) {
se.printStackTrace(); se.printStackTrace();
@@ -287,12 +286,12 @@ public class Harvest {
} }
private void configureCollection(String collectionID, int type, String oaiSource, String oaiSetId, private void configureCollection(Context context, String collectionID, int type, String oaiSource, String oaiSetId,
String mdConfigId) { String mdConfigId) {
System.out.println("Running: configure collection"); handler.logInfo("Running: configure collection");
Collection collection = resolveCollection(collectionID); Collection collection = resolveCollection(context, collectionID);
System.out.println(collection.getID()); handler.logInfo(String.valueOf(collection.getID()));
try { try {
HarvestedCollection hc = harvestedCollectionService.find(context, collection); HarvestedCollection hc = harvestedCollectionService.find(context, collection);
@@ -307,9 +306,8 @@ public class Harvest {
context.restoreAuthSystemState(); context.restoreAuthSystemState();
context.complete(); context.complete();
} catch (Exception e) { } catch (Exception e) {
System.out.println("Changes could not be committed"); handler.logError("Changes could not be committed");
e.printStackTrace(); handler.handleException(e);
System.exit(1);
} finally { } finally {
if (context != null) { if (context != null) {
context.restoreAuthSystemState(); context.restoreAuthSystemState();
@@ -320,18 +318,15 @@ public class Harvest {
/** /**
* Purges a collection of all harvest-related data and settings. All items in the collection will be deleted. * Purges a collection of all harvest-related data and settings. All items in the collection will be deleted.
* @param collectionID
* *
* @param collectionID
* @param email
*/ */
private void purgeCollection(String collectionID, String email) { private void purgeCollection(Context context, String collectionID) {
System.out.println( handler.logInfo(
"Purging collection of all items and resetting last_harvested and harvest_message: " + collectionID); "Purging collection of all items and resetting last_harvested and harvest_message: " + collectionID);
Collection collection = resolveCollection(collectionID); Collection collection = resolveCollection(context, collectionID);
try { try {
EPerson eperson = ePersonService.findByEmail(context, email);
context.setCurrentUser(eperson);
context.turnOffAuthorisationSystem(); context.turnOffAuthorisationSystem();
ItemService itemService = ContentServiceFactory.getInstance().getItemService(); ItemService itemService = ContentServiceFactory.getInstance().getItemService();
@@ -340,7 +335,7 @@ public class Harvest {
while (it.hasNext()) { while (it.hasNext()) {
i++; i++;
Item item = it.next(); Item item = it.next();
System.out.println("Deleting: " + item.getHandle()); handler.logInfo("Deleting: " + item.getHandle());
collectionService.removeItem(context, collection, item); collectionService.removeItem(context, collection, item);
context.uncacheEntity(item);// Dispatch events every 50 items context.uncacheEntity(item);// Dispatch events every 50 items
if (i % 50 == 0) { if (i % 50 == 0) {
@@ -360,9 +355,8 @@ public class Harvest {
context.restoreAuthSystemState(); context.restoreAuthSystemState();
context.dispatchEvents(); context.dispatchEvents();
} catch (Exception e) { } catch (Exception e) {
System.out.println("Changes could not be committed"); handler.logError("Changes could not be committed");
e.printStackTrace(); handler.handleException(e);
System.exit(1);
} finally { } finally {
context.restoreAuthSystemState(); context.restoreAuthSystemState();
} }
@@ -372,46 +366,42 @@ public class Harvest {
/** /**
* Run a single harvest cycle on the specified collection under the authorization of the supplied EPerson * Run a single harvest cycle on the specified collection under the authorization of the supplied EPerson
*/ */
private void runHarvest(String collectionID, String email) { private void runHarvest(Context context, String collectionID) {
System.out.println("Running: a harvest cycle on " + collectionID); handler.logInfo("Running: a harvest cycle on " + collectionID);
System.out.print("Initializing the harvester... "); handler.logInfo("Initializing the harvester... ");
OAIHarvester harvester = null; OAIHarvester harvester = null;
try { try {
Collection collection = resolveCollection(collectionID); Collection collection = resolveCollection(context, collectionID);
HarvestedCollection hc = harvestedCollectionService.find(context, collection); HarvestedCollection hc = harvestedCollectionService.find(context, collection);
harvester = new OAIHarvester(context, collection, hc); harvester = new OAIHarvester(context, collection, hc);
System.out.println("success. "); handler.logInfo("Initialized the harvester successfully");
} catch (HarvestingException hex) { } catch (HarvestingException hex) {
System.out.print("failed. "); handler.logError("Initializing the harvester failed.");
System.out.println(hex.getMessage());
throw new IllegalStateException("Unable to harvest", hex); throw new IllegalStateException("Unable to harvest", hex);
} catch (SQLException se) { } catch (SQLException se) {
System.out.print("failed. "); handler.logError("Initializing the harvester failed.");
System.out.println(se.getMessage());
throw new IllegalStateException("Unable to access database", se); throw new IllegalStateException("Unable to access database", se);
} }
try { try {
// Harvest will not work for an anonymous user // Harvest will not work for an anonymous user
EPerson eperson = ePersonService.findByEmail(context, email); handler.logInfo("Harvest started... ");
System.out.println("Harvest started... ");
context.setCurrentUser(eperson);
harvester.runHarvest(); harvester.runHarvest();
context.complete(); context.complete();
} catch (SQLException | AuthorizeException | IOException e) { } catch (SQLException | AuthorizeException | IOException e) {
throw new IllegalStateException("Failed to run harvester", e); throw new IllegalStateException("Failed to run harvester", e);
} }
System.out.println("Harvest complete. "); handler.logInfo("Harvest complete. ");
} }
/** /**
* Resets harvest_status and harvest_start_time flags for all collections that have a row in the * Resets harvest_status and harvest_start_time flags for all collections that have a row in the
* harvested_collections table * harvested_collections table
*/ */
private static void resetHarvesting() { private void resetHarvesting(Context context) {
System.out.print("Resetting harvest status flag on all collections... "); handler.logInfo("Resetting harvest status flag on all collections... ");
try { try {
List<HarvestedCollection> harvestedCollections = harvestedCollectionService.findAll(context); List<HarvestedCollection> harvestedCollections = harvestedCollectionService.findAll(context);
@@ -421,21 +411,21 @@ public class Harvest {
harvestedCollection.setHarvestStatus(HarvestedCollection.STATUS_READY); harvestedCollection.setHarvestStatus(HarvestedCollection.STATUS_READY);
harvestedCollectionService.update(context, harvestedCollection); harvestedCollectionService.update(context, harvestedCollection);
} }
System.out.println("success. "); handler.logInfo("Reset harvest status flag successfully");
} catch (Exception ex) { } catch (Exception ex) {
System.out.println("failed. "); handler.logError("Resetting harvest status flag failed");
ex.printStackTrace(); handler.handleException(ex);
} }
} }
/** /**
* Starts up the harvest scheduler. Terminating this process will stop the scheduler. * Starts up the harvest scheduler. Terminating this process will stop the scheduler.
*/ */
private static void startHarvester() { private void startHarvester() {
try { try {
System.out.print("Starting harvest loop... "); handler.logInfo("Starting harvest loop... ");
HarvestServiceFactory.getInstance().getHarvestSchedulingService().startNewScheduler(); HarvestServiceFactory.getInstance().getHarvestSchedulingService().startNewScheduler();
System.out.println("running. "); handler.logInfo("running. ");
} catch (Exception ex) { } catch (Exception ex) {
ex.printStackTrace(); ex.printStackTrace();
} }
@@ -448,29 +438,31 @@ public class Harvest {
* @param set name of an item set. * @param set name of an item set.
* @param metadataFormat local prefix name, or null for "dc". * @param metadataFormat local prefix name, or null for "dc".
*/ */
private static void pingResponder(String server, String set, String metadataFormat) { private void pingResponder(String server, String set, String metadataFormat) {
List<String> errors; List<String> errors;
System.out.print("Testing basic PMH access: "); handler.logInfo("Testing basic PMH access: ");
errors = harvestedCollectionService.verifyOAIharvester(server, set, errors = harvestedCollectionService.verifyOAIharvester(server, set,
(null != metadataFormat) ? metadataFormat : "dc", false); (null != metadataFormat) ? metadataFormat : "dc", false);
if (errors.isEmpty()) { if (errors.isEmpty()) {
System.out.println("OK"); handler.logInfo("OK");
} else { } else {
for (String error : errors) { for (String error : errors) {
System.err.println(error); handler.logError(error);
} }
} }
System.out.print("Testing ORE support: "); handler.logInfo("Testing ORE support: ");
errors = harvestedCollectionService.verifyOAIharvester(server, set, errors = harvestedCollectionService.verifyOAIharvester(server, set,
(null != metadataFormat) ? metadataFormat : "dc", true); (null != metadataFormat) ? metadataFormat : "dc", true);
if (errors.isEmpty()) { if (errors.isEmpty()) {
System.out.println("OK"); handler.logInfo("OK");
} else { } else {
for (String error : errors) { for (String error : errors) {
System.err.println(error); handler.logError(error);
} }
} }
} }
} }

View File

@@ -0,0 +1,45 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.app.harvest;
import java.sql.SQLException;
import org.apache.commons.cli.ParseException;
import org.dspace.core.Context;
import org.dspace.eperson.EPerson;
public class HarvestCli extends Harvest {
/**
* This is the overridden instance of the {@link Harvest#assignCurrentUserInContext()} method in the parent class
* {@link Harvest}.
* This is done so that the CLI version of the Script is able to retrieve its currentUser from the -e flag given
* with the parameters of the Script.
*
* @throws ParseException If the e flag was not given to the parameters when calling the script
*/
@Override
protected void assignCurrentUserInContext() throws ParseException {
if (this.commandLine.hasOption('e')) {
String ePersonEmail = this.commandLine.getOptionValue('e');
this.context = new Context(Context.Mode.BATCH_EDIT);
try {
EPerson ePerson = ePersonService.findByEmail(this.context, ePersonEmail);
if (ePerson == null) {
super.handler.logError("EPerson not found: " + ePersonEmail);
throw new IllegalArgumentException("Unable to find a user with email: " + ePersonEmail);
}
this.context.setCurrentUser(ePerson);
} catch (SQLException e) {
throw new IllegalArgumentException("SQLException trying to find user with email: " + ePersonEmail);
}
}
}
}

View File

@@ -0,0 +1,22 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.app.harvest;
import org.apache.commons.cli.Options;
public class HarvestCliScriptConfiguration extends HarvestScriptConfiguration {
public Options getOptions() {
Options options = super.getOptions();
options.addOption("e", "eperson", true,
"eperson");
return options;
}
}

View File

@@ -0,0 +1,79 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.app.harvest;
import java.sql.SQLException;
import org.apache.commons.cli.Options;
import org.dspace.authorize.service.AuthorizeService;
import org.dspace.core.Context;
import org.dspace.scripts.configuration.ScriptConfiguration;
import org.springframework.beans.factory.annotation.Autowired;
public class HarvestScriptConfiguration<T extends Harvest> extends ScriptConfiguration<T> {
@Autowired
private AuthorizeService authorizeService;
private Class<T> dspaceRunnableClass;
@Override
public Class<T> getDspaceRunnableClass() {
return dspaceRunnableClass;
}
@Override
public void setDspaceRunnableClass(Class<T> dspaceRunnableClass) {
this.dspaceRunnableClass = dspaceRunnableClass;
}
public boolean isAllowedToExecute(final Context context) {
try {
return authorizeService.isAdmin(context);
} catch (SQLException e) {
throw new RuntimeException("SQLException occurred when checking if the current user is an admin", e);
}
}
public Options getOptions() {
Options options = new Options();
options.addOption("p", "purge", false, "delete all items in the collection");
options.getOption("p").setType(boolean.class);
options.addOption("r", "run", false, "run the standard harvest procedure");
options.getOption("r").setType(boolean.class);
options.addOption("g", "ping", false, "test the OAI server and set");
options.getOption("g").setType(boolean.class);
options.addOption("s", "setup", false, "Set the collection up for harvesting");
options.getOption("s").setType(boolean.class);
options.addOption("S", "start", false, "start the harvest loop");
options.getOption("S").setType(boolean.class);
options.addOption("R", "reset", false, "reset harvest status on all collections");
options.getOption("R").setType(boolean.class);
options.addOption("P", "purgeCollections", false, "purge all harvestable collections");
options.getOption("P").setType(boolean.class);
options.addOption("o", "reimport", false, "reimport all items in the collection, " +
"this is equivalent to -p -r, purging all items in a collection and reimporting them");
options.getOption("o").setType(boolean.class);
options.addOption("c", "collection", true,
"harvesting collection (handle or id)");
options.addOption("t", "type", true,
"type of harvesting (0 for none)");
options.addOption("a", "address", true,
"address of the OAI-PMH server");
options.addOption("i", "oai_set_id", true,
"id of the PMH set representing the harvested collection");
options.addOption("m", "metadata_format", true,
"the name of the desired metadata format for harvesting, resolved to namespace and " +
"crosswalk in dspace.cfg");
options.addOption("h", "help", false, "help");
options.getOption("h").setType(boolean.class);
return options;
}
}

View File

@@ -296,29 +296,36 @@ public class ItemImportCLITool {
// validate each collection arg to see if it's a real collection // validate each collection arg to see if it's a real collection
for (int i = 0; i < collections.length; i++) { for (int i = 0; i < collections.length; i++) {
// is the ID a handle?
if (collections[i].indexOf('/') != -1) {
// string has a / so it must be a handle - try and resolve
// it
mycollections.add((Collection) handleService
.resolveToObject(c, collections[i]));
// resolved, now make sure it's a collection Collection resolved = null;
if ((mycollections.get(i) == null)
|| (mycollections.get(i).getType() != Constants.COLLECTION)) { if (collections[i] != null) {
mycollections.set(i, null);
// is the ID a handle?
if (collections[i].indexOf('/') != -1) {
// string has a / so it must be a handle - try and resolve
// it
resolved = ((Collection) handleService
.resolveToObject(c, collections[i]));
} else {
// not a handle, try and treat it as an integer collection database ID
resolved = collectionService.find(c, UUID.fromString(collections[i]));
} }
} else if (collections[i] != null) {
// not a handle, try and treat it as an integer collection database ID
mycollections.set(i, collectionService.find(c, UUID.fromString(collections[i])));
} }
// was the collection valid? // was the collection valid?
if (mycollections.get(i) == null) { if ((resolved == null)
|| (resolved.getType() != Constants.COLLECTION)) {
throw new IllegalArgumentException("Cannot resolve " throw new IllegalArgumentException("Cannot resolve "
+ collections[i] + " to collection"); + collections[i] + " to collection");
} }
// add resolved collection to list
mycollections.add(resolved);
// print progress info // print progress info
String owningPrefix = ""; String owningPrefix = "";
@@ -327,7 +334,7 @@ public class ItemImportCLITool {
} }
System.out.println(owningPrefix + " Collection: " System.out.println(owningPrefix + " Collection: "
+ mycollections.get(i).getName()); + resolved.getName());
} }
} // end of validating collections } // end of validating collections

View File

@@ -55,6 +55,7 @@ import org.apache.logging.log4j.Logger;
import org.apache.xpath.XPathAPI; import org.apache.xpath.XPathAPI;
import org.dspace.app.itemimport.service.ItemImportService; import org.dspace.app.itemimport.service.ItemImportService;
import org.dspace.app.util.LocalSchemaFilenameFilter; import org.dspace.app.util.LocalSchemaFilenameFilter;
import org.dspace.app.util.RelationshipUtils;
import org.dspace.authorize.AuthorizeException; import org.dspace.authorize.AuthorizeException;
import org.dspace.authorize.ResourcePolicy; import org.dspace.authorize.ResourcePolicy;
import org.dspace.authorize.service.AuthorizeService; import org.dspace.authorize.service.AuthorizeService;
@@ -68,6 +69,9 @@ import org.dspace.content.Item;
import org.dspace.content.MetadataField; import org.dspace.content.MetadataField;
import org.dspace.content.MetadataSchema; import org.dspace.content.MetadataSchema;
import org.dspace.content.MetadataSchemaEnum; import org.dspace.content.MetadataSchemaEnum;
import org.dspace.content.MetadataValue;
import org.dspace.content.Relationship;
import org.dspace.content.RelationshipType;
import org.dspace.content.WorkspaceItem; import org.dspace.content.WorkspaceItem;
import org.dspace.content.service.BitstreamFormatService; import org.dspace.content.service.BitstreamFormatService;
import org.dspace.content.service.BitstreamService; import org.dspace.content.service.BitstreamService;
@@ -77,6 +81,9 @@ import org.dspace.content.service.InstallItemService;
import org.dspace.content.service.ItemService; import org.dspace.content.service.ItemService;
import org.dspace.content.service.MetadataFieldService; import org.dspace.content.service.MetadataFieldService;
import org.dspace.content.service.MetadataSchemaService; import org.dspace.content.service.MetadataSchemaService;
import org.dspace.content.service.MetadataValueService;
import org.dspace.content.service.RelationshipService;
import org.dspace.content.service.RelationshipTypeService;
import org.dspace.content.service.WorkspaceItemService; import org.dspace.content.service.WorkspaceItemService;
import org.dspace.core.Constants; import org.dspace.core.Constants;
import org.dspace.core.Context; import org.dspace.core.Context;
@@ -151,6 +158,12 @@ public class ItemImportServiceImpl implements ItemImportService, InitializingBea
protected WorkflowService workflowService; protected WorkflowService workflowService;
@Autowired(required = true) @Autowired(required = true)
protected ConfigurationService configurationService; protected ConfigurationService configurationService;
@Autowired(required = true)
protected RelationshipService relationshipService;
@Autowired(required = true)
protected RelationshipTypeService relationshipTypeService;
@Autowired(required = true)
protected MetadataValueService metadataValueService;
protected String tempWorkDir; protected String tempWorkDir;
@@ -160,6 +173,9 @@ public class ItemImportServiceImpl implements ItemImportService, InitializingBea
protected boolean useWorkflowSendEmail = false; protected boolean useWorkflowSendEmail = false;
protected boolean isQuiet = false; protected boolean isQuiet = false;
//remember which folder item was imported from
Map<String, Item> itemFolderMap = null;
@Override @Override
public void afterPropertiesSet() throws Exception { public void afterPropertiesSet() throws Exception {
tempWorkDir = configurationService.getProperty("org.dspace.app.batchitemimport.work.dir"); tempWorkDir = configurationService.getProperty("org.dspace.app.batchitemimport.work.dir");
@@ -211,10 +227,13 @@ public class ItemImportServiceImpl implements ItemImportService, InitializingBea
// create the mapfile // create the mapfile
File outFile = null; File outFile = null;
PrintWriter mapOut = null; PrintWriter mapOut = null;
try { try {
Map<String, String> skipItems = new HashMap<>(); // set of items to skip if in 'resume' Map<String, String> skipItems = new HashMap<>(); // set of items to skip if in 'resume'
// mode // mode
itemFolderMap = new HashMap<>();
System.out.println("Adding items from directory: " + sourceDir); System.out.println("Adding items from directory: " + sourceDir);
log.debug("Adding items from directory: " + sourceDir); log.debug("Adding items from directory: " + sourceDir);
System.out.println("Generating mapfile: " + mapFile); System.out.println("Generating mapfile: " + mapFile);
@@ -255,6 +274,12 @@ public class ItemImportServiceImpl implements ItemImportService, InitializingBea
for (int i = 0; i < dircontents.length; i++) { for (int i = 0; i < dircontents.length; i++) {
if (skipItems.containsKey(dircontents[i])) { if (skipItems.containsKey(dircontents[i])) {
System.out.println("Skipping import of " + dircontents[i]); System.out.println("Skipping import of " + dircontents[i]);
//we still need the item in the map for relationship linking
String skippedHandle = skipItems.get(dircontents[i]);
Item skippedItem = (Item) handleService.resolveToObject(c, skippedHandle);
itemFolderMap.put(dircontents[i], skippedItem);
} else { } else {
List<Collection> clist; List<Collection> clist;
if (directoryFileCollections) { if (directoryFileCollections) {
@@ -274,12 +299,19 @@ public class ItemImportServiceImpl implements ItemImportService, InitializingBea
} else { } else {
clist = mycollections; clist = mycollections;
} }
Item item = addItem(c, clist, sourceDir, dircontents[i], mapOut, template); Item item = addItem(c, clist, sourceDir, dircontents[i], mapOut, template);
itemFolderMap.put(dircontents[i], item);
c.uncacheEntity(item); c.uncacheEntity(item);
System.out.println(i + " " + dircontents[i]); System.out.println(i + " " + dircontents[i]);
} }
} }
//now that all items are imported, iterate again to link relationships
addRelationships(c, sourceDir);
} finally { } finally {
if (mapOut != null) { if (mapOut != null) {
mapOut.flush(); mapOut.flush();
@@ -288,6 +320,276 @@ public class ItemImportServiceImpl implements ItemImportService, InitializingBea
} }
} }
/**
* Add relationships from a 'relationships' manifest file.
*
* @param c Context
* @param sourceDir The parent import source directory
* @throws Exception
*/
protected void addRelationships(Context c, String sourceDir) throws Exception {
for (Map.Entry<String, Item> itemEntry : itemFolderMap.entrySet()) {
String folderName = itemEntry.getKey();
String path = sourceDir + File.separatorChar + folderName;
Item item = itemEntry.getValue();
//look for a 'relationship' manifest
Map<String, List<String>> relationships = processRelationshipFile(path, "relationships");
if (!relationships.isEmpty()) {
for (Map.Entry<String, List<String>> relEntry : relationships.entrySet()) {
String relationshipType = relEntry.getKey();
List<String> identifierList = relEntry.getValue();
for (String itemIdentifier : identifierList) {
if (isTest) {
System.out.println("\tAdding relationship (type: " + relationshipType +
") from " + folderName + " to " + itemIdentifier);
continue;
}
//find referenced item
Item relationItem = resolveRelatedItem(c, itemIdentifier);
if (null == relationItem) {
throw new Exception("Could not find item for " + itemIdentifier);
}
//get entity type of entity and item
String itemEntityType = getEntityType(item);
String relatedEntityType = getEntityType(relationItem);
//find matching relationship type
List<RelationshipType> relTypes = relationshipTypeService.findByLeftwardOrRightwardTypeName(
c, relationshipType);
RelationshipType foundRelationshipType = RelationshipUtils.matchRelationshipType(
relTypes, relatedEntityType, itemEntityType, relationshipType);
if (foundRelationshipType == null) {
throw new Exception("No Relationship type found for:\n" +
"Target type: " + relatedEntityType + "\n" +
"Origin referer type: " + itemEntityType + "\n" +
"with typeName: " + relationshipType
);
}
boolean left = false;
if (foundRelationshipType.getLeftwardType().equalsIgnoreCase(relationshipType)) {
left = true;
}
// Placeholder items for relation placing
Item leftItem = null;
Item rightItem = null;
if (left) {
leftItem = item;
rightItem = relationItem;
} else {
leftItem = relationItem;
rightItem = item;
}
// Create the relationship
int leftPlace = relationshipService.findNextLeftPlaceByLeftItem(c, leftItem);
int rightPlace = relationshipService.findNextRightPlaceByRightItem(c, rightItem);
Relationship persistedRelationship = relationshipService.create(
c, leftItem, rightItem, foundRelationshipType, leftPlace, rightPlace);
// relationshipService.update(c, persistedRelationship);
System.out.println("\tAdded relationship (type: " + relationshipType + ") from " +
leftItem.getHandle() + " to " + rightItem.getHandle());
}
}
}
}
}
/**
* Get the item's entity type from meta.
*
* @param item
* @return
*/
protected String getEntityType(Item item) throws Exception {
return itemService.getMetadata(item, "dspace", "entity", "type", Item.ANY).get(0).getValue();
}
/**
* Read the relationship manifest file.
*
* Each line in the file contains a relationship type id and an item identifier in the following format:
*
* relation.<relation_key> <handle|uuid|folderName:import_item_folder|schema.element[.qualifier]:value>
*
* The input_item_folder should refer the folder name of another item in this import batch.
*
* @param path The main import folder path.
* @param filename The name of the manifest file to check ('relationships')
* @return Map of found relationships
* @throws Exception
*/
protected Map<String, List<String>> processRelationshipFile(String path, String filename) throws Exception {
File file = new File(path + File.separatorChar + filename);
Map<String, List<String>> result = new HashMap<>();
if (file.exists()) {
System.out.println("\tProcessing relationships file: " + filename);
BufferedReader br = null;
try {
br = new BufferedReader(new FileReader(file));
String line = null;
while ((line = br.readLine()) != null) {
line = line.trim();
if ("".equals(line)) {
continue;
}
String relationshipType = null;
String itemIdentifier = null;
StringTokenizer st = new StringTokenizer(line);
if (st.hasMoreTokens()) {
relationshipType = st.nextToken();
if (relationshipType.split("\\.").length > 1) {
relationshipType = relationshipType.split("\\.")[1];
}
} else {
throw new Exception("Bad mapfile line:\n" + line);
}
if (st.hasMoreTokens()) {
itemIdentifier = st.nextToken("").trim();
} else {
throw new Exception("Bad mapfile line:\n" + line);
}
if (!result.containsKey(relationshipType)) {
result.put(relationshipType, new ArrayList<>());
}
result.get(relationshipType).add(itemIdentifier);
}
} catch (FileNotFoundException e) {
System.out.println("\tNo relationships file found.");
} finally {
if (br != null) {
try {
br.close();
} catch (IOException e) {
System.out.println("Non-critical problem releasing resources.");
}
}
}
}
return result;
}
/**
* Resolve an item identifier referred to in the relationships manifest file.
*
* The import item map will be checked first to see if the identifier refers to an item folder
* that was just imported. Next it will try to find the item by handle or UUID, or by a unique
* meta value.
*
* @param c Context
* @param itemIdentifier The identifier string found in the import manifest (handle, uuid, or import subfolder)
* @return Item if found, or null.
* @throws Exception
*/
protected Item resolveRelatedItem(Context c, String itemIdentifier) throws Exception {
if (itemIdentifier.contains(":")) {
if (itemIdentifier.startsWith("folderName:") || itemIdentifier.startsWith("rowName:")) {
//identifier refers to a folder name in this import
int i = itemIdentifier.indexOf(":");
String folderName = itemIdentifier.substring(i + 1);
if (itemFolderMap.containsKey(folderName)) {
return itemFolderMap.get(folderName);
}
} else {
//lookup by meta value
int i = itemIdentifier.indexOf(":");
String metaKey = itemIdentifier.substring(0, i);
String metaValue = itemIdentifier.substring(i + 1);
return findItemByMetaValue(c, metaKey, metaValue);
}
} else if (itemIdentifier.indexOf('/') != -1) {
//resolve by handle
return (Item) handleService.resolveToObject(c, itemIdentifier);
} else {
//try to resolve by UUID
return itemService.findByIdOrLegacyId(c, itemIdentifier);
}
return null;
}
/**
* Lookup an item by a (unique) meta value.
*
* @param metaKey
* @param metaValue
* @return Item
* @throws Exception if single item not found.
*/
protected Item findItemByMetaValue(Context c, String metaKey, String metaValue) throws Exception {
Item item = null;
String mf[] = metaKey.split("\\.");
if (mf.length < 2) {
throw new Exception("Bad metadata field in reference: '" + metaKey +
"' (expected syntax is schema.element[.qualifier])");
}
String schema = mf[0];
String element = mf[1];
String qualifier = mf.length == 2 ? null : mf[2];
try {
MetadataField mfo = metadataFieldService.findByElement(c, schema, element, qualifier);
Iterator<MetadataValue> mdv = metadataValueService.findByFieldAndValue(c, mfo, metaValue);
if (mdv.hasNext()) {
MetadataValue mdvVal = mdv.next();
UUID uuid = mdvVal.getDSpaceObject().getID();
if (mdv.hasNext()) {
throw new Exception("Ambiguous reference; multiple matches in db: " + metaKey);
}
item = itemService.find(c, uuid);
}
} catch (SQLException e) {
throw new Exception("Error looking up item by metadata reference: " + metaKey, e);
}
if (item == null) {
throw new Exception("Item not found by metadata reference: " + metaKey);
}
return item;
}
@Override @Override
public void replaceItems(Context c, List<Collection> mycollections, public void replaceItems(Context c, List<Collection> mycollections,
String sourceDir, String mapFile, boolean template) throws Exception { String sourceDir, String mapFile, boolean template) throws Exception {
@@ -1823,4 +2125,5 @@ public class ItemImportServiceImpl implements ItemImportService, InitializingBea
public void setQuiet(boolean isQuiet) { public void setQuiet(boolean isQuiet) {
this.isQuiet = isQuiet; this.isQuiet = isQuiet;
} }
} }

View File

@@ -0,0 +1,69 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.app.util;
import java.util.List;
import org.dspace.content.RelationshipType;
public class RelationshipUtils {
private RelationshipUtils() {
}
/**
* Matches two Entity types to a Relationship Type from a set of Relationship Types.
*
* Given a list of Relationship Types, this method will find a Relationship Type that
* is configured between the originType and the targetType, with the matching originTypeName.
* It will match a relationship between these two entities in either direction (eg leftward
* or rightward).
*
* Example: originType = Author, targetType = Publication, originTypeName = isAuthorOfPublication.
*
* @param relTypes set of Relationship Types in which to find a match.
* @param targetType entity type of target (eg. Publication).
* @param originType entity type of origin referer (eg. Author).
* @param originTypeName the name of the relationship (eg. isAuthorOfPublication)
* @return null or matched Relationship Type.
*/
public static RelationshipType matchRelationshipType(List<RelationshipType> relTypes, String targetType,
String originType, String originTypeName) {
RelationshipType foundRelationshipType = null;
if (originTypeName.split("\\.").length > 1) {
originTypeName = originTypeName.split("\\.")[1];
}
for (RelationshipType relationshipType : relTypes) {
// Is origin type leftward or righward
boolean isLeft = false;
if (relationshipType.getLeftType().getLabel().equalsIgnoreCase(originType)) {
isLeft = true;
}
if (isLeft) {
// Validate typeName reference
if (!relationshipType.getLeftwardType().equalsIgnoreCase(originTypeName)) {
continue;
}
if (relationshipType.getLeftType().getLabel().equalsIgnoreCase(originType)
&& relationshipType.getRightType().getLabel().equalsIgnoreCase(targetType)) {
foundRelationshipType = relationshipType;
}
} else {
if (!relationshipType.getRightwardType().equalsIgnoreCase(originTypeName)) {
continue;
}
if (relationshipType.getLeftType().getLabel().equalsIgnoreCase(targetType)
&& relationshipType.getRightType().getLabel().equalsIgnoreCase(originType)) {
foundRelationshipType = relationshipType;
}
}
}
return foundRelationshipType;
}
}

View File

@@ -14,6 +14,7 @@ import java.util.Arrays;
import java.util.HashSet; import java.util.HashSet;
import java.util.List; import java.util.List;
import java.util.Set; import java.util.Set;
import java.util.UUID;
import javax.annotation.Nonnull; import javax.annotation.Nonnull;
import javax.persistence.Cacheable; import javax.persistence.Cacheable;
import javax.persistence.CascadeType; import javax.persistence.CascadeType;
@@ -104,6 +105,16 @@ public class Collection extends DSpaceObject implements DSpaceObjectLegacySuppor
} }
/**
* Takes a pre-determined UUID to be passed to the object to allow for the
* restoration of previously defined UUID's.
*
* @param uuid Takes a uuid to be passed to the Pre-Defined UUID Generator
*/
protected Collection(UUID uuid) {
this.predefinedUUID = uuid;
}
@Override @Override
public String getName() { public String getName() {
String value = getCollectionService() String value = getCollectionService()

View File

@@ -129,12 +129,23 @@ public class CollectionServiceImpl extends DSpaceObjectServiceImpl<Collection> i
@Override @Override
public Collection create(Context context, Community community, String handle) public Collection create(Context context, Community community, String handle)
throws SQLException, AuthorizeException { throws SQLException, AuthorizeException {
return create(context, community, handle, null);
}
@Override
public Collection create(Context context, Community community,
String handle, UUID uuid) throws SQLException, AuthorizeException {
if (community == null) { if (community == null) {
throw new IllegalArgumentException("Community cannot be null when creating a new collection."); throw new IllegalArgumentException("Community cannot be null when creating a new collection.");
} }
Collection newCollection = collectionDAO.create(context, new Collection()); Collection newCollection;
if (uuid != null) {
newCollection = collectionDAO.create(context, new Collection(uuid));
} else {
newCollection = collectionDAO.create(context, new Collection());
}
//Add our newly created collection to our community, authorization checks occur in THIS method //Add our newly created collection to our community, authorization checks occur in THIS method
communityService.addCollection(context, community, newCollection); communityService.addCollection(context, community, newCollection);
@@ -146,9 +157,10 @@ public class CollectionServiceImpl extends DSpaceObjectServiceImpl<Collection> i
authorizeService.createResourcePolicy(context, newCollection, anonymousGroup, null, Constants.READ, null); authorizeService.createResourcePolicy(context, newCollection, anonymousGroup, null, Constants.READ, null);
// now create the default policies for submitted items // now create the default policies for submitted items
authorizeService authorizeService
.createResourcePolicy(context, newCollection, anonymousGroup, null, Constants.DEFAULT_ITEM_READ, null); .createResourcePolicy(context, newCollection, anonymousGroup, null, Constants.DEFAULT_ITEM_READ, null);
authorizeService authorizeService
.createResourcePolicy(context, newCollection, anonymousGroup, null, Constants.DEFAULT_BITSTREAM_READ, null); .createResourcePolicy(context, newCollection, anonymousGroup, null,
Constants.DEFAULT_BITSTREAM_READ, null);
collectionDAO.save(context, newCollection); collectionDAO.save(context, newCollection);
@@ -164,12 +176,12 @@ public class CollectionServiceImpl extends DSpaceObjectServiceImpl<Collection> i
} }
context.addEvent(new Event(Event.CREATE, Constants.COLLECTION, context.addEvent(new Event(Event.CREATE, Constants.COLLECTION,
newCollection.getID(), newCollection.getHandle(), newCollection.getID(), newCollection.getHandle(),
getIdentifiers(context, newCollection))); getIdentifiers(context, newCollection)));
log.info(LogHelper.getHeader(context, "create_collection", log.info(LogHelper.getHeader(context, "create_collection",
"collection_id=" + newCollection.getID()) "collection_id=" + newCollection.getID())
+ ",handle=" + newCollection.getHandle()); + ",handle=" + newCollection.getHandle());
return newCollection; return newCollection;
} }
@@ -951,7 +963,7 @@ public class CollectionServiceImpl extends DSpaceObjectServiceImpl<Collection> i
* Finds all Indexed Collections where the current user has submit rights. If the user is an Admin, * Finds all Indexed Collections where the current user has submit rights. If the user is an Admin,
* this is all Indexed Collections. Otherwise, it includes those collections where * this is all Indexed Collections. Otherwise, it includes those collections where
* an indexed "submit" policy lists either the eperson or one of the eperson's groups * an indexed "submit" policy lists either the eperson or one of the eperson's groups
* *
* @param context DSpace context * @param context DSpace context
* @param discoverQuery * @param discoverQuery
* @param community parent community, could be null * @param community parent community, could be null

View File

@@ -11,6 +11,7 @@ import java.util.Arrays;
import java.util.HashSet; import java.util.HashSet;
import java.util.List; import java.util.List;
import java.util.Set; import java.util.Set;
import java.util.UUID;
import javax.persistence.Cacheable; import javax.persistence.Cacheable;
import javax.persistence.CascadeType; import javax.persistence.CascadeType;
import javax.persistence.Column; import javax.persistence.Column;
@@ -89,6 +90,16 @@ public class Community extends DSpaceObject implements DSpaceObjectLegacySupport
} }
/**
* Takes a pre-determined UUID to be passed to the object to allow for the
* restoration of previously defined UUID's.
*
* @param uuid Takes a uuid to be passed to the Pre-Defined UUID Generator
*/
protected Community(UUID uuid) {
this.predefinedUUID = uuid;
}
void addSubCommunity(Community subCommunity) { void addSubCommunity(Community subCommunity) {
subCommunities.add(subCommunity); subCommunities.add(subCommunity);
setModified(); setModified();

View File

@@ -86,13 +86,24 @@ public class CommunityServiceImpl extends DSpaceObjectServiceImpl<Community> imp
@Override @Override
public Community create(Community parent, Context context, String handle) throws SQLException, AuthorizeException { public Community create(Community parent, Context context, String handle) throws SQLException, AuthorizeException {
return create(parent, context, handle, null);
}
@Override
public Community create(Community parent, Context context, String handle,
UUID uuid) throws SQLException, AuthorizeException {
if (!(authorizeService.isAdmin(context) || if (!(authorizeService.isAdmin(context) ||
(parent != null && authorizeService.authorizeActionBoolean(context, parent, Constants.ADD)))) { (parent != null && authorizeService.authorizeActionBoolean(context, parent, Constants.ADD)))) {
throw new AuthorizeException( throw new AuthorizeException(
"Only administrators can create communities"); "Only administrators can create communities");
} }
Community newCommunity = communityDAO.create(context, new Community()); Community newCommunity;
if (uuid != null) {
newCommunity = communityDAO.create(context, new Community(uuid));
} else {
newCommunity = communityDAO.create(context, new Community());
}
if (parent != null) { if (parent != null) {
parent.addSubCommunity(newCommunity); parent.addSubCommunity(newCommunity);
@@ -129,8 +140,8 @@ public class CommunityServiceImpl extends DSpaceObjectServiceImpl<Community> imp
} }
log.info(LogHelper.getHeader(context, "create_community", log.info(LogHelper.getHeader(context, "create_community",
"community_id=" + newCommunity.getID()) "community_id=" + newCommunity.getID())
+ ",handle=" + newCommunity.getHandle()); + ",handle=" + newCommunity.getHandle());
return newCommunity; return newCommunity;
} }
@@ -383,17 +394,26 @@ public class CommunityServiceImpl extends DSpaceObjectServiceImpl<Community> imp
@Override @Override
public Community createSubcommunity(Context context, Community parentCommunity) public Community createSubcommunity(Context context, Community parentCommunity)
throws SQLException, AuthorizeException { throws SQLException, AuthorizeException {
return createSubcommunity(context, parentCommunity, null); return createSubcommunity(context, parentCommunity, null);
} }
@Override @Override
public Community createSubcommunity(Context context, Community parentCommunity, String handle) public Community createSubcommunity(Context context, Community parentCommunity, String handle)
throws SQLException, AuthorizeException { throws SQLException, AuthorizeException {
return createSubcommunity(context, parentCommunity, handle, null);
}
@Override
public Community createSubcommunity(Context context, Community parentCommunity, String handle,
UUID uuid) throws SQLException, AuthorizeException {
// Check authorisation // Check authorisation
authorizeService.authorizeAction(context, parentCommunity, Constants.ADD); authorizeService.authorizeAction(context, parentCommunity, Constants.ADD);
Community c = create(parentCommunity, context, handle); Community c;
c = create(parentCommunity, context, handle, uuid);
addSubcommunity(context, parentCommunity, c); addSubcommunity(context, parentCommunity, c);
return c; return c;

View File

@@ -38,8 +38,8 @@ import org.hibernate.annotations.GenericGenerator;
@Table(name = "dspaceobject") @Table(name = "dspaceobject")
public abstract class DSpaceObject implements Serializable, ReloadableEntity<java.util.UUID> { public abstract class DSpaceObject implements Serializable, ReloadableEntity<java.util.UUID> {
@Id @Id
@GeneratedValue(generator = "system-uuid") @GeneratedValue(generator = "predefined-uuid")
@GenericGenerator(name = "system-uuid", strategy = "uuid2") @GenericGenerator(name = "predefined-uuid", strategy = "org.dspace.content.PredefinedUUIDGenerator")
@Column(name = "uuid", unique = true, nullable = false, insertable = true, updatable = false) @Column(name = "uuid", unique = true, nullable = false, insertable = true, updatable = false)
protected java.util.UUID id; protected java.util.UUID id;
@@ -76,6 +76,15 @@ public abstract class DSpaceObject implements Serializable, ReloadableEntity<jav
@Transient @Transient
private boolean modified = false; private boolean modified = false;
/**
* This will read our predefinedUUID property to pass it along to the UUID generator
*/
@Transient
protected UUID predefinedUUID;
public UUID getPredefinedUUID() {
return predefinedUUID;
}
protected DSpaceObject() { protected DSpaceObject() {
} }

View File

@@ -13,6 +13,7 @@ import java.util.Date;
import java.util.HashSet; import java.util.HashSet;
import java.util.List; import java.util.List;
import java.util.Set; import java.util.Set;
import java.util.UUID;
import javax.persistence.CascadeType; import javax.persistence.CascadeType;
import javax.persistence.Column; import javax.persistence.Column;
import javax.persistence.Entity; import javax.persistence.Entity;
@@ -122,6 +123,16 @@ public class Item extends DSpaceObject implements DSpaceObjectLegacySupport {
} }
/**
* Takes a pre-determined UUID to be passed to the object to allow for the
* restoration of previously defined UUID's.
*
* @param uuid Takes a uuid to be passed to the Pre-Defined UUID Generator
*/
protected Item(UUID uuid) {
this.predefinedUUID = uuid;
}
/** /**
* Find out if the item is part of the main archive * Find out if the item is part of the main archive
* *

View File

@@ -176,16 +176,29 @@ public class ItemServiceImpl extends DSpaceObjectServiceImpl<Item> implements It
@Override @Override
public Item create(Context context, WorkspaceItem workspaceItem) throws SQLException, AuthorizeException { public Item create(Context context, WorkspaceItem workspaceItem) throws SQLException, AuthorizeException {
return create(context, workspaceItem, null);
}
@Override
public Item create(Context context, WorkspaceItem workspaceItem,
UUID uuid) throws SQLException, AuthorizeException {
Collection collection = workspaceItem.getCollection();
authorizeService.authorizeAction(context, collection, Constants.ADD);
if (workspaceItem.getItem() != null) { if (workspaceItem.getItem() != null) {
throw new IllegalArgumentException( throw new IllegalArgumentException(
"Attempting to create an item for a workspace item that already contains an item"); "Attempting to create an item for a workspace item that already contains an item");
}
Item item = null;
if (uuid != null) {
item = createItem(context, uuid);
} else {
item = createItem(context);
} }
Item item = createItem(context);
workspaceItem.setItem(item); workspaceItem.setItem(item);
log.info(LogHelper.getHeader(context, "create_item", "item_id=" log.info(LogHelper.getHeader(context, "create_item", "item_id="
+ item.getID())); + item.getID()));
return item; return item;
} }
@@ -418,6 +431,30 @@ public class ItemServiceImpl extends DSpaceObjectServiceImpl<Item> implements It
return bitstreamList; return bitstreamList;
} }
protected Item createItem(Context context, UUID uuid) throws SQLException, AuthorizeException {
Item item;
if (uuid != null) {
item = itemDAO.create(context, new Item(uuid));
} else {
item = itemDAO.create(context, new Item());
}
// set discoverable to true (default)
item.setDiscoverable(true);
// Call update to give the item a last modified date. OK this isn't
// amazingly efficient but creates don't happen that often.
context.turnOffAuthorisationSystem();
update(context, item);
context.restoreAuthSystemState();
context.addEvent(new Event(Event.CREATE, Constants.ITEM, item.getID(),
null, getIdentifiers(context, item)));
log.info(LogHelper.getHeader(context, "create_item", "item_id=" + item.getID()));
return item;
}
protected Item createItem(Context context) throws SQLException, AuthorizeException { protected Item createItem(Context context) throws SQLException, AuthorizeException {
Item item = itemDAO.create(context, new Item()); Item item = itemDAO.create(context, new Item());
// set discoverable to true (default) // set discoverable to true (default)

View File

@@ -0,0 +1,33 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.content;
import java.io.Serializable;
import java.util.UUID;
import org.hibernate.engine.spi.SharedSessionContractImplementor;
import org.hibernate.id.UUIDGenerator;
/**
* Allows DSpaceObjects to provide a pre-determined UUID
*
* @author April Herron
*/
public class PredefinedUUIDGenerator extends UUIDGenerator {
@Override
public Serializable generate(SharedSessionContractImplementor session, Object object) {
if (object instanceof DSpaceObject) {
UUID uuid = ((DSpaceObject) object).getPredefinedUUID();
if (uuid != null) {
return uuid;
}
}
return super.generate(session, object);
}
}

View File

@@ -174,4 +174,9 @@ public class RelationshipTypeServiceImpl implements RelationshipTypeService {
} }
relationshipTypeDAO.delete(context, relationshipType); relationshipTypeDAO.delete(context, relationshipType);
} }
@Override
public int countByEntityType(Context context, EntityType entityType) throws SQLException {
return relationshipTypeDAO.countByEntityType(context, entityType);
}
} }

View File

@@ -12,6 +12,7 @@ import java.sql.SQLException;
import java.util.ArrayList; import java.util.ArrayList;
import java.util.List; import java.util.List;
import java.util.Map; import java.util.Map;
import java.util.UUID;
import org.apache.logging.log4j.Logger; import org.apache.logging.log4j.Logger;
import org.dspace.app.util.DCInputsReaderException; import org.dspace.app.util.DCInputsReaderException;
@@ -80,6 +81,12 @@ public class WorkspaceItemServiceImpl implements WorkspaceItemService {
@Override @Override
public WorkspaceItem create(Context context, Collection collection, boolean template) public WorkspaceItem create(Context context, Collection collection, boolean template)
throws AuthorizeException, SQLException {
return create(context, collection, null, template);
}
@Override
public WorkspaceItem create(Context context, Collection collection, UUID uuid, boolean template)
throws AuthorizeException, SQLException { throws AuthorizeException, SQLException {
// Check the user has permission to ADD to the collection // Check the user has permission to ADD to the collection
authorizeService.authorizeAction(context, collection, Constants.ADD); authorizeService.authorizeAction(context, collection, Constants.ADD);
@@ -89,7 +96,12 @@ public class WorkspaceItemServiceImpl implements WorkspaceItemService {
// Create an item // Create an item
Item item = itemService.create(context, workspaceItem); Item item;
if (uuid != null) {
item = itemService.create(context, workspaceItem, uuid);
} else {
item = itemService.create(context, workspaceItem);
}
item.setSubmitter(context.getCurrentUser()); item.setSubmitter(context.getCurrentUser());
// Now create the policies for the submitter to modify item and contents // Now create the policies for the submitter to modify item and contents

View File

@@ -120,4 +120,16 @@ public interface RelationshipTypeDAO extends GenericDAO<RelationshipType> {
List<RelationshipType> findByEntityType(Context context, EntityType entityType, Boolean isLeft, List<RelationshipType> findByEntityType(Context context, EntityType entityType, Boolean isLeft,
Integer limit, Integer offset) Integer limit, Integer offset)
throws SQLException; throws SQLException;
/**
* Count all RelationshipType objects for which the given EntityType
* is equal to either the leftType or the rightType
*
* @param context DSpace context object
* @param entityType The EntityType object used to check the leftType and rightType properties
* @return Total RelationshipType objects
* @throws SQLException If database error
*/
public int countByEntityType(Context context, EntityType entityType) throws SQLException;
} }

View File

@@ -32,6 +32,7 @@ import org.dspace.eperson.EPerson;
import org.hibernate.Criteria; import org.hibernate.Criteria;
import org.hibernate.criterion.Criterion; import org.hibernate.criterion.Criterion;
import org.hibernate.criterion.DetachedCriteria; import org.hibernate.criterion.DetachedCriteria;
import org.hibernate.criterion.Order;
import org.hibernate.criterion.Projections; import org.hibernate.criterion.Projections;
import org.hibernate.criterion.Property; import org.hibernate.criterion.Property;
import org.hibernate.criterion.Restrictions; import org.hibernate.criterion.Restrictions;
@@ -54,14 +55,14 @@ public class ItemDAOImpl extends AbstractHibernateDSODAO<Item> implements ItemDA
@Override @Override
public Iterator<Item> findAll(Context context, boolean archived) throws SQLException { public Iterator<Item> findAll(Context context, boolean archived) throws SQLException {
Query query = createQuery(context, "FROM Item WHERE inArchive= :in_archive"); Query query = createQuery(context, "FROM Item WHERE inArchive=:in_archive ORDER BY id");
query.setParameter("in_archive", archived); query.setParameter("in_archive", archived);
return iterate(query); return iterate(query);
} }
@Override @Override
public Iterator<Item> findAll(Context context, boolean archived, int limit, int offset) throws SQLException { public Iterator<Item> findAll(Context context, boolean archived, int limit, int offset) throws SQLException {
Query query = createQuery(context, "FROM Item WHERE inArchive= :in_archive"); Query query = createQuery(context, "FROM Item WHERE inArchive=:in_archive ORDER BY id");
query.setParameter("in_archive", archived); query.setParameter("in_archive", archived);
query.setFirstResult(offset); query.setFirstResult(offset);
query.setMaxResults(limit); query.setMaxResults(limit);
@@ -71,7 +72,8 @@ public class ItemDAOImpl extends AbstractHibernateDSODAO<Item> implements ItemDA
@Override @Override
public Iterator<Item> findAll(Context context, boolean archived, boolean withdrawn) throws SQLException { public Iterator<Item> findAll(Context context, boolean archived, boolean withdrawn) throws SQLException {
Query query = createQuery(context, "FROM Item WHERE inArchive= :in_archive or withdrawn = :withdrawn"); Query query = createQuery(context,
"FROM Item WHERE inArchive=:in_archive or withdrawn=:withdrawn ORDER BY id");
query.setParameter("in_archive", archived); query.setParameter("in_archive", archived);
query.setParameter("withdrawn", withdrawn); query.setParameter("withdrawn", withdrawn);
return iterate(query); return iterate(query);
@@ -89,6 +91,7 @@ public class ItemDAOImpl extends AbstractHibernateDSODAO<Item> implements ItemDA
if (lastModified != null) { if (lastModified != null) {
queryStr.append(" AND last_modified > :last_modified"); queryStr.append(" AND last_modified > :last_modified");
} }
queryStr.append(" ORDER BY i.id");
Query query = createQuery(context, queryStr.toString()); Query query = createQuery(context, queryStr.toString());
query.setParameter("in_archive", archived); query.setParameter("in_archive", archived);
@@ -102,7 +105,8 @@ public class ItemDAOImpl extends AbstractHibernateDSODAO<Item> implements ItemDA
@Override @Override
public Iterator<Item> findBySubmitter(Context context, EPerson eperson) throws SQLException { public Iterator<Item> findBySubmitter(Context context, EPerson eperson) throws SQLException {
Query query = createQuery(context, "FROM Item WHERE inArchive= :in_archive and submitter= :submitter"); Query query = createQuery(context,
"FROM Item WHERE inArchive=:in_archive and submitter=:submitter ORDER BY id");
query.setParameter("in_archive", true); query.setParameter("in_archive", true);
query.setParameter("submitter", eperson); query.setParameter("submitter", eperson);
return iterate(query); return iterate(query);
@@ -114,7 +118,7 @@ public class ItemDAOImpl extends AbstractHibernateDSODAO<Item> implements ItemDA
if (!retrieveAllItems) { if (!retrieveAllItems) {
return findBySubmitter(context, eperson); return findBySubmitter(context, eperson);
} }
Query query = createQuery(context, "FROM Item WHERE submitter= :submitter"); Query query = createQuery(context, "FROM Item WHERE submitter=:submitter ORDER BY id");
query.setParameter("submitter", eperson); query.setParameter("submitter", eperson);
return iterate(query); return iterate(query);
} }
@@ -146,7 +150,7 @@ public class ItemDAOImpl extends AbstractHibernateDSODAO<Item> implements ItemDA
if (value != null) { if (value != null) {
hqlQueryString += " AND STR(metadatavalue.value) = :text_value"; hqlQueryString += " AND STR(metadatavalue.value) = :text_value";
} }
Query query = createQuery(context, hqlQueryString); Query query = createQuery(context, hqlQueryString + " ORDER BY item.id");
query.setParameter("in_archive", inArchive); query.setParameter("in_archive", inArchive);
query.setParameter("metadata_field", metadataField); query.setParameter("metadata_field", metadataField);
@@ -262,6 +266,8 @@ public class ItemDAOImpl extends AbstractHibernateDSODAO<Item> implements ItemDA
criteria.add(Subqueries.notExists(subcriteria)); criteria.add(Subqueries.notExists(subcriteria));
} }
} }
criteria.addOrder(Order.asc("item.id"));
log.debug(String.format("Running custom query with %d filters", index)); log.debug(String.format("Running custom query with %d filters", index));
return ((List<Item>) criteria.list()).iterator(); return ((List<Item>) criteria.list()).iterator();
@@ -274,7 +280,7 @@ public class ItemDAOImpl extends AbstractHibernateDSODAO<Item> implements ItemDA
Query query = createQuery(context, Query query = createQuery(context,
"SELECT item FROM Item as item join item.metadata metadatavalue " + "SELECT item FROM Item as item join item.metadata metadatavalue " +
"WHERE item.inArchive=:in_archive AND metadatavalue.metadataField = :metadata_field AND " + "WHERE item.inArchive=:in_archive AND metadatavalue.metadataField = :metadata_field AND " +
"metadatavalue.authority = :authority"); "metadatavalue.authority = :authority ORDER BY item.id");
query.setParameter("in_archive", inArchive); query.setParameter("in_archive", inArchive);
query.setParameter("metadata_field", metadataField); query.setParameter("metadata_field", metadataField);
query.setParameter("authority", authority); query.setParameter("authority", authority);
@@ -286,7 +292,7 @@ public class ItemDAOImpl extends AbstractHibernateDSODAO<Item> implements ItemDA
Integer offset) throws SQLException { Integer offset) throws SQLException {
Query query = createQuery(context, Query query = createQuery(context,
"select i from Item i join i.collections c " + "select i from Item i join i.collections c " +
"WHERE :collection IN c AND i.inArchive=:in_archive"); "WHERE :collection IN c AND i.inArchive=:in_archive ORDER BY i.id");
query.setParameter("collection", collection); query.setParameter("collection", collection);
query.setParameter("in_archive", true); query.setParameter("in_archive", true);
if (offset != null) { if (offset != null) {
@@ -309,6 +315,8 @@ public class ItemDAOImpl extends AbstractHibernateDSODAO<Item> implements ItemDA
criteriaBuilder.notEqual(itemRoot.get(Item_.owningCollection), collection), criteriaBuilder.notEqual(itemRoot.get(Item_.owningCollection), collection),
criteriaBuilder.isMember(collection, itemRoot.get(Item_.collections)), criteriaBuilder.isMember(collection, itemRoot.get(Item_.collections)),
criteriaBuilder.isTrue(itemRoot.get(Item_.inArchive)))); criteriaBuilder.isTrue(itemRoot.get(Item_.inArchive))));
criteriaQuery.orderBy(criteriaBuilder.asc(itemRoot.get(Item_.id)));
criteriaQuery.groupBy(itemRoot.get(Item_.id));
return list(context, criteriaQuery, false, Item.class, limit, offset).iterator(); return list(context, criteriaQuery, false, Item.class, limit, offset).iterator();
} }
@@ -327,7 +335,8 @@ public class ItemDAOImpl extends AbstractHibernateDSODAO<Item> implements ItemDA
@Override @Override
public Iterator<Item> findAllByCollection(Context context, Collection collection) throws SQLException { public Iterator<Item> findAllByCollection(Context context, Collection collection) throws SQLException {
Query query = createQuery(context, "select i from Item i join i.collections c WHERE :collection IN c"); Query query = createQuery(context,
"select i from Item i join i.collections c WHERE :collection IN c ORDER BY i.id");
query.setParameter("collection", collection); query.setParameter("collection", collection);
return iterate(query); return iterate(query);
@@ -336,7 +345,8 @@ public class ItemDAOImpl extends AbstractHibernateDSODAO<Item> implements ItemDA
@Override @Override
public Iterator<Item> findAllByCollection(Context context, Collection collection, Integer limit, Integer offset) public Iterator<Item> findAllByCollection(Context context, Collection collection, Integer limit, Integer offset)
throws SQLException { throws SQLException {
Query query = createQuery(context, "select i from Item i join i.collections c WHERE :collection IN c"); Query query = createQuery(context,
"select i from Item i join i.collections c WHERE :collection IN c ORDER BY i.id");
query.setParameter("collection", collection); query.setParameter("collection", collection);
if (offset != null) { if (offset != null) {
@@ -381,7 +391,8 @@ public class ItemDAOImpl extends AbstractHibernateDSODAO<Item> implements ItemDA
@Override @Override
public Iterator<Item> findByLastModifiedSince(Context context, Date since) public Iterator<Item> findByLastModifiedSince(Context context, Date since)
throws SQLException { throws SQLException {
Query query = createQuery(context, "SELECT i FROM item i WHERE last_modified > :last_modified"); Query query = createQuery(context,
"SELECT i FROM item i WHERE last_modified > :last_modified ORDER BY id");
query.setParameter("last_modified", since, TemporalType.TIMESTAMP); query.setParameter("last_modified", since, TemporalType.TIMESTAMP);
return iterate(query); return iterate(query);
} }

View File

@@ -8,6 +8,7 @@
package org.dspace.content.dao.impl; package org.dspace.content.dao.impl;
import java.sql.SQLException; import java.sql.SQLException;
import java.util.LinkedList;
import java.util.List; import java.util.List;
import javax.persistence.criteria.CriteriaBuilder; import javax.persistence.criteria.CriteriaBuilder;
import javax.persistence.criteria.CriteriaQuery; import javax.persistence.criteria.CriteriaQuery;
@@ -92,6 +93,9 @@ public class RelationshipTypeDAOImpl extends AbstractHibernateDAO<RelationshipTy
.equal(relationshipTypeRoot.get(RelationshipType_.rightType), entityType) .equal(relationshipTypeRoot.get(RelationshipType_.rightType), entityType)
) )
); );
List<javax.persistence.criteria.Order> orderList = new LinkedList<>();
orderList.add(criteriaBuilder.asc(relationshipTypeRoot.get(RelationshipType_.ID)));
criteriaQuery.orderBy(orderList);
return list(context, criteriaQuery, false, RelationshipType.class, limit, offset); return list(context, criteriaQuery, false, RelationshipType.class, limit, offset);
} }
@@ -120,4 +124,18 @@ public class RelationshipTypeDAOImpl extends AbstractHibernateDAO<RelationshipTy
} }
return list(context, criteriaQuery, false, RelationshipType.class, limit, offset); return list(context, criteriaQuery, false, RelationshipType.class, limit, offset);
} }
@Override
public int countByEntityType(Context context, EntityType entityType) throws SQLException {
CriteriaBuilder criteriaBuilder = getCriteriaBuilder(context);
CriteriaQuery criteriaQuery = getCriteriaQuery(criteriaBuilder, RelationshipType.class);
Root<RelationshipType> relationshipTypeRoot = criteriaQuery.from(RelationshipType.class);
criteriaQuery.select(relationshipTypeRoot);
criteriaQuery.where(criteriaBuilder.or(
criteriaBuilder.equal(relationshipTypeRoot.get(RelationshipType_.leftType), entityType),
criteriaBuilder.equal(relationshipTypeRoot.get(RelationshipType_.rightType), entityType)
));
return count(context, criteriaQuery, criteriaBuilder, relationshipTypeRoot);
}
} }

View File

@@ -777,9 +777,6 @@ public abstract class AbstractMETSDisseminator
Mets mets = new Mets(); Mets mets = new Mets();
String identifier = "DB-ID-" + dso.getID(); String identifier = "DB-ID-" + dso.getID();
if (dso.getHandle() != null) {
identifier = dso.getHandle().replace('/', '-');
}
// this ID should be globally unique (format: DSpace_[objType]_[handle with slash replaced with a dash]) // this ID should be globally unique (format: DSpace_[objType]_[handle with slash replaced with a dash])
mets.setID("DSpace_" + Constants.typeText[dso.getType()] + "_" + identifier); mets.setID("DSpace_" + Constants.typeText[dso.getType()] + "_" + identifier);

View File

@@ -16,6 +16,7 @@ import java.net.URLConnection;
import java.sql.SQLException; import java.sql.SQLException;
import java.util.Iterator; import java.util.Iterator;
import java.util.List; import java.util.List;
import java.util.UUID;
import java.util.zip.ZipEntry; import java.util.zip.ZipEntry;
import java.util.zip.ZipFile; import java.util.zip.ZipFile;
@@ -409,6 +410,7 @@ public abstract class AbstractMETSIngester extends AbstractPackageIngester {
// get handle from manifest // get handle from manifest
handle = getObjectHandle(manifest); handle = getObjectHandle(manifest);
} }
UUID uuid = getObjectID(manifest);
// -- Step 2 -- // -- Step 2 --
// Create our DSpace Object based on info parsed from manifest, and // Create our DSpace Object based on info parsed from manifest, and
@@ -416,7 +418,7 @@ public abstract class AbstractMETSIngester extends AbstractPackageIngester {
DSpaceObject dso; DSpaceObject dso;
try { try {
dso = PackageUtils.createDSpaceObject(context, parent, dso = PackageUtils.createDSpaceObject(context, parent,
type, handle, params); type, handle, uuid, params);
} catch (SQLException sqle) { } catch (SQLException sqle) {
throw new PackageValidationException("Exception while ingesting " throw new PackageValidationException("Exception while ingesting "
+ pkgFile.getPath(), sqle); + pkgFile.getPath(), sqle);
@@ -727,7 +729,6 @@ public abstract class AbstractMETSIngester extends AbstractPackageIngester {
// retrieve path/name of file in manifest // retrieve path/name of file in manifest
String path = METSManifest.getFileName(mfile); String path = METSManifest.getFileName(mfile);
// extract the file input stream from package (or retrieve // extract the file input stream from package (or retrieve
// externally, if it is an externally referenced file) // externally, if it is an externally referenced file)
InputStream fileStream = getFileInputStream(pkgFile, params, path); InputStream fileStream = getFileInputStream(pkgFile, params, path);
@@ -1506,4 +1507,22 @@ public abstract class AbstractMETSIngester extends AbstractPackageIngester {
*/ */
public abstract String getConfigurationName(); public abstract String getConfigurationName();
public UUID getObjectID(METSManifest manifest)
throws PackageValidationException {
Element mets = manifest.getMets();
String idStr = mets.getAttributeValue("ID");
if (idStr == null || idStr.length() == 0) {
throw new PackageValidationException("Manifest is missing the required mets@ID attribute.");
}
if (idStr.contains("DB-ID-")) {
idStr = idStr.substring(idStr.lastIndexOf("DB-ID-") + 6, idStr.length());
}
try {
return UUID.fromString(idStr);
} catch (IllegalArgumentException ignored) {
//do nothing
}
return null;
}
} }

View File

@@ -17,6 +17,7 @@ import java.util.HashMap;
import java.util.Iterator; import java.util.Iterator;
import java.util.List; import java.util.List;
import java.util.Map; import java.util.Map;
import java.util.UUID;
import java.util.regex.Matcher; import java.util.regex.Matcher;
import java.util.regex.Pattern; import java.util.regex.Pattern;
@@ -447,6 +448,7 @@ public class PackageUtils {
* @param parent Parent Object * @param parent Parent Object
* @param type Type of new Object * @param type Type of new Object
* @param handle Handle of new Object (may be null) * @param handle Handle of new Object (may be null)
* @param uuid
* @param params Properties-style list of options (interpreted by each packager). * @param params Properties-style list of options (interpreted by each packager).
* @return newly created DSpace Object (or null) * @return newly created DSpace Object (or null)
* @throws AuthorizeException if authorization error * @throws AuthorizeException if authorization error
@@ -454,29 +456,55 @@ public class PackageUtils {
* @throws IOException if IO error * @throws IOException if IO error
*/ */
public static DSpaceObject createDSpaceObject(Context context, DSpaceObject parent, int type, String handle, public static DSpaceObject createDSpaceObject(Context context, DSpaceObject parent, int type, String handle,
PackageParameters params) UUID uuid, PackageParameters params)
throws AuthorizeException, SQLException, IOException { throws AuthorizeException, SQLException, IOException {
DSpaceObject dso = null; DSpaceObject dso = null;
switch (type) { switch (type) {
case Constants.COLLECTION: case Constants.COLLECTION:
dso = collectionService.create(context, (Community) parent, handle); Collection collection = collectionService.find(context, uuid);
if (collection != null) {
dso = collectionService.create(context, (Community) parent, handle);
} else {
dso = collectionService.create(context, (Community) parent, handle, uuid);
}
return dso; return dso;
case Constants.COMMUNITY: case Constants.COMMUNITY:
// top-level community? // top-level community?
if (parent == null || parent.getType() == Constants.SITE) { if (parent == null || parent.getType() == Constants.SITE) {
dso = communityService.create(null, context, handle); Community community = communityService.find(context, uuid);
if (community != null) {
dso = communityService.create(null, context, handle);
} else {
dso = communityService.create(null, context, handle, uuid);
}
} else { } else {
dso = communityService.createSubcommunity(context, ((Community) parent), handle); Community community = communityService.find(context, uuid);
if (community != null) {
dso = communityService.createSubcommunity(context, ((Community) parent), handle);
} else {
dso = communityService.createSubcommunity(context, ((Community) parent), handle, uuid);
}
} }
return dso; return dso;
case Constants.ITEM: case Constants.ITEM:
//Initialize a WorkspaceItem //Initialize a WorkspaceItem
//(Note: Handle is not set until item is finished) //(Note: Handle is not set until item is finished)
WorkspaceItem wsi = workspaceItemService Item item = itemService.find(context, uuid);
.create(context, (Collection) parent, params.useCollectionTemplate()); if (item != null) {
return item;
}
WorkspaceItem wsi = null;
if (!params.replaceModeEnabled()) {
wsi = workspaceItemService.create(context, (Collection)parent, params.useCollectionTemplate());
} else {
wsi = workspaceItemService.create(context, (Collection)parent,
uuid, params.useCollectionTemplate());
}
// Please note that we are returning an Item which is *NOT* yet in the Archive, // Please note that we are returning an Item which is *NOT* yet in the Archive,
// and doesn't yet have a handle assigned. // and doesn't yet have a handle assigned.

View File

@@ -12,6 +12,7 @@ import java.io.InputStream;
import java.sql.SQLException; import java.sql.SQLException;
import java.util.List; import java.util.List;
import java.util.Map; import java.util.Map;
import java.util.UUID;
import org.dspace.authorize.AuthorizeException; import org.dspace.authorize.AuthorizeException;
import org.dspace.content.Bitstream; import org.dspace.content.Bitstream;
@@ -60,6 +61,21 @@ public interface CollectionService
public Collection create(Context context, Community community, String handle) throws SQLException, public Collection create(Context context, Community community, String handle) throws SQLException,
AuthorizeException; AuthorizeException;
/**
* Create a new collection with the supplied handle and ID.
* Once created the collection is added to the given community
*
* @param context DSpace context object
* @param community DSpace Community (parent)
* @param handle the pre-determined Handle to assign to the new collection
* @param uuid the pre-determined UUID to assign to the new collection
* @return the newly created collection
* @throws SQLException if database error
* @throws AuthorizeException if authorization error
*/
public Collection create(Context context, Community community, String handle, UUID uuid) throws SQLException,
AuthorizeException;
/** /**
* Get all collections in the system. These are alphabetically sorted by * Get all collections in the system. These are alphabetically sorted by
* collection name. * collection name.
@@ -308,7 +324,7 @@ public interface CollectionService
throws java.sql.SQLException; throws java.sql.SQLException;
/** /**
* *
* @param context DSpace Context * @param context DSpace Context
* @param group EPerson Group * @param group EPerson Group
* @return the collection, if any, that has the specified group as administrators or submitters * @return the collection, if any, that has the specified group as administrators or submitters
@@ -349,7 +365,7 @@ public interface CollectionService
* NOTE: for better performance, this method retrieves its results from an * NOTE: for better performance, this method retrieves its results from an
* index (cache) and does not query the database directly. * index (cache) and does not query the database directly.
* This means that results may be stale or outdated until DS-4524 is resolved" * This means that results may be stale or outdated until DS-4524 is resolved"
* *
* @param q limit the returned collection to those with metadata values matching the query terms. * @param q limit the returned collection to those with metadata values matching the query terms.
* The terms are used to make also a prefix query on SOLR so it can be used to implement * The terms are used to make also a prefix query on SOLR so it can be used to implement
* an autosuggest feature over the collection name * an autosuggest feature over the collection name
@@ -369,7 +385,7 @@ public interface CollectionService
* NOTE: for better performance, this method retrieves its results from an index (cache) * NOTE: for better performance, this method retrieves its results from an index (cache)
* and does not query the database directly. * and does not query the database directly.
* This means that results may be stale or outdated until DS-4524 is resolved." * This means that results may be stale or outdated until DS-4524 is resolved."
* *
* @param q limit the returned collection to those with metadata values matching the query terms. * @param q limit the returned collection to those with metadata values matching the query terms.
* The terms are used to make also a prefix query on SOLR so it can be used to implement * The terms are used to make also a prefix query on SOLR so it can be used to implement
* an autosuggest feature over the collection name * an autosuggest feature over the collection name

View File

@@ -11,6 +11,7 @@ import java.io.IOException;
import java.io.InputStream; import java.io.InputStream;
import java.sql.SQLException; import java.sql.SQLException;
import java.util.List; import java.util.List;
import java.util.UUID;
import org.dspace.authorize.AuthorizeException; import org.dspace.authorize.AuthorizeException;
import org.dspace.content.Bitstream; import org.dspace.content.Bitstream;
@@ -53,6 +54,20 @@ public interface CommunityService extends DSpaceObjectService<Community>, DSpace
public Community create(Community parent, Context context, String handle) public Community create(Community parent, Context context, String handle)
throws SQLException, AuthorizeException; throws SQLException, AuthorizeException;
/**
* Create a new top-level community, with a new ID.
*
* @param parent parent community
* @param context DSpace context object
* @param handle the pre-determined Handle to assign to the new community
* @param uuid the pre-determined uuid to assign to the new community
* @return the newly created community
* @throws SQLException if database error
* @throws AuthorizeException if authorization error
*/
public Community create(Community parent, Context context,
String handle, UUID uuid) throws SQLException, AuthorizeException;
/** /**
* Get a list of all communities in the system. These are alphabetically * Get a list of all communities in the system. These are alphabetically
@@ -202,6 +217,20 @@ public interface CommunityService extends DSpaceObjectService<Community>, DSpace
public Community createSubcommunity(Context context, Community parentCommunity, String handle) public Community createSubcommunity(Context context, Community parentCommunity, String handle)
throws SQLException, AuthorizeException; throws SQLException, AuthorizeException;
/**
* Create a new sub-community within this community.
*
* @param context context
* @param handle the pre-determined Handle to assign to the new community
* @param parentCommunity parent community
* @param uuid the pre-determined UUID to assign to the new community
* @return the new community
* @throws SQLException if database error
* @throws AuthorizeException if authorization error
*/
public Community createSubcommunity(Context context, Community parentCommunity, String handle, UUID uuid)
throws SQLException, AuthorizeException;
/** /**
* Add an existing community as a subcommunity to the community * Add an existing community as a subcommunity to the community
* *

View File

@@ -43,9 +43,8 @@ public interface ItemService
public Thumbnail getThumbnail(Context context, Item item, boolean requireOriginal) throws SQLException; public Thumbnail getThumbnail(Context context, Item item, boolean requireOriginal) throws SQLException;
/** /**
* Create a new item, with a new internal ID. This method is not public, * Create a new item, with a new internal ID. Authorization is done
* since items need to be created as workspace items. Authorisation is the * inside of this method.
* responsibility of the caller.
* *
* @param context DSpace context object * @param context DSpace context object
* @param workspaceItem in progress workspace item * @param workspaceItem in progress workspace item
@@ -55,6 +54,19 @@ public interface ItemService
*/ */
public Item create(Context context, WorkspaceItem workspaceItem) throws SQLException, AuthorizeException; public Item create(Context context, WorkspaceItem workspaceItem) throws SQLException, AuthorizeException;
/**
* Create a new item, with a provided ID. Authorisation is done
* inside of this method.
*
* @param context DSpace context object
* @param workspaceItem in progress workspace item
* @param uuid the pre-determined UUID to assign to the new item
* @return the newly created item
* @throws SQLException if database error
* @throws AuthorizeException if authorization error
*/
public Item create(Context context, WorkspaceItem workspaceItem, UUID uuid) throws SQLException, AuthorizeException;
/** /**
* Create an empty template item for this collection. If one already exists, * Create an empty template item for this collection. If one already exists,
* no action is taken. Caution: Make sure you call <code>update</code> on * no action is taken. Caution: Make sure you call <code>update</code> on

View File

@@ -103,6 +103,16 @@ public interface RelationshipTypeService extends DSpaceCRUDService<RelationshipT
List<RelationshipType> findByEntityType(Context context, EntityType entityType, Integer limit, Integer offset) List<RelationshipType> findByEntityType(Context context, EntityType entityType, Integer limit, Integer offset)
throws SQLException; throws SQLException;
/**
* Count all RelationshipType objects for which the given EntityType
* is equal to either the leftType or the rightType
*
* @param context DSpace context object
* @param entityType The EntityType object used to check the leftType and rightType properties
* @return Total RelationshipType objects
* @throws SQLException If database error
*/
public int countByEntityType(Context context, EntityType entityType) throws SQLException;
/** /**
* This method will return a list of RelationshipType objects for which the given EntityType object is equal * This method will return a list of RelationshipType objects for which the given EntityType object is equal

View File

@@ -11,6 +11,7 @@ import java.io.IOException;
import java.sql.SQLException; import java.sql.SQLException;
import java.util.List; import java.util.List;
import java.util.Map; import java.util.Map;
import java.util.UUID;
import org.dspace.authorize.AuthorizeException; import org.dspace.authorize.AuthorizeException;
import org.dspace.content.Collection; import org.dspace.content.Collection;
@@ -55,6 +56,22 @@ public interface WorkspaceItemService extends InProgressSubmissionService<Worksp
public WorkspaceItem create(Context context, Collection collection, boolean template) public WorkspaceItem create(Context context, Collection collection, boolean template)
throws AuthorizeException, SQLException; throws AuthorizeException, SQLException;
/**
* Create a new workspace item, with a new ID. An Item is also created. The
* submitter is the current user in the context.
*
* @param context DSpace context object
* @param collection Collection being submitted to
* @param uuid the preferred uuid of the new item (used if restoring an item and retaining old uuid)
* @param template if <code>true</code>, the workspace item starts as a copy
* of the collection's template item
* @return the newly created workspace item
* @throws SQLException if database error
* @throws AuthorizeException if authorization error
*/
public WorkspaceItem create(Context context, Collection collection, UUID uuid, boolean template)
throws AuthorizeException, SQLException;
public WorkspaceItem create(Context c, WorkflowItem wfi) throws SQLException, AuthorizeException; public WorkspaceItem create(Context c, WorkflowItem wfi) throws SQLException, AuthorizeException;

View File

@@ -174,8 +174,13 @@ public class SolrServiceMetadataBrowseIndexingPlugin implements SolrServiceIndex
Boolean.FALSE), Boolean.FALSE),
true); true);
if (!ignorePrefered) { if (!ignorePrefered) {
preferedLabel = choiceAuthorityService try {
.getLabel(values.get(x), collection, values.get(x).getLanguage()); preferedLabel = choiceAuthorityService
.getLabel(values.get(x), collection, values.get(x).getLanguage());
} catch (Exception e) {
log.warn("Failed to get preferred label for "
+ values.get(x).getMetadataField().toString('.'), e);
}
} }
List<String> variants = null; List<String> variants = null;
@@ -193,9 +198,13 @@ public class SolrServiceMetadataBrowseIndexingPlugin implements SolrServiceIndex
Boolean.FALSE), Boolean.FALSE),
true); true);
if (!ignoreVariants) { if (!ignoreVariants) {
variants = choiceAuthorityService try {
.getVariants( variants = choiceAuthorityService
values.get(x), collection); .getVariants(values.get(x), collection);
} catch (Exception e) {
log.warn("Failed to get variants for "
+ values.get(x).getMetadataField().toString(), e);
}
} }
if (StringUtils if (StringUtils

View File

@@ -359,10 +359,13 @@ public class ItemIndexFactoryImpl extends DSpaceObjectIndexFactoryImpl<Indexable
"discovery.index.authority.ignore-prefered", "discovery.index.authority.ignore-prefered",
Boolean.FALSE), Boolean.FALSE),
true); true);
if (!ignorePrefered) {
preferedLabel = choiceAuthorityService if (!ignorePrefered) {
.getLabel(meta, collection, meta.getLanguage()); try {
preferedLabel = choiceAuthorityService.getLabel(meta, collection, meta.getLanguage());
} catch (Exception e) {
log.warn("Failed to get preferred label for " + field, e);
}
} }
boolean ignoreVariants = boolean ignoreVariants =
@@ -377,8 +380,12 @@ public class ItemIndexFactoryImpl extends DSpaceObjectIndexFactoryImpl<Indexable
Boolean.FALSE), Boolean.FALSE),
true); true);
if (!ignoreVariants) { if (!ignoreVariants) {
variants = choiceAuthorityService try {
variants = choiceAuthorityService
.getVariants(meta, collection); .getVariants(meta, collection);
} catch (Exception e) {
log.warn("Failed to get variants for " + field, e);
}
} }
} }

View File

@@ -77,6 +77,13 @@ public interface DSpaceRunnableHandler {
*/ */
public void logError(String message); public void logError(String message);
/**
* This method will perform the error logging of the message given along with a stack trace
* @param message The message to be logged as an error
* @param throwable The original exception
*/
public void logError(String message, Throwable throwable);
/** /**
* This method will print the help for the options and name * This method will print the help for the options and name
* @param options The options for the script * @param options The options for the script

View File

@@ -84,6 +84,12 @@ public class CommandLineDSpaceRunnableHandler implements DSpaceRunnableHandler {
log.error(message); log.error(message);
} }
@Override
public void logError(String message, Throwable throwable) {
System.err.println(message);
log.error(message, throwable);
}
@Override @Override
public void printHelp(Options options, String name) { public void printHelp(Options options, String name) {
if (options != null) { if (options != null) {

View File

@@ -0,0 +1,15 @@
--
-- The contents of this file are subject to the license and copyright
-- detailed in the LICENSE and NOTICE files at the root of the source
-- tree and available online at
--
-- http://www.dspace.org/license/
--
-------------------------------------------------------------------------------------
---- ALTER table collection
-------------------------------------------------------------------------------------
ALTER TABLE collection DROP COLUMN workflow_step_1;
ALTER TABLE collection DROP COLUMN workflow_step_2;
ALTER TABLE collection DROP COLUMN workflow_step_3;

View File

@@ -0,0 +1,15 @@
--
-- The contents of this file are subject to the license and copyright
-- detailed in the LICENSE and NOTICE files at the root of the source
-- tree and available online at
--
-- http://www.dspace.org/license/
--
-------------------------------------------------------------------------------------
---- ALTER table collection
-------------------------------------------------------------------------------------
ALTER TABLE collection DROP COLUMN workflow_step_1;
ALTER TABLE collection DROP COLUMN workflow_step_2;
ALTER TABLE collection DROP COLUMN workflow_step_3;

View File

@@ -0,0 +1,15 @@
--
-- The contents of this file are subject to the license and copyright
-- detailed in the LICENSE and NOTICE files at the root of the source
-- tree and available online at
--
-- http://www.dspace.org/license/
--
-------------------------------------------------------------------------------------
---- ALTER table collection
-------------------------------------------------------------------------------------
ALTER TABLE collection DROP COLUMN workflow_step_1;
ALTER TABLE collection DROP COLUMN workflow_step_2;
ALTER TABLE collection DROP COLUMN workflow_step_3;

View File

@@ -0,0 +1,205 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.app.packager;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertNotNull;
import static org.junit.Assert.assertTrue;
import java.io.File;
import java.io.IOException;
import java.sql.SQLException;
import java.util.Iterator;
import java.util.UUID;
import java.util.zip.ZipEntry;
import java.util.zip.ZipFile;
import com.google.common.collect.Iterators;
import org.dspace.AbstractIntegrationTestWithDatabase;
import org.dspace.builder.CollectionBuilder;
import org.dspace.builder.CommunityBuilder;
import org.dspace.builder.ItemBuilder;
import org.dspace.content.Collection;
import org.dspace.content.Community;
import org.dspace.content.Item;
import org.dspace.content.WorkspaceItem;
import org.dspace.content.crosswalk.MetadataValidationException;
import org.dspace.content.factory.ContentServiceFactory;
import org.dspace.content.packager.METSManifest;
import org.dspace.content.service.CollectionService;
import org.dspace.content.service.CommunityService;
import org.dspace.content.service.InstallItemService;
import org.dspace.content.service.ItemService;
import org.dspace.content.service.WorkspaceItemService;
import org.dspace.services.ConfigurationService;
import org.dspace.services.factory.DSpaceServicesFactory;
import org.jdom.Element;
import org.junit.After;
import org.junit.Before;
import org.junit.Test;
/**
* Basic integration testing for the Packager restore feature
*
* @author Nathan Buckingham
*/
public class PackagerIT extends AbstractIntegrationTestWithDatabase {
private ItemService itemService = ContentServiceFactory.getInstance().getItemService();
private CollectionService collectionService = ContentServiceFactory.getInstance().getCollectionService();
private CommunityService communityService = ContentServiceFactory.getInstance().getCommunityService();
private WorkspaceItemService workspaceItemService = ContentServiceFactory.getInstance().getWorkspaceItemService();
protected static final InstallItemService installItemService = ContentServiceFactory.getInstance()
.getInstallItemService();
protected ConfigurationService configService = DSpaceServicesFactory.getInstance().getConfigurationService();
protected Community child1;
protected Collection col1;
protected Item article;
File tempFile;
@Before
public void setup() throws IOException {
context.turnOffAuthorisationSystem();
parentCommunity = CommunityBuilder.createCommunity(context)
.withName("Parent Community")
.build();
child1 = CommunityBuilder.createSubCommunity(context, parentCommunity)
.withName("Sub Community")
.build();
col1 = CollectionBuilder.createCollection(context, child1).withName("Collection 2").build();
// Create a new Publication (which is an Article)
article = ItemBuilder.createItem(context, col1)
.withTitle("Article")
.withIssueDate("2017-10-17")
.withEntityType("Publication")
.build();
tempFile = File.createTempFile("packagerExportTest", ".zip");
context.restoreAuthSystemState();
}
@After
public void destroy() throws SQLException, IOException {
tempFile.delete();
}
@Test
public void packagerExportUUIDTest() throws Exception {
context.turnOffAuthorisationSystem();
performExportScript(article.getHandle(), tempFile);
assertTrue(tempFile.length() > 0);
String idStr = getID();
assertEquals(idStr, article.getID().toString());
}
@Test
public void packagerImportUUIDTest() throws Exception {
context.turnOffAuthorisationSystem();
//Item
performExportScript(article.getHandle(), tempFile);
String idStr = getID();
itemService.delete(context, article);
performImportScript(tempFile);
Item item = itemService.find(context, UUID.fromString(idStr));
assertNotNull(item);
}
@Test
public void packagerImportColUUIDTest() throws Exception {
context.turnOffAuthorisationSystem();
configService.setProperty("upload.temp.dir",tempFile.getParent());
performExportScript(col1.getHandle(), tempFile);
String idStr = getID();
collectionService.delete(context, col1);
performImportScript(tempFile);
Collection collection = collectionService.find(context, UUID.fromString(idStr));
assertNotNull(collection);
}
@Test
public void packagerImportComUUIDTest() throws Exception {
context.turnOffAuthorisationSystem();
configService.setProperty("upload.temp.dir",tempFile.getParent());
//Community
performExportScript(child1.getHandle(), tempFile);
String idStr = getID();
communityService.delete(context, child1);
performImportScript(tempFile);
Community community = communityService.find(context, UUID.fromString(idStr));
assertNotNull(community);
}
@Test
public void packagerUUIDAlreadyExistTest() throws Exception {
context.turnOffAuthorisationSystem();
//Item should be overwritten if UUID already Exists
performExportScript(article.getHandle(), tempFile);
performImportScript(tempFile);
Iterator<Item> items = itemService.findByCollection(context, col1);
assertEquals(1, Iterators.size(items));
}
@Test
public void packagerUUIDAlreadyExistWithoutForceTest() throws Exception {
context.turnOffAuthorisationSystem();
//should fail to restore the item because the uuid already exists.
performExportScript(article.getHandle(), tempFile);
UUID id = article.getID();
itemService.delete(context, article);
WorkspaceItem workspaceItem = workspaceItemService.create(context, col1, id, false);
installItemService.installItem(context, workspaceItem, "123456789/0100");
performImportNoForceScript(tempFile);
Iterator<Item> items = itemService.findByCollection(context, col1);
Item testItem = items.next();
assertFalse(items.hasNext()); //check to make sure there is only 1 item
assertEquals("123456789/0100", testItem.getHandle()); //check to make sure the item wasn't overwritten as
// it would have the old handle.
itemService.delete(context, testItem);
}
private String getID() throws IOException, MetadataValidationException {
//this method gets the UUID from the mets file thats stored in the attribute element
METSManifest manifest = null;
ZipFile zip = new ZipFile(tempFile);
ZipEntry manifestEntry = zip.getEntry(METSManifest.MANIFEST_FILE);
if (manifestEntry != null) {
// parse the manifest and sanity-check it.
manifest = METSManifest.create(zip.getInputStream(manifestEntry),
false, "AIP");
}
Element mets = manifest.getMets();
String idStr = mets.getAttributeValue("ID");
if (idStr.contains("DB-ID-")) {
idStr = idStr.substring(idStr.lastIndexOf("DB-ID-") + 6, idStr.length());
}
return idStr;
}
private void performExportScript(String handle, File outputFile) throws Exception {
runDSpaceScript("packager", "-d", "-e", "admin@email.com", "-i", handle, "-t",
"AIP", outputFile.getPath());
}
private void performImportNoForceScript(File outputFile) throws Exception {
runDSpaceScript("packager", "-r", "-u", "-e", "admin@email.com", "-t",
"AIP", outputFile.getPath());
}
private void performImportScript(File outputFile) throws Exception {
runDSpaceScript("packager", "-r", "-f", "-u", "-e", "admin@email.com", "-t",
"AIP", outputFile.getPath());
}
}

View File

@@ -14,6 +14,8 @@ import static org.hamcrest.MatcherAssert.assertThat;
import static org.junit.Assert.assertFalse; import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertTrue; import static org.junit.Assert.assertTrue;
import static org.junit.Assert.fail; import static org.junit.Assert.fail;
import static org.mockito.ArgumentMatchers.any;
import static org.mockito.ArgumentMatchers.eq;
import static org.mockito.Mockito.doNothing; import static org.mockito.Mockito.doNothing;
import static org.mockito.Mockito.doThrow; import static org.mockito.Mockito.doThrow;
import static org.mockito.Mockito.spy; import static org.mockito.Mockito.spy;
@@ -33,12 +35,15 @@ import org.dspace.content.service.CommunityService;
import org.dspace.content.service.ItemService; import org.dspace.content.service.ItemService;
import org.dspace.content.service.WorkspaceItemService; import org.dspace.content.service.WorkspaceItemService;
import org.dspace.core.Constants; import org.dspace.core.Constants;
import org.dspace.core.Context;
import org.dspace.eperson.EPerson; import org.dspace.eperson.EPerson;
import org.dspace.eperson.factory.EPersonServiceFactory; import org.dspace.eperson.factory.EPersonServiceFactory;
import org.dspace.eperson.service.EPersonService; import org.dspace.eperson.service.EPersonService;
import org.junit.After; import org.junit.After;
import org.junit.Before; import org.junit.Before;
import org.junit.Test; import org.junit.Test;
import org.junit.runner.RunWith;
import org.mockito.junit.MockitoJUnitRunner;
import org.springframework.test.util.ReflectionTestUtils; import org.springframework.test.util.ReflectionTestUtils;
/** /**
@@ -46,6 +51,7 @@ import org.springframework.test.util.ReflectionTestUtils;
* *
* @author pvillega * @author pvillega
*/ */
@RunWith(MockitoJUnitRunner.class)
public class WorkspaceItemTest extends AbstractUnitTest { public class WorkspaceItemTest extends AbstractUnitTest {
/** /**
@@ -98,6 +104,7 @@ public class WorkspaceItemTest extends AbstractUnitTest {
// "Wire" our spy to be used by the current loaded object services // "Wire" our spy to be used by the current loaded object services
// (To ensure these services use the spy instead of the real service) // (To ensure these services use the spy instead of the real service)
ReflectionTestUtils.setField(workspaceItemService, "authorizeService", authorizeServiceSpy); ReflectionTestUtils.setField(workspaceItemService, "authorizeService", authorizeServiceSpy);
ReflectionTestUtils.setField(itemService, "authorizeService", authorizeServiceSpy);
ReflectionTestUtils.setField(collectionService, "authorizeService", authorizeServiceSpy); ReflectionTestUtils.setField(collectionService, "authorizeService", authorizeServiceSpy);
ReflectionTestUtils.setField(communityService, "authorizeService", authorizeServiceSpy); ReflectionTestUtils.setField(communityService, "authorizeService", authorizeServiceSpy);
} catch (AuthorizeException ex) { } catch (AuthorizeException ex) {
@@ -158,7 +165,8 @@ public class WorkspaceItemTest extends AbstractUnitTest {
@Test @Test
public void testCreateAuth() throws Exception { public void testCreateAuth() throws Exception {
// Allow Collection ADD perms // Allow Collection ADD perms
doNothing().when(authorizeServiceSpy).authorizeAction(context, collection, Constants.ADD); doNothing().when(authorizeServiceSpy).authorizeAction(any(Context.class),
any(Collection.class), eq(Constants.ADD));
boolean template; boolean template;
WorkspaceItem created; WorkspaceItem created;

View File

@@ -1,58 +0,0 @@
#
# The contents of this file are subject to the license and copyright
# detailed in the LICENSE and NOTICE files at the root of the source
# tree and available online at
#
# http://www.dspace.org/license/
#
###########################################################################
#
# log4j.properties
#
#
###########################################################################
# This is a copy of the log4j configuration file for DSpace, to avoid
# getting errors when running tests.
# Set root category priority to INFO and its only appender to A1.
log4j.rootCategory=INFO, A1
# A1 is set to be a ConsoleAppender.
log4j.appender.A1=org.apache.log4j.ConsoleAppender
# A1 uses PatternLayout.
log4j.appender.A1.layout=org.apache.logging.log4j.PatternLayout
log4j.appender.A1.layout.ConversionPattern=%d %-5p %c @ %m%n
###########################################################################
# Other settings
###########################################################################
# Block passwords from being exposed in Axis logs.
# (DEBUG exposes passwords in Basic Auth)
log4j.logger.org.apache.axis.handlers.http.HTTPAuthHandler=INFO
# Block services logging except on exceptions
log4j.logger.org.dspace.kernel=ERROR
log4j.logger.org.dspace.services=ERROR
log4j.logger.org.dspace.servicemanager=ERROR
log4j.logger.org.dspace.providers=ERROR
log4j.logger.org.dspace.utils=ERROR
log4j.appender.stdout=org.apache.log4j.ConsoleAppender
log4j.appender.stdout.Target=System.out
log4j.appender.stdout.layout=org.apache.log4j.PatternLayout
log4j.appender.stdout.layout.ConversionPattern=%d{ABSOLUTE} %5p %c{1}:%L - %m%n
#
# Root logger option
log4j.rootLogger=INFO, stdout
# Hibernate logging options (INFO only shows startup messages)
log4j.logger.org.hibernate=INFO
# For detailed Hibernate logging in Unit Tests, you can enable the following
# setting which logs all JDBC bind parameter runtime arguments.
# This will drastically increase the size of Unit Test logs though.
#log4j.logger.org.hibernate.SQL=DEBUG, A1
#log4j.logger.org.hibernate.type=TRACE, A1

View File

@@ -0,0 +1,56 @@
<?xml version="1.0" encoding="UTF-8"?>
<Configuration strict='true'
xmlns='http://logging.apache.org/log4j/2.0/config'>
<Properties>
<!-- Log level for all DSpace-specific code (org.dspace.*)
Possible values (from most to least info):
DEBUG, INFO, WARN, ERROR, FATAL -->
<Property name='loglevel.dspace'>DEBUG</Property>
<!-- Log level for other third-party tools/APIs used by DSpace
Possible values (from most to least info):
DEBUG, INFO, WARN, ERROR, FATAL -->
<Property name='loglevel.other'>INFO</Property>
</Properties>
<Appenders>
<!-- A1 is for most DSpace activity -->
<Appender name='A1'
type='Console'>
<Layout type='PatternLayout'
pattern='%d %-5p %c @ %m%n'/>
</Appender>
</Appenders>
<Loggers>
<!-- Most parts of DSpace -->
<Logger name='org.dspace'
level='${loglevel.dspace}'
additivity='false'>
<AppenderRef ref='A1'/>
</Logger>
<!-- Block services logging except on exceptions -->
<Logger name='org.dspace.kernel'
level='ERROR'/>
<Logger name='org.dspace.services'
level='ERROR'/>
<Logger name='org.dspace.servicemanager'
level='WARN'/>
<Logger name='org.dspace.providers'
level='ERROR'/>
<Logger name='org.dspace.utils'
level='ERROR'/>
<!-- Block passwords from being exposed in Axis logs.
(DEBUG exposes passwords in Basic Auth) -->
<Logger name='org.apache.axis.handlers.http.HTTPAuthHandler'
level='INFO'/>
<!-- Anything not a part of DSpace -->
<Root level='${loglevel.other}'>
<AppenderRef ref='A1'/>
</Root>
</Loggers>
</Configuration>

View File

@@ -1,94 +0,0 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.app.rest;
import java.sql.SQLException;
import java.util.List;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import org.dspace.app.rest.converter.ConverterService;
import org.dspace.app.rest.link.HalLinkService;
import org.dspace.app.rest.model.RelationshipTypeRest;
import org.dspace.app.rest.model.hateoas.RelationshipTypeResource;
import org.dspace.app.rest.utils.ContextUtil;
import org.dspace.app.rest.utils.Utils;
import org.dspace.content.EntityType;
import org.dspace.content.RelationshipType;
import org.dspace.content.service.EntityTypeService;
import org.dspace.content.service.RelationshipTypeService;
import org.dspace.core.Context;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.data.domain.Page;
import org.springframework.data.domain.Pageable;
import org.springframework.data.web.PagedResourcesAssembler;
import org.springframework.hateoas.PagedModel;
import org.springframework.web.bind.annotation.PathVariable;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RequestMethod;
import org.springframework.web.bind.annotation.RestController;
/**
* This controller will handle all the incoming calls on the api/core/entitytypes/{id}/relationshiptypes endpoint
* where the id parameter can be filled in to match a specific entityType and then get all the relationshipTypes
* for the given EntityType
*/
@RestController
@RequestMapping("/api/core/entitytypes/{id}/relationshiptypes")
public class RelationshipTypeRestController {
@Autowired
private RelationshipTypeService relationshipTypeService;
@Autowired
private EntityTypeService entityTypeService;
@Autowired
private ConverterService converter;
@Autowired
private Utils utils;
@Autowired
private HalLinkService halLinkService;
/**
* This method will retrieve all the RelationshipTypes that conform to the given EntityType by the given ID and
* it will return this in a wrapped resource.
*
* @param id The ID of the EntityType objects that we'll use to retrieve the RelationshipTypes
* @param response The response object
* @param request The request object
* @param pageable The pagination object
* @param assembler The assembler object
* @return The wrapped resource containing the list of RelationshipType objects as defined above
* @throws SQLException If something goes wrong
*/
@RequestMapping(method = RequestMethod.GET)
public PagedModel<RelationshipTypeResource> retrieve(@PathVariable Integer id,
HttpServletResponse response,
HttpServletRequest request,
Pageable pageable,
PagedResourcesAssembler assembler) throws SQLException {
Context context = ContextUtil.obtainContext(request);
EntityType entityType = entityTypeService.find(context, id);
List<RelationshipType> list = relationshipTypeService.findByEntityType(context, entityType, -1, -1);
Page<RelationshipTypeRest> relationshipTypeRestPage = converter
.toRestPage(list, pageable, utils.obtainProjection());
Page<RelationshipTypeResource> relationshipTypeResources = relationshipTypeRestPage
.map(relationshipTypeRest -> new RelationshipTypeResource(relationshipTypeRest, utils));
relationshipTypeResources.forEach(halLinkService::addLinks);
PagedModel<RelationshipTypeResource> result = assembler.toModel(relationshipTypeResources);
return result;
}
}

View File

@@ -1,43 +0,0 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.app.rest.link.relation;
import java.util.LinkedList;
import org.dspace.app.rest.RelationshipTypeRestController;
import org.dspace.app.rest.link.HalLinkFactory;
import org.dspace.app.rest.model.hateoas.EntityTypeResource;
import org.springframework.data.domain.Pageable;
import org.springframework.hateoas.Link;
import org.springframework.stereotype.Component;
/**
* This class' purpose is to add the links to the EntityTypeResource. This function and class will be called
* and used
* when the HalLinkService addLinks methods is called as it'll iterate over all the different factories and check
* whether
* these are allowed to create links for said resource or not.
*/
@Component
public class EntityTypeHalLinkFactory extends HalLinkFactory<EntityTypeResource, RelationshipTypeRestController> {
@Override
protected void addLinks(EntityTypeResource halResource, Pageable pageable, LinkedList<Link> list) throws Exception {
list.add(buildLink("relationshiptypes", getMethodOn().retrieve(
halResource.getContent().getId(), null, null, null, null)));
}
@Override
protected Class<RelationshipTypeRestController> getControllerClass() {
return RelationshipTypeRestController.class;
}
@Override
protected Class<EntityTypeResource> getResourceClass() {
return EntityTypeResource.class;
}
}

View File

@@ -14,11 +14,20 @@ import org.dspace.app.rest.RestResourceController;
* for the EntityTypeResource class. * for the EntityTypeResource class.
* Refer to {@link org.dspace.content.EntityType} for explanation of the properties * Refer to {@link org.dspace.content.EntityType} for explanation of the properties
*/ */
@LinksRest(links = {
@LinkRest(
name = EntityTypeRest.RELATION_SHIP_TYPES,
method = "getEntityTypeRelationship"
)
})
public class EntityTypeRest extends BaseObjectRest<Integer> { public class EntityTypeRest extends BaseObjectRest<Integer> {
private static final long serialVersionUID = 8166078961459192770L;
public static final String NAME = "entitytype"; public static final String NAME = "entitytype";
public static final String NAME_PLURAL = "entitytypes"; public static final String NAME_PLURAL = "entitytypes";
public static final String CATEGORY = "core"; public static final String CATEGORY = "core";
public static final String RELATION_SHIP_TYPES = "relationshiptypes";
public String getCategory() { public String getCategory() {
return CATEGORY; return CATEGORY;

View File

@@ -0,0 +1,73 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.app.rest.repository;
import java.sql.SQLException;
import java.util.List;
import java.util.Objects;
import javax.annotation.Nullable;
import javax.servlet.http.HttpServletRequest;
import org.dspace.app.rest.model.EntityTypeRest;
import org.dspace.app.rest.model.RelationshipTypeRest;
import org.dspace.app.rest.projection.Projection;
import org.dspace.content.EntityType;
import org.dspace.content.RelationshipType;
import org.dspace.content.service.EntityTypeService;
import org.dspace.content.service.RelationshipTypeService;
import org.dspace.core.Context;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.data.domain.Page;
import org.springframework.data.domain.Pageable;
import org.springframework.data.rest.webmvc.ResourceNotFoundException;
import org.springframework.stereotype.Component;
/**
* Link repository for "relationships" subresource of an individual EntityType
*
* @author Mykhaylo Boychuk (mykhaylo.boychuk at 4science.it)
*/
@Component(EntityTypeRest.CATEGORY + "." + EntityTypeRest.NAME + "." + EntityTypeRest.RELATION_SHIP_TYPES)
public class EntityTypeRelationshipLinkRepository extends AbstractDSpaceRestRepository
implements LinkRestRepository {
@Autowired
private EntityTypeService entityTypeService;
@Autowired
private RelationshipTypeService relationshipTypeService;
/**
* This method will retrieve all the RelationshipTypes that conform
* to the given EntityType by the given ID and it will return this in a wrapped resource.
*
* @param request The request object
* @param id The ID of the EntityType objects that we'll use to retrieve the RelationshipTypes
* @param optionalPageable The pagination object
* @param projection The current Projection
* @return List of RelationshipType objects as defined above
*/
public Page<RelationshipTypeRest> getEntityTypeRelationship(@Nullable HttpServletRequest request,
Integer id,
@Nullable Pageable optionalPageable,
Projection projection) {
try {
Context context = obtainContext();
Pageable pageable = utils.getPageable(optionalPageable);
EntityType entityType = entityTypeService.find(context, id);
if (Objects.isNull(entityType)) {
throw new ResourceNotFoundException("No such EntityType: " + id);
}
int total = relationshipTypeService.countByEntityType(context, entityType);
List<RelationshipType> list = relationshipTypeService.findByEntityType(context, entityType,
pageable.getPageSize(), Math.toIntExact(pageable.getOffset()));
return converter.toRestPage(list, pageable, total, projection);
} catch (SQLException e) {
throw new RuntimeException(e);
}
}
}

View File

@@ -152,10 +152,14 @@ public class ScriptRestRepository extends DSpaceRestRepository<ScriptRest, Strin
restDSpaceRunnableHandler.schedule(dSpaceRunnable); restDSpaceRunnableHandler.schedule(dSpaceRunnable);
} catch (ParseException e) { } catch (ParseException e) {
dSpaceRunnable.printHelp(); dSpaceRunnable.printHelp();
restDSpaceRunnableHandler try {
.handleException( restDSpaceRunnableHandler.handleException(
"Failed to parse the arguments given to the script with name: " + scriptToExecute.getName() "Failed to parse the arguments given to the script with name: "
+ " and args: " + args, e); + scriptToExecute.getName() + " and args: " + args, e
);
} catch (Exception re) {
// ignore re-thrown exception
}
} }
} }

View File

@@ -134,19 +134,13 @@ public class RestDSpaceRunnableHandler implements DSpaceRunnableHandler {
@Override @Override
public void handleException(String message, Exception e) { public void handleException(String message, Exception e) {
if (message != null) { logError(message, e);
logError(message);
}
if (e != null) {
logError(ExceptionUtils.getStackTrace(e));
}
Context context = new Context(); Context context = new Context();
try { try {
Process process = processService.find(context, processId); Process process = processService.find(context, processId);
processService.fail(context, process); processService.fail(context, process);
addLogBitstreamToProcess(context); addLogBitstreamToProcess(context);
context.complete(); context.complete();
} catch (SQLException sqlException) { } catch (SQLException sqlException) {
@@ -161,6 +155,9 @@ public class RestDSpaceRunnableHandler implements DSpaceRunnableHandler {
context.abort(); context.abort();
} }
} }
// Make sure execution actually ends after we handle the exception
throw new RuntimeException(e);
} }
@Override @Override
@@ -180,7 +177,6 @@ public class RestDSpaceRunnableHandler implements DSpaceRunnableHandler {
log.info(logMessage); log.info(logMessage);
appendLogToProcess(message, ProcessLogLevel.INFO); appendLogToProcess(message, ProcessLogLevel.INFO);
} }
@Override @Override
@@ -189,7 +185,6 @@ public class RestDSpaceRunnableHandler implements DSpaceRunnableHandler {
log.warn(logMessage); log.warn(logMessage);
appendLogToProcess(message, ProcessLogLevel.WARNING); appendLogToProcess(message, ProcessLogLevel.WARNING);
} }
@Override @Override
@@ -198,7 +193,17 @@ public class RestDSpaceRunnableHandler implements DSpaceRunnableHandler {
log.error(logMessage); log.error(logMessage);
appendLogToProcess(message, ProcessLogLevel.ERROR); appendLogToProcess(message, ProcessLogLevel.ERROR);
}
@Override
public void logError(String message, Throwable throwable) {
String logMessage = getLogMessage(message);
log.error(logMessage, throwable);
appendLogToProcess(message, ProcessLogLevel.ERROR);
if (throwable != null) {
appendLogToProcess(ExceptionUtils.getStackTrace(throwable), ProcessLogLevel.ERROR);
}
} }
@Override @Override
@@ -207,8 +212,8 @@ public class RestDSpaceRunnableHandler implements DSpaceRunnableHandler {
HelpFormatter formatter = new HelpFormatter(); HelpFormatter formatter = new HelpFormatter();
StringWriter out = new StringWriter(); StringWriter out = new StringWriter();
PrintWriter pw = new PrintWriter(out); PrintWriter pw = new PrintWriter(out);
formatter.printHelp(pw, 1000, name, null, options, formatter.getLeftPadding(), formatter.getDescPadding(),
formatter.printUsage(pw, 1000, name, options); null, false);
pw.flush(); pw.flush();
String helpString = out.toString(); String helpString = out.toString();

View File

@@ -29,6 +29,7 @@ import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.
import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.status; import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.status;
import java.util.ArrayList; import java.util.ArrayList;
import java.util.Comparator;
import java.util.HashMap; import java.util.HashMap;
import java.util.List; import java.util.List;
import java.util.Map; import java.util.Map;
@@ -2414,16 +2415,34 @@ public class CollectionRestRepositoryIT extends AbstractControllerIntegrationTes
.withName("Mapped Collection") .withName("Mapped Collection")
.build(); .build();
List<Item> items = new ArrayList();
// This comparator is used to sort our test Items by java.util.UUID (which sorts them based on the RFC
// and not based on String comparison, see also https://stackoverflow.com/a/51031298/3750035 )
Comparator<Item> compareByUUID = Comparator.comparing(i -> i.getID());
Item item0 = ItemBuilder.createItem(context, collection).withTitle("Item 0").build(); Item item0 = ItemBuilder.createItem(context, collection).withTitle("Item 0").build();
items.add(item0);
Item item1 = ItemBuilder.createItem(context, collection).withTitle("Item 1").build(); Item item1 = ItemBuilder.createItem(context, collection).withTitle("Item 1").build();
items.add(item1);
Item item2 = ItemBuilder.createItem(context, collection).withTitle("Item 2").build(); Item item2 = ItemBuilder.createItem(context, collection).withTitle("Item 2").build();
items.add(item2);
Item item3 = ItemBuilder.createItem(context, collection).withTitle("Item 3").build(); Item item3 = ItemBuilder.createItem(context, collection).withTitle("Item 3").build();
items.add(item3);
Item item4 = ItemBuilder.createItem(context, collection).withTitle("Item 4").build(); Item item4 = ItemBuilder.createItem(context, collection).withTitle("Item 4").build();
items.add(item4);
Item item5 = ItemBuilder.createItem(context, collection).withTitle("Item 5").build(); Item item5 = ItemBuilder.createItem(context, collection).withTitle("Item 5").build();
items.add(item5);
Item item6 = ItemBuilder.createItem(context, collection).withTitle("Item 6").build(); Item item6 = ItemBuilder.createItem(context, collection).withTitle("Item 6").build();
items.add(item6);
Item item7 = ItemBuilder.createItem(context, collection).withTitle("Item 7").build(); Item item7 = ItemBuilder.createItem(context, collection).withTitle("Item 7").build();
items.add(item7);
Item item8 = ItemBuilder.createItem(context, collection).withTitle("Item 8").build(); Item item8 = ItemBuilder.createItem(context, collection).withTitle("Item 8").build();
items.add(item8);
Item item9 = ItemBuilder.createItem(context, collection).withTitle("Item 9").build(); Item item9 = ItemBuilder.createItem(context, collection).withTitle("Item 9").build();
items.add(item9);
// sort items list by UUID (as Items will come back ordered by UUID)
items.sort(compareByUUID);
collectionService.addItem(context, mappedCollection, item0); collectionService.addItem(context, mappedCollection, item0);
collectionService.addItem(context, mappedCollection, item1); collectionService.addItem(context, mappedCollection, item1);
@@ -2445,12 +2464,13 @@ public class CollectionRestRepositoryIT extends AbstractControllerIntegrationTes
.param("embed.size", "mappedItems=5")) .param("embed.size", "mappedItems=5"))
.andExpect(status().isOk()) .andExpect(status().isOk())
.andExpect(jsonPath("$", CollectionMatcher.matchCollection(mappedCollection))) .andExpect(jsonPath("$", CollectionMatcher.matchCollection(mappedCollection)))
.andExpect(jsonPath("$._embedded.mappedItems._embedded.mappedItems", Matchers.containsInAnyOrder( .andExpect(jsonPath("$._embedded.mappedItems._embedded.mappedItems",
ItemMatcher.matchItemProperties(item0), Matchers.containsInRelativeOrder(
ItemMatcher.matchItemProperties(item1), ItemMatcher.matchItemProperties(items.get(0)),
ItemMatcher.matchItemProperties(item2), ItemMatcher.matchItemProperties(items.get(1)),
ItemMatcher.matchItemProperties(item3), ItemMatcher.matchItemProperties(items.get(2)),
ItemMatcher.matchItemProperties(item4) ItemMatcher.matchItemProperties(items.get(3)),
ItemMatcher.matchItemProperties(items.get(4))
))) )))
.andExpect(jsonPath("$._links.self.href", .andExpect(jsonPath("$._links.self.href",
Matchers.containsString("/api/core/collections/" + mappedCollection.getID()))) Matchers.containsString("/api/core/collections/" + mappedCollection.getID())))

View File

@@ -196,15 +196,8 @@ public class DiscoveryRestControllerIT extends AbstractControllerIntegrationTest
@Test @Test
public void discoverFacetsAuthorWithAuthorityWithSizeParameter() throws Exception { public void discoverFacetsAuthorWithAuthorityWithSizeParameter() throws Exception {
configurationService.setProperty("choices.plugin.dc.contributor.author", configurationService.setProperty("choices.plugin.dc.contributor.author", "SolrAuthorAuthority");
"SolrAuthorAuthority"); configurationService.setProperty("authority.controlled.dc.contributor.author", "true");
configurationService.setProperty("authority.controlled.dc.contributor.author",
"true");
configurationService.setProperty("discovery.browse.authority.ignore-prefered.author", true);
configurationService.setProperty("discovery.index.authority.ignore-prefered.dc.contributor.author", true);
configurationService.setProperty("discovery.browse.authority.ignore-variants.author", true);
configurationService.setProperty("discovery.index.authority.ignore-variants.dc.contributor.author", true);
metadataAuthorityService.clearCache(); metadataAuthorityService.clearCache();

View File

@@ -15,8 +15,10 @@ import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.
import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.status; import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.status;
import org.dspace.app.rest.matcher.EntityTypeMatcher; import org.dspace.app.rest.matcher.EntityTypeMatcher;
import org.dspace.app.rest.matcher.RelationshipTypeMatcher;
import org.dspace.app.rest.test.AbstractEntityIntegrationTest; import org.dspace.app.rest.test.AbstractEntityIntegrationTest;
import org.dspace.content.EntityType; import org.dspace.content.EntityType;
import org.dspace.content.RelationshipType;
import org.dspace.content.service.EntityTypeService; import org.dspace.content.service.EntityTypeService;
import org.hamcrest.Matchers; import org.hamcrest.Matchers;
import org.junit.Test; import org.junit.Test;
@@ -168,4 +170,37 @@ public class EntityTypeRestRepositoryIT extends AbstractEntityIntegrationTest {
.andExpect(jsonPath("$.page.number", is(1))); .andExpect(jsonPath("$.page.number", is(1)));
} }
@Test
public void findEntityTypeWithEmbedRelatioshipTypeTest() throws Exception {
EntityType person = entityTypeService.findByEntityType(context, "Person");
EntityType orgunit = entityTypeService.findByEntityType(context, "OrgUnit");
EntityType project = entityTypeService.findByEntityType(context, "Project");
EntityType publication = entityTypeService.findByEntityType(context, "Publication");
EntityType journalIssue = entityTypeService.findByEntityType(context, "journalIssue");
RelationshipType relationshipType1 = relationshipTypeService.findbyTypesAndTypeName(context,
publication, person, "isAuthorOfPublication", "isPublicationOfAuthor");
RelationshipType relationshipType2 = relationshipTypeService.findbyTypesAndTypeName(context,
publication, project, "isProjectOfPublication", "isPublicationOfProject");
RelationshipType relationshipType3 = relationshipTypeService.findbyTypesAndTypeName(context,
publication, orgunit, "isOrgUnitOfPublication", "isPublicationOfOrgUnit");
RelationshipType relationshipType4 = relationshipTypeService.findbyTypesAndTypeName(context,
journalIssue, publication, "isPublicationOfJournalIssue", "isJournalIssueOfPublication");
RelationshipType relationshipType5 = relationshipTypeService.findbyTypesAndTypeName(context,
publication, orgunit, "isAuthorOfPublication","isPublicationOfAuthor");
getClient().perform(get("/api/core/entitytypes/" + publication.getID())
.param("embed", "relationshiptypes"))
.andExpect(status().isOk())
.andExpect(jsonPath("$", EntityTypeMatcher.matchEntityTypeEntry(publication)))
.andExpect(jsonPath("$._embedded.relationshiptypes._embedded.relationshiptypes", containsInAnyOrder(
RelationshipTypeMatcher.matchRelationshipTypeEntry(relationshipType1),
RelationshipTypeMatcher.matchRelationshipTypeEntry(relationshipType2),
RelationshipTypeMatcher.matchRelationshipTypeEntry(relationshipType3),
RelationshipTypeMatcher.matchRelationshipTypeEntry(relationshipType4),
RelationshipTypeMatcher.matchRelationshipTypeEntry(relationshipType5)
)));
}
} }

View File

@@ -27,6 +27,7 @@ import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.
import java.io.InputStream; import java.io.InputStream;
import java.sql.SQLException; import java.sql.SQLException;
import java.util.ArrayList; import java.util.ArrayList;
import java.util.Comparator;
import java.util.HashMap; import java.util.HashMap;
import java.util.List; import java.util.List;
import java.util.Map; import java.util.Map;
@@ -111,6 +112,11 @@ public class ItemRestRepositoryIT extends AbstractControllerIntegrationTest {
Collection col1 = CollectionBuilder.createCollection(context, child1).withName("Collection 1").build(); Collection col1 = CollectionBuilder.createCollection(context, child1).withName("Collection 1").build();
Collection col2 = CollectionBuilder.createCollection(context, child1).withName("Collection 2").build(); Collection col2 = CollectionBuilder.createCollection(context, child1).withName("Collection 2").build();
List<Item> items = new ArrayList();
// This comparator is used to sort our test Items by java.util.UUID (which sorts them based on the RFC
// and not based on String comparison, see also https://stackoverflow.com/a/51031298/3750035 )
Comparator<Item> compareByUUID = Comparator.comparing(i -> i.getID());
//2. Three public items that are readable by Anonymous with different subjects //2. Three public items that are readable by Anonymous with different subjects
Item publicItem1 = ItemBuilder.createItem(context, col1) Item publicItem1 = ItemBuilder.createItem(context, col1)
.withTitle("Public item 1") .withTitle("Public item 1")
@@ -118,6 +124,7 @@ public class ItemRestRepositoryIT extends AbstractControllerIntegrationTest {
.withAuthor("Smith, Donald").withAuthor("Doe, John") .withAuthor("Smith, Donald").withAuthor("Doe, John")
.withSubject("ExtraEntry") .withSubject("ExtraEntry")
.build(); .build();
items.add(publicItem1);
Item publicItem2 = ItemBuilder.createItem(context, col2) Item publicItem2 = ItemBuilder.createItem(context, col2)
.withTitle("Public item 2") .withTitle("Public item 2")
@@ -125,6 +132,7 @@ public class ItemRestRepositoryIT extends AbstractControllerIntegrationTest {
.withAuthor("Smith, Maria").withAuthor("Doe, Jane") .withAuthor("Smith, Maria").withAuthor("Doe, Jane")
.withSubject("TestingForMore").withSubject("ExtraEntry") .withSubject("TestingForMore").withSubject("ExtraEntry")
.build(); .build();
items.add(publicItem2);
Item publicItem3 = ItemBuilder.createItem(context, col2) Item publicItem3 = ItemBuilder.createItem(context, col2)
.withTitle("Public item 3") .withTitle("Public item 3")
@@ -133,19 +141,19 @@ public class ItemRestRepositoryIT extends AbstractControllerIntegrationTest {
.withSubject("AnotherTest").withSubject("TestingForMore") .withSubject("AnotherTest").withSubject("TestingForMore")
.withSubject("ExtraEntry") .withSubject("ExtraEntry")
.build(); .build();
items.add(publicItem3);
// sort items list by UUID (as Items will come back ordered by UUID)
items.sort(compareByUUID);
context.restoreAuthSystemState(); context.restoreAuthSystemState();
String token = getAuthToken(admin.getEmail(), password); String token = getAuthToken(admin.getEmail(), password);
getClient(token).perform(get("/api/core/items")) getClient(token).perform(get("/api/core/items"))
.andExpect(status().isOk()) .andExpect(status().isOk())
.andExpect(jsonPath("$._embedded.items", Matchers.containsInAnyOrder( .andExpect(jsonPath("$._embedded.items", Matchers.containsInRelativeOrder(
ItemMatcher.matchItemWithTitleAndDateIssued(publicItem1, ItemMatcher.matchItemProperties(items.get(0)),
"Public item 1", "2017-10-17"), ItemMatcher.matchItemProperties(items.get(1)),
ItemMatcher.matchItemWithTitleAndDateIssued(publicItem2, ItemMatcher.matchItemProperties(items.get(2))
"Public item 2", "2016-02-13"),
ItemMatcher.matchItemWithTitleAndDateIssued(publicItem3,
"Public item 3", "2016-02-13")
))) )))
.andExpect(jsonPath("$._links.self.href", .andExpect(jsonPath("$._links.self.href",
Matchers.containsString("/api/core/items"))) Matchers.containsString("/api/core/items")))
@@ -185,6 +193,11 @@ public class ItemRestRepositoryIT extends AbstractControllerIntegrationTest {
.withTemplateItem() .withTemplateItem()
.build(); .build();
List<Item> items = new ArrayList();
// This comparator is used to sort our test Items by java.util.UUID (which sorts them based on the RFC
// and not based on String comparison, see also https://stackoverflow.com/a/51031298/3750035 )
Comparator<Item> compareByUUID = Comparator.comparing(i -> i.getID());
//2. Three public items that are readable by Anonymous with different subjects //2. Three public items that are readable by Anonymous with different subjects
Item publicItem1 = ItemBuilder.createItem(context, col1) Item publicItem1 = ItemBuilder.createItem(context, col1)
.withTitle("Public item 1") .withTitle("Public item 1")
@@ -192,6 +205,7 @@ public class ItemRestRepositoryIT extends AbstractControllerIntegrationTest {
.withAuthor("Smith, Donald").withAuthor("Doe, John") .withAuthor("Smith, Donald").withAuthor("Doe, John")
.withSubject("ExtraEntry") .withSubject("ExtraEntry")
.build(); .build();
items.add(publicItem1);
Item publicItem2 = ItemBuilder.createItem(context, col2) Item publicItem2 = ItemBuilder.createItem(context, col2)
.withTitle("Public item 2") .withTitle("Public item 2")
@@ -199,6 +213,7 @@ public class ItemRestRepositoryIT extends AbstractControllerIntegrationTest {
.withAuthor("Smith, Maria").withAuthor("Doe, Jane") .withAuthor("Smith, Maria").withAuthor("Doe, Jane")
.withSubject("TestingForMore").withSubject("ExtraEntry") .withSubject("TestingForMore").withSubject("ExtraEntry")
.build(); .build();
items.add(publicItem2);
Item publicItem3 = ItemBuilder.createItem(context, col2) Item publicItem3 = ItemBuilder.createItem(context, col2)
.withTitle("Public item 3") .withTitle("Public item 3")
@@ -207,6 +222,9 @@ public class ItemRestRepositoryIT extends AbstractControllerIntegrationTest {
.withSubject("AnotherTest").withSubject("TestingForMore") .withSubject("AnotherTest").withSubject("TestingForMore")
.withSubject("ExtraEntry") .withSubject("ExtraEntry")
.build(); .build();
items.add(publicItem3);
// sort items list by UUID (as Items will come back ordered by UUID)
items.sort(compareByUUID);
// Create a Workspace Item (which in turn creates an Item with "in_archive=false") // Create a Workspace Item (which in turn creates an Item with "in_archive=false")
// This is only created to prove that WorkspaceItems are NOT counted/listed in this endpoint // This is only created to prove that WorkspaceItems are NOT counted/listed in this endpoint
@@ -232,16 +250,13 @@ public class ItemRestRepositoryIT extends AbstractControllerIntegrationTest {
getClient(token).perform(get("/api/core/items") getClient(token).perform(get("/api/core/items")
.param("size", "2")) .param("size", "2"))
.andExpect(status().isOk()) .andExpect(status().isOk())
.andExpect(jsonPath("$._embedded.items", Matchers.containsInAnyOrder( .andExpect(jsonPath("$._embedded.items", Matchers.containsInRelativeOrder(
ItemMatcher.matchItemWithTitleAndDateIssued(publicItem1, ItemMatcher.matchItemProperties(items.get(0)),
"Public item 1", "2017-10-17"), ItemMatcher.matchItemProperties(items.get(1))
ItemMatcher.matchItemWithTitleAndDateIssued(publicItem2,
"Public item 2", "2016-02-13")
))) )))
.andExpect(jsonPath("$._embedded.items", Matchers.not( .andExpect(jsonPath("$._embedded.items", Matchers.not(
Matchers.contains( Matchers.contains(
ItemMatcher.matchItemWithTitleAndDateIssued(publicItem3, ItemMatcher.matchItemProperties(items.get(2)),
"Public item 3", "2016-02-13"),
ItemMatcher.matchItemWithTitleAndDateIssued(itemInWorkspace, ItemMatcher.matchItemWithTitleAndDateIssued(itemInWorkspace,
"In Progress Item", "2018-02-05"), "In Progress Item", "2018-02-05"),
ItemMatcher.matchItemWithTitleAndDateIssued(itemInWorkflow, ItemMatcher.matchItemWithTitleAndDateIssued(itemInWorkflow,
@@ -271,15 +286,12 @@ public class ItemRestRepositoryIT extends AbstractControllerIntegrationTest {
.param("page", "1")) .param("page", "1"))
.andExpect(status().isOk()) .andExpect(status().isOk())
.andExpect(jsonPath("$._embedded.items", Matchers.contains( .andExpect(jsonPath("$._embedded.items", Matchers.contains(
ItemMatcher.matchItemWithTitleAndDateIssued(publicItem3, ItemMatcher.matchItemProperties(items.get(2))
"Public item 3", "2016-02-13")
))) )))
.andExpect(jsonPath("$._embedded.items", Matchers.not( .andExpect(jsonPath("$._embedded.items", Matchers.not(
Matchers.contains( Matchers.contains(
ItemMatcher.matchItemWithTitleAndDateIssued(publicItem1, ItemMatcher.matchItemProperties(items.get(0)),
"Public item 1", "2017-10-17"), ItemMatcher.matchItemProperties(items.get(1)),
ItemMatcher.matchItemWithTitleAndDateIssued(publicItem2,
"Public item 2", "2016-02-13"),
ItemMatcher.matchItemWithTitleAndDateIssued(itemInWorkspace, ItemMatcher.matchItemWithTitleAndDateIssued(itemInWorkspace,
"In Progress Item", "2018-02-05"), "In Progress Item", "2018-02-05"),
ItemMatcher.matchItemWithTitleAndDateIssued(itemInWorkflow, ItemMatcher.matchItemWithTitleAndDateIssued(itemInWorkflow,

View File

@@ -6,7 +6,7 @@
* http://www.dspace.org/license/ * http://www.dspace.org/license/
*/ */
package org.dspace.app.rest; package org.dspace.app.rest;
import static org.hamcrest.Matchers.contains;
import static org.hamcrest.Matchers.containsInAnyOrder; import static org.hamcrest.Matchers.containsInAnyOrder;
import static org.hamcrest.Matchers.is; import static org.hamcrest.Matchers.is;
import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.get; import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.get;
@@ -20,6 +20,7 @@ import org.dspace.app.rest.matcher.RelationshipTypeMatcher;
import org.dspace.app.rest.test.AbstractEntityIntegrationTest; import org.dspace.app.rest.test.AbstractEntityIntegrationTest;
import org.dspace.builder.CollectionBuilder; import org.dspace.builder.CollectionBuilder;
import org.dspace.builder.CommunityBuilder; import org.dspace.builder.CommunityBuilder;
import org.dspace.builder.EntityTypeBuilder;
import org.dspace.builder.ItemBuilder; import org.dspace.builder.ItemBuilder;
import org.dspace.builder.RelationshipBuilder; import org.dspace.builder.RelationshipBuilder;
import org.dspace.content.Collection; import org.dspace.content.Collection;
@@ -86,8 +87,7 @@ public class RelationshipTypeRestControllerIT extends AbstractEntityIntegrationT
RelationshipType relationshipType5 = relationshipTypeService RelationshipType relationshipType5 = relationshipTypeService
.findbyTypesAndTypeName(context, publicationEntityType, orgunitEntityType, "isAuthorOfPublication", .findbyTypesAndTypeName(context, publicationEntityType, orgunitEntityType, "isAuthorOfPublication",
"isPublicationOfAuthor"); "isPublicationOfAuthor");
getClient().perform(get("/api/core/entitytypes/" + publicationEntityType.getID() + "/relationshiptypes") getClient().perform(get("/api/core/entitytypes/" + publicationEntityType.getID() + "/relationshiptypes"))
.param("projection", "full"))
.andExpect(status().isOk()) .andExpect(status().isOk())
.andExpect(jsonPath("$._embedded.relationshiptypes", containsInAnyOrder( .andExpect(jsonPath("$._embedded.relationshiptypes", containsInAnyOrder(
RelationshipTypeMatcher.matchRelationshipTypeEntry(relationshipType1), RelationshipTypeMatcher.matchRelationshipTypeEntry(relationshipType1),
@@ -95,7 +95,115 @@ public class RelationshipTypeRestControllerIT extends AbstractEntityIntegrationT
RelationshipTypeMatcher.matchRelationshipTypeEntry(relationshipType3), RelationshipTypeMatcher.matchRelationshipTypeEntry(relationshipType3),
RelationshipTypeMatcher.matchRelationshipTypeEntry(relationshipType4), RelationshipTypeMatcher.matchRelationshipTypeEntry(relationshipType4),
RelationshipTypeMatcher.matchRelationshipTypeEntry(relationshipType5) RelationshipTypeMatcher.matchRelationshipTypeEntry(relationshipType5)
))); )))
.andExpect(jsonPath("$.page.size", is(20)))
.andExpect(jsonPath("$.page.totalElements", is(5)))
.andExpect(jsonPath("$.page.number", is(0)));
}
@Test
public void findAllRelationshipTypesEmptyResponseTest() throws Exception {
context.turnOffAuthorisationSystem();
EntityType testEntityType = EntityTypeBuilder.createEntityTypeBuilder(context, "TestEntityType").build();
context.restoreAuthSystemState();
getClient().perform(get("/api/core/entitytypes/" + testEntityType.getID() + "/relationshiptypes"))
.andExpect(status().isOk())
.andExpect(jsonPath("$._embedded.relationshiptypes").isEmpty())
.andExpect(jsonPath("$.page.size", is(20)))
.andExpect(jsonPath("$.page.totalElements", is(0)))
.andExpect(jsonPath("$.page.number", is(0)));
}
@Test
public void findAllRelationshipTypesForPublicationsPaginationTest() throws Exception {
EntityType person = entityTypeService.findByEntityType(context, "Person");
EntityType orgunit = entityTypeService.findByEntityType(context, "OrgUnit");
EntityType project = entityTypeService.findByEntityType(context, "Project");
EntityType publication = entityTypeService.findByEntityType(context, "Publication");
EntityType journalIssue = entityTypeService.findByEntityType(context, "journalIssue");
RelationshipType relationshipType1 = relationshipTypeService.findbyTypesAndTypeName(context,
publication, person, "isAuthorOfPublication", "isPublicationOfAuthor");
RelationshipType relationshipType2 = relationshipTypeService.findbyTypesAndTypeName(context,
publication, project, "isProjectOfPublication", "isPublicationOfProject");
RelationshipType relationshipType3 = relationshipTypeService.findbyTypesAndTypeName(context,
publication, orgunit, "isOrgUnitOfPublication", "isPublicationOfOrgUnit");
RelationshipType relationshipType4 = relationshipTypeService.findbyTypesAndTypeName(context,
journalIssue, publication, "isPublicationOfJournalIssue", "isJournalIssueOfPublication");
RelationshipType relationshipType5 = relationshipTypeService.findbyTypesAndTypeName(context,
publication, orgunit, "isAuthorOfPublication","isPublicationOfAuthor");
getClient().perform(get("/api/core/entitytypes/" + publication.getID() + "/relationshiptypes")
.param("size", "2"))
.andExpect(status().isOk())
.andExpect(jsonPath("$._embedded.relationshiptypes", containsInAnyOrder(
RelationshipTypeMatcher.matchRelationshipTypeEntry(relationshipType1),
RelationshipTypeMatcher.matchRelationshipTypeEntry(relationshipType2)
)))
.andExpect(jsonPath("$.page.size", is(2)))
.andExpect(jsonPath("$.page.totalElements", is(5)))
.andExpect(jsonPath("$.page.number", is(0)));
getClient().perform(get("/api/core/entitytypes/" + publication.getID() + "/relationshiptypes")
.param("size", "2")
.param("page", "1"))
.andExpect(status().isOk())
.andExpect(jsonPath("$._embedded.relationshiptypes", containsInAnyOrder(
RelationshipTypeMatcher.matchRelationshipTypeEntry(relationshipType3),
RelationshipTypeMatcher.matchRelationshipTypeEntry(relationshipType5)
)))
.andExpect(jsonPath("$.page.size", is(2)))
.andExpect(jsonPath("$.page.totalElements", is(5)))
.andExpect(jsonPath("$.page.number", is(1)));
getClient().perform(get("/api/core/entitytypes/" + publication.getID() + "/relationshiptypes")
.param("size", "2")
.param("page", "2"))
.andExpect(status().isOk())
.andExpect(jsonPath("$._embedded.relationshiptypes", contains(
RelationshipTypeMatcher.matchRelationshipTypeEntry(relationshipType4)
)))
.andExpect(jsonPath("$.page.size", is(2)))
.andExpect(jsonPath("$.page.totalElements", is(5)))
.andExpect(jsonPath("$.page.number", is(2)));
}
@Test
public void findAllRelationshipTypesForPublicationsEmbedTest() throws Exception {
EntityType publicationEntityType = entityTypeService.findByEntityType(context, "Publication");
EntityType personEntityType = entityTypeService.findByEntityType(context, "Person");
EntityType projectEntityType = entityTypeService.findByEntityType(context, "Project");
EntityType orgunitEntityType = entityTypeService.findByEntityType(context, "OrgUnit");
EntityType journalIssueEntityType = entityTypeService.findByEntityType(context, "journalIssue");
RelationshipType relationshipType1 = relationshipTypeService
.findbyTypesAndTypeName(context, publicationEntityType, personEntityType, "isAuthorOfPublication",
"isPublicationOfAuthor");
RelationshipType relationshipType2 = relationshipTypeService
.findbyTypesAndTypeName(context, publicationEntityType, projectEntityType, "isProjectOfPublication",
"isPublicationOfProject");
RelationshipType relationshipType3 = relationshipTypeService
.findbyTypesAndTypeName(context, publicationEntityType, orgunitEntityType, "isOrgUnitOfPublication",
"isPublicationOfOrgUnit");
RelationshipType relationshipType4 = relationshipTypeService
.findbyTypesAndTypeName(context, journalIssueEntityType, publicationEntityType,
"isPublicationOfJournalIssue", "isJournalIssueOfPublication");
RelationshipType relationshipType5 = relationshipTypeService
.findbyTypesAndTypeName(context, publicationEntityType, orgunitEntityType, "isAuthorOfPublication",
"isPublicationOfAuthor");
String adminToken = getAuthToken(admin.getEmail(), password);
getClient(adminToken).perform(get("/api/core/relationships?embed=relationshipType"))
.andExpect(status().isOk());
}
@Test
public void findAllRelationshipTypesNotFoundTest() throws Exception {
getClient().perform(get("/api/core/entitytypes/" + Integer.MAX_VALUE + "/relationshiptypes"))
.andExpect(status().isNotFound());
} }
@Test @Test

View File

@@ -15,7 +15,6 @@ import static org.hamcrest.Matchers.is;
import org.dspace.content.EntityType; import org.dspace.content.EntityType;
import org.dspace.content.RelationshipType; import org.dspace.content.RelationshipType;
import org.hamcrest.Matcher; import org.hamcrest.Matcher;
import org.hamcrest.Matchers;
public class RelationshipTypeMatcher { public class RelationshipTypeMatcher {
@@ -81,12 +80,8 @@ public class RelationshipTypeMatcher {
hasJsonPath("$.rightMaxCardinality", is(rightMaxCardinality)), hasJsonPath("$.rightMaxCardinality", is(rightMaxCardinality)),
hasJsonPath("$.type", is("relationshiptype")), hasJsonPath("$.type", is("relationshiptype")),
hasJsonPath("$._links.self.href", containsString("/api/core/relationshiptypes/" + id)), hasJsonPath("$._links.self.href", containsString("/api/core/relationshiptypes/" + id)),
hasJsonPath("$._embedded.leftType", Matchers.allOf( hasJsonPath("$._links.leftType.href", containsString("/api/core/entitytypes/" + leftEntityTypeId)),
EntityTypeMatcher.matchEntityTypeExplicitValuesEntry(leftEntityTypeId, leftEntityTypeLabel) hasJsonPath("$._links.rightType.href", containsString("/api/core/entitytypes/" + rightEntityTypeId))
)),
hasJsonPath("$._embedded.rightType", Matchers.is(
EntityTypeMatcher.matchEntityTypeExplicitValuesEntry(rightEntityTypeId, rightEntityTypeLabel)
))
); );
} }
} }

View File

@@ -11,6 +11,7 @@ import org.dspace.builder.EntityTypeBuilder;
import org.dspace.builder.RelationshipTypeBuilder; import org.dspace.builder.RelationshipTypeBuilder;
import org.dspace.content.EntityType; import org.dspace.content.EntityType;
import org.dspace.content.service.EntityTypeService; import org.dspace.content.service.EntityTypeService;
import org.dspace.content.service.RelationshipTypeService;
import org.junit.Before; import org.junit.Before;
import org.springframework.beans.factory.annotation.Autowired; import org.springframework.beans.factory.annotation.Autowired;
@@ -18,6 +19,8 @@ public class AbstractEntityIntegrationTest extends AbstractControllerIntegration
@Autowired @Autowired
private EntityTypeService entityTypeService; private EntityTypeService entityTypeService;
@Autowired
protected RelationshipTypeService relationshipTypeService;
/** /**
* This method will call the setUp method from AbstractControllerIntegrationTest. * This method will call the setUp method from AbstractControllerIntegrationTest.

View File

@@ -1,7 +0,0 @@
log4j.rootCategory=info
log4j.rootLogger=info, stdout
log4j.appender.stdout=ConsoleAppender
log4j.appender.stdout.layout=PatternLayout
log4j.appender.stdout.layout.ConversionPattern= %p %m [%d] (%F:%L) %n

View File

@@ -0,0 +1,16 @@
<Configuration strict='true'
xmlns='http://logging.apache.org/log4j/2.0/config'>
<Appenders>
<Appender name='A1'
type='Console'>
<Layout type='PatternLayout'
pattern='%p %m [%d] (%F:%L) %n'/>
</Appender>
</Appenders>
<Loggers>
<Root level='info'>
<AppenderRef ref='A1'/>
</Root>
</Loggers>
</Configuration>

View File

@@ -40,4 +40,9 @@
<property name="description" value="Delete all the values of the specified metadata field"/> <property name="description" value="Delete all the values of the specified metadata field"/>
<property name="dspaceRunnableClass" value="org.dspace.app.bulkedit.MetadataDeletionCli"/> <property name="dspaceRunnableClass" value="org.dspace.app.bulkedit.MetadataDeletionCli"/>
</bean> </bean>
<bean id="harvest" class="org.dspace.app.harvest.HarvestCliScriptConfiguration">
<property name="description" value="Manage the OAI-PMH harvesting of external collections"/>
<property name="dspaceRunnableClass" value="org.dspace.app.harvest.HarvestCli"/>
</bean>
</beans> </beans>

View File

@@ -33,4 +33,9 @@
<property name="description" value="Delete all the values of the specified metadata field"/> <property name="description" value="Delete all the values of the specified metadata field"/>
<property name="dspaceRunnableClass" value="org.dspace.app.bulkedit.MetadataDeletion"/> <property name="dspaceRunnableClass" value="org.dspace.app.bulkedit.MetadataDeletion"/>
</bean> </bean>
<bean id="harvest" class="org.dspace.app.harvest.HarvestScriptConfiguration">
<property name="description" value="Manage the OAI-PMH harvesting of external collections"/>
<property name="dspaceRunnableClass" value="org.dspace.app.harvest.Harvest"/>
</bean>
</beans> </beans>

View File

@@ -161,8 +161,7 @@
<repeatable>true</repeatable> <repeatable>true</repeatable>
<label>Type</label> <label>Type</label>
<input-type value-pairs-name="common_types">dropdown</input-type> <input-type value-pairs-name="common_types">dropdown</input-type>
<hint>Select the type(s) of content of the item. To select more than one value in the list, you may <hint>Select the type of content of the item.
have to hold down the "CTRL" or "Shift" key.
</hint> </hint>
<required></required> <required></required>
</field> </field>

View File

@@ -24,7 +24,7 @@
<spring-security.version>5.2.2.RELEASE</spring-security.version> <!-- sync with version used by spring-boot--> <spring-security.version>5.2.2.RELEASE</spring-security.version> <!-- sync with version used by spring-boot-->
<hibernate.version>5.4.10.Final</hibernate.version> <hibernate.version>5.4.10.Final</hibernate.version>
<hibernate-validator.version>6.0.18.Final</hibernate-validator.version> <hibernate-validator.version>6.0.18.Final</hibernate-validator.version>
<postgresql.driver.version>42.2.9</postgresql.driver.version> <postgresql.driver.version>42.2.24</postgresql.driver.version>
<solr.client.version>8.8.1</solr.client.version> <solr.client.version>8.8.1</solr.client.version>
<axiom.version>1.2.22</axiom.version> <axiom.version>1.2.22</axiom.version>