Port DS-3579_Context-mode-and-cache-management-CLI-commands to master

This commit is contained in:
Tom Desair
2017-06-28 14:49:11 -07:00
parent 207a9e64be
commit 10e028918a
37 changed files with 398 additions and 388 deletions

View File

@@ -34,6 +34,8 @@ public class MetadataExport
protected ItemService itemService;
protected Context context;
/** Whether to export all metadata, or just normally edited metadata */
protected boolean exportAll;
@@ -55,6 +57,7 @@ public class MetadataExport
// Store the export settings
this.toExport = toExport;
this.exportAll = exportAll;
this.context = c;
}
/**
@@ -73,6 +76,7 @@ public class MetadataExport
// Try to export the community
this.toExport = buildFromCommunity(c, toExport, 0);
this.exportAll = exportAll;
this.context = c;
}
catch (SQLException sqle)
{
@@ -144,13 +148,19 @@ public class MetadataExport
{
try
{
Context.Mode originalMode = context.getCurrentMode();
context.setMode(Context.Mode.READ_ONLY);
// Process each item
DSpaceCSV csv = new DSpaceCSV(exportAll);
while (toExport.hasNext())
{
csv.addItem(toExport.next());
Item item = toExport.next();
csv.addItem(item);
context.uncacheEntity(item);
}
context.setMode(originalMode);
// Return the results
return csv;
}
@@ -224,7 +234,7 @@ public class MetadataExport
String filename = line.getOptionValue('f');
// Create a context
Context c = new Context();
Context c = new Context(Context.Mode.READ_ONLY);
c.turnOffAuthorisationSystem();
// The things we'll export

View File

@@ -31,6 +31,7 @@ import org.dspace.eperson.EPerson;
import org.dspace.eperson.factory.EPersonServiceFactory;
import org.dspace.handle.factory.HandleServiceFactory;
import org.dspace.handle.service.HandleService;
import org.dspace.workflow.WorkflowItem;
import org.dspace.workflow.WorkflowService;
import org.dspace.workflow.factory.WorkflowServiceFactory;
@@ -122,6 +123,9 @@ public class MetadataImport
// Make the changes
try
{
Context.Mode originalMode = c.getCurrentMode();
c.setMode(Context.Mode.BATCH_EDIT);
// Process each change
for (DSpaceCSVLine line : toImport)
{
@@ -134,11 +138,15 @@ public class MetadataImport
throw new MetadataImportException("'action' not allowed for new items!");
}
WorkspaceItem wsItem = null;
WorkflowItem wfItem = null;
Item item = null;
// Is this a new item?
if (id != null)
{
// Get the item
Item item = itemService.find(c, id);
item = itemService.find(c, id);
if (item == null)
{
throw new MetadataImportException("Unknown item ID " + id);
@@ -345,8 +353,8 @@ public class MetadataImport
// Create the item
String collectionHandle = line.get("collection").get(0);
collection = (Collection) handleService.resolveToObject(c, collectionHandle);
WorkspaceItem wsItem = workspaceItemService.create(c, collection, useTemplate);
Item item = wsItem.getItem();
wsItem = workspaceItemService.create(c, collection, useTemplate);
item = wsItem.getItem();
// Add the metadata to the item
for (BulkEditMetadataValue dcv : whatHasChanged.getAdds())
@@ -364,9 +372,9 @@ public class MetadataImport
if(useWorkflow){
WorkflowService workflowService = WorkflowServiceFactory.getInstance().getWorkflowService();
if (workflowNotify) {
workflowService.start(c, wsItem);
wfItem = workflowService.start(c, wsItem);
} else {
workflowService.startWithoutNotify(c, wsItem);
wfItem = workflowService.startWithoutNotify(c, wsItem);
}
}
else
@@ -394,7 +402,16 @@ public class MetadataImport
// Record the changes
changes.add(whatHasChanged);
}
if (change) {
//only clear cache if changes have been made.
c.uncacheEntity(wsItem);
c.uncacheEntity(wfItem);
c.uncacheEntity(item);
}
}
c.setMode(originalMode);
}
catch (MetadataImportException mie)
{

View File

@@ -7,36 +7,32 @@
*/
package org.dspace.app.harvest;
import org.apache.commons.cli.*;
import org.dspace.authorize.AuthorizeException;
import org.dspace.content.Collection;
import org.dspace.content.DSpaceObject;
import org.dspace.content.Item;
import org.dspace.content.factory.ContentServiceFactory;
import org.dspace.content.service.CollectionService;
import org.dspace.content.service.ItemService;
import org.dspace.core.Constants;
import org.dspace.core.Context;
import org.dspace.eperson.EPerson;
import org.dspace.eperson.factory.EPersonServiceFactory;
import org.dspace.eperson.service.EPersonService;
import org.dspace.handle.factory.HandleServiceFactory;
import org.dspace.harvest.HarvestedCollection;
import org.dspace.harvest.HarvestingException;
import org.dspace.harvest.OAIHarvester;
import org.dspace.harvest.factory.HarvestServiceFactory;
import org.dspace.harvest.service.HarvestedCollectionService;
import java.io.IOException;
import java.sql.SQLException;
import java.util.Iterator;
import java.util.List;
import java.util.UUID;
import org.apache.commons.cli.CommandLine;
import org.apache.commons.cli.CommandLineParser;
import org.apache.commons.cli.HelpFormatter;
import org.apache.commons.cli.Options;
import org.apache.commons.cli.PosixParser;
import org.dspace.authorize.AuthorizeException;
import org.dspace.content.Collection;
import org.dspace.content.DSpaceObject;
import org.dspace.content.factory.ContentServiceFactory;
import org.dspace.content.service.CollectionService;
import org.dspace.content.service.ItemService;
import org.dspace.eperson.factory.EPersonServiceFactory;
import org.dspace.eperson.service.EPersonService;
import org.dspace.handle.factory.HandleServiceFactory;
import org.dspace.harvest.HarvestedCollection;
import org.dspace.content.Item;
import org.dspace.harvest.HarvestingException;
import org.dspace.harvest.OAIHarvester;
import org.dspace.core.Constants;
import org.dspace.core.Context;
import org.dspace.eperson.EPerson;
import org.dspace.harvest.factory.HarvestServiceFactory;
import org.dspace.harvest.service.HarvestedCollectionService;
/**
* Test class for harvested collections.
*
@@ -96,7 +92,7 @@ public class Harvest
{
HelpFormatter myhelp = new HelpFormatter();
myhelp.printHelp("Harvest\n", options);
System.out.println("\nPING OAI server: Harvest -g -s oai_source -i oai_set_id");
System.out.println("\nPING OAI server: Harvest -g -a oai_source -i oai_set_id");
System.out.println("RUNONCE harvest with arbitrary options: Harvest -o -e eperson -c collection -t harvest_type -a oai_source -i oai_set_id -m metadata_format");
System.out.println("SETUP a collection for harvesting: Harvest -s -c collection -t harvest_type -a oai_source -i oai_set_id -m metadata_format");
System.out.println("RUN harvest once: Harvest -r -e eperson -c collection");
@@ -160,7 +156,7 @@ public class Harvest
// Instantiate our class
Harvest harvester = new Harvest();
harvester.context = new Context();
harvester.context = new Context(Context.Mode.BATCH_EDIT);
// Check our options
@@ -203,7 +199,7 @@ public class Harvest
System.out.println(" (run with -h flag for details)");
System.exit(1);
}
List<HarvestedCollection> harvestedCollections = harvestedCollectionService.findAll(context);
for (HarvestedCollection harvestedCollection : harvestedCollections)
{
@@ -247,7 +243,7 @@ public class Harvest
{
System.out.println("Error - a metadata key (commonly the prefix) must be specified for this collection");
System.out.println(" (run with -h flag for details)");
System.exit(1);
System.exit(1);
}
harvester.configureCollection(collection, harvestType, oaiSource, oaiSetID, metadataKey);
@@ -270,10 +266,10 @@ public class Harvest
* the collection, if not, bail out.
*/
private Collection resolveCollection(String collectionID) {
DSpaceObject dso;
Collection targetCollection = null;
try {
// is the ID a handle?
if (collectionID != null)
@@ -311,30 +307,30 @@ public class Harvest
catch (SQLException se) {
se.printStackTrace();
}
return targetCollection;
}
private void configureCollection(String collectionID, int type, String oaiSource, String oaiSetId, String mdConfigId) {
System.out.println("Running: configure collection");
Collection collection = resolveCollection(collectionID);
System.out.println(collection.getID());
try {
HarvestedCollection hc = harvestedCollectionService.find(context, collection);
if (hc == null) {
hc = harvestedCollectionService.create(context, collection);
}
context.turnOffAuthorisationSystem();
hc.setHarvestParams(type, oaiSource, oaiSetId, mdConfigId);
hc.setHarvestStatus(HarvestedCollection.STATUS_READY);
harvestedCollectionService.update(context, hc);
context.restoreAuthSystemState();
context.complete();
}
}
catch (Exception e) {
System.out.println("Changes could not be committed");
e.printStackTrace();
@@ -358,8 +354,8 @@ public class Harvest
private void purgeCollection(String collectionID, String email) {
System.out.println("Purging collection of all items and resetting last_harvested and harvest_message: " + collectionID);
Collection collection = resolveCollection(collectionID);
try
try
{
EPerson eperson = ePersonService.findByEmail(context, email);
context.setCurrentUser(eperson);
@@ -373,16 +369,16 @@ public class Harvest
Item item = it.next();
System.out.println("Deleting: " + item.getHandle());
collectionService.removeItem(context, collection, item);
// Dispatch events every 50 items
if (i%50 == 0) {
context.dispatchEvents();
i=0;
}
}
HarvestedCollection hc = harvestedCollectionService.find(context, collection);
if (hc != null) {
hc.setLastHarvested(null);
context.uncacheEntity(item);// Dispatch events every 50 items
if (i%50 == 0) {
context.dispatchEvents();
i=0;
}
}
HarvestedCollection hc = harvestedCollectionService.find(context, collection);
if (hc != null) {
hc.setLastHarvested(null);
hc.setHarvestMessage("");
hc.setHarvestStatus(HarvestedCollection.STATUS_READY);
hc.setHarvestStartTime(null);
@@ -407,7 +403,7 @@ public class Harvest
*/
private void runHarvest(String collectionID, String email) {
System.out.println("Running: a harvest cycle on " + collectionID);
System.out.print("Initializing the harvester... ");
OAIHarvester harvester = null;
try {
@@ -425,7 +421,7 @@ public class Harvest
System.out.println(se.getMessage());
throw new IllegalStateException("Unable to access database", se);
}
try {
// Harvest will not work for an anonymous user
EPerson eperson = ePersonService.findByEmail(context, email);
@@ -468,7 +464,7 @@ public class Harvest
catch (Exception ex) {
System.out.println("failed. ");
ex.printStackTrace();
}
}
}
/**

View File

@@ -176,7 +176,7 @@ public class ItemExportCLITool {
System.exit(1);
}
Context c = new Context();
Context c = new Context(Context.Mode.READ_ONLY);
c.turnOffAuthorisationSystem();
if (myType == Constants.ITEM)

View File

@@ -7,44 +7,28 @@
*/
package org.dspace.app.itemexport;
import java.io.BufferedOutputStream;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileNotFoundException;
import java.io.FileOutputStream;
import java.io.FileWriter;
import java.io.IOException;
import java.io.InputStream;
import java.io.PrintWriter;
import java.sql.SQLException;
import java.text.SimpleDateFormat;
import java.util.*;
import java.util.zip.ZipEntry;
import java.util.zip.ZipOutputStream;
import javax.mail.MessagingException;
import org.apache.commons.lang.StringUtils;
import org.apache.log4j.Logger;
import org.dspace.app.itemexport.service.ItemExportService;
import org.dspace.content.*;
import org.dspace.content.Collection;
import org.dspace.content.service.BitstreamService;
import org.dspace.content.service.CommunityService;
import org.dspace.content.service.ItemService;
import org.dspace.core.ConfigurationManager;
import org.dspace.core.Constants;
import org.dspace.core.Context;
import org.dspace.core.I18nUtil;
import org.dspace.core.LogManager;
import org.dspace.core.Utils;
import org.dspace.core.Email;
import org.dspace.core.*;
import org.dspace.eperson.EPerson;
import org.dspace.eperson.service.EPersonService;
import org.dspace.handle.service.HandleService;
import org.springframework.beans.factory.annotation.Autowired;
import javax.mail.MessagingException;
import java.io.*;
import java.sql.SQLException;
import java.text.SimpleDateFormat;
import java.util.*;
import java.util.zip.ZipEntry;
import java.util.zip.ZipOutputStream;
/**
* Item exporter to create simple AIPs for DSpace content. Currently exports
* individual items, or entire collections. For instructions on use, see
@@ -129,7 +113,9 @@ public class ItemExportServiceImpl implements ItemExportService
}
System.out.println("Exporting item to " + mySequenceNumber);
exportItem(c, i.next(), fullPath, mySequenceNumber, migrate, excludeBitstreams);
Item item = i.next();
exportItem(c, item, fullPath, mySequenceNumber, migrate, excludeBitstreams);
c.uncacheEntity(item);
mySequenceNumber++;
}
}

View File

@@ -294,7 +294,7 @@ public class ItemImportCLITool {
myloader.setQuiet(isQuiet);
// create a context
Context c = new Context();
Context c = new Context(Context.Mode.BATCH_EDIT);
// find the EPerson, assign to context
EPerson myEPerson = null;

View File

@@ -14,21 +14,6 @@ import gr.ekt.bte.core.TransformationSpec;
import gr.ekt.bte.dataloader.FileDataLoader;
import gr.ekt.bteio.generators.DSpaceOutputGenerator;
import gr.ekt.bteio.loaders.OAIPMHDataLoader;
import java.io.*;
import java.net.URL;
import java.sql.SQLException;
import java.text.SimpleDateFormat;
import java.util.*;
import java.util.zip.ZipFile;
import java.util.zip.ZipEntry;
import javax.mail.MessagingException;
import javax.xml.parsers.DocumentBuilder;
import javax.xml.parsers.DocumentBuilderFactory;
import javax.xml.parsers.ParserConfigurationException;
import javax.xml.transform.TransformerException;
import org.apache.commons.collections.ComparatorUtils;
import org.apache.commons.io.FileDeleteStrategy;
import org.apache.commons.io.FileUtils;
@@ -46,18 +31,14 @@ import org.dspace.authorize.service.ResourcePolicyService;
import org.dspace.content.*;
import org.dspace.content.Collection;
import org.dspace.content.service.*;
import org.dspace.core.ConfigurationManager;
import org.dspace.core.Constants;
import org.dspace.core.Context;
import org.dspace.core.Email;
import org.dspace.core.I18nUtil;
import org.dspace.core.LogManager;
import org.dspace.core.*;
import org.dspace.eperson.EPerson;
import org.dspace.eperson.Group;
import org.dspace.eperson.service.EPersonService;
import org.dspace.eperson.service.GroupService;
import org.dspace.handle.service.HandleService;
import org.dspace.utils.DSpace;
import org.dspace.workflow.WorkflowItem;
import org.dspace.workflow.WorkflowService;
import org.springframework.beans.factory.InitializingBean;
import org.springframework.beans.factory.annotation.Autowired;
@@ -67,6 +48,19 @@ import org.w3c.dom.Node;
import org.w3c.dom.NodeList;
import org.xml.sax.SAXException;
import javax.mail.MessagingException;
import javax.xml.parsers.DocumentBuilder;
import javax.xml.parsers.DocumentBuilderFactory;
import javax.xml.parsers.ParserConfigurationException;
import javax.xml.transform.TransformerException;
import java.io.*;
import java.net.URL;
import java.sql.SQLException;
import java.text.SimpleDateFormat;
import java.util.*;
import java.util.zip.ZipEntry;
import java.util.zip.ZipFile;
/**
* Import items into DSpace. The conventional use is upload files by copying
@@ -314,39 +308,39 @@ public class ItemImportServiceImpl implements ItemImportService, InitializingBea
Arrays.sort(dircontents, ComparatorUtils.naturalComparator());
for (int i = 0; i < dircontents.length; i++)
for (int i = 0; i < dircontents.length; i++)
{
if (skipItems.containsKey(dircontents[i]))
{
if (skipItems.containsKey(dircontents[i]))
{
System.out.println("Skipping import of " + dircontents[i]);
System.out.println("Skipping import of " + dircontents[i]);
}
else
{
List<Collection> clist;
if (directoryFileCollections) {
String path = sourceDir + File.separatorChar + dircontents[i];
try {
List<Collection> cols = processCollectionFile(c, path, "collections");
if (cols == null) {
System.out.println("No collections specified for item " + dircontents[i] + ". Skipping.");
continue;
}
clist = cols;
}
catch (IllegalArgumentException e)
{
System.out.println(e.getMessage() + " Skipping." );
continue;
}
}
else
{
List<Collection> clist;
if (directoryFileCollections) {
String path = sourceDir + File.separatorChar + dircontents[i];
try {
List<Collection> cols = processCollectionFile(c, path, "collections");
if (cols == null) {
System.out.println("No collections specified for item " + dircontents[i] + ". Skipping.");
continue;
}
clist = cols;
}
catch (IllegalArgumentException e)
{
System.out.println(e.getMessage() + " Skipping." );
continue;
}
}
else
{
clist = mycollections;
}
addItem(c, clist, sourceDir, dircontents[i], mapOut, template);
System.out.println(i + " " + dircontents[i]);
clist = mycollections;
}
Item item =addItem(c, clist, sourceDir, dircontents[i], mapOut, template);c.uncacheEntity(item);
System.out.println(i + " " + dircontents[i]);
}
}
} finally {
if(mapOut!=null) {
@@ -416,7 +410,9 @@ public class ItemImportServiceImpl implements ItemImportService, InitializingBea
handleOut.close();
deleteItem(c, oldItem);
addItem(c, mycollections, sourceDir, newItemName, null, template);
Item newItem = addItem(c, mycollections, sourceDir, newItemName, null, template);
c.uncacheEntity(oldItem);
c.uncacheEntity(newItem);
}
}
@@ -447,6 +443,7 @@ public class ItemImportServiceImpl implements ItemImportService, InitializingBea
Item myitem = itemService.findByIdOrLegacyId(c, itemID);
System.out.println("Deleting item " + itemID);
deleteItem(c, myitem);
c.uncacheEntity(myitem);
}
}
}
@@ -473,6 +470,7 @@ public class ItemImportServiceImpl implements ItemImportService, InitializingBea
// create workspace item
Item myitem = null;
WorkspaceItem wi = null;
WorkflowItem wfi = null;
if (!isTest)
{
@@ -498,9 +496,9 @@ public class ItemImportServiceImpl implements ItemImportService, InitializingBea
{
// Should we send a workflow alert email or not?
if (useWorkflowSendEmail) {
workflowService.start(c, wi);
wfi = workflowService.start(c, wi);
} else {
workflowService.startWithoutNotify(c, wi);
wfi = workflowService.startWithoutNotify(c, wi);
}
// send ID to the mapfile
@@ -556,6 +554,10 @@ public class ItemImportServiceImpl implements ItemImportService, InitializingBea
mapOut.println(mapOutputString);
}
//Clear intermediary objects from the cache
c.uncacheEntity(wi);
c.uncacheEntity(wfi);
return myitem;
}
@@ -593,6 +595,7 @@ public class ItemImportServiceImpl implements ItemImportService, InitializingBea
else
{
deleteItem(c, myitem);
c.uncacheEntity(myitem);
}
}

View File

@@ -7,21 +7,7 @@
*/
package org.dspace.app.itemupdate;
import java.io.BufferedWriter;
import java.io.File;
import java.io.FilenameFilter;
import java.io.FileNotFoundException;
import java.io.FileWriter;
import java.io.IOException;
import java.io.PrintWriter;
import java.util.*;
import org.apache.commons.cli.CommandLine;
import org.apache.commons.cli.CommandLineParser;
import org.apache.commons.cli.HelpFormatter;
import org.apache.commons.cli.Option;
import org.apache.commons.cli.Options;
import org.apache.commons.cli.PosixParser;
import org.apache.commons.cli.*;
import org.dspace.content.Item;
import org.dspace.content.factory.ContentServiceFactory;
import org.dspace.content.service.ItemService;
@@ -31,6 +17,9 @@ import org.dspace.eperson.EPerson;
import org.dspace.eperson.factory.EPersonServiceFactory;
import org.dspace.eperson.service.EPersonService;
import java.io.*;
import java.util.*;
/**
*
* Provides some batch editing capabilities for items in DSpace:
@@ -62,7 +51,7 @@ import org.dspace.eperson.service.EPersonService;
*
*/
public class ItemUpdate {
public static final String SUPPRESS_UNDO_FILENAME = "suppress_undo";
public static final String CONTENTS_FILE = "contents";
@@ -70,7 +59,7 @@ public class ItemUpdate {
public static String HANDLE_PREFIX = null;
public static final Map<String, String> filterAliases = new HashMap<String, String>();
public static boolean verbose = false;
protected static final EPersonService epersonService = EPersonServiceFactory.getInstance().getEPersonService();
@@ -80,10 +69,10 @@ public class ItemUpdate {
{
filterAliases.put("ORIGINAL", "org.dspace.app.itemupdate.OriginalBitstreamFilter");
filterAliases.put("ORIGINAL_AND_DERIVATIVES", "org.dspace.app.itemupdate.OriginalWithDerivativesBitstreamFilter");
filterAliases.put("TEXT", "org.dspace.app.itemupdate.DerivativeTextBitstreamFilter");
filterAliases.put("THUMBNAIL", "org.dspace.app.itemupdate.ThumbnailBitstreamFilter");
filterAliases.put("TEXT", "org.dspace.app.itemupdate.DerivativeTextBitstreamFilter");
filterAliases.put("THUMBNAIL", "org.dspace.app.itemupdate.ThumbnailBitstreamFilter");
}
// File listing filter to check for folders
static FilenameFilter directoryFilter = new FilenameFilter()
{
@@ -122,11 +111,11 @@ public class ItemUpdate {
Options options = new Options();
//processing basis for determining items
//processing basis for determining items
//item-specific changes with metadata in source directory with dublin_core.xml files
options.addOption("s", "source", true, "root directory of source dspace archive ");
//actions on items
//actions on items
options.addOption("a", "addmetadata", true, "add metadata specified for each item; multiples separated by semicolon ';'");
options.addOption("d", "deletemetadata", true, "delete metadata specified for each item");
@@ -138,13 +127,13 @@ public class ItemUpdate {
delBitstreamOption.setArgName("BitstreamFilter");
options.addOption(delBitstreamOption);
//other params
//other params
options.addOption("e", "eperson", true, "email of eperson doing the update");
options.addOption("i", "itemfield", true, "optional metadata field that containing item identifier; default is dc.identifier.uri");
options.addOption("F", "filter-properties", true, "filter class name; only for deleting bitstream");
options.addOption("v", "verbose", false, "verbose logging");
//special run states
//special run states
options.addOption("t", "test", false, "test run - do not actually import items");
options.addOption("P", "provenance", false, "suppress altering provenance field for bitstream changes");
options.addOption("h", "help", false, "help");
@@ -156,12 +145,12 @@ public class ItemUpdate {
String metadataIndexName = null;
Context context = null;
ItemUpdate iu = new ItemUpdate();
ItemUpdate iu = new ItemUpdate();
try
{
CommandLine line = parser.parse(options, argv);
if (line.hasOption('h'))
{
HelpFormatter myhelp = new HelpFormatter();
@@ -173,10 +162,10 @@ public class ItemUpdate {
pr(" adding bitstreams: ItemUpdate -e jsmith@mit.edu -s sourcedir -A -i dc.identifier");
pr(" deleting bitstreams: ItemUpdate -e jsmith@mit.edu -s sourcedir -D ORIGINAL ");
pr("");
System.exit(0);
}
if (line.hasOption('v'))
{
verbose = true;
@@ -190,85 +179,85 @@ public class ItemUpdate {
}
iu.eperson = line.getOptionValue('e'); // db ID or email
if (!line.hasOption('s')) // item specific changes from archive dir
{
pr("Missing source archive option");
System.exit(1);
}
String sourcedir = line.getOptionValue('s');
if (line.hasOption('t')) //test
{
isTest = true;
pr("**Test Run** - not actually updating items.");
}
if (line.hasOption('i'))
{
itemField = line.getOptionValue('i');
itemField = line.getOptionValue('i');
}
if (line.hasOption('d'))
{
String[] targetFields = line.getOptionValues('d');
String[] targetFields = line.getOptionValues('d');
DeleteMetadataAction delMetadataAction = (DeleteMetadataAction) iu.actionMgr.getUpdateAction(DeleteMetadataAction.class);
delMetadataAction.addTargetFields(targetFields);
//undo is an add
//undo is an add
for (String field : targetFields)
{
iu.undoActionList.add(" -a " + field + " ");
}
pr("Delete metadata for fields: ");
for (String s : targetFields)
{
pr(" " + s);
}
}
if (line.hasOption('a'))
{
String[] targetFields = line.getOptionValues('a');
AddMetadataAction addMetadataAction = (AddMetadataAction) iu.actionMgr.getUpdateAction(AddMetadataAction.class);
String[] targetFields = line.getOptionValues('a');
AddMetadataAction addMetadataAction = (AddMetadataAction) iu.actionMgr.getUpdateAction(AddMetadataAction.class);
addMetadataAction.addTargetFields(targetFields);
//undo is a delete followed by an add of a replace record for target fields
for (String field : targetFields)
{
iu.undoActionList.add(" -d " + field + " ");
}
for (String field : targetFields)
{
iu.undoActionList.add(" -a " + field + " ");
}
pr("Add metadata for fields: ");
for (String s : targetFields)
{
pr(" " + s);
}
}
if (line.hasOption('D')) // undo not supported
if (line.hasOption('D')) // undo not supported
{
pr("Delete bitstreams ");
String[] filterNames = line.getOptionValues('D');
if ((filterNames != null) && (filterNames.length > 1))
{
pr("Error: Only one filter can be a used at a time.");
System.exit(1);
}
String filterName = line.getOptionValue('D');
pr("Filter argument: " + filterName);
if (filterName == null) // indicates using delete_contents files
{
DeleteBitstreamsAction delAction = (DeleteBitstreamsAction) iu.actionMgr.getUpdateAction(DeleteBitstreamsAction.class);
@@ -278,19 +267,19 @@ public class ItemUpdate {
{
// check if param is on ALIAS list
String filterClassname = filterAliases.get(filterName);
if (filterClassname == null)
{
filterClassname = filterName;
}
BitstreamFilter filter = null;
BitstreamFilter filter = null;
try
{
Class<?> cfilter = Class.forName(filterClassname);
pr("BitstreamFilter class to instantiate: " + cfilter.toString());
filter = (BitstreamFilter) cfilter.newInstance(); //unfortunate cast, an erasure consequence
}
catch(Exception e)
@@ -298,7 +287,7 @@ public class ItemUpdate {
pr("Error: Failure instantiating bitstream filter class: " + filterClassname);
System.exit(1);
}
String filterPropertiesName = line.getOptionValue('F');
if (filterPropertiesName != null) //not always required
{
@@ -309,7 +298,7 @@ public class ItemUpdate {
{
filterPropertiesName = sourcedir + File.separator + filterPropertiesName;
}
filter.initProperties(filterPropertiesName);
}
catch(Exception e)
@@ -318,52 +307,52 @@ public class ItemUpdate {
System.exit(1);
}
}
DeleteBitstreamsByFilterAction delAction =
DeleteBitstreamsByFilterAction delAction =
(DeleteBitstreamsByFilterAction) iu.actionMgr.getUpdateAction(DeleteBitstreamsByFilterAction.class);
delAction.setAlterProvenance(alterProvenance);
delAction.setBitstreamFilter(filter);
//undo not supported
}
}
}
if (line.hasOption('A'))
{
pr("Add bitstreams ");
pr("Add bitstreams ");
AddBitstreamsAction addAction = (AddBitstreamsAction) iu.actionMgr.getUpdateAction(AddBitstreamsAction.class);
addAction.setAlterProvenance(alterProvenance);
iu.undoActionList.add(" -D "); // delete_contents file will be written, no arg required
}
iu.undoActionList.add(" -D "); // delete_contents file will be written, no arg required
}
if (!iu.actionMgr.hasActions())
{
pr("Error - an action must be specified");
System.exit(1);
}
else
else
{
pr("Actions to be performed: ");
for (UpdateAction ua : iu.actionMgr)
{
pr(" " + ua.getClass().getName());
}
}
pr("ItemUpdate - initializing run on " + (new Date()).toString());
context = new Context();
iu.setEPerson(context, iu.eperson);
context = new Context(Context.Mode.BATCH_EDIT);
iu.setEPerson(context, iu.eperson);
context.turnOffAuthorisationSystem();
HANDLE_PREFIX = ConfigurationManager.getProperty("handle.canonical.prefix");
if (HANDLE_PREFIX == null || HANDLE_PREFIX.length() == 0)
{
HANDLE_PREFIX = "http://hdl.handle.net/";
}
iu.processArchive(context, sourcedir, itemField, metadataIndexName, alterProvenance, isTest);
iu.processArchive(context, sourcedir, itemField, metadataIndexName, alterProvenance, isTest);
context.complete(); // complete all transactions
}
@@ -388,7 +377,7 @@ public class ItemUpdate {
else
{
pr("End.");
}
System.exit(status);
}
@@ -430,24 +419,24 @@ public class ItemUpdate {
File undoDir = null; //sibling directory of source archive
if (!suppressUndo && !isTest)
if (!suppressUndo && !isTest)
{
undoDir = initUndoArchive(sourceDir);
undoDir = initUndoArchive(sourceDir);
}
int itemCount = 0;
int successItemCount = 0;
for (String dirname : dircontents)
{
itemCount++;
pr("");
pr("processing item " + dirname);
try
{
ItemArchive itarch = ItemArchive.create(context, new File(sourceDir, dirname), itemField);
for (UpdateAction action : actionMgr)
{
pr("action: " + action.getClass().getName());
@@ -461,28 +450,28 @@ public class ItemUpdate {
{
Item item = itarch.getItem();
itemService.update(context, item); //need to update before commit
}
ItemUpdate.pr("Item " + dirname + " completed");
successItemCount++;
}
catch(Exception e)
{
pr("Exception processing item " + dirname + ": " + e.toString());
context.uncacheEntity(item);}
ItemUpdate.pr("Item " + dirname + " completed");
successItemCount++;
}
catch(Exception e)
{
pr("Exception processing item " + dirname + ": " + e.toString());
e.printStackTrace();
}
}
if (!suppressUndo && !isTest)
{
{
StringBuilder sb = new StringBuilder("dsrun org.dspace.app.itemupdate.ItemUpdate ");
sb.append(" -e ").append(this.eperson);
sb.append(" -s ").append(undoDir);
if (itemField != null)
{
sb.append(" -i ").append(itemField);
}
if (!alterProvenance)
{
sb.append(" -P ");
@@ -491,12 +480,12 @@ public class ItemUpdate {
{
sb.append(" -t ");
}
for (String actionOption : undoActionList)
{
sb.append(actionOption);
}
}
PrintWriter pw = null;
try
{
@@ -527,29 +516,29 @@ public class ItemUpdate {
*/
protected File initUndoArchive(File sourceDir)
throws FileNotFoundException, IOException
{
{
File parentDir = sourceDir.getCanonicalFile().getParentFile();
if (parentDir == null)
{
throw new FileNotFoundException("Parent directory of archive directory not found; unable to write UndoArchive; no processing performed");
throw new FileNotFoundException("Parent directory of archive directory not found; unable to write UndoArchive; no processing performed");
}
String sourceDirName = sourceDir.getName();
int seqNo = 1;
File undoDir = new File(parentDir, "undo_" + sourceDirName + "_" + seqNo);
while (undoDir.exists())
{
undoDir = new File(parentDir, "undo_" + sourceDirName+ "_" + ++seqNo); //increment
}
// create root directory
if (!undoDir.mkdir())
{
pr("ERROR creating Undo Archive directory " + undoDir.getCanonicalPath());
throw new IOException("ERROR creating Undo Archive directory " + undoDir.getCanonicalPath());
}
//Undo is suppressed to prevent undo of undo
File fSuppressUndo = new File(undoDir, ItemUpdate.SUPPRESS_UNDO_FILENAME);
try
@@ -561,9 +550,9 @@ public class ItemUpdate {
pr("ERROR creating Suppress Undo File " + e.toString());
throw e;
}
return undoDir;
return undoDir;
}
//private void write
/**
@@ -606,7 +595,7 @@ public class ItemUpdate {
* poor man's logging
* As with ItemImport, API logging goes through log4j to the DSpace.log files
* whereas the batch logging goes to the console to be captured there.
*
*
* @param s String
*/
static void pr(String s)
@@ -620,7 +609,7 @@ public class ItemUpdate {
*/
static void prv(String s)
{
if (verbose)
if (verbose)
{
System.out.println(s);
}

View File

@@ -7,9 +7,6 @@
*/
package org.dspace.app.mediafilter;
import java.io.InputStream;
import java.util.*;
import org.dspace.app.mediafilter.service.MediaFilterService;
import org.dspace.authorize.service.AuthorizeService;
import org.dspace.content.*;
@@ -24,6 +21,9 @@ import org.dspace.services.ConfigurationService;
import org.springframework.beans.factory.InitializingBean;
import org.springframework.beans.factory.annotation.Autowired;
import java.io.InputStream;
import java.util.*;
/**
* MediaFilterManager is the class that invokes the media/format filters over the
* repository's content. A few command line flags affect the operation of the
@@ -161,6 +161,7 @@ public class MediaFilterServiceImpl implements MediaFilterService, InitializingB
++processed;
}
// clear item objects from context cache and internal cache
c.uncacheEntity(currentItem);
currentItem = null;
}
}

View File

@@ -7,26 +7,14 @@
*/
package org.dspace.app.packager;
import java.io.BufferedReader;
import java.io.File;
import java.io.FileNotFoundException;
import java.io.IOException;
import java.io.InputStreamReader;
import java.sql.SQLException;
import java.util.List;
import org.apache.commons.cli.CommandLine;
import org.apache.commons.cli.CommandLineParser;
import org.apache.commons.cli.HelpFormatter;
import org.apache.commons.cli.Options;
import org.apache.commons.cli.PosixParser;
import org.apache.commons.cli.*;
import org.dspace.authorize.AuthorizeException;
import org.dspace.content.DSpaceObject;
import org.dspace.content.crosswalk.CrosswalkException;
import org.dspace.content.packager.PackageDisseminator;
import org.dspace.content.packager.PackageException;
import org.dspace.content.packager.PackageParameters;
import org.dspace.content.packager.PackageIngester;
import org.dspace.content.packager.PackageParameters;
import org.dspace.core.Constants;
import org.dspace.core.Context;
import org.dspace.core.factory.CoreServiceFactory;
@@ -36,6 +24,10 @@ import org.dspace.eperson.factory.EPersonServiceFactory;
import org.dspace.handle.factory.HandleServiceFactory;
import org.dspace.workflow.WorkflowException;
import java.io.*;
import java.sql.SQLException;
import java.util.List;
/**
* Command-line interface to the Packager plugin.
* <p>
@@ -331,6 +323,7 @@ public class Packager
//If we are in REPLACE mode
if(pkgParams.replaceModeEnabled())
{
context.setMode(Context.Mode.BATCH_EDIT);
PackageIngester sip = (PackageIngester) pluginService
.getNamedPlugin(PackageIngester.class, myPackager.packageType);
if (sip == null)
@@ -394,6 +387,8 @@ public class Packager
//else if normal SUBMIT mode (or basic RESTORE mode -- which is a special type of submission)
else if (myPackager.submit || pkgParams.restoreModeEnabled())
{
context.setMode(Context.Mode.BATCH_EDIT);
PackageIngester sip = (PackageIngester) pluginService
.getNamedPlugin(PackageIngester.class, myPackager.packageType);
if (sip == null)
@@ -445,6 +440,8 @@ public class Packager
}// else, if DISSEMINATE mode
else
{
context.setMode(Context.Mode.READ_ONLY);
//retrieve specified package disseminator
PackageDisseminator dip = (PackageDisseminator) pluginService
.getNamedPlugin(PackageDisseminator.class, myPackager.packageType);

View File

@@ -7,28 +7,9 @@
*/
package org.dspace.app.sitemap;
import java.io.BufferedReader;
import java.io.File;
import java.io.IOException;
import java.io.InputStreamReader;
import java.io.UnsupportedEncodingException;
import java.net.HttpURLConnection;
import java.net.MalformedURLException;
import java.net.URL;
import java.net.URLEncoder;
import java.sql.SQLException;
import java.util.Date;
import java.util.Iterator;
import java.util.List;
import org.apache.commons.cli.CommandLine;
import org.apache.commons.cli.CommandLineParser;
import org.apache.commons.cli.HelpFormatter;
import org.apache.commons.cli.Options;
import org.apache.commons.cli.ParseException;
import org.apache.commons.cli.PosixParser;
import org.apache.commons.lang.StringUtils;
import org.apache.commons.cli.*;
import org.apache.commons.lang.ArrayUtils;
import org.apache.commons.lang.StringUtils;
import org.apache.log4j.Logger;
import org.dspace.content.Collection;
import org.dspace.content.Community;
@@ -42,6 +23,16 @@ import org.dspace.core.LogManager;
import org.dspace.services.ConfigurationService;
import org.dspace.services.factory.DSpaceServicesFactory;
import java.io.*;
import java.net.HttpURLConnection;
import java.net.MalformedURLException;
import java.net.URL;
import java.net.URLEncoder;
import java.sql.SQLException;
import java.util.Date;
import java.util.Iterator;
import java.util.List;
/**
* Command-line utility for generating HTML and Sitemaps.org protocol Sitemaps.
*
@@ -188,7 +179,7 @@ public class GenerateSitemaps
+ "?map=", null);
}
Context c = new Context();
Context c = new Context(Context.Mode.READ_ONLY);
List<Community> comms = communityService.findAll(c);
@@ -201,6 +192,8 @@ public class GenerateSitemaps
if (makeSitemapOrg) {
sitemapsOrg.addURL(url, null);
}
c.uncacheEntity(comm);
}
List<Collection> colls = collectionService.findAll(c);
@@ -214,6 +207,8 @@ public class GenerateSitemaps
if (makeSitemapOrg) {
sitemapsOrg.addURL(url, null);
}
c.uncacheEntity(coll);
}
Iterator<Item> allItems = itemService.findAll(c);
@@ -234,6 +229,8 @@ public class GenerateSitemaps
sitemapsOrg.addURL(url, lastMod);
}
c.uncacheEntity(i);
itemCount++;
}

View File

@@ -145,6 +145,7 @@ public class DSpaceAuthorityIndexer implements AuthorityIndexerInterface, Initia
} else {
// 3. iterate over the items
context.uncacheEntity(currentItem);
if (itemIterator.hasNext()) {
currentItem = itemIterator.next();

View File

@@ -460,7 +460,6 @@ public class AuthorizeServiceImpl implements AuthorizeService
return groupService.isMember(c, Group.ADMIN);
}
}
@Override
public boolean isAdmin(Context c, EPerson e) throws SQLException
{
@@ -478,7 +477,6 @@ public class AuthorizeServiceImpl implements AuthorizeService
return groupService.isMember(c, e, Group.ADMIN);
}
}
public boolean isCommunityAdmin(Context c) throws SQLException
{
EPerson e = c.getCurrentUser();

View File

@@ -7,11 +7,6 @@
*/
package org.dspace.checker;
import java.io.IOException;
import java.sql.SQLException;
import java.util.Date;
import java.util.Map;
import org.apache.commons.collections.MapUtils;
import org.apache.log4j.Logger;
import org.dspace.checker.factory.CheckerServiceFactory;
@@ -23,6 +18,11 @@ import org.dspace.core.Context;
import org.dspace.storage.bitstore.factory.StorageServiceFactory;
import org.dspace.storage.bitstore.service.BitstreamStorageService;
import java.io.IOException;
import java.sql.SQLException;
import java.util.Date;
import java.util.Map;
/**
* <p>
* Main class for the checksum checker tool, which calculates checksums for each
@@ -127,6 +127,7 @@ public final class CheckerCommand
collector.collect(context, info);
}
context.uncacheEntity(bitstream);
bitstream = dispatcher.next();
}
}

View File

@@ -7,20 +7,7 @@
*/
package org.dspace.checker;
import java.io.File;
import java.io.FileWriter;
import java.io.IOException;
import java.sql.SQLException;
import java.util.Date;
import java.util.GregorianCalendar;
import javax.mail.MessagingException;
import org.apache.commons.cli.CommandLine;
import org.apache.commons.cli.CommandLineParser;
import org.apache.commons.cli.HelpFormatter;
import org.apache.commons.cli.Options;
import org.apache.commons.cli.ParseException;
import org.apache.commons.cli.PosixParser;
import org.apache.commons.cli.*;
import org.apache.log4j.Logger;
import org.dspace.checker.factory.CheckerServiceFactory;
import org.dspace.checker.service.SimpleReporterService;
@@ -28,6 +15,14 @@ import org.dspace.core.ConfigurationManager;
import org.dspace.core.Context;
import org.dspace.core.Email;
import javax.mail.MessagingException;
import java.io.File;
import java.io.FileWriter;
import java.io.IOException;
import java.sql.SQLException;
import java.util.Date;
import java.util.GregorianCalendar;
/**
* <p>
* The email reporter creates and sends emails to an administrator. This only
@@ -168,7 +163,7 @@ public class DailyReportEmailer
try
{
context = new Context();
context = new Context(Context.Mode.READ_ONLY);
// the number of bitstreams in report
int numBitstreams = 0;

View File

@@ -13,8 +13,6 @@ import org.dspace.content.service.CollectionService;
import org.dspace.core.*;
import org.dspace.eperson.Group;
import org.hibernate.annotations.CacheConcurrencyStrategy;
import org.hibernate.annotations.Sort;
import org.hibernate.annotations.SortType;
import org.hibernate.proxy.HibernateProxyHelper;
import javax.persistence.*;

View File

@@ -15,8 +15,6 @@ import org.dspace.content.service.CommunityService;
import org.dspace.core.*;
import org.dspace.eperson.Group;
import org.hibernate.annotations.CacheConcurrencyStrategy;
import org.hibernate.annotations.Sort;
import org.hibernate.annotations.SortType;
import org.hibernate.proxy.HibernateProxyHelper;
import javax.persistence.*;

View File

@@ -21,6 +21,7 @@ import org.springframework.beans.factory.annotation.Autowired;
import java.io.IOException;
import java.sql.SQLException;
import java.util.Iterator;
import java.util.List;
/**
@@ -98,7 +99,7 @@ public class MetadataValueServiceImpl implements MetadataValueService {
}
@Override
public List<MetadataValue> findByValueLike(Context context, String value) throws SQLException {
public Iterator<MetadataValue> findByValueLike(Context context, String value) throws SQLException {
return metadataValueDAO.findByValueLike(context, value);
}

View File

@@ -13,6 +13,7 @@ import org.dspace.core.Context;
import org.dspace.core.GenericDAO;
import java.sql.SQLException;
import java.util.Iterator;
import java.util.List;
/**
@@ -26,7 +27,7 @@ public interface MetadataValueDAO extends GenericDAO<MetadataValue> {
public List<MetadataValue> findByField(Context context, MetadataField fieldId) throws SQLException;
public List<MetadataValue> findByValueLike(Context context, String value) throws SQLException;
public Iterator<MetadataValue> findByValueLike(Context context, String value) throws SQLException;
public void deleteByMetadataField(Context context, MetadataField metadataField) throws SQLException;

View File

@@ -18,6 +18,7 @@ import org.hibernate.Query;
import org.hibernate.criterion.Restrictions;
import java.sql.SQLException;
import java.util.Iterator;
import java.util.List;
/**
@@ -48,14 +49,14 @@ public class MetadataValueDAOImpl extends AbstractHibernateDAO<MetadataValue> im
}
@Override
public List<MetadataValue> findByValueLike(Context context, String value) throws SQLException {
Criteria criteria = createCriteria(context, MetadataValue.class);
criteria.add(
Restrictions.like("value", "%" + value + "%")
);
criteria.setFetchMode("metadataField", FetchMode.JOIN);
public Iterator<MetadataValue> findByValueLike(Context context, String value) throws SQLException {
String queryString = "SELECT m FROM MetadataValue m JOIN m.metadataField f " +
"WHERE m.value like concat('%', concat(:searchString,'%')) ORDER BY m.id ASC";
return list(criteria);
Query query = createQuery(context, queryString);
query.setString("searchString", value);
return iterate(query);
}
@Override

View File

@@ -15,6 +15,7 @@ import org.dspace.core.Context;
import java.io.IOException;
import java.sql.SQLException;
import java.util.Iterator;
import java.util.List;
/**
@@ -82,7 +83,7 @@ public interface MetadataValueService {
*/
public void delete(Context context, MetadataValue metadataValue) throws SQLException;
public List<MetadataValue> findByValueLike(Context context, String value) throws SQLException;
public Iterator<MetadataValue> findByValueLike(Context context, String value) throws SQLException;
public void deleteByMetadataField(Context context, MetadataField metadataField) throws SQLException;

View File

@@ -362,8 +362,13 @@ public class Context
try
{
// As long as we have a valid, writeable database connection,
// commit any changes made as part of the transaction
commit();
// rollback any changes if we are in read-only mode,
// otherwise, commit any changes made as part of the transaction
if(isReadOnly()) {
abort();
} else {
commit();
}
}
finally
{

View File

@@ -7,16 +7,7 @@
*/
package org.dspace.curate;
import java.io.BufferedReader;
import java.io.FileReader;
import java.util.Iterator;
import org.apache.commons.cli.CommandLine;
import org.apache.commons.cli.CommandLineParser;
import org.apache.commons.cli.HelpFormatter;
import org.apache.commons.cli.Options;
import org.apache.commons.cli.PosixParser;
import org.apache.commons.cli.*;
import org.dspace.content.factory.ContentServiceFactory;
import org.dspace.core.Context;
import org.dspace.core.factory.CoreServiceFactory;
@@ -25,6 +16,10 @@ import org.dspace.eperson.EPerson;
import org.dspace.eperson.factory.EPersonServiceFactory;
import org.dspace.eperson.service.EPersonService;
import java.io.BufferedReader;
import java.io.FileReader;
import java.util.Iterator;
/**
* CurationCli provides command-line access to Curation tools and processes.
*
@@ -142,7 +137,7 @@ public class CurationCli
}
EPersonService ePersonService = EPersonServiceFactory.getInstance().getEPersonService();
Context c = new Context();
Context c = new Context(Context.Mode.BATCH_EDIT);
if (ePersonName != null)
{
EPerson ePerson = ePersonService.findByEmail(c, ePersonName);

View File

@@ -512,10 +512,14 @@ public class Curator
{
return false;
}
Iterator<Item> iter = itemService.findByCollection(curationContext(), coll);
Context context = curationContext();
Iterator<Item> iter = itemService.findByCollection(context, coll);
while (iter.hasNext())
{
if (! tr.run(iter.next()))
Item item = iter.next();
boolean shouldContinue = tr.run(item);
context.uncacheEntity(item);
if (!shouldContinue)
{
return false;
}

View File

@@ -115,7 +115,7 @@ public class EmbargoCLITool {
Context context = null;
try
{
context = new Context();
context = new Context(Context.Mode.BATCH_EDIT);
context.turnOffAuthorisationSystem();
Date now = new Date();
@@ -149,10 +149,12 @@ public class EmbargoCLITool {
Iterator<Item> ii = embargoService.findItemsByLiftMetadata(context);
while (ii.hasNext())
{
if (processOneItem(context, ii.next(), line, now))
Item item = ii.next();
if (processOneItem(context, item, line, now))
{
status = 1;
}
context.uncacheEntity(item);
}
}
context.complete();

View File

@@ -302,7 +302,7 @@ public class SubscribeCLITool {
Context context = null;
try {
context = new Context();
context = new Context(Context.Mode.READ_ONLY);
processDaily(context, test);
context.complete();
} catch (Exception e) {

View File

@@ -169,7 +169,7 @@ public interface GroupService extends DSpaceObjectService<Group>, DSpaceObjectLe
* @throws SQLException if database error
*/
public boolean isMember(Context context, EPerson epersonToCheck, String groupName) throws SQLException;
/**
* fast check to see if an eperson is a member called with eperson id, does
* database lookup without instantiating all of the epeople objects and is

View File

@@ -231,6 +231,10 @@ public class HandleServiceImpl implements HandleService
// can verify during a restore whether the same *type* of resource
// is reusing this handle!
handle.setDSpaceObject(null);
//Also remove the handle from the DSO list to keep a consistent model
dso.getHandles().remove(handle);
handleDAO.save(context, handle);
if (log.isDebugEnabled())
@@ -241,7 +245,7 @@ public class HandleServiceImpl implements HandleService
}
else
{
log.warn("Cannot find Handle entry to unbind for object " + Constants.typeText[dso.getType()] + " id=" + dso.getID());
log.trace("Cannot find Handle entry to unbind for object " + Constants.typeText[dso.getType()] + " id=" + dso.getID() + ". Handle could have been unbinded before.");
}
}

View File

@@ -7,10 +7,6 @@
*/
package org.dspace.handle;
import java.io.BufferedReader;
import java.io.InputStreamReader;
import java.sql.SQLException;
import java.util.List;
import org.apache.log4j.Logger;
import org.dspace.content.MetadataValue;
import org.dspace.content.factory.ContentServiceFactory;
@@ -19,6 +15,13 @@ import org.dspace.core.Context;
import org.dspace.discovery.IndexClient;
import org.dspace.handle.factory.HandleServiceFactory;
import org.dspace.handle.service.HandleService;
import org.dspace.services.ConfigurationService;
import org.dspace.services.factory.DSpaceServicesFactory;
import java.io.BufferedReader;
import java.io.InputStreamReader;
import java.sql.SQLException;
import java.util.Iterator;
/**
* A script to update the handle values in the database. This is typically used
@@ -32,6 +35,7 @@ public class UpdateHandlePrefix
{
private static final Logger log = Logger.getLogger(UpdateHandlePrefix.class);
private static final ConfigurationService configurationService = DSpaceServicesFactory.getInstance().getConfigurationService();
/**
* When invoked as a command-line tool, updates handle prefix
@@ -94,12 +98,19 @@ public class UpdateHandlePrefix
System.out.print("Updating metadatavalues table... ");
MetadataValueService metadataValueService = ContentServiceFactory.getInstance().getMetadataValueService();
List<MetadataValue> metadataValues = metadataValueService.findByValueLike(context, "http://hdl.handle.net/");
int updMeta = metadataValues.size();
for (MetadataValue metadataValue : metadataValues) {
metadataValue.setValue(metadataValue.getValue().replace("http://hdl.handle.net/" + oldH, "http://hdl.handle.net/" + newH));
String handlePrefix = configurationService.getProperty("handle.canonical.prefix");
Iterator<MetadataValue> metadataValues = metadataValueService.findByValueLike(context, handlePrefix);
int updMeta = 0;
while(metadataValues.hasNext()) {
MetadataValue metadataValue = metadataValues.next();
metadataValue.setValue(metadataValue.getValue().replace(handlePrefix + oldH, handlePrefix + newH));
metadataValueService.update(context, metadataValue, true);
context.uncacheEntity(metadataValue);
updMeta++;
}
System.out.println(
updMeta + " metadata value" + ((updMeta > 1) ? "s" : "") + " updated"
);

View File

@@ -646,8 +646,13 @@ public class OAIHarvester {
log.info(String.format("Item %s (%s) has been ingested (item %d of %d). The whole process took: %d ms.",
item.getHandle(), item.getID(), currentRecord, totalListSize, timeTaken));
// Stop ignoring authorization
ourContext.restoreAuthSystemState();
//Clear the context cache
ourContext.uncacheEntity(wi);
ourContext.uncacheEntity(hi);
ourContext.uncacheEntity(item);
// Stop ignoring authorization
ourContext.restoreAuthSystemState();
}

View File

@@ -8,28 +8,12 @@
package org.dspace.identifier.doi;
import java.io.IOException;
import java.io.PrintStream;
import java.sql.SQLException;
import java.util.*;
import org.apache.commons.cli.CommandLine;
import org.apache.commons.cli.CommandLineParser;
import org.apache.commons.cli.HelpFormatter;
import org.apache.commons.cli.Option;
import org.apache.commons.cli.OptionBuilder;
import org.apache.commons.cli.Options;
import org.apache.commons.cli.ParseException;
import org.apache.commons.cli.PosixParser;
import org.apache.commons.cli.*;
import org.apache.log4j.Logger;
import org.dspace.content.DSpaceObject;
import org.dspace.content.factory.ContentServiceFactory;
import org.dspace.content.service.ItemService;
import org.dspace.core.ConfigurationManager;
import org.dspace.core.Constants;
import org.dspace.core.Context;
import org.dspace.core.Email;
import org.dspace.core.I18nUtil;
import org.dspace.core.*;
import org.dspace.handle.factory.HandleServiceFactory;
import org.dspace.handle.service.HandleService;
import org.dspace.identifier.DOI;
@@ -39,6 +23,11 @@ import org.dspace.identifier.factory.IdentifierServiceFactory;
import org.dspace.identifier.service.DOIService;
import org.dspace.utils.DSpace;
import java.io.IOException;
import java.io.PrintStream;
import java.sql.SQLException;
import java.util.*;
/**
*
@@ -203,6 +192,7 @@ public class DOIOrganiser {
for (DOI doi : dois) {
organiser.reserve(doi);
context.uncacheEntity(doi);
}
} catch (SQLException ex) {
System.err.println("Error in database connection:" + ex.getMessage());
@@ -223,6 +213,7 @@ public class DOIOrganiser {
for (DOI doi : dois)
{
organiser.register(doi);
context.uncacheEntity(doi);
}
} catch (SQLException ex) {
System.err.println("Error in database connection:" + ex.getMessage());
@@ -247,6 +238,7 @@ public class DOIOrganiser {
for (DOI doi : dois)
{
organiser.update(doi);
context.uncacheEntity(doi);
}
} catch (SQLException ex) {
System.err.println("Error in database connection:" + ex.getMessage());
@@ -270,6 +262,7 @@ public class DOIOrganiser {
DOI doi = iterator.next();
iterator.remove();
organiser.delete(doi.getDoi());
context.uncacheEntity(doi);
}
} catch (SQLException ex) {
System.err.println("Error in database connection:" + ex.getMessage());

View File

@@ -9,29 +9,11 @@
package org.dspace.rdf;
import com.hp.hpl.jena.rdf.model.Model;
import java.io.PrintWriter;
import java.sql.SQLException;
import java.util.Iterator;
import java.util.List;
import java.util.Set;
import java.util.UUID;
import java.util.concurrent.CopyOnWriteArraySet;
import org.apache.commons.cli.CommandLine;
import org.apache.commons.cli.CommandLineParser;
import org.apache.commons.cli.HelpFormatter;
import org.apache.commons.cli.Option;
import org.apache.commons.cli.OptionBuilder;
import org.apache.commons.cli.Options;
import org.apache.commons.cli.ParseException;
import org.apache.commons.cli.PosixParser;
import org.apache.commons.cli.*;
import org.apache.commons.lang3.StringUtils;
import org.apache.log4j.Logger;
import org.dspace.authorize.AuthorizeException;
import org.dspace.content.Collection;
import org.dspace.content.Community;
import org.dspace.content.DSpaceObject;
import org.dspace.content.Item;
import org.dspace.content.Site;
import org.dspace.content.*;
import org.dspace.content.factory.ContentServiceFactory;
import org.dspace.content.service.CommunityService;
import org.dspace.content.service.ItemService;
@@ -44,6 +26,14 @@ import org.dspace.rdf.storage.RDFStorage;
import org.dspace.services.ConfigurationService;
import org.dspace.services.factory.DSpaceServicesFactory;
import java.io.PrintWriter;
import java.sql.SQLException;
import java.util.Iterator;
import java.util.List;
import java.util.Set;
import java.util.UUID;
import java.util.concurrent.CopyOnWriteArraySet;
/**
* This class manages the handling of RDF data in DSpace. It generates
* identifiers, it loads data, it manages the conversion of DSpace Objects into
@@ -465,8 +455,9 @@ public class RDFizer {
// }
callback.callback(dso);
report("Processed " + contentServiceFactory.getDSpaceObjectService(dso).getTypeText(dso) + " " + dso.getID()
report("Processed " + contentServiceFactory.getDSpaceObjectService(dso).getTypeText(dso) + " " + dso.getID()
+ " (handle " + dso.getHandle() + ").");
context.uncacheEntity(dso);
}
protected boolean isProcessed(DSpaceObject dso)

View File

@@ -68,7 +68,7 @@ public class BitStoreMigrate {
System.exit(0);
}
Context context = new Context();
Context context = new Context(Context.Mode.BATCH_EDIT);
context.turnOffAuthorisationSystem();
if(line.hasOption('p')) {

View File

@@ -223,7 +223,7 @@ public class BitstreamStorageServiceImpl implements BitstreamStorageService, Ini
try
{
context = new Context();
context = new Context(Context.Mode.BATCH_EDIT);
context.turnOffAuthorisationSystem();
List<Bitstream> storage = bitstreamService.findDeletedBitstreams(context);
@@ -254,6 +254,7 @@ public class BitstreamStorageServiceImpl implements BitstreamStorageService, Ini
}
bitstreamService.expunge(context, bitstream);
}
context.uncacheEntity(bitstream);
continue;
}
@@ -262,7 +263,8 @@ public class BitstreamStorageServiceImpl implements BitstreamStorageService, Ini
if (isRecent(Long.valueOf(receivedMetadata.get("modified").toString())))
{
log.debug("file is recent");
continue;
context.uncacheEntity(bitstream);
continue;
}
if (deleteDbRecords)
@@ -281,6 +283,7 @@ public class BitstreamStorageServiceImpl implements BitstreamStorageService, Ini
}
if (isRegisteredBitstream(bitstream.getInternalId())) {
context.uncacheEntity(bitstream);
continue; // do not delete registered bitstreams
}
@@ -310,6 +313,8 @@ public class BitstreamStorageServiceImpl implements BitstreamStorageService, Ini
{
context.dispatchEvents();
}
context.uncacheEntity(bitstream);
}
System.out.print("Committing changes to the database...");
@@ -393,6 +398,8 @@ public class BitstreamStorageServiceImpl implements BitstreamStorageService, Ini
}
processedCounter++;
context.uncacheEntity(bitstream);
//modulo
if ((processedCounter % batchCommitSize) == 0) {
log.info("Migration Commit Checkpoint: " + processedCounter);

View File

@@ -9,12 +9,13 @@ package org.dspace.workflow;
import org.dspace.content.InProgressSubmission;
import org.dspace.core.ReloadableEntity;
/**
* Interface representing a workflowitem, each workflowItem implementation must implement this interface.
*
* @author kevinvandevelde at atmire.com
*/
public interface WorkflowItem extends InProgressSubmission {
public interface WorkflowItem extends InProgressSubmission, ReloadableEntity<Integer> {
}

View File

@@ -25,7 +25,7 @@ import java.sql.SQLException;
*/
@Entity
@Table(name = "workflowitem")
public class BasicWorkflowItem implements WorkflowItem, ReloadableEntity<Integer>
public class BasicWorkflowItem implements WorkflowItem
{
@Id

View File

@@ -166,9 +166,10 @@ public class VersioningTest extends AbstractUnitTest {
@Test
public void testVersionDelete() throws Exception {
context.turnOffAuthorisationSystem();
String handle = versionedItem.getHandle();
versionService.removeVersion(context, versionedItem);
assertThat("Test_version_delete", itemService.find(context, versionedItem.getID()), nullValue());
assertThat("Test_version_handle_delete", handleService.resolveToObject(context, versionedItem.getHandle()), nullValue());
assertThat("Test_version_handle_delete", handleService.resolveToObject(context, handle), nullValue());
context.restoreAuthSystemState();
}
}