DSpace refactored service api

This commit is contained in:
KevinVdV
2014-11-08 09:19:09 +01:00
parent fcb3717aad
commit 54222f3c1d
1145 changed files with 52233 additions and 57064 deletions

View File

@@ -56,6 +56,7 @@ default.language = en_US
# Uncomment the appropriate block below for your database.
# postgres
db.driver=org.postgresql.Driver
db.dialect=org.dspace.storage.rdbms.hibernate.postgres.DSpacePostgreSQL9Dialect
db.url=jdbc:postgresql://localhost:5432/dspace
db.username=dspace
db.password=dspace
@@ -77,18 +78,9 @@ db.schema =
# Maximum number of DB connections in pool
db.maxconnections = 30
# Maximum time to wait before giving up if all connections in pool are busy (milliseconds)
db.maxwait = 5000
# Determine the number of statements that can be cached (set to 0 to disable caching)
db.statementpool.cache = 50
# Maximum number of idle connections in pool (-1 = unlimited)
db.maxidle = -1
# Determine if prepared statement should be cached. (default is true)
db.statementpool = true
# Specify a name for the connection pool (useful if you have multiple applications sharing Tomcat's dbcp)
# If not specified, defaults to 'dspacepool'
db.poolname = dspacepool
#######################
# EMAIL CONFIGURATION #

View File

@@ -289,6 +289,22 @@
<dependencies>
<dependency>
<groupId>org.hibernate</groupId>
<artifactId>hibernate-core</artifactId>
</dependency>
<dependency>
<groupId>org.hibernate</groupId>
<artifactId>hibernate-ehcache</artifactId>
</dependency>
<dependency>
<groupId>org.hibernate</groupId>
<artifactId>hibernate-c3p0</artifactId>
</dependency>
<dependency>
<groupId>org.springframework</groupId>
<artifactId>spring-orm</artifactId>
</dependency>
<dependency>
<groupId>org.dspace</groupId>
<artifactId>handle</artifactId>

View File

@@ -9,6 +9,8 @@ package org.dspace.administer;
import java.io.IOException;
import java.sql.SQLException;
import java.util.List;
import java.util.UUID;
import org.apache.commons.cli.CommandLine;
import org.apache.commons.cli.CommandLineParser;
@@ -17,10 +19,12 @@ import org.apache.commons.cli.Options;
import org.apache.commons.cli.PosixParser;
import org.dspace.authorize.AuthorizeException;
import org.dspace.content.Community;
import org.dspace.content.factory.ContentServiceFactory;
import org.dspace.content.service.CommunityService;
import org.dspace.core.Constants;
import org.dspace.core.Context;
import org.dspace.handle.HandleManager;
import org.dspace.storage.rdbms.DatabaseManager;
import org.dspace.handle.factory.HandleServiceFactory;
import org.dspace.handle.service.HandleService;
/**
* A command-line tool for setting/removing community/sub-community
@@ -32,6 +36,15 @@ import org.dspace.storage.rdbms.DatabaseManager;
public class CommunityFiliator
{
protected CommunityService communityService;
protected HandleService handleService;
public CommunityFiliator() {
communityService = ContentServiceFactory.getInstance().getCommunityService();
handleService = HandleServiceFactory.getInstance().getHandleService();
}
public static void main(String[] argv) throws Exception
{
// create an options object and populate it
@@ -166,7 +179,7 @@ public class CommunityFiliator
// check that a valid filiation would be established
// first test - proposed child must currently be an orphan (i.e.
// top-level)
Community childDad = child.getParentCommunity();
Community childDad = child.getParentCommunities() != null ? child.getParentCommunities().iterator().next() : null;
if (childDad != null)
{
@@ -177,11 +190,11 @@ public class CommunityFiliator
// second test - circularity: parent's parents can't include proposed
// child
Community[] parentDads = parent.getAllParents();
List<Community> parentDads = parent.getParentCommunities();
for (int i = 0; i < parentDads.length; i++)
for (int i = 0; i < parentDads.size(); i++)
{
if (parentDads[i].getID() == child.getID())
if (parentDads.get(i).getID().equals(child.getID()))
{
System.out
.println("Error, circular parentage - child is parent of parent");
@@ -190,7 +203,7 @@ public class CommunityFiliator
}
// everthing's OK
parent.addSubcommunity(child);
communityService.addSubcommunity(c, parent, child);
// complete the pending transaction
c.complete();
@@ -202,12 +215,12 @@ public class CommunityFiliator
throws SQLException, AuthorizeException, IOException
{
// verify that child is indeed a child of parent
Community[] parentKids = parent.getSubcommunities();
List<Community> parentKids = parent.getSubcommunities();
boolean isChild = false;
for (int i = 0; i < parentKids.length; i++)
for (int i = 0; i < parentKids.size(); i++)
{
if (parentKids[i].getID() == child.getID())
if (parentKids.get(i).getID().equals(child.getID()))
{
isChild = true;
@@ -224,9 +237,10 @@ public class CommunityFiliator
// OK remove the mappings - but leave the community, which will become
// top-level
DatabaseManager.updateQuery(c,
"DELETE FROM community2community WHERE parent_comm_id= ? "+
"AND child_comm_id= ? ", parent.getID(), child.getID());
child.getParentCommunities().remove(parent);
parent.getSubcommunities().remove(child);
communityService.update(c, child);
communityService.update(c, parent);
// complete the pending transaction
c.complete();
@@ -235,7 +249,7 @@ public class CommunityFiliator
+ "'");
}
private Community resolveCommunity(Context c, String communityID)
protected Community resolveCommunity(Context c, String communityID)
throws SQLException
{
Community community = null;
@@ -243,7 +257,7 @@ public class CommunityFiliator
if (communityID.indexOf('/') != -1)
{
// has a / must be a handle
community = (Community) HandleManager.resolveToObject(c,
community = (Community) handleService.resolveToObject(c,
communityID);
// ensure it's a community
@@ -255,7 +269,7 @@ public class CommunityFiliator
}
else
{
community = Community.find(c, Integer.parseInt(communityID));
community = communityService.find(c, UUID.fromString(communityID));
}
return community;

View File

@@ -22,6 +22,9 @@ import org.dspace.core.Context;
import org.dspace.core.I18nUtil;
import org.dspace.eperson.EPerson;
import org.dspace.eperson.Group;
import org.dspace.eperson.factory.EPersonServiceFactory;
import org.dspace.eperson.service.EPersonService;
import org.dspace.eperson.service.GroupService;
/**
* A command-line tool for creating an initial administrator for setting up a
@@ -46,7 +49,10 @@ public final class CreateAdministrator
{
/** DSpace Context object */
private final Context context;
protected EPersonService ePersonService;
protected GroupService groupService;
/**
* For invoking via the command line. If called with no command line arguments,
* it will negotiate with the user for the administrator details
@@ -88,10 +94,12 @@ public final class CreateAdministrator
*
* @throws Exception
*/
private CreateAdministrator()
protected CreateAdministrator()
throws Exception
{
context = new Context();
groupService = EPersonServiceFactory.getInstance().getGroupService();
ePersonService = EPersonServiceFactory.getInstance().getEPersonService();
}
/**
@@ -100,7 +108,7 @@ public final class CreateAdministrator
*
* @throws Exception
*/
private void negotiateAdministratorDetails()
protected void negotiateAdministratorDetails()
throws Exception
{
Console console = System.console();
@@ -222,7 +230,7 @@ public final class CreateAdministrator
*
* @throws Exception
*/
private void createAdministrator(String email, String first, String last,
protected void createAdministrator(String email, String first, String last,
String language, String pw)
throws Exception
{
@@ -231,7 +239,7 @@ public final class CreateAdministrator
context.setIgnoreAuthorization(true);
// Find administrator group
Group admins = Group.find(context, 1);
Group admins = groupService.findByName(context, Group.ADMIN);
if (admins == null)
{
@@ -239,27 +247,27 @@ public final class CreateAdministrator
}
// Create the administrator e-person
EPerson eperson = EPerson.findByEmail(context,email);
EPerson eperson = ePersonService.findByEmail(context,email);
// check if the email belongs to a registered user,
// if not create a new user with this email
if (eperson == null)
{
eperson = EPerson.create(context);
eperson = ePersonService.create(context);
eperson.setEmail(email);
eperson.setCanLogIn(true);
eperson.setRequireCertificate(false);
eperson.setSelfRegistered(false);
}
eperson.setLastName(last);
eperson.setFirstName(first);
eperson.setLanguage(language);
eperson.setPassword(pw);
eperson.update();
eperson.setLastName(context, last);
eperson.setFirstName(context, first);
eperson.setLanguage(context, language);
ePersonService.setPassword(eperson, pw);
ePersonService.update(context, eperson);
admins.addMember(eperson);
admins.update();
groupService.addMember(context, admins, eperson);
groupService.update(context, admins);
context.complete();

View File

@@ -1,287 +0,0 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.administer;
import java.io.IOException;
import java.sql.SQLException;
import org.dspace.authorize.AuthorizeException;
import org.dspace.content.MetadataField;
import org.dspace.content.MetadataSchema;
import org.dspace.content.NonUniqueMetadataException;
import org.dspace.core.Context;
/**
* Class representing a particular Dublin Core metadata type, with various
* utility methods. In general, only used for manipulating the registry of
* Dublin Core types in the system, so most users will not need this.
*
* <p>
* The DCType implementation has been deprecated, please use MetadataManager,
* MetadataSchema and MetadataField instead. For backward compatibility the this
* implementation has been updated to transparently call the new classes.
* </p>
*
* @author Robert Tansley
* @author Martin Hald
* @version $Revision$
* @deprecated
*/
public class DCType
{
/** Our context */
private Context ourContext;
/** The matching metadata field */
private MetadataField field = new MetadataField();
/**
* Create a DCType from an existing metadata field.
*
* @param context
* @param field
* @deprecated
*/
public DCType(Context context, MetadataField field)
{
this.ourContext = context;
this.field = field;
}
/**
* Default constructor.
*
* @param context
* @deprecated
*/
public DCType(Context context)
{
this.ourContext = context;
}
/**
* Utility method for quick access to an element and qualifier given the
* type ID.
*
* @param context
* context, in case DC types need to be read in from DB
* @param id
* the DC type ID
* @return a two-String array, string 0 is the element, string 1 is the
* qualifier
* @deprecated
*/
public static String[] quickFind(Context context, int id)
throws SQLException
{
MetadataField field = MetadataField.find(context, id);
String[] result = new String[2];
if (field == null)
{
return result;
}
else
{
result[0] = field.getElement();
result[1] = field.getQualifier();
return result;
}
}
/**
* Get a metadata field from the database.
*
* @param context
* DSpace context object
* @param id
* ID of the dublin core type
*
* @return the metadata field, or null if the ID is invalid.
* @deprecated
*/
public static DCType find(Context context, int id) throws SQLException
{
MetadataField field = MetadataField.find(context, id);
return new DCType(context, field);
}
/**
* Find a given Dublin Core type. Returns <code>null</code> if the Dublin
* Core type doesn't exist.
*
* @param context
* the DSpace context to use
* @param element
* the element to find
* @param qualifier
* the qualifier, or <code>null</code> to find an unqualified
* type
*
* @return the Dublin Core type, or <code>null</code> if there isn't a
* corresponding type in the registry
* @throws AuthorizeException
* @deprecated
*/
public static DCType findByElement(Context context, String element,
String qualifier) throws SQLException, AuthorizeException
{
MetadataField field = MetadataField.findByElement(context,
MetadataSchema.DC_SCHEMA_ID, element, qualifier);
if (field == null)
{
return null;
}
else
{
return new DCType(context, field);
}
}
/**
* Retrieve all Dublin Core types from the registry
*
* @return an array of all the Dublin Core types
* @deprecated
*/
public static DCType[] findAll(Context context) throws SQLException
{
MetadataField field[] = MetadataField.findAll(context);
DCType[] typeArray = new DCType[field.length];
for (int ii = 0; ii < field.length; ii++)
{
typeArray[ii] = new DCType(context, field[ii]);
}
// Return the array
return typeArray;
}
/**
* Create a new Dublin Core type
*
* @param context
* DSpace context object
* @return the newly created DCType
* @throws NonUniqueMetadataException
* @throws IOException
* @deprecated
*/
public static DCType create(Context context) throws SQLException,
AuthorizeException, IOException, NonUniqueMetadataException
{
MetadataField field = new MetadataField();
field.setSchemaID(MetadataSchema.DC_SCHEMA_ID);
field.create(context);
return new DCType(context, field);
}
/**
* Delete this DC type. This won't work if there are any DC values in the
* database of this type - they need to be updated first. An
* <code>SQLException</code> (referential integrity violation) will be
* thrown in this case.
* @deprecated
*/
public void delete() throws SQLException, AuthorizeException
{
field.delete(ourContext);
}
/**
* Get the internal identifier of this metadata field
*
* @return the internal identifier
*/
public int getID()
{
return field.getFieldID();
}
/**
* Get the DC element
*
* @return the element
*/
public String getElement()
{
return field.getElement();
}
/**
* Set the DC element
*
* @param s
* the new element
*/
public void setElement(String s)
{
field.setElement(s);
}
/**
* Get the DC qualifier, if any.
*
* @return the DC qualifier, or <code>null</code> if this is an
* unqualified element
*/
public String getQualifier()
{
return field.getQualifier();
}
/**
* Set the DC qualifier
*
* @param s
* the DC qualifier, or <code>null</code> if this is an
* unqualified element
*/
public void setQualifier(String s)
{
field.setQualifier(s);
}
/**
* Get the scope note - information about the DC type and its use
*
* @return the scope note
*/
public String getScopeNote()
{
return field.getScopeNote();
}
/**
* Set the scope note
*
* @param s
* the new scope note
*/
public void setScopeNote(String s)
{
field.setScopeNote(s);
}
/**
* Update the dublin core registry
*
* @throws IOException
* @throws NonUniqueMetadataException
* @deprecated
*/
public void update() throws SQLException, AuthorizeException,
NonUniqueMetadataException, IOException
{
field.update(ourContext);
}
}

View File

@@ -12,6 +12,7 @@ import java.io.FileWriter;
import java.io.IOException;
import java.sql.SQLException;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import org.apache.commons.cli.CommandLine;
@@ -24,6 +25,9 @@ import org.apache.xml.serialize.OutputFormat;
import org.apache.xml.serialize.XMLSerializer;
import org.dspace.content.MetadataField;
import org.dspace.content.MetadataSchema;
import org.dspace.content.factory.ContentServiceFactory;
import org.dspace.content.service.MetadataFieldService;
import org.dspace.content.service.MetadataSchemaService;
import org.dspace.core.Context;
import org.xml.sax.SAXException;
@@ -46,6 +50,9 @@ import org.xml.sax.SAXException;
public class MetadataExporter
{
protected static MetadataSchemaService metadataSchemaService = ContentServiceFactory.getInstance().getMetadataSchemaService();
protected static MetadataFieldService metadataFieldService = ContentServiceFactory.getInstance().getMetadataFieldService();
/**
* @param args
* @throws ParseException
@@ -102,25 +109,25 @@ public class MetadataExporter
// Save the schema definition(s)
saveSchema(context, xmlSerializer, schema);
MetadataField[] mdFields = null;
List<MetadataField> mdFields = null;
// If a single schema has been specified
if (schema != null && !"".equals(schema))
{
// Get the id of that schema
MetadataSchema mdSchema = MetadataSchema.find(context, schema);
MetadataSchema mdSchema = metadataSchemaService.find(context, schema);
if (mdSchema == null)
{
throw new RegistryExportException("no schema to export");
}
// Get the metadata fields only for the specified schema
mdFields = MetadataField.findAllInSchema(context, mdSchema.getSchemaID());
mdFields = metadataFieldService.findAllInSchema(context, mdSchema);
}
else
{
// Get the metadata fields for all the schemas
mdFields = MetadataField.findAll(context);
mdFields = metadataFieldService.findAll(context);
}
// Output the metadata fields
@@ -150,14 +157,14 @@ public class MetadataExporter
if (schema != null && !"".equals(schema))
{
// Find a single named schema
MetadataSchema mdSchema = MetadataSchema.find(context, schema);
MetadataSchema mdSchema = metadataSchemaService.find(context, schema);
saveSchema(xmlSerializer, mdSchema);
}
else
{
// Find all schemas
MetadataSchema[] mdSchemas = MetadataSchema.findAll(context);
List<MetadataSchema> mdSchemas = metadataSchemaService.findAll(context);
for (MetadataSchema mdSchema : mdSchemas)
{
@@ -292,16 +299,16 @@ public class MetadataExporter
private static String getSchemaName(Context context, MetadataField mdField) throws SQLException, RegistryExportException
{
// Get name from cache
String name = schemaMap.get(Integer.valueOf(mdField.getSchemaID()));
String name = schemaMap.get(mdField.getMetadataSchema().getSchemaID());
if (name == null)
{
// Name not retrieved before, so get the schema now
MetadataSchema mdSchema = MetadataSchema.find(context, mdField.getSchemaID());
MetadataSchema mdSchema = metadataSchemaService.find(context, mdField.getMetadataSchema().getSchemaID());
if (mdSchema != null)
{
name = mdSchema.getName();
schemaMap.put(Integer.valueOf(mdSchema.getSchemaID()), name);
schemaMap.put(mdSchema.getSchemaID(), name);
}
else
{

View File

@@ -25,6 +25,9 @@ import org.dspace.authorize.AuthorizeException;
import org.dspace.content.MetadataField;
import org.dspace.content.MetadataSchema;
import org.dspace.content.NonUniqueMetadataException;
import org.dspace.content.factory.ContentServiceFactory;
import org.dspace.content.service.MetadataFieldService;
import org.dspace.content.service.MetadataSchemaService;
import org.dspace.core.Context;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
@@ -58,6 +61,9 @@ import org.xml.sax.SAXException;
*/
public class MetadataImporter
{
protected static MetadataSchemaService metadataSchemaService = ContentServiceFactory.getInstance().getMetadataSchemaService();
protected static MetadataFieldService metadataFieldService = ContentServiceFactory.getInstance().getMetadataFieldService();
/** logging category */
private static final Logger log = LoggerFactory.getLogger(MetadataImporter.class);
@@ -173,14 +179,13 @@ public class MetadataImporter
}
// check to see if the schema already exists
MetadataSchema s = MetadataSchema.find(context, name);
MetadataSchema s = metadataSchemaService.find(context, name);
if (s == null)
{
// Schema does not exist - create
log.info("Registering Schema " + name + " (" + namespace + ")");
MetadataSchema schema = new MetadataSchema(namespace, name);
schema.create(context);
MetadataSchema schema = metadataSchemaService.create(context, name, namespace);
}
else
{
@@ -197,7 +202,7 @@ public class MetadataImporter
// Update the existing schema namespace and continue to type import
log.info("Updating Schema " + name + ": New namespace " + namespace);
s.setNamespace(namespace);
s.update(context);
metadataSchemaService.update(context, s);
}
else
{
@@ -236,14 +241,14 @@ public class MetadataImporter
// Find the matching schema object
MetadataSchema schemaObj = MetadataSchema.find(context, schema);
MetadataSchema schemaObj = metadataSchemaService.find(context, schema);
if (schemaObj == null)
{
throw new RegistryImportException("Schema '" + schema + "' is not registered and does not exist.");
}
MetadataField mf = MetadataField.findByElement(context, schemaObj.getSchemaID(), element, qualifier);
MetadataField mf = metadataFieldService.findByElement(context, schemaObj, element, qualifier);
if (mf != null)
{
// Metadata field already exists, skipping it
@@ -255,12 +260,8 @@ public class MetadataImporter
if(qualifier==null)
fieldName = schema + "." + element;
log.info("Registering metadata field " + fieldName);
MetadataField field = new MetadataField();
field.setSchemaID(schemaObj.getSchemaID());
field.setElement(element);
field.setQualifier(qualifier);
field.setScopeNote(scopeNote);
field.create(context);
MetadataField field = metadataFieldService.create(context, schemaObj, element, qualifier, scopeNote);
metadataFieldService.update(context, field);
}
/**

View File

@@ -10,6 +10,8 @@ package org.dspace.administer;
import java.io.File;
import java.io.IOException;
import java.sql.SQLException;
import java.util.ArrayList;
import java.util.Arrays;
import javax.xml.parsers.DocumentBuilder;
import javax.xml.parsers.DocumentBuilderFactory;
@@ -20,9 +22,8 @@ import org.apache.log4j.Logger;
import org.apache.xpath.XPathAPI;
import org.dspace.authorize.AuthorizeException;
import org.dspace.content.BitstreamFormat;
import org.dspace.content.MetadataField;
import org.dspace.content.MetadataSchema;
import org.dspace.content.NonUniqueMetadataException;
import org.dspace.content.factory.ContentServiceFactory;
import org.dspace.content.service.BitstreamFormatService;
import org.dspace.core.Context;
import org.dspace.core.LogManager;
import org.w3c.dom.Document;
@@ -48,6 +49,8 @@ public class RegistryLoader
/** log4j category */
private static Logger log = Logger.getLogger(RegistryLoader.class);
protected static BitstreamFormatService bitstreamFormatService = ContentServiceFactory.getInstance().getBitstreamFormatService();
/**
* For invoking via the command line
*
@@ -167,30 +170,32 @@ public class RegistryLoader
String[] extensions = getRepeatedElementData(node, "extension");
// Check if this format already exists in our registry (by mime type)
BitstreamFormat exists = BitstreamFormat.findByMIMEType(context, mimeType);
BitstreamFormat exists = bitstreamFormatService.findByMIMEType(context, mimeType);
// If not found by mimeType, check by short description (since this must also be unique)
if(exists==null)
{
exists = BitstreamFormat.findByShortDescription(context, shortDesc);
exists = bitstreamFormatService.findByShortDescription(context, shortDesc);
}
// If it doesn't exist, create it..otherwise skip it.
if(exists==null)
{
// Create the format object
BitstreamFormat format = BitstreamFormat.create(context);
BitstreamFormat format = bitstreamFormatService.create(context);
// Fill it out with the values
format.setMIMEType(mimeType);
format.setShortDescription(shortDesc);
bitstreamFormatService.setShortDescription(context, format, shortDesc);
format.setDescription(desc);
format.setSupportLevel(supportLevel);
format.setInternal(internal);
format.setExtensions(extensions);
ArrayList<String> extensionList = new ArrayList<>();
extensionList.addAll(Arrays.asList(extensions));
format.setExtensions(extensionList);
// Write to database
format.update();
bitstreamFormatService.update(context, format);
}
}

View File

@@ -28,8 +28,12 @@ import org.apache.xpath.XPathAPI;
import org.dspace.authorize.AuthorizeException;
import org.dspace.content.Collection;
import org.dspace.content.Community;
import org.dspace.content.factory.ContentServiceFactory;
import org.dspace.content.service.CollectionService;
import org.dspace.content.service.CommunityService;
import org.dspace.core.Context;
import org.dspace.eperson.EPerson;
import org.dspace.eperson.factory.EPersonServiceFactory;
import org.dspace.eperson.service.EPersonService;
import org.jdom.Element;
import org.jdom.output.XMLOutputter;
import org.w3c.dom.Document;
@@ -73,7 +77,11 @@ public class StructBuilder
/** a hashtable to hold metadata for the community being worked on */
private static Map<String, String> communityMap = new HashMap<String, String>();
protected static CommunityService communityService = ContentServiceFactory.getInstance().getCommunityService();
protected static CollectionService collectionService = ContentServiceFactory.getInstance().getCollectionService();
protected static EPersonService ePersonService = EPersonServiceFactory.getInstance().getEPersonService();
/**
* Main method to be run from the command line to import a structure into
* DSpace
@@ -127,7 +135,7 @@ public class StructBuilder
Context context = new Context();
// set the context
context.setCurrentUser(EPerson.findByEmail(context, eperson));
context.setCurrentUser(ePersonService.findByEmail(context, eperson));
// load the XML
Document document = loadXML(file);
@@ -390,15 +398,15 @@ public class StructBuilder
// create the community or sub community
if (parent != null)
{
community = parent.createSubcommunity();
community = communityService.create(parent, context);
}
else
{
community = Community.create(null, context);
community = communityService.create(null, context);
}
// default the short description to be an empty string
community.setMetadata("short_description", " ");
communityService.setMetadata(context, community, "short_description", " ");
// now update the metadata
Node tn = communities.item(i);
@@ -407,7 +415,7 @@ public class StructBuilder
NodeList nl = XPathAPI.selectNodeList(tn, entry.getKey());
if (nl.getLength() == 1)
{
community.setMetadata(entry.getValue(), getStringValue(nl.item(0)));
communityService.setMetadata(context, community, entry.getValue(), getStringValue(nl.item(0)));
}
}
@@ -420,7 +428,7 @@ public class StructBuilder
// difficult
// to isolate the community that already exists without hitting
// the database directly.
community.update();
communityService.update(context, community);
// build the element with the handle that identifies the new
// community
@@ -433,34 +441,34 @@ public class StructBuilder
element.setAttribute("identifier", community.getHandle());
Element nameElement = new Element("name");
nameElement.setText(community.getMetadata("name"));
nameElement.setText(communityService.getMetadata(community, "name"));
element.addContent(nameElement);
if (community.getMetadata("short_description") != null)
if (communityService.getMetadata(community, "short_description") != null)
{
Element descriptionElement = new Element("description");
descriptionElement.setText(community.getMetadata("short_description"));
descriptionElement.setText(communityService.getMetadata(community, "short_description"));
element.addContent(descriptionElement);
}
if (community.getMetadata("introductory_text") != null)
if (communityService.getMetadata(community, "introductory_text") != null)
{
Element introElement = new Element("intro");
introElement.setText(community.getMetadata("introductory_text"));
introElement.setText(communityService.getMetadata(community, "introductory_text"));
element.addContent(introElement);
}
if (community.getMetadata("copyright_text") != null)
if (communityService.getMetadata(community, "copyright_text") != null)
{
Element copyrightElement = new Element("copyright");
copyrightElement.setText(community.getMetadata("copyright_text"));
copyrightElement.setText(communityService.getMetadata(community, "copyright_text"));
element.addContent(copyrightElement);
}
if (community.getMetadata("side_bar_text") != null)
if (communityService.getMetadata(community, "side_bar_text") != null)
{
Element sidebarElement = new Element("sidebar");
sidebarElement.setText(community.getMetadata("side_bar_text"));
sidebarElement.setText(communityService.getMetadata(community, "side_bar_text"));
element.addContent(sidebarElement);
}
@@ -506,10 +514,10 @@ public class StructBuilder
for (int i = 0; i < collections.getLength(); i++)
{
Element element = new Element("collection");
Collection collection = parent.createCollection();
Collection collection = collectionService.create(context, parent);
// default the short description to the empty string
collection.setMetadata("short_description", " ");
collectionService.setMetadata(context, collection, "short_description", " ");
// import the rest of the metadata
Node tn = collections.item(i);
@@ -518,57 +526,57 @@ public class StructBuilder
NodeList nl = XPathAPI.selectNodeList(tn, entry.getKey());
if (nl.getLength() == 1)
{
collection.setMetadata(entry.getValue(), getStringValue(nl.item(0)));
collectionService.setMetadata(context, collection, entry.getValue(), getStringValue(nl.item(0)));
}
}
collection.update();
collectionService.update(context, collection);
element.setAttribute("identifier", collection.getHandle());
Element nameElement = new Element("name");
nameElement.setText(collection.getMetadata("name"));
nameElement.setText(collectionService.getMetadata(collection, "name"));
element.addContent(nameElement);
if (collection.getMetadata("short_description") != null)
if (collectionService.getMetadata(collection, "short_description") != null)
{
Element descriptionElement = new Element("description");
descriptionElement.setText(collection.getMetadata("short_description"));
descriptionElement.setText(collectionService.getMetadata(collection, "short_description"));
element.addContent(descriptionElement);
}
if (collection.getMetadata("introductory_text") != null)
if (collectionService.getMetadata(collection, "introductory_text") != null)
{
Element introElement = new Element("intro");
introElement.setText(collection.getMetadata("introductory_text"));
introElement.setText(collectionService.getMetadata(collection, "introductory_text"));
element.addContent(introElement);
}
if (collection.getMetadata("copyright_text") != null)
if (collectionService.getMetadata(collection, "copyright_text") != null)
{
Element copyrightElement = new Element("copyright");
copyrightElement.setText(collection.getMetadata("copyright_text"));
copyrightElement.setText(collectionService.getMetadata(collection, "copyright_text"));
element.addContent(copyrightElement);
}
if (collection.getMetadata("side_bar_text") != null)
if (collectionService.getMetadata(collection, "side_bar_text") != null)
{
Element sidebarElement = new Element("sidebar");
sidebarElement.setText(collection.getMetadata("side_bar_text"));
sidebarElement.setText(collectionService.getMetadata(collection, "side_bar_text"));
element.addContent(sidebarElement);
}
if (collection.getMetadata("license") != null)
if (collectionService.getMetadata(collection, "license") != null)
{
Element sidebarElement = new Element("license");
sidebarElement.setText(collection.getMetadata("license"));
sidebarElement.setText(collectionService.getMetadata(collection, "license"));
element.addContent(sidebarElement);
}
if (collection.getMetadata("provenance_description") != null)
if (collectionService.getMetadata(collection, "provenance_description") != null)
{
Element sidebarElement = new Element("provenance");
sidebarElement.setText(collection.getMetadata("provenance_description"));
sidebarElement.setText(collectionService.getMetadata(collection, "provenance_description"));
element.addContent(sidebarElement);
}

View File

@@ -8,7 +8,6 @@
package org.dspace.app.bulkedit;
import org.dspace.content.Item;
import org.dspace.content.Metadatum;
import org.dspace.content.Collection;
import java.util.ArrayList;
@@ -25,16 +24,16 @@ public class BulkEditChange
private Item item;
/** The List of hashtables with the new elements */
private List<Metadatum> adds;
private List<BulkEditMetadataValue> adds;
/** The List of hashtables with the removed elements */
private List<Metadatum> removes;
private List<BulkEditMetadataValue> removes;
/** The List of hashtables with the unchanged elements */
private List<Metadatum> constant;
private List<BulkEditMetadataValue> constant;
/** The List of the complete set of new values (constant + adds) */
private List<Metadatum> complete;
private List<BulkEditMetadataValue> complete;
/** The list of old collections the item used to be mapped to */
private List<Collection> oldMappedCollections;
@@ -77,12 +76,12 @@ public class BulkEditChange
newOwningCollection = null;
// Initialise the arrays
adds = new ArrayList<Metadatum>();
removes = new ArrayList<Metadatum>();
constant = new ArrayList<Metadatum>();
complete = new ArrayList<Metadatum>();
oldMappedCollections = new ArrayList<Collection>();
newMappedCollections = new ArrayList<Collection>();
adds = new ArrayList<>();
removes = new ArrayList<>();
constant = new ArrayList<>();
complete = new ArrayList<>();
oldMappedCollections = new ArrayList<>();
newMappedCollections = new ArrayList<>();
}
/**
@@ -98,12 +97,12 @@ public class BulkEditChange
empty = true;
// Initialise the arrays
adds = new ArrayList<Metadatum>();
removes = new ArrayList<Metadatum>();
constant = new ArrayList<Metadatum>();
complete = new ArrayList<Metadatum>();
oldMappedCollections = new ArrayList<Collection>();
newMappedCollections = new ArrayList<Collection>();
adds = new ArrayList<>();
removes = new ArrayList<>();
constant = new ArrayList<>();
complete = new ArrayList<>();
oldMappedCollections = new ArrayList<>();
newMappedCollections = new ArrayList<>();
}
/**
@@ -122,7 +121,7 @@ public class BulkEditChange
*
* @param dcv The value to add
*/
public void registerAdd(Metadatum dcv)
public void registerAdd(BulkEditMetadataValue dcv)
{
// Add the added value
adds.add(dcv);
@@ -135,7 +134,7 @@ public class BulkEditChange
*
* @param dcv The value to remove
*/
public void registerRemove(Metadatum dcv)
public void registerRemove(BulkEditMetadataValue dcv)
{
// Add the removed value
removes.add(dcv);
@@ -147,7 +146,7 @@ public class BulkEditChange
*
* @param dcv The value to keep unchanged
*/
public void registerConstant(Metadatum dcv)
public void registerConstant(BulkEditMetadataValue dcv)
{
// Add the removed value
constant.add(dcv);
@@ -241,7 +240,7 @@ public class BulkEditChange
*
* @return the list of elements and their values that have been added.
*/
public List<Metadatum> getAdds()
public List<BulkEditMetadataValue> getAdds()
{
// Return the array
return adds;
@@ -252,7 +251,7 @@ public class BulkEditChange
*
* @return the list of elements and their values that have been removed.
*/
public List<Metadatum> getRemoves()
public List<BulkEditMetadataValue> getRemoves()
{
// Return the array
return removes;
@@ -263,7 +262,7 @@ public class BulkEditChange
*
* @return the list of unchanged values
*/
public List<Metadatum> getConstant()
public List<BulkEditMetadataValue> getConstant()
{
// Return the array
return constant;
@@ -274,7 +273,7 @@ public class BulkEditChange
*
* @return the list of all values
*/
public List<Metadatum> getComplete()
public List<BulkEditMetadataValue> getComplete()
{
// Return the array
return complete;
@@ -404,4 +403,4 @@ public class BulkEditChange
{
return !empty;
}
}
}

View File

@@ -0,0 +1,83 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.app.bulkedit;
/**
* Value class used for metadata value edits used by the bulk edit.
*
* @author kevinvandevelde at atmire.com
*/
public class BulkEditMetadataValue {
private String schema;
private String element;
private String qualifier;
private String language;
private String value;
private String authority;
private int confidence;
public BulkEditMetadataValue() {
}
public void setSchema(String schema) {
this.schema = schema;
}
public void setElement(String element) {
this.element = element;
}
public void setQualifier(String qualifier) {
this.qualifier = qualifier;
}
public void setLanguage(String language) {
this.language = language;
}
public void setValue(String value) {
this.value = value;
}
public void setAuthority(String authority) {
this.authority = authority;
}
public void setConfidence(int confidence) {
this.confidence = confidence;
}
public String getSchema() {
return schema;
}
public String getElement() {
return element;
}
public String getQualifier() {
return qualifier;
}
public String getLanguage() {
return language;
}
public String getValue() {
return value;
}
public String getAuthority() {
return authority;
}
public int getConfidence() {
return confidence;
}
}

View File

@@ -12,9 +12,15 @@ import org.dspace.authority.AuthorityValue;
import org.dspace.app.bulkedit.DSpaceCSVLine;
import org.dspace.app.bulkedit.MetadataImport;
import org.dspace.app.bulkedit.MetadataImportInvalidHeadingException;
import org.dspace.authority.factory.AuthorityServiceFactory;
import org.dspace.authority.service.AuthorityValueService;
import org.dspace.content.Collection;
import org.dspace.content.*;
import org.dspace.content.Collection;
import org.dspace.content.factory.ContentServiceFactory;
import org.dspace.content.service.ItemService;
import org.dspace.content.service.MetadataFieldService;
import org.dspace.content.service.MetadataSchemaService;
import org.dspace.content.authority.Choices;
import org.dspace.core.ConfigurationManager;
import org.dspace.core.Context;
@@ -41,38 +47,43 @@ import java.io.*;
public class DSpaceCSV implements Serializable
{
/** The headings of the CSV file */
private List<String> headings;
protected List<String> headings;
/** An array list of CSV lines */
private List<DSpaceCSVLine> lines;
protected List<DSpaceCSVLine> lines;
/** A counter of how many CSV lines this object holds */
private int counter;
protected int counter;
/** The value separator (defaults to double pipe '||') */
protected static String valueSeparator;
protected String valueSeparator;
/** The value separator in an escaped form for using in regexes */
protected static String escapedValueSeparator;
protected String escapedValueSeparator;
/** The field separator (defaults to comma) */
protected static String fieldSeparator;
protected String fieldSeparator;
/** The field separator in an escaped form for using in regexes */
protected static String escapedFieldSeparator;
protected String escapedFieldSeparator;
/** The authority separator (defaults to double colon '::') */
protected static String authoritySeparator;
protected String authoritySeparator;
/** The authority separator in an escaped form for using in regexes */
protected static String escapedAuthoritySeparator;
protected String escapedAuthoritySeparator;
protected final ItemService itemService = ContentServiceFactory.getInstance().getItemService();
protected final MetadataSchemaService metadataSchemaService = ContentServiceFactory.getInstance().getMetadataSchemaService();
protected final MetadataFieldService metadataFieldService = ContentServiceFactory.getInstance().getMetadataFieldService();
protected final AuthorityValueService authorityValueService = AuthorityServiceFactory.getInstance().getAuthorityValueService();
/** Whether to export all metadata such as handles and provenance information */
private boolean exportAll;
protected boolean exportAll;
/** A list of metadata elements to ignore */
private Map<String, String> ignore;
protected Map<String, String> ignore;
/**
@@ -137,7 +148,7 @@ public class DSpaceCSV implements Serializable
else if (!"id".equals(element))
{
String authorityPrefix = "";
AuthorityValue authorityValueType = MetadataImport.getAuthorityValueType(element);
AuthorityValue authorityValueType = authorityValueService.getAuthorityValueType(element);
if (authorityValueType != null) {
String authorityType = authorityValueType.getAuthorityType();
authorityPrefix = element.substring(0, authorityType.length() + 1);
@@ -162,7 +173,7 @@ public class DSpaceCSV implements Serializable
}
// Check that the scheme exists
MetadataSchema foundSchema = MetadataSchema.find(c, metadataSchema);
MetadataSchema foundSchema = metadataSchemaService.find(c, metadataSchema);
if (foundSchema == null) {
throw new MetadataImportInvalidHeadingException(clean[0],
MetadataImportInvalidHeadingException.SCHEMA,
@@ -170,8 +181,7 @@ public class DSpaceCSV implements Serializable
}
// Check that the metadata element exists in the schema
int schemaID = foundSchema.getSchemaID();
MetadataField foundField = MetadataField.findByElement(c, schemaID, metadataElement, metadataQualifier);
MetadataField foundField = metadataFieldService.findByElement(c, foundSchema, metadataElement, metadataQualifier);
if (foundField == null) {
throw new MetadataImportInvalidHeadingException(clean[0],
MetadataImportInvalidHeadingException.ELEMENT,
@@ -240,7 +250,7 @@ public class DSpaceCSV implements Serializable
/**
* Initialise this class with values from dspace.cfg
*/
private void init()
protected void init()
{
// Set the value separator
setValueSeparator();
@@ -414,7 +424,7 @@ public class DSpaceCSV implements Serializable
line.add("collection", owningCollectionHandle);
// Add in any mapped collections
Collection[] collections = i.getCollections();
List<Collection> collections = i.getCollections();
for (Collection c : collections)
{
// Only add if it is not the owning collection
@@ -425,33 +435,35 @@ public class DSpaceCSV implements Serializable
}
// Populate it
Metadatum md[] = i.getMetadata(Item.ANY, Item.ANY, Item.ANY, Item.ANY);
for (Metadatum value : md)
List<MetadataValue> md = itemService.getMetadata(i, Item.ANY, Item.ANY, Item.ANY, Item.ANY);
for (MetadataValue value : md)
{
MetadataField metadataField = value.getMetadataField();
MetadataSchema metadataSchema = metadataField.getMetadataSchema();
// Get the key (schema.element)
String key = value.schema + "." + value.element;
String key = metadataSchema.getName() + "." + metadataField.getElement();
// Add the qualifier if there is one (schema.element.qualifier)
if (value.qualifier != null)
if (metadataField.getQualifier() != null)
{
key = key + "." + value.qualifier;
key = key + "." + metadataField.getQualifier();
}
// Add the language if there is one (schema.element.qualifier[langauge])
//if ((value.language != null) && (!"".equals(value.language)))
if (value.language != null)
if (value.getLanguage() != null)
{
key = key + "[" + value.language + "]";
key = key + "[" + value.getLanguage() + "]";
}
// Store the item
if (exportAll || okToExport(value))
if (exportAll || okToExport(metadataField))
{
// Add authority and confidence if authority is not null
String mdValue = value.value;
if (value.authority != null && !"".equals(value.authority))
String mdValue = value.getValue();
if (value.getAuthority() != null && !"".equals(value.getAuthority()))
{
mdValue += authoritySeparator + value.authority + authoritySeparator + (value.confidence != -1 ? value.confidence : Choices.CF_ACCEPTED);
mdValue += authoritySeparator + value.getAuthority() + authoritySeparator + (value.getConfidence() != -1 ? value.getConfidence() : Choices.CF_ACCEPTED);
}
line.add(key, mdValue);
if (!headings.contains(key))
@@ -546,7 +558,7 @@ public class DSpaceCSV implements Serializable
{
try
{
csvLine = new DSpaceCSVLine(Integer.parseInt(id));
csvLine = new DSpaceCSVLine(UUID.fromString(id));
}
catch (NumberFormatException nfe)
{
@@ -623,7 +635,7 @@ public class DSpaceCSV implements Serializable
int c = 1;
while (i.hasNext())
{
csvLines[c++] = i.next().toCSV(headingsCopy);
csvLines[c++] = i.next().toCSV(headingsCopy, fieldSeparator);
}
return csvLines;
@@ -658,13 +670,13 @@ public class DSpaceCSV implements Serializable
* @param md The Metadatum to examine
* @return Whether or not it is OK to export this element
*/
private final boolean okToExport(Metadatum md)
protected boolean okToExport(MetadataField md)
{
// Now compare with the list to ignore
String key = md.schema + "." + md.element;
if (md.qualifier != null)
String key = md.getMetadataSchema().getName() + "." + md.getElement();
if (md.getQualifier() != null)
{
key += "." + md.qualifier;
key += "." + md.getQualifier();
}
if (ignore.get(key) != null) {
return false;
@@ -700,4 +712,12 @@ public class DSpaceCSV implements Serializable
}
return csvLines.toString();
}
public String getAuthoritySeparator() {
return authoritySeparator;
}
public String getEscapedAuthoritySeparator() {
return escapedAuthoritySeparator;
}
}

View File

@@ -8,6 +8,8 @@
package org.dspace.app.bulkedit;
import org.dspace.authority.AuthorityValue;
import org.dspace.authority.factory.AuthorityServiceFactory;
import org.dspace.authority.service.AuthorityValueService;
import java.io.Serializable;
import java.util.*;
@@ -19,20 +21,21 @@ import java.util.*;
*/
public class DSpaceCSVLine implements Serializable
{
/** The item id of the item represented by this line. -1 is for a new item */
private int id;
private UUID id;
/** The elements in this line in a hashtable, keyed by the metadata type */
private Map<String, ArrayList> items;
protected final AuthorityValueService authorityValueService = AuthorityServiceFactory.getInstance().getAuthorityValueService();
/** ensuring that the order-sensible columns of the csv are processed in the correct order */
private final Comparator<? super String> headerComparator = new Comparator<String>() {
@Override
public int compare(String md1, String md2) {
// The metadata coming from an external source should be processed after the others
AuthorityValue source1 = MetadataImport.getAuthorityValueType(md1);
AuthorityValue source2 = MetadataImport.getAuthorityValueType(md2);
AuthorityValue source1 = authorityValueService.getAuthorityValueType(md1);
AuthorityValue source2 = authorityValueService.getAuthorityValueType(md2);
int compare;
if (source1 == null && source2 != null) {
@@ -53,7 +56,7 @@ public class DSpaceCSVLine implements Serializable
*
* @param itemId The item ID of the line
*/
public DSpaceCSVLine(int itemId)
public DSpaceCSVLine(UUID itemId)
{
// Store the ID + separator, and initialise the hashtable
this.id = itemId;
@@ -66,10 +69,9 @@ public class DSpaceCSVLine implements Serializable
*/
public DSpaceCSVLine()
{
// Set the ID to be -1, and initialise the hashtable
this.id = -1;
// Set the ID to be null, and initialise the hashtable
this.id = null;
this.items = new TreeMap<String, ArrayList>(headerComparator);
// this.items = new HashMap<String, ArrayList>();
}
/**
@@ -77,7 +79,7 @@ public class DSpaceCSVLine implements Serializable
*
* @return The item ID
*/
public int getID()
public UUID getID()
{
// Return the ID
return id;
@@ -149,22 +151,22 @@ public class DSpaceCSVLine implements Serializable
* @param headings The headings which define the order the elements must be presented in
* @return The CSV formatted String
*/
protected String toCSV(List<String> headings)
protected String toCSV(List<String> headings, String fieldSeparator)
{
StringBuilder bits = new StringBuilder();
// Add the id
bits.append("\"").append(id).append("\"").append(DSpaceCSV.fieldSeparator);
bits.append(valueToCSV(items.get("collection")));
bits.append("\"").append(id).append("\"").append(fieldSeparator);
bits.append(valueToCSV(items.get("collection"), fieldSeparator));
// Add the rest of the elements
for (String heading : headings)
{
bits.append(DSpaceCSV.fieldSeparator);
bits.append(fieldSeparator);
List<String> values = items.get(heading);
if (values != null && !"collection".equals(heading))
{
bits.append(valueToCSV(values));
bits.append(valueToCSV(values, fieldSeparator));
}
}
@@ -177,7 +179,7 @@ public class DSpaceCSVLine implements Serializable
* @param values The values to create the string from
* @return The line as a CSV formatted String
*/
protected String valueToCSV(List<String> values)
protected String valueToCSV(List<String> values, String valueSeparator)
{
// Check there is some content
if (values == null)
@@ -200,7 +202,7 @@ public class DSpaceCSVLine implements Serializable
{
if (str.length() > 0)
{
str.append(DSpaceCSV.valueSeparator);
str.append(valueSeparator);
}
str.append(value);

View File

@@ -7,15 +7,19 @@
*/
package org.dspace.app.bulkedit;
import com.google.common.collect.Iterators;
import org.apache.commons.cli.*;
import org.dspace.content.*;
import org.dspace.content.factory.ContentServiceFactory;
import org.dspace.content.service.ItemService;
import org.dspace.core.Constants;
import org.dspace.core.Context;
import org.dspace.handle.HandleManager;
import org.dspace.handle.factory.HandleServiceFactory;
import java.util.ArrayList;
import java.sql.SQLException;
import java.util.Iterator;
import java.util.List;
/**
@@ -26,10 +30,16 @@ import java.util.List;
public class MetadataExport
{
/** The items to export */
private ItemIterator toExport;
protected Iterator<Item> toExport;
protected ItemService itemService;
/** Whether to export all metadata, or just normally edited metadata */
private boolean exportAll;
protected boolean exportAll;
protected MetadataExport() {
itemService = ContentServiceFactory.getInstance().getItemService();
}
/**
* Set up a new metadata export
@@ -38,7 +48,7 @@ public class MetadataExport
* @param toExport The ItemIterator of items to export
* @param exportAll whether to export all metadata or not (include handle, provenance etc)
*/
public MetadataExport(Context c, ItemIterator toExport, boolean exportAll)
public MetadataExport(Context c, Iterator<Item> toExport, boolean exportAll)
{
// Store the export settings
this.toExport = toExport;
@@ -57,7 +67,7 @@ public class MetadataExport
try
{
// Try to export the community
this.toExport = new ItemIterator(c, buildFromCommunity(toExport, new ArrayList<Integer>(), 0));
this.toExport = buildFromCommunity(c, toExport, new ArrayList<Integer>(), 0);
this.exportAll = exportAll;
}
catch (SQLException sqle)
@@ -78,11 +88,12 @@ public class MetadataExport
* @return The list of item ids
* @throws SQLException
*/
private List<Integer> buildFromCommunity(Community community, List<Integer> itemIDs, int indent)
protected Iterator<Item> buildFromCommunity(Context context, Community community, List<Integer> itemIDs, int indent)
throws SQLException
{
// Add all the collections
Collection[] collections = community.getCollections();
List<Collection> collections = community.getCollections();
Iterator<Item> result = null;
for (Collection collection : collections)
{
for (int i = 0; i < indent; i++)
@@ -90,30 +101,27 @@ public class MetadataExport
System.out.print(" ");
}
ItemIterator items = collection.getAllItems();
while (items.hasNext())
Iterator<Item> items = itemService.findByCollection(context, collection);
if(result == null)
{
int id = items.next().getID();
// Only add if not already included (so mapped items only appear once)
if (!itemIDs.contains(id))
{
itemIDs.add(id);
}
result = items;
}else{
result = Iterators.concat(result, items);
}
}
}
// Add all the sub-communities
Community[] communities = community.getSubcommunities();
List<Community> communities = community.getSubcommunities();
for (Community subCommunity : communities)
{
for (int i = 0; i < indent; i++)
{
System.out.print(" ");
}
buildFromCommunity(subCommunity, itemIDs, indent + 1);
buildFromCommunity(context, subCommunity, itemIDs, indent + 1);
}
return itemIDs;
return result;
}
/**
@@ -208,22 +216,24 @@ public class MetadataExport
c.turnOffAuthorisationSystem();
// The things we'll export
ItemIterator toExport = null;
Iterator<Item> toExport = null;
MetadataExport exporter = null;
// Export everything?
boolean exportAll = line.hasOption('a');
ContentServiceFactory contentServiceFactory = ContentServiceFactory.getInstance();
// Check we have an item OK
ItemService itemService = contentServiceFactory.getItemService();
if (!line.hasOption('i'))
{
System.out.println("Exporting whole repository WARNING: May take some time!");
exporter = new MetadataExport(c, Item.findAll(c), exportAll);
exporter = new MetadataExport(c, itemService.findAll(c), exportAll);
}
else
{
String handle = line.getOptionValue('i');
DSpaceObject dso = HandleManager.resolveToObject(c, handle);
DSpaceObject dso = HandleServiceFactory.getInstance().getHandleService().resolveToObject(c, handle);
if (dso == null)
{
System.err.println("Item '" + handle + "' does not resolve to an item in your repository!");
@@ -233,15 +243,15 @@ public class MetadataExport
if (dso.getType() == Constants.ITEM)
{
System.out.println("Exporting item '" + dso.getName() + "' (" + handle + ")");
List<Integer> item = new ArrayList<Integer>();
item.add(dso.getID());
exporter = new MetadataExport(c, new ItemIterator(c, item), exportAll);
List<Item> item = new ArrayList<>();
item.add((Item) dso);
exporter = new MetadataExport(c, item.iterator(), exportAll);
}
else if (dso.getType() == Constants.COLLECTION)
{
System.out.println("Exporting collection '" + dso.getName() + "' (" + handle + ")");
Collection collection = (Collection)dso;
toExport = collection.getAllItems();
toExport = itemService.findByCollection(c, collection);
exporter = new MetadataExport(c, toExport, exportAll);
}
else if (dso.getType() == Constants.COMMUNITY)

View File

@@ -8,34 +8,38 @@
package org.dspace.app.bulkedit;
import org.dspace.authority.AuthorityValue;
import org.dspace.authority.AuthorityValueFinder;
import org.dspace.authority.AuthorityValueGenerator;
import org.apache.commons.cli.*;
import org.apache.commons.lang.StringUtils;
import org.apache.log4j.Logger;
import org.dspace.authority.factory.AuthorityServiceFactory;
import org.dspace.authority.service.AuthorityValueService;
import org.dspace.content.*;
import org.dspace.content.Collection;
import org.dspace.content.authority.Choices;
import org.dspace.content.factory.ContentServiceFactory;
import org.dspace.content.service.CollectionService;
import org.dspace.content.service.InstallItemService;
import org.dspace.content.service.ItemService;
import org.dspace.content.service.WorkspaceItemService;
import org.dspace.core.ConfigurationManager;
import org.dspace.core.Context;
import org.dspace.core.Constants;
import org.dspace.authorize.AuthorizeException;
import org.dspace.core.LogManager;
import org.dspace.handle.HandleManager;
import org.dspace.eperson.EPerson;
import org.dspace.workflow.WorkflowManager;
import org.dspace.xmlworkflow.XmlWorkflowManager;
import org.dspace.eperson.factory.EPersonServiceFactory;
import org.dspace.handle.factory.HandleServiceFactory;
import org.dspace.handle.service.HandleService;
import org.dspace.workflow.WorkflowService;
import org.dspace.workflow.factory.WorkflowServiceFactory;
import java.util.ArrayList;
import java.util.*;
import java.io.File;
import java.io.InputStreamReader;
import java.io.BufferedReader;
import java.io.IOException;
import java.sql.SQLException;
import java.util.Enumeration;
import java.util.HashSet;
import java.util.List;
import java.util.Set;
/**
* Metadata importer to allow the batch import of metadata from a file
@@ -54,19 +58,25 @@ public class MetadataImport
List<DSpaceCSVLine> toImport;
/** The authority controlled fields */
private static Set<String> authorityControlled;
protected static Set<String> authorityControlled;
static
{
setAuthorizedMetadataFields();
}
/** The prefix of the authority controlled field */
private static final String AC_PREFIX = "authority.controlled.";
protected static final String AC_PREFIX = "authority.controlled.";
/** Logger */
private static final Logger log = Logger.getLogger(MetadataImport.class);
protected static final Logger log = Logger.getLogger(MetadataImport.class);
private AuthorityValueFinder authorityValueFinder = new AuthorityValueFinder();
protected final AuthorityValueService authorityValueService;
protected final ItemService itemService;
protected final InstallItemService installItemService;
protected final CollectionService collectionService;
protected final HandleService handleService;
protected final WorkspaceItemService workspaceItemService;
/**
* Create an instance of the metadata importer. Requires a context and an array of CSV lines
@@ -81,6 +91,12 @@ public class MetadataImport
this.c = c;
csv = toImport;
this.toImport = toImport.getCSVLines();
installItemService = ContentServiceFactory.getInstance().getInstallItemService();
itemService = ContentServiceFactory.getInstance().getItemService();
collectionService = ContentServiceFactory.getInstance().getCollectionService();
handleService = HandleServiceFactory.getInstance().getHandleService();
authorityValueService = AuthorityServiceFactory.getInstance().getAuthorityValueService();
workspaceItemService = ContentServiceFactory.getInstance().getWorkspaceItemService();
}
/**
@@ -110,19 +126,19 @@ public class MetadataImport
for (DSpaceCSVLine line : toImport)
{
// Get the DSpace item to compare with
int id = line.getID();
UUID id = line.getID();
// Is there an action column?
if (csv.hasActions() && (!"".equals(line.getAction())) && (id == -1))
if (csv.hasActions() && (!"".equals(line.getAction())) && (id == null))
{
throw new MetadataImportException("'action' not allowed for new items!");
}
// Is this a new item?
if (id != -1)
if (id != null)
{
// Get the item
Item item = Item.find(c, id);
Item item = itemService.find(c, id);
if (item == null)
{
throw new MetadataImportException("Unknown item ID " + id);
@@ -140,7 +156,7 @@ public class MetadataImport
{
throw new MetadataImportException("Missing collection from item " + item.getHandle());
}
Collection[] actualCollections = item.getCollections();
List<Collection> actualCollections = item.getCollections();
compare(item, collections, actualCollections, whatHasChanged, change);
}
@@ -157,7 +173,7 @@ public class MetadataImport
{
for (int i=0; i<fromCSV.length; i++)
{
int pos = fromCSV[i].indexOf(DSpaceCSV.authoritySeparator);
int pos = fromCSV[i].indexOf(csv.getAuthoritySeparator());
if (pos > -1)
{
fromCSV[i] = fromCSV[i].substring(0, pos);
@@ -187,12 +203,12 @@ public class MetadataImport
}
// Remove the item
Collection[] owners = item.getCollections();
List<Collection> owners = item.getCollections();
for (Collection owner : owners)
{
if (change)
{
owner.removeItem(item);
collectionService.removeItem(c, owner, item);
}
}
whatHasChanged.setDeleted();
@@ -204,7 +220,7 @@ public class MetadataImport
{
if (change)
{
item.withdraw();
itemService.withdraw(c, item);
}
whatHasChanged.setWithdrawn();
}
@@ -216,7 +232,7 @@ public class MetadataImport
{
if (change)
{
item.reinstate();
itemService.reinstate(c, item);
}
whatHasChanged.setReinstated();
}
@@ -258,7 +274,7 @@ public class MetadataImport
{
for (int i=0; i<fromCSV.length; i++)
{
int pos = fromCSV[i].indexOf(DSpaceCSV.authoritySeparator);
int pos = fromCSV[i].indexOf(csv.getAuthoritySeparator());
if (pos > -1)
{
fromCSV[i] = fromCSV[i].substring(0, pos);
@@ -286,7 +302,7 @@ public class MetadataImport
try
{
// Resolve the handle to the collection
collection = (Collection)HandleManager.resolveToObject(c, handle);
collection = (Collection) handleService.resolveToObject(c, handle);
// Check it resolved OK
if (collection == null)
@@ -314,7 +330,7 @@ public class MetadataImport
boolean first = true;
for (String handle : collections)
{
Collection extra = (Collection)HandleManager.resolveToObject(c, handle);
Collection extra = (Collection) handleService.resolveToObject(c, handle);
if (first)
{
whatHasChanged.setOwningCollection(extra);
@@ -331,42 +347,35 @@ public class MetadataImport
{
// Create the item
String collectionHandle = line.get("collection").get(0);
collection = (Collection)HandleManager.resolveToObject(c, collectionHandle);
WorkspaceItem wsItem = WorkspaceItem.create(c, collection, useTemplate);
collection = (Collection) handleService.resolveToObject(c, collectionHandle);
WorkspaceItem wsItem = workspaceItemService.create(c, collection, useTemplate);
Item item = wsItem.getItem();
// Add the metadata to the item
for (Metadatum dcv : whatHasChanged.getAdds())
for (BulkEditMetadataValue dcv : whatHasChanged.getAdds())
{
item.addMetadata(dcv.schema,
dcv.element,
dcv.qualifier,
dcv.language,
dcv.value,
dcv.authority,
dcv.confidence);
itemService.addMetadata(c, item, dcv.getSchema(),
dcv.getElement(),
dcv.getQualifier(),
dcv.getLanguage(),
dcv.getValue(),
dcv.getAuthority(),
dcv.getConfidence());
}
// Should the workflow be used?
if(useWorkflow){
if (ConfigurationManager.getProperty("workflow", "workflow.framework").equals("xmlworkflow")) {
if (workflowNotify) {
XmlWorkflowManager.start(c, wsItem);
} else {
XmlWorkflowManager.startWithoutNotify(c, wsItem);
}
WorkflowService workflowService = WorkflowServiceFactory.getInstance().getWorkflowService();
if (workflowNotify) {
workflowService.start(c, wsItem);
} else {
if (workflowNotify) {
WorkflowManager.start(c, wsItem);
} else {
WorkflowManager.startWithoutNotify(c, wsItem);
}
workflowService.startWithoutNotify(c, wsItem);
}
}
else
{
// Install the item
InstallItem.installItem(c, wsItem);
installItemService.installItem(c, wsItem);
}
// Add to extra collections
@@ -375,13 +384,13 @@ public class MetadataImport
for (int i = 1; i < collections.size(); i++)
{
String handle = collections.get(i);
Collection extra = (Collection)HandleManager.resolveToObject(c, handle);
extra.addItem(item);
Collection extra = (Collection) handleService.resolveToObject(c, handle);
collectionService.addItem(c, extra, item);
}
}
// Commit changes to the object
c.commit();
// c.commit();
whatHasChanged.setItem(item);
}
@@ -416,7 +425,7 @@ public class MetadataImport
* @throws SQLException if there is a problem accessing a Collection from the database, from its handle
* @throws AuthorizeException if there is an authorization problem with permissions
*/
private void compare(Item item, String[] fromCSV, boolean change,
protected void compare(Item item, String[] fromCSV, boolean change,
String md, BulkEditChange changes, DSpaceCSVLine line) throws SQLException, AuthorizeException
{
// Log what metadata element we're looking at
@@ -444,7 +453,7 @@ public class MetadataImport
language = bits[1].substring(0, bits[1].length() - 1);
}
AuthorityValue fromAuthority = getAuthorityValueType(md);
AuthorityValue fromAuthority = authorityValueService.getAuthorityValueType(md);
if (md.indexOf(':') > 0) {
md = md.substring(md.indexOf(':') + 1);
}
@@ -476,31 +485,32 @@ public class MetadataImport
",looking_for_language=" + language));
String[] dcvalues = new String[0];
if(fromAuthority==null) {
Metadatum[] current = item.getMetadata(schema, element, qualifier, language);
dcvalues = new String[current.length];
List<MetadataValue> current = itemService.getMetadata(item, schema, element, qualifier, language);
dcvalues = new String[current.size()];
int i = 0;
for (Metadatum dcv : current) {
if (dcv.authority == null || !isAuthorityControlledField(md)) {
dcvalues[i] = dcv.value;
for (MetadataValue dcv : current) {
if (dcv.getAuthority() == null || !isAuthorityControlledField(md)) {
dcvalues[i] = dcv.getValue();
} else {
dcvalues[i] = dcv.value + DSpaceCSV.authoritySeparator + dcv.authority;
dcvalues[i] += DSpaceCSV.authoritySeparator + (dcv.confidence != -1 ? dcv.confidence : Choices.CF_ACCEPTED);
dcvalues[i] = dcv.getValue() + csv.getAuthoritySeparator() + dcv.getAuthority();
dcvalues[i] += csv.getAuthoritySeparator() + (dcv.getConfidence() != -1 ? dcv.getConfidence() : Choices.CF_ACCEPTED);
}
i++;
log.debug(LogManager.getHeader(c, "metadata_import",
"item_id=" + item.getID() + ",fromCSV=" + all +
",found=" + dcv.value));
",found=" + dcv.getValue()));
}
}else{
dcvalues = line.get(md).toArray(new String[line.get(md).size()]);
}
// Compare from current->csv
for (int v = 0; v < fromCSV.length; v++) {
String value = fromCSV[v];
Metadatum dcv = getDcValueFromCSV(language, schema, element, qualifier, value, fromAuthority);
BulkEditMetadataValue dcv = getBulkEditValueFromCSV(language, schema, element, qualifier, value, fromAuthority);
if (fromAuthority!=null) {
value = dcv.value + DSpaceCSV.authoritySeparator + dcv.authority + DSpaceCSV.authoritySeparator + dcv.confidence;
value = dcv.getValue() + csv.getAuthoritySeparator() + dcv.getAuthority() + csv.getAuthoritySeparator() + dcv.getConfidence();
fromCSV[v] = value;
}
@@ -516,19 +526,19 @@ public class MetadataImport
for (String value : dcvalues)
{
// Look to see if it should be removed
Metadatum dcv = new Metadatum();
dcv.schema = schema;
dcv.element = element;
dcv.qualifier = qualifier;
dcv.language = language;
if (value == null || value.indexOf(DSpaceCSV.authoritySeparator) < 0)
BulkEditMetadataValue dcv = new BulkEditMetadataValue();
dcv.setSchema(schema);
dcv.setElement(element);
dcv.setQualifier(qualifier);
dcv.setLanguage(language);
if (value == null || !value.contains(csv.getAuthoritySeparator()))
simplyCopyValue(value, dcv);
else
{
String[] parts = value.split(DSpaceCSV.escapedAuthoritySeparator);
dcv.value = parts[0];
dcv.authority = parts[1];
dcv.confidence = (parts.length > 2 ? Integer.valueOf(parts[2]) : Choices.CF_ACCEPTED);
String[] parts = value.split(csv.getAuthoritySeparator());
dcv.setValue(parts[0]);
dcv.setAuthority(parts[1]);
dcv.setConfidence((parts.length > 2 ? Integer.valueOf(parts[2]) : Choices.CF_ACCEPTED));
}
if ((value != null) && (!"".equals(value)) && (!contains(value, fromCSV)) && fromAuthority==null)
@@ -551,73 +561,66 @@ public class MetadataImport
((changes.getAdds().size() > 0) || (changes.getRemoves().size() > 0)))
{
// Get the complete list of what values should now be in that element
List<Metadatum> list = changes.getComplete();
List<BulkEditMetadataValue> list = changes.getComplete();
List<String> values = new ArrayList<String>();
List<String> authorities = new ArrayList<String>();
List<Integer> confidences = new ArrayList<Integer>();
for (Metadatum value : list)
for (BulkEditMetadataValue value : list)
{
if ((qualifier == null) && (language == null))
{
if ((schema.equals(value.schema)) &&
(element.equals(value.element)) &&
(value.qualifier == null) &&
(value.language == null))
if ((schema.equals(value.getSchema())) &&
(element.equals(value.getElement())) &&
(value.getQualifier() == null) &&
(value.getLanguage() == null))
{
values.add(value.value);
authorities.add(value.authority);
confidences.add(value.confidence);
values.add(value.getValue());
authorities.add(value.getAuthority());
confidences.add(value.getConfidence());
}
}
else if (qualifier == null)
{
if ((schema.equals(value.schema)) &&
(element.equals(value.element)) &&
(language.equals(value.language)) &&
(value.qualifier == null))
if ((schema.equals(value.getSchema())) &&
(element.equals(value.getElement())) &&
(language.equals(value.getLanguage())) &&
(value.getQualifier() == null))
{
values.add(value.value);
authorities.add(value.authority);
confidences.add(value.confidence);
values.add(value.getValue());
authorities.add(value.getAuthority());
confidences.add(value.getConfidence());
}
}
else if (language == null)
{
if ((schema.equals(value.schema)) &&
(element.equals(value.element)) &&
(qualifier.equals(value.qualifier)) &&
(value.language == null))
if ((schema.equals(value.getSchema())) &&
(element.equals(value.getElement())) &&
(qualifier.equals(value.getQualifier())) &&
(value.getLanguage() == null))
{
values.add(value.value);
authorities.add(value.authority);
confidences.add(value.confidence);
values.add(value.getValue());
authorities.add(value.getAuthority());
confidences.add(value.getConfidence());
}
}
else
{
if ((schema.equals(value.schema)) &&
(element.equals(value.element)) &&
(qualifier.equals(value.qualifier)) &&
(language.equals(value.language)))
if ((schema.equals(value.getSchema())) &&
(element.equals(value.getElement())) &&
(qualifier.equals(value.getQualifier())) &&
(language.equals(value.getLanguage())))
{
values.add(value.value);
authorities.add(value.authority);
confidences.add(value.confidence);
values.add(value.getValue());
authorities.add(value.getAuthority());
confidences.add(value.getConfidence());
}
}
}
// Set those values
item.clearMetadata(schema, element, qualifier, language);
String[] theValues = values.toArray(new String[values.size()]);
String[] theAuthorities = authorities.toArray(new String[authorities.size()]);
int[] theConfidences = new int[confidences.size()];
for (int k=0; k< confidences.size(); k++)
{
theConfidences[k] = confidences.get(k).intValue();
}
item.addMetadata(schema, element, qualifier, language, theValues, theAuthorities, theConfidences);
item.update();
itemService.clearMetadata(c, item, schema, element, qualifier, language);
itemService.addMetadata(c, item, schema, element, qualifier, language, values, authorities, confidences);
itemService.update(c, item);
}
}
@@ -636,9 +639,9 @@ public class MetadataImport
* @throws IOException Can be thrown when moving items in communities
* @throws MetadataImportException If something goes wrong to be reported back to the user
*/
private void compare(Item item,
protected void compare(Item item,
List<String> collections,
Collection[] actualCollections,
List<Collection> actualCollections,
BulkEditChange bechange,
boolean change) throws SQLException, AuthorizeException, IOException, MetadataImportException
{
@@ -646,7 +649,7 @@ public class MetadataImport
String oldOwner = item.getOwningCollection().getHandle();
String newOwner = collections.get(0);
// Resolve the handle to the collection
Collection newCollection = (Collection)HandleManager.resolveToObject(c, newOwner);
Collection newCollection = (Collection) handleService.resolveToObject(c, newOwner);
// Check it resolved OK
if (newCollection == null)
@@ -657,7 +660,7 @@ public class MetadataImport
if (!oldOwner.equals(newOwner))
{
// Register the old and new owning collections
bechange.changeOwningCollection(item.getOwningCollection(), (Collection)HandleManager.resolveToObject(c, newOwner));
bechange.changeOwningCollection(item.getOwningCollection(), (Collection) handleService.resolveToObject(c, newOwner));
}
// Second, loop through the strings from the CSV of mapped collections
@@ -681,7 +684,7 @@ public class MetadataImport
}
// Was it found?
DSpaceObject dso = HandleManager.resolveToObject(c, csvcollection);
DSpaceObject dso = handleService.resolveToObject(c, csvcollection);
if ((dso == null) || (dso.getType() != Constants.COLLECTION))
{
throw new MetadataImportException("Collection defined for item " + item.getID() +
@@ -706,7 +709,7 @@ public class MetadataImport
for (String csvcollection : collections)
{
// Don't check the owning collection
if ((first) && (collection.getID() == item.getOwningCollection().getID()))
if ((first) && (collection.getID().equals(item.getOwningCollection().getID())))
{
found = true;
}
@@ -733,17 +736,17 @@ public class MetadataImport
if (change)
{
// Remove old mapped collections
for (Collection c : bechange.getOldMappedCollections())
for (Collection collection : bechange.getOldMappedCollections())
{
c.removeItem(item);
collectionService.removeItem(c, collection, item);
}
// Add to new owned collection
if (bechange.getNewOwningCollection() != null)
{
bechange.getNewOwningCollection().addItem(item);
collectionService.addItem(c, bechange.getNewOwningCollection(), item);
item.setOwningCollection(bechange.getNewOwningCollection());
item.update();
itemService.update(c, item);
}
// Remove from old owned collection (if still a member)
@@ -752,7 +755,7 @@ public class MetadataImport
boolean found = false;
for (Collection c : item.getCollections())
{
if (c.getID() == bechange.getOldOwningCollection().getID())
if (c.getID().equals(bechange.getOldOwningCollection().getID()))
{
found = true;
}
@@ -760,14 +763,14 @@ public class MetadataImport
if (found)
{
bechange.getOldOwningCollection().removeItem(item);
collectionService.removeItem(c, bechange.getOldOwningCollection(), item);
}
}
// Add to new mapped collections
for (Collection c : bechange.getNewMappedCollections())
for (Collection collection : bechange.getNewMappedCollections())
{
c.addItem(item);
collectionService.addItem(c, collection, item);
}
}
@@ -783,7 +786,7 @@ public class MetadataImport
* @throws SQLException when an SQL error has occurred (querying DSpace)
* @throws AuthorizeException If the user can't make the changes
*/
private void add(String[] fromCSV, String md, BulkEditChange changes)
protected void add(String[] fromCSV, String md, BulkEditChange changes)
throws SQLException, AuthorizeException
{
// Don't add owning collection or action
@@ -800,7 +803,7 @@ public class MetadataImport
String[] bits = md.split("\\[");
language = bits[1].substring(0, bits[1].length() - 1);
}
AuthorityValue fromAuthority = getAuthorityValueType(md);
AuthorityValue fromAuthority = authorityValueService.getAuthorityValueType(md);
if (md.indexOf(':') > 0) {
md = md.substring(md.indexOf(':')+1);
}
@@ -828,9 +831,9 @@ public class MetadataImport
// Add all the values
for (String value : fromCSV)
{
Metadatum dcv = getDcValueFromCSV(language, schema, element, qualifier, value, fromAuthority);
BulkEditMetadataValue dcv = getBulkEditValueFromCSV(language, schema, element, qualifier, value, fromAuthority);
if(fromAuthority!=null){
value = dcv.value + DSpaceCSV.authoritySeparator + dcv.authority + DSpaceCSV.authoritySeparator + dcv.confidence;
value = dcv.getValue() + csv.getAuthoritySeparator() + dcv.getAuthority() + csv.getAuthoritySeparator() + dcv.getConfidence();
}
// Add it
@@ -841,60 +844,48 @@ public class MetadataImport
}
}
public static AuthorityValue getAuthorityValueType(String md) {
AuthorityValue fromAuthority = null;
List<AuthorityValue> types = AuthorityValue.getAuthorityTypes().getTypes();
for (AuthorityValue type : types) {
if (StringUtils.startsWithIgnoreCase(md,type.getAuthorityType())) {
fromAuthority = type;
}
}
return fromAuthority;
}
private Metadatum getDcValueFromCSV(String language, String schema, String element, String qualifier, String value, AuthorityValue fromAuthority) {
protected BulkEditMetadataValue getBulkEditValueFromCSV(String language, String schema, String element, String qualifier, String value, AuthorityValue fromAuthority) {
// Look to see if it should be removed
Metadatum dcv = new Metadatum();
dcv.schema = schema;
dcv.element = element;
dcv.qualifier = qualifier;
dcv.language = language;
BulkEditMetadataValue dcv = new BulkEditMetadataValue();
dcv.setSchema(schema);
dcv.setElement(element);
dcv.setQualifier(qualifier);
dcv.setLanguage(language);
if (fromAuthority != null) {
if (value.indexOf(':') > 0) {
value = value.substring(0, value.indexOf(':'));
}
// look up the value and authority in solr
AuthorityValue example = fromAuthority.newInstance(value);
List<AuthorityValue> byValue = authorityValueFinder.findByValue(c, schema, element, qualifier, example.getValue());
List<AuthorityValue> byValue = authorityValueService.findByValue(c, schema, element, qualifier, value);
AuthorityValue authorityValue = null;
if (byValue.isEmpty()) {
String toGenerate = fromAuthority.generateString() + value;
String field = schema + "_" + element + (StringUtils.isNotBlank(qualifier) ? "_" + qualifier : "");
authorityValue = AuthorityValueGenerator.generate(c, toGenerate, value, field);
dcv.authority = toGenerate;
authorityValue = authorityValueService.generate(c, toGenerate, value, field);
dcv.setAuthority(toGenerate);
} else {
authorityValue = byValue.get(0);
dcv.authority = authorityValue.getId();
dcv.setAuthority(authorityValue.getId());
}
dcv.value = authorityValue.getValue();
dcv.confidence = Choices.CF_ACCEPTED;
} else if (value == null || !value.contains(DSpaceCSV.authoritySeparator)) {
dcv.setValue(authorityValue.getValue());
dcv.setConfidence(Choices.CF_ACCEPTED);
} else if (value == null || !value.contains(csv.getAuthoritySeparator())) {
simplyCopyValue(value, dcv);
} else {
String[] parts = value.split(DSpaceCSV.escapedAuthoritySeparator);
dcv.value = parts[0];
dcv.authority = parts[1];
dcv.confidence = (parts.length > 2 ? Integer.valueOf(parts[2]) : Choices.CF_ACCEPTED);
String[] parts = value.split(csv.getEscapedAuthoritySeparator());
dcv.setValue(parts[0]);
dcv.setAuthority(parts[1]);
dcv.setConfidence((parts.length > 2 ? Integer.valueOf(parts[2]) : Choices.CF_ACCEPTED));
}
return dcv;
}
private void simplyCopyValue(String value, Metadatum dcv) {
dcv.value = value;
dcv.authority = null;
dcv.confidence = Choices.CF_UNSET;
protected void simplyCopyValue(String value, BulkEditMetadataValue dcv) {
dcv.setValue(value);
dcv.setAuthority(null);
dcv.setConfidence(Choices.CF_UNSET);
}
/**
@@ -904,7 +895,7 @@ public class MetadataImport
* @param haystack The array of Strings to search through
* @return Whether or not it is contained
*/
private boolean contains(String needle, String[] haystack)
protected boolean contains(String needle, String[] haystack)
{
// Look for the needle in the haystack
for (String examine : haystack)
@@ -923,7 +914,7 @@ public class MetadataImport
* @param in The element to clean
* @return The cleaned up element
*/
private String clean(String in)
protected String clean(String in)
{
// Check for nulls
if (in == null)
@@ -964,8 +955,8 @@ public class MetadataImport
for (BulkEditChange change : changes)
{
// Get the changes
List<Metadatum> adds = change.getAdds();
List<Metadatum> removes = change.getRemoves();
List<BulkEditMetadataValue> adds = change.getAdds();
List<BulkEditMetadataValue> removes = change.getRemoves();
List<Collection> newCollections = change.getNewMappedCollections();
List<Collection> oldCollections = change.getOldMappedCollections();
if ((adds.size() > 0) || (removes.size() > 0) ||
@@ -1103,16 +1094,16 @@ public class MetadataImport
}
// Show additions
for (Metadatum dcv : adds)
for (BulkEditMetadataValue metadataValue : adds)
{
String md = dcv.schema + "." + dcv.element;
if (dcv.qualifier != null)
String md = metadataValue.getSchema() + "." + metadataValue.getElement();
if (metadataValue.getQualifier() != null)
{
md += "." + dcv.qualifier;
md += "." + metadataValue.getQualifier();
}
if (dcv.language != null)
if (metadataValue.getLanguage() != null)
{
md += "[" + dcv.language + "]";
md += "[" + metadataValue.getLanguage() + "]";
}
if (!changed)
{
@@ -1122,26 +1113,26 @@ public class MetadataImport
{
System.out.print(" + Added (" + md + "): ");
}
System.out.print(dcv.value);
System.out.print(metadataValue.getValue());
if (isAuthorityControlledField(md))
{
System.out.print(", authority = " + dcv.authority);
System.out.print(", confidence = " + dcv.confidence);
System.out.print(", authority = " + metadataValue.getAuthority());
System.out.print(", confidence = " + metadataValue.getConfidence());
}
System.out.println("");
}
// Show removals
for (Metadatum dcv : removes)
for (BulkEditMetadataValue metadataValue : removes)
{
String md = dcv.schema + "." + dcv.element;
if (dcv.qualifier != null)
String md = metadataValue.getSchema() + "." + metadataValue.getElement();
if (metadataValue.getQualifier() != null)
{
md += "." + dcv.qualifier;
md += "." + metadataValue.getQualifier();
}
if (dcv.language != null)
if (metadataValue.getLanguage() != null)
{
md += "[" + dcv.language + "]";
md += "[" + metadataValue.getLanguage() + "]";
}
if (!changed)
{
@@ -1151,11 +1142,11 @@ public class MetadataImport
{
System.out.print(" - Removed (" + md + "): ");
}
System.out.print(dcv.value);
System.out.print(metadataValue.getValue());
if (isAuthorityControlledField(md))
{
System.out.print(", authority = " + dcv.authority);
System.out.print(", confidence = " + dcv.confidence);
System.out.print(", authority = " + metadataValue.getAuthority());
System.out.print(", confidence = " + metadataValue.getConfidence());
}
System.out.println("");
}
@@ -1286,11 +1277,10 @@ public class MetadataImport
String e = line.getOptionValue('e');
if (e.indexOf('@') != -1)
{
eperson = EPerson.findByEmail(c, e);
}
else
eperson = EPersonServiceFactory.getInstance().getEPersonService().findByEmail(c, e);
} else
{
eperson = EPerson.find(c, Integer.parseInt(e));
eperson = EPersonServiceFactory.getInstance().getEPersonService().find(c, UUID.fromString(e));
}
if (eperson == null)
@@ -1406,7 +1396,7 @@ public class MetadataImport
displayChanges(changes, true);
// Commit the change to the DB
c.commit();
// c.commit();
}
// Finsh off and tidy up

View File

@@ -85,6 +85,7 @@ public class MetadataImportInvalidHeadingException extends Exception
*
* @return The exception message
*/
@Override
public String getMessage()
{
if (type == SCHEMA)

View File

@@ -9,10 +9,7 @@ package org.dspace.app.checker;
import java.io.FileNotFoundException;
import java.sql.SQLException;
import java.util.ArrayList;
import java.util.Calendar;
import java.util.Date;
import java.util.List;
import java.util.*;
import org.apache.commons.cli.CommandLine;
import org.apache.commons.cli.CommandLineParser;
@@ -23,16 +20,10 @@ import org.apache.commons.cli.Options;
import org.apache.commons.cli.ParseException;
import org.apache.commons.cli.PosixParser;
import org.apache.log4j.Logger;
import org.dspace.checker.BitstreamDispatcher;
import org.dspace.checker.BitstreamInfoDAO;
import org.dspace.checker.CheckerCommand;
import org.dspace.checker.HandleDispatcher;
import org.dspace.checker.LimitedCountDispatcher;
import org.dspace.checker.LimitedDurationDispatcher;
import org.dspace.checker.ListDispatcher;
import org.dspace.checker.ResultsLogger;
import org.dspace.checker.ResultsPruner;
import org.dspace.checker.SimpleDispatcher;
import org.dspace.checker.*;
import org.dspace.content.Bitstream;
import org.dspace.content.factory.ContentServiceFactory;
import org.dspace.content.service.BitstreamService;
import org.dspace.core.Context;
import org.dspace.core.Utils;
@@ -48,6 +39,8 @@ public final class ChecksumChecker
{
private static final Logger LOG = Logger.getLogger(ChecksumChecker.class);
private static final BitstreamService bitstreamService = ContentServiceFactory.getInstance().getBitstreamService();
/**
* Blanked off constructor, this class should be used as a command line
* tool.
@@ -127,118 +120,122 @@ public final class ChecksumChecker
{
printHelp(options);
}
Context context = null;
try {
context = new Context();
// Prune stage
if (line.hasOption('p'))
{
ResultsPruner rp = null;
try
{
rp = (line.getOptionValue('p') != null) ? ResultsPruner
.getPruner(line.getOptionValue('p')) : ResultsPruner
.getDefaultPruner();
}
catch (FileNotFoundException e)
{
LOG.error("File not found", e);
System.exit(1);
}
int count = rp.prune();
System.out.println("Pruned " + count
+ " old results from the database.");
}
Date processStart = Calendar.getInstance().getTime();
BitstreamDispatcher dispatcher = null;
// process should loop infinitely through
// most_recent_checksum table
if (line.hasOption('l'))
{
dispatcher = new SimpleDispatcher(new BitstreamInfoDAO(), processStart, false);
}
else if (line.hasOption('L'))
{
dispatcher = new SimpleDispatcher(new BitstreamInfoDAO(), processStart, true);
}
else if (line.hasOption('b'))
{
// check only specified bitstream(s)
String[] ids = line.getOptionValues('b');
List<Integer> idList = new ArrayList<Integer>(ids.length);
for (int i = 0; i < ids.length; i++)
// Prune stage
if (line.hasOption('p'))
{
ResultsPruner rp = null;
try
{
idList.add(Integer.valueOf(ids[i]));
rp = (line.getOptionValue('p') != null) ? ResultsPruner
.getPruner(context, line.getOptionValue('p')) : ResultsPruner
.getDefaultPruner(context);
}
catch (NumberFormatException nfe)
catch (FileNotFoundException e)
{
System.err.println("The following argument: " + ids[i]
+ " is not an integer");
LOG.error("File not found", e);
System.exit(1);
}
int count = rp.prune();
System.out.println("Pruned " + count
+ " old results from the database.");
}
Date processStart = Calendar.getInstance().getTime();
BitstreamDispatcher dispatcher = null;
// process should loop infinitely through
// most_recent_checksum table
if (line.hasOption('l'))
{
dispatcher = new SimpleDispatcher(context, processStart, false);
}
else if (line.hasOption('L'))
{
dispatcher = new SimpleDispatcher(context, processStart, true);
}
else if (line.hasOption('b'))
{
// check only specified bitstream(s)
String[] ids = line.getOptionValues('b');
List<Bitstream> bitstreams = new ArrayList<>(ids.length);
for (int i = 0; i < ids.length; i++)
{
try
{
bitstreams.add(bitstreamService.find(context, UUID.fromString(ids[i])));
}
catch (NumberFormatException nfe)
{
System.err.println("The following argument: " + ids[i]
+ " is not an integer");
System.exit(0);
}
}
dispatcher = new IteratorDispatcher(bitstreams.iterator());
}
else if (line.hasOption('a'))
{
dispatcher = new HandleDispatcher(context, line.getOptionValue('a'));
}
else if (line.hasOption('d'))
{
// run checker process for specified duration
try
{
dispatcher = new LimitedDurationDispatcher(
new SimpleDispatcher(context, processStart, true), new Date(
System.currentTimeMillis()
+ Utils.parseDuration(line
.getOptionValue('d'))));
}
catch (Exception e)
{
LOG.fatal("Couldn't parse " + line.getOptionValue('d')
+ " as a duration: ", e);
System.exit(0);
}
}
dispatcher = new ListDispatcher(idList);
}
else if (line.hasOption('a'))
{
dispatcher = new HandleDispatcher(new BitstreamInfoDAO(), line.getOptionValue('a'));
}
else if (line.hasOption('d'))
{
// run checker process for specified duration
try
else if (line.hasOption('c'))
{
dispatcher = new LimitedDurationDispatcher(
new SimpleDispatcher(new BitstreamInfoDAO(), processStart, true), new Date(
System.currentTimeMillis()
+ Utils.parseDuration(line
.getOptionValue('d'))));
}
catch (Exception e)
{
LOG.fatal("Couldn't parse " + line.getOptionValue('d')
+ " as a duration: ", e);
System.exit(0);
}
}
else if (line.hasOption('c'))
{
int count = Integer.valueOf(line.getOptionValue('c')).intValue();
// run checker process for specified number of bitstreams
dispatcher = new LimitedCountDispatcher(new SimpleDispatcher(
new BitstreamInfoDAO(), processStart, false), count);
}
else
{
dispatcher = new LimitedCountDispatcher(new SimpleDispatcher(
new BitstreamInfoDAO(), processStart, false), 1);
}
ResultsLogger logger = new ResultsLogger(processStart);
CheckerCommand checker = new CheckerCommand();
// verbose reporting
if (line.hasOption('v'))
{
checker.setReportVerbose(true);
}
int count = Integer.valueOf(line.getOptionValue('c'));
checker.setProcessStartDate(processStart);
checker.setDispatcher(dispatcher);
checker.setCollector(logger);
Context context = new Context();
try {
checker.process(context);
} finally {
context.commit();
// run checker process for specified number of bitstreams
dispatcher = new LimitedCountDispatcher(new SimpleDispatcher(
context, processStart, false), count);
}
else
{
dispatcher = new LimitedCountDispatcher(new SimpleDispatcher(
context, processStart, false), 1);
}
ResultsLogger logger = new ResultsLogger(processStart);
CheckerCommand checker = new CheckerCommand(context);
// verbose reporting
if (line.hasOption('v'))
{
checker.setReportVerbose(true);
}
checker.setProcessStartDate(processStart);
checker.setDispatcher(dispatcher);
checker.setCollector(logger);
checker.process();
context.complete();
context = null;
} finally {
if(context != null){
context.abort();
}
}
System.exit(0);
}
/**

View File

@@ -9,7 +9,9 @@ package org.dspace.app.harvest;
import java.io.IOException;
import java.sql.SQLException;
import java.util.Iterator;
import java.util.List;
import java.util.UUID;
import org.apache.commons.cli.CommandLine;
import org.apache.commons.cli.CommandLineParser;
@@ -20,15 +22,21 @@ import org.dspace.authorize.AuthorizeException;
import org.dspace.browse.IndexBrowse;
import org.dspace.content.Collection;
import org.dspace.content.DSpaceObject;
import org.dspace.content.factory.ContentServiceFactory;
import org.dspace.content.service.CollectionService;
import org.dspace.content.service.ItemService;
import org.dspace.eperson.factory.EPersonServiceFactory;
import org.dspace.eperson.service.EPersonService;
import org.dspace.handle.factory.HandleServiceFactory;
import org.dspace.harvest.HarvestedCollection;
import org.dspace.content.Item;
import org.dspace.content.ItemIterator;
import org.dspace.harvest.HarvestingException;
import org.dspace.harvest.OAIHarvester;
import org.dspace.harvest.OAIHarvester.HarvestingException;
import org.dspace.core.Constants;
import org.dspace.core.Context;
import org.dspace.eperson.EPerson;
import org.dspace.handle.HandleManager;
import org.dspace.harvest.factory.HarvestServiceFactory;
import org.dspace.harvest.service.HarvestedCollectionService;
/**
* Test class for harvested collections.
@@ -38,7 +46,11 @@ import org.dspace.handle.HandleManager;
public class Harvest
{
private static Context context;
private static final HarvestedCollectionService harvestedCollectionService = HarvestServiceFactory.getInstance().getHarvestedCollectionService();
private static final EPersonService ePersonService = EPersonServiceFactory.getInstance().getEPersonService();
private static final CollectionService collectionService = ContentServiceFactory.getInstance().getCollectionService();
public static void main(String[] argv) throws Exception
{
// create an options object and populate it
@@ -195,11 +207,11 @@ public class Harvest
System.exit(1);
}
List<Integer> cids = HarvestedCollection.findAll(context);
System.out.println("Purging the following collections (deleting items and resetting harvest status): " + cids.toString());
for (Integer cid : cids)
List<HarvestedCollection> harvestedCollections = harvestedCollectionService.findAll(context);
for (HarvestedCollection harvestedCollection : harvestedCollections)
{
harvester.purgeCollection(cid.toString(), eperson);
System.out.println("Purging the following collections (deleting items and resetting harvest status): " + harvestedCollection.getCollection().getID().toString());
harvester.purgeCollection(harvestedCollection.getCollection().getID().toString(), eperson);
}
context.complete();
}
@@ -272,7 +284,7 @@ public class Harvest
if (collectionID.indexOf('/') != -1)
{
// string has a / so it must be a handle - try and resolve it
dso = HandleManager.resolveToObject(context, collectionID);
dso = HandleServiceFactory.getInstance().getHandleService().resolveToObject(context, collectionID);
// resolved, now make sure it's a collection
if (dso == null || dso.getType() != Constants.COLLECTION)
@@ -289,7 +301,7 @@ public class Harvest
else
{
System.out.println("Looking up by id: " + collectionID + ", parsed as '" + Integer.parseInt(collectionID) + "', " + "in context: " + context);
targetCollection = Collection.find(context, Integer.parseInt(collectionID));
targetCollection = collectionService.find(context, UUID.fromString(collectionID));
}
}
// was the collection valid?
@@ -314,15 +326,15 @@ public class Harvest
System.out.println(collection.getID());
try {
HarvestedCollection hc = HarvestedCollection.find(context, collection.getID());
HarvestedCollection hc = harvestedCollectionService.find(context, collection);
if (hc == null) {
hc = HarvestedCollection.create(context, collection.getID());
hc = harvestedCollectionService.create(context, collection);
}
context.turnOffAuthorisationSystem();
hc.setHarvestParams(type, oaiSource, oaiSetId, mdConfigId);
hc.setHarvestStatus(HarvestedCollection.STATUS_READY);
hc.update();
harvestedCollectionService.update(context, hc);
context.restoreAuthSystemState();
context.complete();
}
@@ -352,11 +364,12 @@ public class Harvest
try
{
EPerson eperson = EPerson.findByEmail(context, email);
EPerson eperson = ePersonService.findByEmail(context, email);
context.setCurrentUser(eperson);
context.turnOffAuthorisationSystem();
ItemIterator it = collection.getAllItems();
ItemService itemService = ContentServiceFactory.getInstance().getItemService();
Iterator<Item> it = itemService.findByCollection(context, collection);
IndexBrowse ib = new IndexBrowse(context);
int i=0;
while (it.hasNext()) {
@@ -364,24 +377,24 @@ public class Harvest
Item item = it.next();
System.out.println("Deleting: " + item.getHandle());
ib.itemRemoved(item);
collection.removeItem(item);
// commit every 50 items
collectionService.removeItem(context, collection, item);
// Dispatch events every 50 items
if (i%50 == 0) {
context.commit();
context.dispatchEvents();
i=0;
}
}
HarvestedCollection hc = HarvestedCollection.find(context, collection.getID());
HarvestedCollection hc = harvestedCollectionService.find(context, collection);
if (hc != null) {
hc.setHarvestResult(null,"");
hc.setLastHarvested(null);
hc.setHarvestMessage("");
hc.setHarvestStatus(HarvestedCollection.STATUS_READY);
hc.setHarvestStartTime(null);
hc.update();
harvestedCollectionService.update(context, hc);
}
context.restoreAuthSystemState();
context.commit();
}
}
catch (Exception e) {
System.out.println("Changes could not be committed");
e.printStackTrace();
@@ -403,7 +416,7 @@ public class Harvest
OAIHarvester harvester = null;
try {
Collection collection = resolveCollection(collectionID);
HarvestedCollection hc = HarvestedCollection.find(context, collection.getID());
HarvestedCollection hc = harvestedCollectionService.find(context, collection);
harvester = new OAIHarvester(context, collection, hc);
System.out.println("success. ");
}
@@ -419,7 +432,7 @@ public class Harvest
try {
// Harvest will not work for an anonymous user
EPerson eperson = EPerson.findByEmail(context, email);
EPerson eperson = ePersonService.findByEmail(context, email);
System.out.println("Harvest started... ");
context.setCurrentUser(eperson);
harvester.runHarvest();
@@ -446,16 +459,14 @@ public class Harvest
try
{
List<Integer> cids = HarvestedCollection.findAll(context);
for (Integer cid : cids)
List<HarvestedCollection> harvestedCollections = harvestedCollectionService.findAll(context);
for (HarvestedCollection harvestedCollection : harvestedCollections)
{
HarvestedCollection hc = HarvestedCollection.find(context, cid);
//hc.setHarvestResult(null,"");
hc.setHarvestStartTime(null);
hc.setHarvestStatus(HarvestedCollection.STATUS_READY);
hc.update();
harvestedCollection.setHarvestStartTime(null);
harvestedCollection.setHarvestStatus(HarvestedCollection.STATUS_READY);
harvestedCollectionService.update(context, harvestedCollection);
}
context.commit();
System.out.println("success. ");
}
catch (Exception ex) {
@@ -467,12 +478,12 @@ public class Harvest
/**
* Starts up the harvest scheduler. Terminating this process will stop the scheduler.
*/
private static void startHarvester()
private static void startHarvester()
{
try
{
System.out.print("Starting harvest loop... ");
OAIHarvester.startNewScheduler();
HarvestServiceFactory.getInstance().getHarvestSchedulingService().startNewScheduler();
System.out.println("running. ");
}
catch (Exception ex) {

View File

@@ -0,0 +1,251 @@
package org.dspace.app.itemexport;
import org.apache.commons.cli.*;
import org.dspace.app.itemexport.factory.ItemExportServiceFactory;
import org.dspace.app.itemexport.service.ItemExportService;
import org.dspace.content.Collection;
import org.dspace.content.Item;
import org.dspace.content.factory.ContentServiceFactory;
import org.dspace.content.service.CollectionService;
import org.dspace.content.service.ItemService;
import org.dspace.core.Constants;
import org.dspace.core.Context;
import org.dspace.handle.factory.HandleServiceFactory;
import org.dspace.handle.service.HandleService;
import java.util.*;
/**
* Item exporter to create simple AIPs for DSpace content. Currently exports
* individual items, or entire collections. For instructions on use, see
* printUsage() method.
* <P>
* ItemExport creates the simple AIP package that the importer also uses. It
* consists of:
* <P>
* /exportdir/42/ (one directory per item) / dublin_core.xml - qualified dublin
* core in RDF schema / contents - text file, listing one file per line / file1
* - files contained in the item / file2 / ...
* <P>
* issues -doesn't handle special characters in metadata (needs to turn &'s into
* &amp;, etc.)
* <P>
* Modified by David Little, UCSD Libraries 12/21/04 to allow the registration
* of files (bitstreams) into DSpace.
*
* @author David Little
* @author Jay Paz
*/
public class ItemExportCLITool {
protected static ItemExportService itemExportService = ItemExportServiceFactory.getInstance().getItemExportService();
protected static HandleService handleService = HandleServiceFactory.getInstance().getHandleService();
protected static ItemService itemService = ContentServiceFactory.getInstance().getItemService();
protected static CollectionService collectionService = ContentServiceFactory.getInstance().getCollectionService();
/*
*
*/
public static void main(String[] argv) throws Exception
{
// create an options object and populate it
CommandLineParser parser = new PosixParser();
Options options = new Options();
options.addOption("t", "type", true, "type: COLLECTION or ITEM");
options.addOption("i", "id", true, "ID or handle of thing to export");
options.addOption("d", "dest", true,
"destination where you want items to go");
options.addOption("m", "migrate", false, "export for migration (remove handle and metadata that will be re-created in new system)");
options.addOption("n", "number", true,
"sequence number to begin exporting items with");
options.addOption("z", "zip", true, "export as zip file (specify filename e.g. export.zip)");
options.addOption("h", "help", false, "help");
CommandLine line = parser.parse(options, argv);
String typeString = null;
String destDirName = null;
String myIDString = null;
int seqStart = -1;
int myType = -1;
Item myItem = null;
Collection mycollection = null;
if (line.hasOption('h'))
{
HelpFormatter myhelp = new HelpFormatter();
myhelp.printHelp("ItemExport\n", options);
System.out
.println("\nfull collection: ItemExport -t COLLECTION -i ID -d dest -n number");
System.out
.println("singleitem: ItemExport -t ITEM -i ID -d dest -n number");
System.exit(0);
}
if (line.hasOption('t')) // type
{
typeString = line.getOptionValue('t');
if ("ITEM".equals(typeString))
{
myType = Constants.ITEM;
}
else if ("COLLECTION".equals(typeString))
{
myType = Constants.COLLECTION;
}
}
if (line.hasOption('i')) // id
{
myIDString = line.getOptionValue('i');
}
if (line.hasOption('d')) // dest
{
destDirName = line.getOptionValue('d');
}
if (line.hasOption('n')) // number
{
seqStart = Integer.parseInt(line.getOptionValue('n'));
}
boolean migrate = false;
if (line.hasOption('m')) // number
{
migrate = true;
}
boolean zip = false;
String zipFileName = "";
if (line.hasOption('z'))
{
zip = true;
zipFileName = line.getOptionValue('z');
}
// now validate the args
if (myType == -1)
{
System.out
.println("type must be either COLLECTION or ITEM (-h for help)");
System.exit(1);
}
if (destDirName == null)
{
System.out
.println("destination directory must be set (-h for help)");
System.exit(1);
}
if (seqStart == -1)
{
System.out
.println("sequence start number must be set (-h for help)");
System.exit(1);
}
if (myIDString == null)
{
System.out
.println("ID must be set to either a database ID or a handle (-h for help)");
System.exit(1);
}
Context c = new Context();
c.setIgnoreAuthorization(true);
if (myType == Constants.ITEM)
{
// first, is myIDString a handle?
if (myIDString.indexOf('/') != -1)
{
myItem = (Item) handleService.resolveToObject(c, myIDString);
if ((myItem == null) || (myItem.getType() != Constants.ITEM))
{
myItem = null;
}
}
else
{
myItem = itemService.find(c, UUID.fromString(myIDString));
}
if (myItem == null)
{
System.out
.println("Error, item cannot be found: " + myIDString);
}
}
else
{
if (myIDString.indexOf('/') != -1)
{
// has a / must be a handle
mycollection = (Collection) handleService.resolveToObject(c,
myIDString);
// ensure it's a collection
if ((mycollection == null)
|| (mycollection.getType() != Constants.COLLECTION))
{
mycollection = null;
}
}
else if (myIDString != null)
{
mycollection = collectionService.find(c, UUID.fromString(myIDString));
}
if (mycollection == null)
{
System.out.println("Error, collection cannot be found: "
+ myIDString);
System.exit(1);
}
}
if (zip)
{
Iterator<Item> items;
if (myItem != null)
{
List<Item> myItems = new ArrayList<>();
myItems.add(myItem);
items = myItems.iterator();
}
else
{
System.out.println("Exporting from collection: " + myIDString);
items = itemService.findByCollection(c, mycollection);
}
itemExportService.exportAsZip(c, items, destDirName, zipFileName, seqStart, migrate);
}
else
{
if (myItem != null)
{
// it's only a single item
itemExportService.exportItem(c, Collections.singletonList(myItem).iterator(), destDirName, seqStart, migrate);
}
else
{
System.out.println("Exporting from collection: " + myIDString);
// it's a collection, so do a bunch of items
Iterator<Item> i = itemService.findByCollection(c, mycollection);
itemExportService.exportItem(c, i, destDirName, seqStart, migrate);
}
}
c.complete();
}
}

View File

@@ -0,0 +1,25 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.app.itemexport.factory;
import org.dspace.app.itemexport.service.ItemExportService;
import org.dspace.utils.DSpace;
/**
* Abstract factory to get services for the itemexport package, use ItemExportServiceFactory.getInstance() to retrieve an implementation
*
* @author kevinvandevelde at atmire.com
*/
public abstract class ItemExportServiceFactory {
public abstract ItemExportService getItemExportService();
public static ItemExportServiceFactory getInstance(){
return new DSpace().getServiceManager().getServiceByName("itemExportServiceFactory", ItemExportServiceFactory.class);
}
}

View File

@@ -0,0 +1,27 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.app.itemexport.factory;
import org.dspace.app.itemexport.service.ItemExportService;
import org.springframework.beans.factory.annotation.Autowired;
/**
* Factory implementation to get services for the itemexport package, use ItemExportServiceFactory.getInstance() to retrieve an implementation
*
* @author kevinvandevelde at atmire.com
*/
public class ItemExportServiceFactoryImpl extends ItemExportServiceFactory {
@Autowired(required = true)
private ItemExportService itemExportService;
@Override
public ItemExportService getItemExportService() {
return itemExportService;
}
}

View File

@@ -0,0 +1,268 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.app.itemexport.service;
import org.dspace.content.DSpaceObject;
import org.dspace.content.Item;
import org.dspace.core.Context;
import org.dspace.eperson.EPerson;
import javax.mail.MessagingException;
import java.io.InputStream;
import java.util.Date;
import java.util.Iterator;
import java.util.List;
/**
* Item exporter to create simple AIPs for DSpace content. Currently exports
* individual items, or entire collections. For instructions on use, see
* printUsage() method.
* <P>
* ItemExport creates the simple AIP package that the importer also uses. It
* consists of:
* <P>
* /exportdir/42/ (one directory per item) / dublin_core.xml - qualified dublin
* core in RDF schema / contents - text file, listing one file per line / file1
* - files contained in the item / file2 / ...
* <P>
* issues -doesn't handle special characters in metadata (needs to turn &'s into
* &amp;, etc.)
* <P>
* Modified by David Little, UCSD Libraries 12/21/04 to allow the registration
* of files (bitstreams) into DSpace.
*
* @author David Little
* @author Jay Paz
*/
public interface ItemExportService {
/**
* used for export download
*/
public static final String COMPRESSED_EXPORT_MIME_TYPE = "application/zip";
public void exportItem(Context c, Iterator<Item> i,
String destDirName, int seqStart, boolean migrate) throws Exception;
/**
* Method to perform an export and save it as a zip file.
*
* @param context The DSpace Context
* @param items The items to export
* @param destDirName The directory to save the export in
* @param zipFileName The name to save the zip file as
* @param seqStart The first number in the sequence
* @param migrate Whether to use the migrate option or not
* @throws Exception
*/
public void exportAsZip(Context context, Iterator<Item> items,
String destDirName, String zipFileName,
int seqStart, boolean migrate) throws Exception;
/**
* Convenience methot to create export a single Community, Collection, or
* Item
*
* @param dso
* - the dspace object to export
* @param context
* - the dspace context
* @throws Exception
*/
public void createDownloadableExport(DSpaceObject dso,
Context context, boolean migrate) throws Exception;
/**
* Convenience method to export a List of dspace objects (Community,
* Collection or Item)
*
* @param dsObjects
* - List containing dspace objects
* @param context
* - the dspace context
* @throws Exception
*/
public void createDownloadableExport(List<DSpaceObject> dsObjects,
Context context, boolean migrate) throws Exception;
/**
* Convenience methot to create export a single Community, Collection, or
* Item
*
* @param dso
* - the dspace object to export
* @param context
* - the dspace context
* @param additionalEmail
* - cc email to use
* @throws Exception
*/
public void createDownloadableExport(DSpaceObject dso,
Context context, String additionalEmail, boolean migrate) throws Exception;
/**
* Convenience method to export a List of dspace objects (Community,
* Collection or Item)
*
* @param dsObjects
* - List containing dspace objects
* @param context
* - the dspace context
* @param additionalEmail
* - cc email to use
* @throws Exception
*/
public void createDownloadableExport(List<DSpaceObject> dsObjects,
Context context, String additionalEmail, boolean migrate) throws Exception;
/**
* Create a file name based on the date and eperson
*
* @param eperson
* - eperson who requested export and will be able to download it
* @param date
* - the date the export process was created
* @return String representing the file name in the form of
* 'export_yyy_MMM_dd_count_epersonID'
* @throws Exception
*/
public String assembleFileName(String type, EPerson eperson,
Date date) throws Exception;
/**
* Use config file entry for org.dspace.app.itemexport.download.dir and id
* of the eperson to create a download directory name
*
* @param ePerson
* - the eperson who requested export archive
* @return String representing a directory in the form of
* org.dspace.app.itemexport.download.dir/epersonID
* @throws Exception
*/
public String getExportDownloadDirectory(EPerson ePerson)
throws Exception;
/**
* Returns config file entry for org.dspace.app.itemexport.work.dir
*
* @return String representing config file entry for
* org.dspace.app.itemexport.work.dir
* @throws Exception
*/
public String getExportWorkDirectory() throws Exception;
/**
* Used to read the export archived. Inteded for download.
*
* @param fileName
* the name of the file to download
* @param eperson
* the eperson requesting the download
* @return an input stream of the file to be downloaded
* @throws Exception
*/
public InputStream getExportDownloadInputStream(String fileName,
EPerson eperson) throws Exception;
/**
* Get the file size of the export archive represented by the file name.
*
* @param fileName
* name of the file to get the size.
* @throws Exception
*/
public long getExportFileSize(Context context, String fileName) throws Exception;
public long getExportFileLastModified(Context context, String fileName)
throws Exception;
/**
* The file name of the export archive contains the eperson id of the person
* who created it When requested for download this method can check if the
* person requesting it is the same one that created it
*
* @param context
* dspace context
* @param fileName
* the file name to check auths for
* @return true if it is the same person false otherwise
*/
public boolean canDownload(Context context, String fileName);
/**
* Reads the download directory for the eperson to see if any export
* archives are available
*
* @param eperson
* @return a list of file names representing export archives that have been
* processed
* @throws Exception
*/
public List<String> getExportsAvailable(EPerson eperson)
throws Exception;
/**
* A clean up method that is ran before a new export archive is created. It
* uses the config file entry 'org.dspace.app.itemexport.life.span.hours' to
* determine if the current exports are too old and need pruging
*
* @param eperson
* - the eperson to clean up
* @throws Exception
*/
public void deleteOldExportArchives(EPerson eperson) throws Exception;
/**
* A clean up method that is ran before a new export archive is created. It
* uses the config file entry 'org.dspace.app.itemexport.life.span.hours' to
* determine if the current exports are too old and need purgeing
* Removes all old exports, not just those for the person doing the export.
*
* @throws Exception
*/
public void deleteOldExportArchives() throws Exception;
/**
* Since the archive is created in a new thread we are unable to communicate
* with calling method about success or failure. We accomplis this
* communication with email instead. Send a success email once the export
* archive is complete and ready for download
*
* @param context
* - the current Context
* @param eperson
* - eperson to send the email to
* @param fileName
* - the file name to be downloaded. It is added to the url in
* the email
* @throws MessagingException
*/
public void emailSuccessMessage(Context context, EPerson eperson,
String fileName) throws MessagingException;
/**
* Since the archive is created in a new thread we are unable to communicate
* with calling method about success or failure. We accomplis this
* communication with email instead. Send an error email if the export
* archive fails
*
* @param eperson
* - EPerson to send the error message to
* @param error
* - the error message
* @throws MessagingException
*/
public void emailErrorMessage(EPerson eperson, String error)
throws MessagingException;
public void zip(String strSource, String target) throws Exception;
}

View File

@@ -0,0 +1,413 @@
package org.dspace.app.itemimport;
import org.apache.commons.cli.*;
import org.dspace.app.itemimport.factory.ItemImportServiceFactory;
import org.dspace.app.itemimport.service.ItemImportService;
import org.dspace.content.Collection;
import org.dspace.content.factory.ContentServiceFactory;
import org.dspace.content.service.CollectionService;
import org.dspace.core.Constants;
import org.dspace.core.Context;
import org.dspace.eperson.EPerson;
import org.dspace.eperson.factory.EPersonServiceFactory;
import org.dspace.eperson.service.EPersonService;
import org.dspace.handle.factory.HandleServiceFactory;
import org.dspace.handle.service.HandleService;
import org.dspace.search.DSIndexer;
import java.io.File;
import java.util.ArrayList;
import java.util.Date;
import java.util.List;
import java.util.UUID;
/**
* Import items into DSpace. The conventional use is upload files by copying
* them. DSpace writes the item's bitstreams into its assetstore. Metadata is
* also loaded to the DSpace database.
* <P>
* A second use assumes the bitstream files already exist in a storage
* resource accessible to DSpace. In this case the bitstreams are 'registered'.
* That is, the metadata is loaded to the DSpace database and DSpace is given
* the location of the file which is subsumed into DSpace.
* <P>
* The distinction is controlled by the format of lines in the 'contents' file.
* See comments in processContentsFile() below.
* <P>
* Modified by David Little, UCSD Libraries 12/21/04 to
* allow the registration of files (bitstreams) into DSpace.
*/
public class ItemImportCLITool {
private static boolean template = false;
private static final CollectionService collectionService = ContentServiceFactory.getInstance().getCollectionService();
private static final EPersonService epersonService = EPersonServiceFactory.getInstance().getEPersonService();
private static final HandleService handleService = HandleServiceFactory.getInstance().getHandleService();
public static void main(String[] argv) throws Exception
{
DSIndexer.setBatchProcessingMode(true);
Date startTime = new Date();
int status = 0;
try {
// create an options object and populate it
CommandLineParser parser = new PosixParser();
Options options = new Options();
options.addOption("a", "add", false, "add items to DSpace");
options.addOption("b", "add-bte", false, "add items to DSpace via Biblio-Transformation-Engine (BTE)");
options.addOption("r", "replace", false, "replace items in mapfile");
options.addOption("d", "delete", false,
"delete items listed in mapfile");
options.addOption("i", "inputtype", true, "input type in case of BTE import");
options.addOption("s", "source", true, "source of items (directory)");
options.addOption("z", "zip", true, "name of zip file");
options.addOption("c", "collection", true,
"destination collection(s) Handle or database ID");
options.addOption("m", "mapfile", true, "mapfile items in mapfile");
options.addOption("e", "eperson", true,
"email of eperson doing importing");
options.addOption("w", "workflow", false,
"send submission through collection's workflow");
options.addOption("n", "notify", false,
"if sending submissions through the workflow, send notification emails");
options.addOption("t", "test", false,
"test run - do not actually import items");
options.addOption("p", "template", false, "apply template");
options.addOption("R", "resume", false,
"resume a failed import (add only)");
options.addOption("q", "quiet", false, "don't display metadata");
options.addOption("h", "help", false, "help");
CommandLine line = parser.parse(options, argv);
String command = null; // add replace remove, etc
String bteInputType = null; //ris, endnote, tsv, csv, bibtex
String sourcedir = null;
String mapfile = null;
String eperson = null; // db ID or email
String[] collections = null; // db ID or handles
boolean isTest = false;
boolean isResume = false;
boolean useWorkflow = false;
boolean useWorkflowSendEmail = false;
boolean isQuiet = false;
if (line.hasOption('h')) {
HelpFormatter myhelp = new HelpFormatter();
myhelp.printHelp("ItemImport\n", options);
System.out
.println("\nadding items: ItemImport -a -e eperson -c collection -s sourcedir -m mapfile");
System.out
.println("\nadding items from zip file: ItemImport -a -e eperson -c collection -s sourcedir -z filename.zip -m mapfile");
System.out
.println("replacing items: ItemImport -r -e eperson -c collection -s sourcedir -m mapfile");
System.out
.println("deleting items: ItemImport -d -e eperson -m mapfile");
System.out
.println("If multiple collections are specified, the first collection will be the one that owns the item.");
System.exit(0);
}
if (line.hasOption('a')) {
command = "add";
}
if (line.hasOption('r')) {
command = "replace";
}
if (line.hasOption('d')) {
command = "delete";
}
if (line.hasOption('b')) {
command = "add-bte";
}
if (line.hasOption('i')) {
bteInputType = line.getOptionValue('i');
}
if (line.hasOption('w')) {
useWorkflow = true;
if (line.hasOption('n')) {
useWorkflowSendEmail = true;
}
}
if (line.hasOption('t')) {
isTest = true;
System.out.println("**Test Run** - not actually importing items.");
}
if (line.hasOption('p')) {
template = true;
}
if (line.hasOption('s')) // source
{
sourcedir = line.getOptionValue('s');
}
if (line.hasOption('m')) // mapfile
{
mapfile = line.getOptionValue('m');
}
if (line.hasOption('e')) // eperson
{
eperson = line.getOptionValue('e');
}
if (line.hasOption('c')) // collections
{
collections = line.getOptionValues('c');
}
if (line.hasOption('R')) {
isResume = true;
System.out
.println("**Resume import** - attempting to import items not already imported");
}
if (line.hasOption('q')) {
isQuiet = true;
}
boolean zip = false;
String zipfilename = "";
if (line.hasOption('z')) {
zip = true;
zipfilename = sourcedir + System.getProperty("file.separator") + line.getOptionValue('z');
}
//By default assume collections will be given on the command line
boolean commandLineCollections = true;
// now validate
// must have a command set
if (command == null) {
System.out
.println("Error - must run with either add, replace, or remove (run with -h flag for details)");
System.exit(1);
} else if ("add".equals(command) || "replace".equals(command)) {
if (sourcedir == null) {
System.out
.println("Error - a source directory containing items must be set");
System.out.println(" (run with -h flag for details)");
System.exit(1);
}
if (mapfile == null) {
System.out
.println("Error - a map file to hold importing results must be specified");
System.out.println(" (run with -h flag for details)");
System.exit(1);
}
if (eperson == null) {
System.out
.println("Error - an eperson to do the importing must be specified");
System.out.println(" (run with -h flag for details)");
System.exit(1);
}
if (collections == null) {
System.out.println("No collections given. Assuming 'collections' file inside item directory");
commandLineCollections = false;
}
} else if ("add-bte".equals(command)) {
//Source dir can be null, the user can specify the parameters for his loader in the Spring XML configuration file
if (mapfile == null) {
System.out
.println("Error - a map file to hold importing results must be specified");
System.out.println(" (run with -h flag for details)");
System.exit(1);
}
if (eperson == null) {
System.out
.println("Error - an eperson to do the importing must be specified");
System.out.println(" (run with -h flag for details)");
System.exit(1);
}
if (collections == null) {
System.out.println("No collections given. Assuming 'collections' file inside item directory");
commandLineCollections = false;
}
if (bteInputType == null) {
System.out
.println("Error - an input type (tsv, csv, ris, endnote, bibtex or any other type you have specified in BTE Spring XML configuration file) must be specified");
System.out.println(" (run with -h flag for details)");
System.exit(1);
}
} else if ("delete".equals(command)) {
if (eperson == null) {
System.out
.println("Error - an eperson to do the importing must be specified");
System.exit(1);
}
if (mapfile == null) {
System.out.println("Error - a map file must be specified");
System.exit(1);
}
}
// can only resume for adds
if (isResume && !"add".equals(command) && !"add-bte".equals(command)) {
System.out
.println("Error - resume option only works with the --add or the --add-bte commands");
System.exit(1);
}
// do checks around mapfile - if mapfile exists and 'add' is selected,
// resume must be chosen
File myFile = new File(mapfile);
if (!isResume && "add".equals(command) && myFile.exists()) {
System.out.println("Error - the mapfile " + mapfile
+ " already exists.");
System.out
.println("Either delete it or use --resume if attempting to resume an aborted import.");
System.exit(1);
}
ItemImportService myloader = ItemImportServiceFactory.getInstance().getItemImportService();
myloader.setTest(isTest);
myloader.setResume(isResume);
myloader.setUseWorkflow(useWorkflow);
myloader.setUseWorkflowSendEmail(useWorkflowSendEmail);
myloader.setQuiet(isQuiet);
// create a context
Context c = new Context();
// find the EPerson, assign to context
EPerson myEPerson = null;
if (eperson.indexOf('@') != -1) {
// @ sign, must be an email
myEPerson = epersonService.findByEmail(c, eperson);
} else {
myEPerson = epersonService.find(c, UUID.fromString(eperson));
}
if (myEPerson == null) {
System.out.println("Error, eperson cannot be found: " + eperson);
System.exit(1);
}
c.setCurrentUser(myEPerson);
// find collections
List<Collection> mycollections = null;
// don't need to validate collections set if command is "delete"
// also if no collections are given in the command line
if (!"delete".equals(command) && commandLineCollections) {
System.out.println("Destination collections:");
mycollections = new ArrayList<>();
// validate each collection arg to see if it's a real collection
for (int i = 0; i < collections.length; i++) {
// is the ID a handle?
if (collections[i].indexOf('/') != -1) {
// string has a / so it must be a handle - try and resolve
// it
mycollections.add((Collection) handleService
.resolveToObject(c, collections[i]));
// resolved, now make sure it's a collection
if ((mycollections.get(i) == null)
|| (mycollections.get(i).getType() != Constants.COLLECTION)) {
mycollections.set(i, null);
}
}
// not a handle, try and treat it as an integer collection
// database ID
else if (collections[i] != null) {
mycollections.set(i, collectionService.find(c, UUID.fromString(collections[i])));
}
// was the collection valid?
if (mycollections.get(i) == null) {
throw new IllegalArgumentException("Cannot resolve "
+ collections[i] + " to collection");
}
// print progress info
String owningPrefix = "";
if (i == 0) {
owningPrefix = "Owning ";
}
System.out.println(owningPrefix + " Collection: "
+ mycollections.get(i).getName());
}
} // end of validating collections
try {
// If this is a zip archive, unzip it first
if (zip) {
sourcedir = myloader.unzip(sourcedir, zipfilename);
}
c.turnOffAuthorisationSystem();
if ("add".equals(command)) {
myloader.addItems(c, mycollections, sourcedir, mapfile, template);
} else if ("replace".equals(command)) {
myloader.replaceItems(c, mycollections, sourcedir, mapfile, template);
} else if ("delete".equals(command)) {
myloader.deleteItems(c, mapfile);
} else if ("add-bte".equals(command)) {
myloader.addBTEItems(c, mycollections, sourcedir, mapfile, template, bteInputType, null);
}
// complete all transactions
c.complete();
} catch (Exception e) {
c.abort();
e.printStackTrace();
System.out.println(e);
status = 1;
}
// Delete the unzipped file
try {
if (zip) {
System.gc();
System.out.println("Deleting temporary zip directory: " + myloader.getTempWorkDirFile().getAbsolutePath());
myloader.cleanupZipTemp();
}
} catch (Exception ex) {
System.out.println("Unable to delete temporary zip archive location: " + myloader.getTempWorkDirFile().getAbsolutePath());
}
if (isTest) {
System.out.println("***End of Test Run***");
}
} finally {
DSIndexer.setBatchProcessingMode(false);
Date endTime = new Date();
System.out.println("Started: " + startTime.getTime());
System.out.println("Ended: " + endTime.getTime());
System.out.println("Elapsed time: " + ((endTime.getTime() - startTime.getTime()) / 1000) + " secs (" + (endTime.getTime() - startTime.getTime()) + " msecs)");
}
System.exit(status);
}
}

View File

@@ -0,0 +1,25 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.app.itemimport.factory;
import org.dspace.app.itemimport.service.ItemImportService;
import org.dspace.utils.DSpace;
/**
* Abstract factory to get services for the itemimport package, use ItemImportService.getInstance() to retrieve an implementation
*
* @author kevinvandevelde at atmire.com
*/
public abstract class ItemImportServiceFactory {
public abstract ItemImportService getItemImportService();
public static ItemImportServiceFactory getInstance(){
return new DSpace().getServiceManager().getServiceByName("itemImportServiceFactory", ItemImportServiceFactory.class);
}
}

View File

@@ -0,0 +1,27 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.app.itemimport.factory;
import org.dspace.app.itemimport.service.ItemImportService;
import org.springframework.beans.factory.annotation.Autowired;
/**
* Factory implementation to get services for the itemimport package, use ItemImportService.getInstance() to retrieve an implementation
*
* @author kevinvandevelde at atmire.com
*/
public class ItemImportServiceFactoryImpl extends ItemImportServiceFactory {
@Autowired(required = true)
private ItemImportService itemImportService;
@Override
public ItemImportService getItemImportService() {
return itemImportService;
}
}

View File

@@ -0,0 +1,124 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.app.itemimport.service;
import org.dspace.app.itemimport.BatchUpload;
import org.dspace.content.Collection;
import org.dspace.core.Context;
import org.dspace.eperson.EPerson;
import javax.mail.MessagingException;
import java.io.File;
import java.io.IOException;
import java.util.List;
/**
* Import items into DSpace. The conventional use is upload files by copying
* them. DSpace writes the item's bitstreams into its assetstore. Metadata is
* also loaded to the DSpace database.
* <P>
* A second use assumes the bitstream files already exist in a storage
* resource accessible to DSpace. In this case the bitstreams are 'registered'.
* That is, the metadata is loaded to the DSpace database and DSpace is given
* the location of the file which is subsumed into DSpace.
* <P>
* The distinction is controlled by the format of lines in the 'contents' file.
* See comments in processContentsFile() below.
* <P>
* Modified by David Little, UCSD Libraries 12/21/04 to
* allow the registration of files (bitstreams) into DSpace.
*/
public interface ItemImportService {
public void addItemsAtomic(Context c, List<Collection> mycollections, String sourceDir, String mapFile, boolean template) throws Exception;
public void addItems(Context c, List<Collection> mycollections,
String sourceDir, String mapFile, boolean template) throws Exception;
public String unzip(File zipfile) throws IOException;
public String unzip(File zipfile, String destDir) throws IOException;
public String unzip(String sourcedir, String zipfilename) throws IOException;
/**
*
* Given a public URL to a zip file that has the Simple Archive Format, this method imports the contents to DSpace
* @param url The public URL of the zip file
* @param owningCollection The owning collection the items will belong to
* @param collections The collections the created items will be inserted to, apart from the owning one
* @param resumeDir In case of a resume request, the directory that containsthe old mapfile and data
* @param context The context
* @throws Exception
*/
public void processUIImport(String url, Collection owningCollection, String[] collections, String resumeDir, String inputType, Context context, boolean template) throws Exception;
/**
* Since the BTE batch import is done in a new thread we are unable to communicate
* with calling method about success or failure. We accomplish this
* communication with email instead. Send a success email once the batch
* import is complete
*
* @param context
* - the current Context
* @param eperson
* - eperson to send the email to
* @param fileName
* - the filepath to the mapfile created by the batch import
* @throws javax.mail.MessagingException
*/
public void emailSuccessMessage(Context context, EPerson eperson,
String fileName) throws MessagingException;
/**
* Since the BTE batch import is done in a new thread we are unable to communicate
* with calling method about success or failure. We accomplis this
* communication with email instead. Send an error email if the batch
* import fails
*
* @param eperson
* - EPerson to send the error message to
* @param error
* - the error message
* @throws MessagingException
*/
public void emailErrorMessage(EPerson eperson, String error)
throws MessagingException;
public List<BatchUpload> getImportsAvailable(EPerson eperson)
throws Exception;
public String getImportUploadableDirectory(EPerson ePerson)
throws Exception;
public void deleteBatchUpload(Context c, String uploadId) throws Exception;
public void replaceItems(Context c, List<Collection> mycollections, String sourcedir, String mapfile, boolean template) throws Exception;
public void deleteItems(Context c, String mapfile) throws Exception;
public void addBTEItems(Context c, List<Collection> mycollections, String sourcedir, String mapfile, boolean template, String bteInputType, String workingDir) throws Exception;
public String getTempWorkDir();
public File getTempWorkDirFile();
public void cleanupZipTemp();
public void setTest(boolean isTest);
public void setResume(boolean isResume);
public void setUseWorkflow(boolean useWorkflow);
public void setUseWorkflowSendEmail(boolean useWorkflow);
public void setQuiet(boolean isQuiet);
}

View File

@@ -9,13 +9,17 @@ package org.dspace.app.itemmarking;
import java.io.UnsupportedEncodingException;
import java.sql.SQLException;
import java.util.List;
import org.dspace.app.util.Util;
import org.dspace.content.Bitstream;
import org.dspace.content.Bundle;
import org.dspace.content.BundleBitstream;
import org.dspace.content.Item;
import org.dspace.content.service.ItemService;
import org.dspace.core.Constants;
import org.dspace.core.Context;
import org.springframework.beans.factory.annotation.Autowired;
/**
* This is an item marking Strategy class that tries to mark an item availability
@@ -28,6 +32,9 @@ public class ItemMarkingAvailabilityBitstreamStrategy implements ItemMarkingExtr
private String availableImageName;
private String nonAvailableImageName;
@Autowired(required = true)
protected ItemService itemService;
public ItemMarkingAvailabilityBitstreamStrategy() {
@@ -37,27 +44,28 @@ public class ItemMarkingAvailabilityBitstreamStrategy implements ItemMarkingExtr
public ItemMarkingInfo getItemMarkingInfo(Context context, Item item)
throws SQLException {
Bundle[] bundles = item.getBundles("ORIGINAL");
if (bundles.length == 0){
List<Bundle> bundles = itemService.getBundles(item, "ORIGINAL");
if (bundles.size() == 0){
ItemMarkingInfo markInfo = new ItemMarkingInfo();
markInfo.setImageName(nonAvailableImageName);
return markInfo;
}
else {
Bundle originalBundle = bundles[0];
if (originalBundle.getBitstreams().length == 0){
Bundle originalBundle = bundles.iterator().next();
if (originalBundle.getBitstreams().size() == 0){
ItemMarkingInfo markInfo = new ItemMarkingInfo();
markInfo.setImageName(nonAvailableImageName);
return markInfo;
}
else {
Bitstream bitstream = originalBundle.getBitstreams()[0];
ItemMarkingInfo signInfo = new ItemMarkingInfo();
signInfo.setImageName(availableImageName);
signInfo.setTooltip(bitstream.getName());
BundleBitstream bundleBitstream = originalBundle.getBitstreams().get(0);
Bitstream bitstream = bundleBitstream.getBitstream();
ItemMarkingInfo signInfo = new ItemMarkingInfo();
signInfo.setImageName(availableImageName);
signInfo.setTooltip(bitstream.getName());
@@ -88,4 +96,4 @@ public class ItemMarkingAvailabilityBitstreamStrategy implements ItemMarkingExtr
public void setNonAvailableImageName(String nonAvailableImageName) {
this.nonAvailableImageName = nonAvailableImageName;
}
}
}

View File

@@ -9,11 +9,14 @@ package org.dspace.app.itemmarking;
import java.sql.SQLException;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import org.dspace.content.Item;
import org.dspace.content.Metadatum;
import org.dspace.content.MetadataValue;
import org.dspace.content.service.ItemService;
import org.dspace.core.Context;
import org.springframework.beans.factory.annotation.Autowired;
/**
* This is an item marking Strategy class that tries to mark an item
@@ -25,6 +28,9 @@ import org.dspace.core.Context;
*/
public class ItemMarkingMetadataStrategy implements ItemMarkingExtractor {
@Autowired(required = true)
protected ItemService itemService;
private String metadataField;
Map<String, ItemMarkingInfo> mapping = new HashMap<String, ItemMarkingInfo>();
@@ -37,11 +43,11 @@ public class ItemMarkingMetadataStrategy implements ItemMarkingExtractor {
if (metadataField != null && mapping!=null)
{
Metadatum[] vals = item.getMetadataByMetadataString(metadataField);
if (vals.length > 0)
List<MetadataValue> vals = itemService.getMetadataByMetadataString(item, metadataField);
if (vals.size() > 0)
{
for (Metadatum value : vals){
String type = value.value;
for (MetadataValue value : vals){
String type = value.getValue();
if (mapping.containsKey(type)){
return mapping.get(type);
}

View File

@@ -21,7 +21,7 @@ import java.util.Map;
*/
public class ActionManager implements Iterable<UpdateAction> {
private Map<Class<? extends UpdateAction>, UpdateAction> registry
protected Map<Class<? extends UpdateAction>, UpdateAction> registry
= new LinkedHashMap<Class<? extends UpdateAction>, UpdateAction>();
public UpdateAction getUpdateAction(Class<? extends UpdateAction> actionClass)
@@ -53,24 +53,28 @@ public class ActionManager implements Iterable<UpdateAction> {
*
* @return iterator for UpdateActions
*/
public Iterator<UpdateAction> iterator()
@Override
public Iterator<UpdateAction> iterator()
{
return new Iterator<UpdateAction>()
{
private Iterator<Class<? extends UpdateAction>> itr = registry.keySet().iterator();
public boolean hasNext()
@Override
public boolean hasNext()
{
return itr.hasNext();
}
public UpdateAction next()
@Override
public UpdateAction next()
{
return registry.get(itr.next());
}
//not supported
public void remove()
@Override
public void remove()
{
throw new UnsupportedOperationException();
}

View File

@@ -17,17 +17,16 @@ import java.util.ArrayList;
import java.util.List;
import org.dspace.authorize.AuthorizeException;
import org.dspace.authorize.AuthorizeManager;
import org.dspace.authorize.ResourcePolicy;
import org.dspace.content.Bitstream;
import org.dspace.content.BitstreamFormat;
import org.dspace.content.Bundle;
import org.dspace.content.DCDate;
import org.dspace.content.FormatIdentifier;
import org.dspace.content.InstallItem;
import org.dspace.content.Item;
import org.dspace.authorize.factory.AuthorizeServiceFactory;
import org.dspace.authorize.service.AuthorizeService;
import org.dspace.content.*;
import org.dspace.content.factory.ContentServiceFactory;
import org.dspace.content.service.BitstreamFormatService;
import org.dspace.content.service.InstallItemService;
import org.dspace.core.Context;
import org.dspace.eperson.Group;
import org.dspace.eperson.factory.EPersonServiceFactory;
import org.dspace.eperson.service.GroupService;
/**
* Action to add bitstreams listed in item contents file to the item in DSpace
@@ -36,6 +35,11 @@ import org.dspace.eperson.Group;
*/
public class AddBitstreamsAction extends UpdateBitstreamsAction {
protected AuthorizeService authorizeService = AuthorizeServiceFactory.getInstance().getAuthorizeService();
protected BitstreamFormatService bitstreamFormatService = ContentServiceFactory.getInstance().getBitstreamFormatService();
protected GroupService groupService = EPersonServiceFactory.getInstance().getGroupService();
protected InstallItemService installItemService = ContentServiceFactory.getInstance().getInstallItemService();
public AddBitstreamsAction()
{
//empty
@@ -54,7 +58,8 @@ public class AddBitstreamsAction extends UpdateBitstreamsAction {
* @throws AuthorizeException
* @throws SQLException
*/
public void execute(Context context, ItemArchive itarch, boolean isTest,
@Override
public void execute(Context context, ItemArchive itarch, boolean isTest,
boolean suppressUndo) throws IllegalArgumentException,
ParseException, IOException, AuthorizeException, SQLException
{
@@ -107,12 +112,12 @@ public class AddBitstreamsAction extends UpdateBitstreamsAction {
String append = ". Added " + Integer.toString(bitstream_bundles_updated)
+ " bitstream(s) on " + DCDate.getCurrent() + " : "
+ InstallItem.getBitstreamProvenanceMessage(item);
MetadataUtilities.appendMetadata(item, dtom, false, append);
+ installItemService.getBitstreamProvenanceMessage(context, item);
MetadataUtilities.appendMetadata(context, item, dtom, false, append);
}
}
private String addBitstream(Context context, ItemArchive itarch, Item item, File dir,
protected String addBitstream(Context context, ItemArchive itarch, Item item, File dir,
ContentsEntry ce, boolean suppressUndo, boolean isTest)
throws IOException, IllegalArgumentException, SQLException, AuthorizeException, ParseException
{
@@ -141,23 +146,24 @@ public class AddBitstreamsAction extends UpdateBitstreamsAction {
if (!isTest)
{
// find the bundle
Bundle[] bundles = item.getBundles(newBundleName);
List<Bundle> bundles = itemService.getBundles(item, newBundleName);
Bundle targetBundle = null;
if (bundles.length < 1)
if (bundles.size() < 1)
{
// not found, create a new one
targetBundle = item.createBundle(newBundleName);
targetBundle = bundleService.create(context, item, newBundleName);
}
else
{
//verify bundle + name are not duplicates
for (Bundle b : bundles)
{
Bitstream[] bitstreams = b.getBitstreams();
for (Bitstream bsm : bitstreams)
List<BundleBitstream> bundleBitstreams = b.getBitstreams();
for (BundleBitstream bundleBitstream : bundleBitstreams)
{
if (bsm.getName().equals(ce.filename))
Bitstream bsm = bundleBitstream.getBitstream();
if (bsm.getName().equals(ce.filename))
{
throw new IllegalArgumentException("Duplicate bundle + filename cannot be added: "
+ b.getName() + " + " + bsm.getName());
@@ -166,39 +172,35 @@ public class AddBitstreamsAction extends UpdateBitstreamsAction {
}
// select first bundle
targetBundle = bundles[0];
targetBundle = bundles.iterator().next();
}
bs = targetBundle.createBitstream(bis);
bs.setName(ce.filename);
bs = bitstreamService.create(context, targetBundle, bis);
bs.setName(context, ce.filename);
// Identify the format
// FIXME - guessing format guesses license.txt incorrectly as a text file format!
BitstreamFormat fmt = FormatIdentifier.guessFormat(context, bs);
bs.setFormat(fmt);
BitstreamFormat fmt = bitstreamFormatService.guessFormat(context, bs);
bitstreamService.setFormat(context, bs, fmt);
if (ce.description != null)
{
bs.setDescription(ce.description);
bs.setDescription(context, ce.description);
}
if ((ce.permissionsActionId != -1) && (ce.permissionsGroupName != null))
{
Group group = Group.findByName(context, ce.permissionsGroupName);
Group group = groupService.findByName(context, ce.permissionsGroupName);
if (group != null)
{
AuthorizeManager.removeAllPolicies(context, bs); // remove the default policy
ResourcePolicy rp = ResourcePolicy.create(context);
rp.setResource(bs);
rp.setAction(ce.permissionsActionId);
rp.setGroup(group);
rp.update();
authorizeService.removeAllPolicies(context, bs); // remove the default policy
authorizeService.createResourcePolicy(context, bs, group, null, ce.permissionsActionId, null);
}
}
//update after all changes are applied
bs.update();
bitstreamService.update(context, bs);
if (!suppressUndo)
{

View File

@@ -8,13 +8,17 @@
package org.dspace.app.itemupdate;
import java.sql.SQLException;
import java.util.List;
import org.dspace.authorize.AuthorizeException;
import org.dspace.content.Metadatum;
import org.dspace.content.MetadataValue;
import org.dspace.content.Item;
import org.dspace.content.MetadataField;
import org.dspace.content.MetadataSchema;
import org.dspace.content.factory.ContentServiceFactory;
import org.dspace.content.service.MetadataFieldService;
import org.dspace.content.service.MetadataSchemaService;
import org.dspace.core.Context;
/**
@@ -22,7 +26,10 @@ import org.dspace.core.Context;
*
*/
public class AddMetadataAction extends UpdateMetadataAction {
protected MetadataSchemaService metadataSchemaService = ContentServiceFactory.getInstance().getMetadataSchemaService();
protected MetadataFieldService metadataFieldService = ContentServiceFactory.getInstance().getMetadataFieldService();
/**
* Adds metadata specified in the source archive
*
@@ -33,7 +40,8 @@ public class AddMetadataAction extends UpdateMetadataAction {
* @throws AuthorizeException
* @throws SQLException
*/
public void execute(Context context, ItemArchive itarch, boolean isTest,
@Override
public void execute(Context context, ItemArchive itarch, boolean isTest,
boolean suppressUndo) throws AuthorizeException, SQLException
{
Item item = itarch.getItem();
@@ -47,13 +55,13 @@ public class AddMetadataAction extends UpdateMetadataAction {
{
// match against metadata for this field/value in repository
// qualifier must be strictly matched, possibly null
Metadatum[] ardcv = null;
ardcv = item.getMetadata(dtom.schema, dtom.element, dtom.qualifier, Item.ANY);
List<MetadataValue> ardcv = null;
ardcv = itemService.getMetadata(item, dtom.schema, dtom.element, dtom.qualifier, Item.ANY);
boolean found = false;
for (Metadatum dcv : ardcv)
for (MetadataValue dcv : ardcv)
{
if (dcv.value.equals(dtom.value))
if (dcv.getValue().equals(dtom.value))
{
found = true;
break;
@@ -72,7 +80,7 @@ public class AddMetadataAction extends UpdateMetadataAction {
ItemUpdate.pr("Metadata to add: " + dtom.toString());
//validity tests that would occur in actual processing
// If we're just test the import, let's check that the actual metadata field exists.
MetadataSchema foundSchema = MetadataSchema.find(context, dtom.schema);
MetadataSchema foundSchema = metadataSchemaService.find(context, dtom.schema);
if (foundSchema == null)
{
@@ -81,8 +89,7 @@ public class AddMetadataAction extends UpdateMetadataAction {
}
else
{
int schemaID = foundSchema.getSchemaID();
MetadataField foundField = MetadataField.findByElement(context, schemaID, dtom.element, dtom.qualifier);
MetadataField foundField = metadataFieldService.findByElement(context, foundSchema, dtom.element, dtom.qualifier);
if (foundField == null)
{
@@ -93,7 +100,7 @@ public class AddMetadataAction extends UpdateMetadataAction {
}
else
{
item.addMetadata(dtom.schema, dtom.element, dtom.qualifier, dtom.language, dtom.value);
itemService.addMetadata(context, item, dtom.schema, dtom.element, dtom.qualifier, dtom.language, dtom.value);
ItemUpdate.pr("Metadata added: " + dtom.toString());
if (!suppressUndo)
@@ -102,10 +109,12 @@ public class AddMetadataAction extends UpdateMetadataAction {
//ItemUpdate.pr("Undo metadata: " + dtom);
// add all as a replace record to be preceded by delete
for (Metadatum dcval : ardcv)
{
itarch.addUndoMetadataField(DtoMetadata.create(dcval.schema, dcval.element,
dcval.qualifier, dcval.language, dcval.value));
for (MetadataValue dcval : ardcv)
{
MetadataField metadataField = dcval.getMetadataField();
MetadataSchema metadataSchema = metadataField.getMetadataSchema();
itarch.addUndoMetadataField(DtoMetadata.create(metadataSchema.getName(), metadataField.getElement(),
metadataField.getQualifier(), dcval.getLanguage(), dcval.getValue()));
}
}

View File

@@ -8,9 +8,11 @@
package org.dspace.app.itemupdate;
import java.sql.SQLException;
import java.util.List;
import org.dspace.content.Bitstream;
import org.dspace.content.Bundle;
import org.dspace.content.BundleBitstream;
/**
* BitstreamFilter implementation to filter by bundle name
@@ -33,7 +35,8 @@ public class BitstreamFilterByBundleName extends BitstreamFilter {
* @return whether bitstream is in bundle
*
*/
public boolean accept(Bitstream bitstream)
@Override
public boolean accept(Bitstream bitstream)
throws BitstreamFilterException
{
if (bundleName == null)
@@ -47,10 +50,11 @@ public class BitstreamFilterByBundleName extends BitstreamFilter {
try
{
Bundle[] bundles = bitstream.getBundles();
for (Bundle b : bundles)
List<BundleBitstream> bundles = bitstream.getBundles();
for (BundleBitstream bundleBitstream : bundles)
{
if (b.getName().equals(bundleName))
Bundle b = bundleBitstream.getBundle();
if (b.getName().equals(bundleName))
{
return true;
}

View File

@@ -17,8 +17,8 @@ import org.dspace.content.Bitstream;
*/
public class BitstreamFilterByFilename extends BitstreamFilter {
private Pattern pattern;
private String filenameRegex;
protected Pattern pattern;
protected String filenameRegex;
public BitstreamFilterByFilename()
{
@@ -31,7 +31,8 @@ public class BitstreamFilterByFilename extends BitstreamFilter {
*
* @return whether bitstream name matches the regular expression
*/
public boolean accept(Bitstream bitstream) throws BitstreamFilterException
@Override
public boolean accept(Bitstream bitstream) throws BitstreamFilterException
{
if (filenameRegex == null)
{

View File

@@ -41,7 +41,7 @@ public class ContentsEntry
final int permissionsActionId;
final String description;
private ContentsEntry(String filename,
protected ContentsEntry(String filename,
String bundlename,
int permissionsActionId,
String permissionsGroupName,

View File

@@ -14,10 +14,7 @@ import java.text.ParseException;
import java.util.List;
import org.dspace.authorize.AuthorizeException;
import org.dspace.content.Bitstream;
import org.dspace.content.Bundle;
import org.dspace.content.DCDate;
import org.dspace.content.Item;
import org.dspace.content.*;
import org.dspace.core.Context;
/**
@@ -43,7 +40,8 @@ public class DeleteBitstreamsAction extends UpdateBitstreamsAction
* @throws AuthorizeException
* @throws SQLException
*/
public void execute(Context context, ItemArchive itarch, boolean isTest,
@Override
public void execute(Context context, ItemArchive itarch, boolean isTest,
boolean suppressUndo) throws IllegalArgumentException, IOException,
SQLException, AuthorizeException, ParseException
{
@@ -54,34 +52,35 @@ public class DeleteBitstreamsAction extends UpdateBitstreamsAction
}
else
{
List<Integer> list = MetadataUtilities.readDeleteContentsFile(f);
List<String> list = MetadataUtilities.readDeleteContentsFile(f);
if (list.isEmpty())
{
ItemUpdate.pr("Warning: empty delete_contents file for item " + itarch.getDirectoryName() );
}
else
{
for (int id : list)
for (String id : list)
{
try
{
Bitstream bs = Bitstream.find(context, id);
Bitstream bs = bitstreamService.findByIdOrLegacyId(context, id);
if (bs == null)
{
ItemUpdate.pr("Bitstream not found by id: " + id);
}
else
{
Bundle[] bundles = bs.getBundles();
for (Bundle b : bundles)
List<BundleBitstream> bundles = bs.getBundles();
for (BundleBitstream bundleBitstream : bundles)
{
if (isTest)
Bundle b = bundleBitstream.getBundle();
if (isTest)
{
ItemUpdate.pr("Delete bitstream with id = " + id);
}
else
{
b.removeBitstream(bs);
bundleService.removeBitstream(context, b, bs);
ItemUpdate.pr("Deleted bitstream with id = " + id);
}
@@ -92,12 +91,12 @@ public class DeleteBitstreamsAction extends UpdateBitstreamsAction
DtoMetadata dtom = DtoMetadata.create("dc.description.provenance", "en", "");
String append = "Bitstream " + bs.getName() + " deleted on " + DCDate.getCurrent() + "; ";
Item item = bundles[0].getItems()[0];
Item item = bundles.iterator().next().getBundle().getItems().iterator().next();
ItemUpdate.pr("Append provenance with: " + append);
if (!isTest)
{
MetadataUtilities.appendMetadata(item, dtom, false, append);
MetadataUtilities.appendMetadata(context, item, dtom, false, append);
}
}
}

View File

@@ -14,10 +14,7 @@ import java.util.ArrayList;
import java.util.List;
import org.dspace.authorize.AuthorizeException;
import org.dspace.content.Bitstream;
import org.dspace.content.Bundle;
import org.dspace.content.DCDate;
import org.dspace.content.Item;
import org.dspace.content.*;
import org.dspace.core.Context;
/**
@@ -33,7 +30,7 @@ import org.dspace.core.Context;
*/
public class DeleteBitstreamsByFilterAction extends UpdateBitstreamsAction {
private BitstreamFilter filter;
protected BitstreamFilter filter;
/**
* Set filter
@@ -67,7 +64,8 @@ public class DeleteBitstreamsByFilterAction extends UpdateBitstreamsAction {
* @throws AuthorizeException
* @throws SQLException
*/
public void execute(Context context, ItemArchive itarch, boolean isTest,
@Override
public void execute(Context context, ItemArchive itarch, boolean isTest,
boolean suppressUndo) throws AuthorizeException,
BitstreamFilterException, IOException, ParseException, SQLException
{
@@ -75,16 +73,17 @@ public class DeleteBitstreamsByFilterAction extends UpdateBitstreamsAction {
List<String> deleted = new ArrayList<String>();
Item item = itarch.getItem();
Bundle[] bundles = item.getBundles();
List<Bundle> bundles = item.getBundles();
for (Bundle b : bundles)
{
Bitstream[] bitstreams = b.getBitstreams();
List<BundleBitstream> bitstreams = b.getBitstreams();
String bundleName = b.getName();
for (Bitstream bs : bitstreams)
{
if (filter.accept(bs))
for (BundleBitstream bundleBitstream : bitstreams)
{
Bitstream bs = bundleBitstream.getBitstream();
if (filter.accept(bs))
{
if (isTest)
{
@@ -98,7 +97,7 @@ public class DeleteBitstreamsByFilterAction extends UpdateBitstreamsAction {
{
deleted.add(bs.getName());
}
b.removeBitstream(bs);
bundleService.removeBitstream(context, b, bs);
ItemUpdate.pr("Deleted " + bundleName + " bitstream " + bs.getName()
+ " with id = " + bs.getID());
}
@@ -122,7 +121,7 @@ public class DeleteBitstreamsByFilterAction extends UpdateBitstreamsAction {
if (!isTest)
{
MetadataUtilities.appendMetadata(item, dtom, false, sb.toString());
MetadataUtilities.appendMetadata(context, item, dtom, false, sb.toString());
}
}
}

View File

@@ -7,10 +7,14 @@
*/
package org.dspace.app.itemupdate;
import java.sql.SQLException;
import java.text.ParseException;
import java.util.List;
import org.dspace.authorize.AuthorizeException;
import org.dspace.content.Metadatum;
import org.dspace.content.MetadataField;
import org.dspace.content.MetadataSchema;
import org.dspace.content.MetadataValue;
import org.dspace.content.Item;
import org.dspace.core.Context;
@@ -31,17 +35,17 @@ public class DeleteMetadataAction extends UpdateMetadataAction {
* @throws ParseException
* @throws AuthorizeException
*/
public void execute(Context context, ItemArchive itarch, boolean isTest,
boolean suppressUndo) throws AuthorizeException, ParseException
{
@Override
public void execute(Context context, ItemArchive itarch, boolean isTest,
boolean suppressUndo) throws AuthorizeException, ParseException, SQLException {
Item item = itarch.getItem();
for (String f : targetFields)
{
DtoMetadata dummy = DtoMetadata.create(f, Item.ANY, "");
Metadatum[] ardcv = item.getMetadataByMetadataString(f);
List<MetadataValue> ardcv = itemService.getMetadataByMetadataString(item, f);
ItemUpdate.pr("Metadata to be deleted: ");
for (Metadatum dcv : ardcv)
for (MetadataValue dcv : ardcv)
{
ItemUpdate.pr(" " + MetadataUtilities.getDCValueString(dcv));
}
@@ -50,14 +54,16 @@ public class DeleteMetadataAction extends UpdateMetadataAction {
{
if (!suppressUndo)
{
for (Metadatum dcv : ardcv)
for (MetadataValue dcv : ardcv)
{
itarch.addUndoMetadataField(DtoMetadata.create(dcv.schema, dcv.element,
dcv.qualifier, dcv.language, dcv.value));
MetadataField metadataField = dcv.getMetadataField();
MetadataSchema metadataSchema = metadataField.getMetadataSchema();
itarch.addUndoMetadataField(DtoMetadata.create(metadataSchema.getName(), metadataField.getElement(),
metadataField.getQualifier(), dcv.getLanguage(), dcv.getValue()));
}
}
item.clearMetadata(dummy.schema, dummy.element, dummy.qualifier, Item.ANY);
itemService.clearMetadata(context, item, dummy.schema, dummy.element, dummy.qualifier, Item.ANY);
}
}
}

View File

@@ -30,7 +30,7 @@ class DtoMetadata
final String language;
final String value;
private DtoMetadata(String schema, String element, String qualifier, String language, String value)
protected DtoMetadata(String schema, String element, String qualifier, String language, String value)
{
this.schema = schema;
this.element = element;

View File

@@ -19,7 +19,9 @@ import java.io.OutputStream;
import java.io.PrintWriter;
import java.sql.SQLException;
import java.util.ArrayList;
import java.util.Iterator;
import java.util.List;
import java.util.UUID;
import javax.xml.parsers.DocumentBuilder;
import javax.xml.parsers.DocumentBuilderFactory;
@@ -30,13 +32,15 @@ import javax.xml.transform.TransformerFactory;
import javax.xml.transform.TransformerConfigurationException;
import org.apache.log4j.Logger;
import org.dspace.content.ItemIterator;
import org.dspace.authorize.AuthorizeException;
import org.dspace.content.DSpaceObject;
import org.dspace.content.Item;
import org.dspace.content.factory.ContentServiceFactory;
import org.dspace.content.service.ItemService;
import org.dspace.core.Context;
import org.dspace.handle.HandleManager;
import org.dspace.handle.factory.HandleServiceFactory;
import org.dspace.handle.service.HandleService;
import org.w3c.dom.Document;
@@ -48,24 +52,28 @@ public class ItemArchive {
private static final Logger log = Logger.getLogger(ItemArchive.class);
public static final String DUBLIN_CORE_XML = "dublin_core.xml";
private static DocumentBuilder builder = null;
private static Transformer transformer = null;
private List<DtoMetadata> dtomList = null;
private List<DtoMetadata> undoDtomList = new ArrayList<DtoMetadata>();
private List<Integer> undoAddContents = new ArrayList<Integer>(); // for undo of add
private Item item;
private File dir; // directory name in source archive for this item
private String dirname; //convenience
protected static DocumentBuilder builder = null;
protected Transformer transformer = null;
protected List<DtoMetadata> dtomList = null;
protected List<DtoMetadata> undoDtomList = new ArrayList<DtoMetadata>();
protected List<UUID> undoAddContents = new ArrayList<>(); // for undo of add
protected Item item;
protected File dir; // directory name in source archive for this item
protected String dirname; //convenience
protected HandleService handleService;
protected ItemService itemService;
//constructors
private ItemArchive()
protected ItemArchive()
{
// nothing
}
handleService = HandleServiceFactory.getInstance().getHandleService();
itemService = ContentServiceFactory.getInstance().getItemService();
}
/** factory method
*
@@ -119,7 +127,7 @@ public class ItemArchive {
return itarch;
}
private static DocumentBuilder getDocumentBuilder()
protected static DocumentBuilder getDocumentBuilder()
throws ParserConfigurationException
{
if (builder == null)
@@ -128,8 +136,8 @@ public class ItemArchive {
}
return builder;
}
private static Transformer getTransformer()
protected Transformer getTransformer()
throws TransformerConfigurationException
{
if (transformer == null)
@@ -188,7 +196,7 @@ public class ItemArchive {
* Add bitstream id to delete contents file
* @param bitstreamId
*/
public void addUndoDeleteContents(int bitstreamId)
public void addUndoDeleteContents(UUID bitstreamId)
{
this.undoAddContents.add(bitstreamId);
}
@@ -222,7 +230,7 @@ public class ItemArchive {
String handle = uri.substring(ItemUpdate.HANDLE_PREFIX.length());
DSpaceObject dso = HandleManager.resolveToObject(context, handle);
DSpaceObject dso = handleService.resolveToObject(context, handle);
if (dso instanceof Item)
{
item = (Item) dso;
@@ -260,16 +268,14 @@ public class ItemArchive {
this.addUndoMetadataField(dtom); //seed the undo list with the identifier field
ItemIterator itr = Item.findByMetadataField(context, dtom.schema, dtom.element, dtom.qualifier, dtom.value);
Iterator<Item> itr = itemService.findByMetadataField(context, dtom.schema, dtom.element, dtom.qualifier, dtom.value);
int count = 0;
while (itr.hasNext())
{
item = itr.next();
count++;
}
itr.close();
ItemUpdate.prv("items matching = " + count );
if (count != 1)
@@ -325,7 +331,7 @@ public class ItemArchive {
{
File f = new File(dir, ItemUpdate.DELETE_CONTENTS_FILE);
pw = new PrintWriter(new BufferedWriter(new FileWriter(f)));
for (Integer i : undoAddContents)
for (UUID i : undoAddContents)
{
pw.println(i);
}

View File

@@ -14,12 +14,7 @@ import java.io.FileNotFoundException;
import java.io.FileWriter;
import java.io.IOException;
import java.io.PrintWriter;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Date;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.*;
import org.apache.commons.cli.CommandLine;
import org.apache.commons.cli.CommandLineParser;
@@ -28,9 +23,13 @@ import org.apache.commons.cli.Option;
import org.apache.commons.cli.Options;
import org.apache.commons.cli.PosixParser;
import org.dspace.content.Item;
import org.dspace.content.factory.ContentServiceFactory;
import org.dspace.content.service.ItemService;
import org.dspace.core.ConfigurationManager;
import org.dspace.core.Context;
import org.dspace.eperson.EPerson;
import org.dspace.eperson.factory.EPersonServiceFactory;
import org.dspace.eperson.service.EPersonService;
/**
*
@@ -73,7 +72,10 @@ public class ItemUpdate {
public static final Map<String, String> filterAliases = new HashMap<String, String>();
public static boolean verbose = false;
protected static final EPersonService epersonService = EPersonServiceFactory.getInstance().getEPersonService();
protected static final ItemService itemService = ContentServiceFactory.getInstance().getItemService();
static
{
filterAliases.put("ORIGINAL", "org.dspace.app.itemupdate.OriginalBitstreamFilter");
@@ -85,7 +87,8 @@ public class ItemUpdate {
// File listing filter to check for folders
static FilenameFilter directoryFilter = new FilenameFilter()
{
public boolean accept(File dir, String n)
@Override
public boolean accept(File dir, String n)
{
File f = new File(dir.getAbsolutePath() + File.separatorChar + n);
return f.isDirectory();
@@ -95,7 +98,8 @@ public class ItemUpdate {
// File listing filter to check for files (not directories)
static FilenameFilter fileFilter = new FilenameFilter()
{
public boolean accept(File dir, String n)
@Override
public boolean accept(File dir, String n)
{
File f = new File(dir.getAbsolutePath() + File.separatorChar + n);
return (f.isFile());
@@ -103,9 +107,9 @@ public class ItemUpdate {
};
// instance variables
private ActionManager actionMgr = new ActionManager();
private List<String> undoActionList = new ArrayList<String>();
private String eperson;
protected ActionManager actionMgr = new ActionManager();
protected List<String> undoActionList = new ArrayList<String>();
protected String eperson;
/**
*
@@ -387,8 +391,8 @@ public class ItemUpdate {
}
System.exit(status);
}
private void processArchive(Context context, String sourceDirPath, String itemField,
protected void processArchive(Context context, String sourceDirPath, String itemField,
String metadataIndexName, boolean alterProvenance, boolean isTest)
throws Exception
{
@@ -444,9 +448,7 @@ public class ItemUpdate {
if (!isTest)
{
Item item = itarch.getItem();
item.update(); //need to update before commit
context.commit();
item.decache();
itemService.update(context, item); //need to update before commit
}
ItemUpdate.pr("Item " + dirname + " completed");
successItemCount++;
@@ -512,7 +514,7 @@ public class ItemUpdate {
* @throws FileNotFoundException
* @throws IOException
*/
private File initUndoArchive(File sourceDir)
protected File initUndoArchive(File sourceDir)
throws FileNotFoundException, IOException
{
File parentDir = sourceDir.getCanonicalFile().getParentFile();
@@ -553,7 +555,7 @@ public class ItemUpdate {
//private void write
private void setEPerson(Context context, String eperson)
protected void setEPerson(Context context, String eperson)
throws Exception
{
if (eperson == null)
@@ -567,11 +569,11 @@ public class ItemUpdate {
if (eperson.indexOf('@') != -1)
{
// @ sign, must be an email
myEPerson = EPerson.findByEmail(context, eperson);
myEPerson = epersonService.findByEmail(context, eperson);
}
else
{
myEPerson = EPerson.find(context, Integer.parseInt(eperson));
myEPerson = epersonService.find(context, UUID.fromString(eperson));
}
if (myEPerson == null)

View File

@@ -32,6 +32,10 @@ import javax.xml.transform.stream.StreamResult;
import org.apache.commons.lang.StringUtils;
import org.apache.xpath.XPathAPI;
import org.dspace.content.*;
import org.dspace.content.factory.ContentServiceFactory;
import org.dspace.content.service.ItemService;
import org.dspace.core.Context;
import org.w3c.dom.Document;
import org.w3c.dom.Element;
import org.w3c.dom.NamedNodeMap;
@@ -40,9 +44,6 @@ import org.w3c.dom.NodeList;
import org.xml.sax.SAXException;
import org.dspace.authorize.AuthorizeException;
import org.dspace.content.Metadatum;
import org.dspace.content.Item;
import org.dspace.content.MetadataSchema;
import org.dspace.core.ConfigurationManager;
@@ -56,7 +57,9 @@ import org.dspace.core.ConfigurationManager;
*
*/
public class MetadataUtilities {
protected static final ItemService itemService = ContentServiceFactory.getInstance().getItemService();
/**
*
* Working around Item API to delete a value-specific Metadatum
@@ -72,32 +75,31 @@ public class MetadataUtilities {
*
* @return true if metadata field is found with matching value and was deleted
*/
public static boolean deleteMetadataByValue(Item item, DtoMetadata dtom, boolean isLanguageStrict)
{
Metadatum[] ar = null;
public static boolean deleteMetadataByValue(Context context, Item item, DtoMetadata dtom, boolean isLanguageStrict) throws SQLException {
List<MetadataValue> ar = null;
if (isLanguageStrict)
{ // get all for given type
ar = item.getMetadata(dtom.schema, dtom.element, dtom.qualifier, dtom.language);
ar = itemService.getMetadata(item, dtom.schema, dtom.element, dtom.qualifier, dtom.language);
}
else
{
ar = item.getMetadata(dtom.schema, dtom.element, dtom.qualifier, Item.ANY);
ar = itemService.getMetadata(item, dtom.schema, dtom.element, dtom.qualifier, Item.ANY);
}
boolean found = false;
//build new set minus the one to delete
List<String> vals = new ArrayList<String>();
for (Metadatum dcv : ar)
for (MetadataValue dcv : ar)
{
if (dcv.value.equals(dtom.value))
if (dcv.getValue().equals(dtom.value))
{
found = true;
}
else
{
vals.add(dcv.value);
vals.add(dcv.getValue());
}
}
@@ -105,14 +107,14 @@ public class MetadataUtilities {
{
if (isLanguageStrict)
{
item.clearMetadata(dtom.schema, dtom.element, dtom.qualifier, dtom.language);
itemService.clearMetadata(context, item, dtom.schema, dtom.element, dtom.qualifier, dtom.language);
}
else
{
item.clearMetadata(dtom.schema, dtom.element, dtom.qualifier, Item.ANY);
itemService.clearMetadata(context, item, dtom.schema, dtom.element, dtom.qualifier, Item.ANY);
}
item.addMetadata(dtom.schema, dtom.element, dtom.qualifier, dtom.language, vals.toArray(new String[vals.size()]));
itemService.addMetadata(context, item, dtom.schema, dtom.element, dtom.qualifier, dtom.language, vals);
}
return found;
}
@@ -126,57 +128,56 @@ public class MetadataUtilities {
* @param textToAppend
* @throws IllegalArgumentException - When target metadata field is not found
*/
public static void appendMetadata(Item item, DtoMetadata dtom, boolean isLanguageStrict,
public static void appendMetadata(Context context, Item item, DtoMetadata dtom, boolean isLanguageStrict,
String textToAppend)
throws IllegalArgumentException
{
Metadatum[] ar = null;
throws IllegalArgumentException, SQLException {
List<MetadataValue> ar = null;
// get all values for given element/qualifier
if (isLanguageStrict) // get all for given element/qualifier
{
ar = item.getMetadata(dtom.schema, dtom.element, dtom.qualifier, dtom.language);
ar = itemService.getMetadata(item, dtom.schema, dtom.element, dtom.qualifier, dtom.language);
}
else
{
ar = item.getMetadata(dtom.schema, dtom.element, dtom.qualifier, Item.ANY);
ar = itemService.getMetadata(item, dtom.schema, dtom.element, dtom.qualifier, Item.ANY);
}
if (ar.length == 0)
if (ar.size() == 0)
{
throw new IllegalArgumentException("Metadata to append to not found");
}
int idx = 0; //index of field to change
if (ar.length > 1) //need to pick one, can't be sure it's the last one
if (ar.size() > 1) //need to pick one, can't be sure it's the last one
{
// TODO maybe get highest id ?
}
//build new set minus the one to delete
List<String> vals = new ArrayList<String>();
for (int i=0; i < ar.length; i++)
for (int i=0; i < ar.size(); i++)
{
if (i == idx)
{
vals.add(ar[i].value + textToAppend);
vals.add(ar.get(i).getValue() + textToAppend);
}
else
{
vals.add(ar[i].value);
vals.add(ar.get(i).getValue());
}
}
if (isLanguageStrict)
{
item.clearMetadata(dtom.schema, dtom.element, dtom.qualifier, dtom.language);
itemService.clearMetadata(context, item, dtom.schema, dtom.element, dtom.qualifier, dtom.language);
}
else
{
item.clearMetadata(dtom.schema, dtom.element, dtom.qualifier, Item.ANY);
itemService.clearMetadata(context, item, dtom.schema, dtom.element, dtom.qualifier, Item.ANY);
}
item.addMetadata(dtom.schema, dtom.element, dtom.qualifier, dtom.language, vals.toArray(new String[vals.size()]));
itemService.addMetadata(context, item, dtom.schema, dtom.element, dtom.qualifier, dtom.language, vals);
}
/**
@@ -424,10 +425,10 @@ public class MetadataUtilities {
* @throws FileNotFoundException
* @throws IOException
*/
public static List<Integer> readDeleteContentsFile(File f)
public static List<String> readDeleteContentsFile(File f)
throws FileNotFoundException, IOException
{
List<Integer> list = new ArrayList<Integer>();
List<String> list = new ArrayList<>();
BufferedReader in = null;
@@ -444,16 +445,7 @@ public class MetadataUtilities {
continue;
}
int n = 0;
try
{
n = Integer.parseInt(line);
list.add(n);
}
catch(NumberFormatException e)
{
ItemUpdate.pr("Error reading delete contents line:" + e.toString());
}
list.add(line);
}
}
finally
@@ -477,10 +469,12 @@ public class MetadataUtilities {
* @param dcv
* @return string displaying elements of the Metadatum
*/
public static String getDCValueString(Metadatum dcv)
public static String getDCValueString(MetadataValue dcv)
{
return "schema: " + dcv.schema + "; element: " + dcv.element + "; qualifier: " + dcv.qualifier +
"; language: " + dcv.language + "; value: " + dcv.value;
MetadataField metadataField = dcv.getMetadataField();
MetadataSchema metadataSchema = metadataField.getMetadataSchema();
return "schema: " + metadataSchema.getName() + "; element: " + metadataField.getElement() + "; qualifier: " + metadataField.getQualifier() +
"; language: " + dcv.getLanguage() + "; value: " + dcv.getValue();
}
/**

View File

@@ -8,9 +8,11 @@
package org.dspace.app.itemupdate;
import java.sql.SQLException;
import java.util.List;
import org.dspace.content.Bitstream;
import org.dspace.content.Bundle;
import org.dspace.content.BundleBitstream;
/**
* Filter all bitstreams in the ORIGINAL bundle
@@ -31,15 +33,17 @@ public class OriginalBitstreamFilter extends BitstreamFilterByBundleName
*
* @throws BitstreamFilterException
*/
public boolean accept(Bitstream bitstream)
@Override
public boolean accept(Bitstream bitstream)
throws BitstreamFilterException
{
try
{
Bundle[] bundles = bitstream.getBundles();
for (Bundle b : bundles)
List<BundleBitstream> bundles = bitstream.getBundles();
for (BundleBitstream bundleBitstream : bundles)
{
if (b.getName().equals("ORIGINAL"))
Bundle bundle = bundleBitstream.getBundle();
if (bundle.getName().equals("ORIGINAL"))
{
return true;
}

View File

@@ -8,8 +8,11 @@
package org.dspace.app.itemupdate;
import java.sql.SQLException;
import java.util.List;
import org.dspace.content.Bitstream;
import org.dspace.content.Bundle;
import org.dspace.content.BundleBitstream;
/**
* Filter all bitstreams in the ORIGINAL bundle
@@ -18,7 +21,7 @@ import org.dspace.content.Bundle;
*/
public class OriginalWithDerivativesBitstreamFilter extends BitstreamFilter
{
private String[] bundlesToEmpty = { "ORIGINAL", "TEXT", "THUMBNAIL" };
protected String[] bundlesToEmpty = { "ORIGINAL", "TEXT", "THUMBNAIL" };
public OriginalWithDerivativesBitstreamFilter()
{
@@ -32,15 +35,17 @@ public class OriginalWithDerivativesBitstreamFilter extends BitstreamFilter
* @throws BitstreamFilterException
* @return true if bitstream is in specified bundles
*/
public boolean accept(Bitstream bitstream)
@Override
public boolean accept(Bitstream bitstream)
throws BitstreamFilterException
{
try
{
Bundle[] bundles = bitstream.getBundles();
for (Bundle b : bundles)
List<BundleBitstream> bundles = bitstream.getBundles();
for (BundleBitstream bundleBitstream : bundles)
{
for (String bn : bundlesToEmpty)
Bitstream b = bundleBitstream.getBitstream();
for (String bn : bundlesToEmpty)
{
if (b.getName().equals(bn))
{

View File

@@ -7,6 +7,8 @@
*/
package org.dspace.app.itemupdate;
import org.dspace.content.factory.ContentServiceFactory;
import org.dspace.content.service.ItemService;
import org.dspace.core.Context;
/**
@@ -15,6 +17,9 @@ import org.dspace.core.Context;
*/
public interface UpdateAction
{
public ItemService itemService = ContentServiceFactory.getInstance().getItemService();
/**
* Action to update item
*

View File

@@ -7,6 +7,10 @@
*/
package org.dspace.app.itemupdate;
import org.dspace.content.factory.ContentServiceFactory;
import org.dspace.content.service.BitstreamService;
import org.dspace.content.service.BundleService;
/**
* Base class for Bitstream actions
*
@@ -16,6 +20,10 @@ public abstract class UpdateBitstreamsAction implements UpdateAction {
protected boolean alterProvenance = true;
protected BundleService bundleService = ContentServiceFactory.getInstance().getBundleService();
protected BitstreamService bitstreamService = ContentServiceFactory.getInstance().getBitstreamService();
/**
* Set variable to indicate that the dc.description.provenance field may
* be changed as a result of Bitstream changes by ItemUpdate

View File

@@ -19,6 +19,7 @@ import java.io.InputStream;
import javax.imageio.ImageIO;
import org.dspace.content.Item;
import org.dspace.core.ConfigurationManager;
/**
@@ -30,6 +31,7 @@ import org.dspace.core.ConfigurationManager;
*/
public class BrandedPreviewJPEGFilter extends MediaFilter
{
@Override
public String getFilteredName(String oldFilename)
{
return oldFilename + ".preview.jpg";
@@ -39,6 +41,7 @@ public class BrandedPreviewJPEGFilter extends MediaFilter
* @return String bundle name
*
*/
@Override
public String getBundleName()
{
return "BRANDED_PREVIEW";
@@ -47,6 +50,7 @@ public class BrandedPreviewJPEGFilter extends MediaFilter
/**
* @return String bitstreamformat
*/
@Override
public String getFormatString()
{
return "JPEG";
@@ -55,6 +59,7 @@ public class BrandedPreviewJPEGFilter extends MediaFilter
/**
* @return String description
*/
@Override
public String getDescription()
{
return "Generated Branded Preview";
@@ -67,7 +72,8 @@ public class BrandedPreviewJPEGFilter extends MediaFilter
*
* @return InputStream the resulting input stream
*/
public InputStream getDestinationStream(InputStream source)
@Override
public InputStream getDestinationStream(Item currentItem, InputStream source, boolean verbose)
throws Exception
{
// read in bitstream's image
@@ -92,7 +98,7 @@ public class BrandedPreviewJPEGFilter extends MediaFilter
// if verbose flag is set, print out dimensions
// to STDOUT
if (MediaFilterManager.isVerbose)
if (verbose)
{
System.out.println("original size: " + xsize + "," + ysize);
}
@@ -105,7 +111,7 @@ public class BrandedPreviewJPEGFilter extends MediaFilter
// if verbose flag is set, print out extracted text
// to STDOUT
if (MediaFilterManager.isVerbose)
if (verbose)
{
System.out.println("x scale factor: " + scaleFactor);
}
@@ -117,7 +123,7 @@ public class BrandedPreviewJPEGFilter extends MediaFilter
// if verbose flag is set, print out extracted text
// to STDOUT
if (MediaFilterManager.isVerbose)
if (verbose)
{
System.out.println("new size: " + xsize + "," + ysize);
}
@@ -135,7 +141,7 @@ public class BrandedPreviewJPEGFilter extends MediaFilter
}
// if verbose flag is set, print details to STDOUT
if (MediaFilterManager.isVerbose)
if (verbose)
{
System.out.println("created thumbnail size: " + xsize + ", "
+ ysize);
@@ -170,7 +176,7 @@ public class BrandedPreviewJPEGFilter extends MediaFilter
Brand brand = new Brand((int) xsize, brandHeight, new Font(brandFont, Font.PLAIN, brandFontPoint), 5);
BufferedImage brandImage = brand.create(ConfigurationManager.getProperty("webui.preview.brand"),
ConfigurationManager.getProperty("webui.preview.brand.abbrev"),
MediaFilterManager.getCurrentItem() == null ? "" : "hdl:" + MediaFilterManager.getCurrentItem().getHandle());
currentItem == null ? "" : "hdl:" + currentItem.getHandle());
g2d.drawImage(brandImage, (int)0, (int)ysize, (int) xsize, (int) 20, null);

View File

@@ -55,7 +55,7 @@ public interface FormatFilter
*
* @return result of filter's transformation, written out to a bitstream
*/
public InputStream getDestinationStream(InputStream source)
public InputStream getDestinationStream(Item item, InputStream source, boolean verbose)
throws Exception;
/**
@@ -77,7 +77,7 @@ public interface FormatFilter
* @return true if bitstream processing should continue,
* false if this bitstream should be skipped
*/
public boolean preProcessBitstream(Context c, Item item, Bitstream source)
public boolean preProcessBitstream(Context c, Item item, Bitstream source, boolean verbose)
throws Exception;
/**

View File

@@ -7,6 +7,8 @@
*/
package org.dspace.app.mediafilter;
import org.dspace.content.Item;
import java.io.ByteArrayInputStream;
import java.io.InputStream;
@@ -22,6 +24,7 @@ import javax.swing.text.html.HTMLEditorKit;
public class HTMLFilter extends MediaFilter
{
@Override
public String getFilteredName(String oldFilename)
{
return oldFilename + ".txt";
@@ -31,6 +34,7 @@ public class HTMLFilter extends MediaFilter
* @return String bundle name
*
*/
@Override
public String getBundleName()
{
return "TEXT";
@@ -39,6 +43,7 @@ public class HTMLFilter extends MediaFilter
/**
* @return String bitstreamformat
*/
@Override
public String getFormatString()
{
return "Text";
@@ -47,6 +52,7 @@ public class HTMLFilter extends MediaFilter
/**
* @return String description
*/
@Override
public String getDescription()
{
return "Extracted text";
@@ -58,7 +64,8 @@ public class HTMLFilter extends MediaFilter
*
* @return InputStream the resulting input stream
*/
public InputStream getDestinationStream(InputStream source)
@Override
public InputStream getDestinationStream(Item currentItem, InputStream source, boolean verbose)
throws Exception
{
// try and read the document - set to ignore character set directive,

View File

@@ -7,6 +7,8 @@
*/
package org.dspace.app.mediafilter;
import org.dspace.content.Item;
import java.io.ByteArrayInputStream;
import java.io.File;
import java.io.InputStream;
@@ -27,14 +29,15 @@ public class ImageMagickImageThumbnailFilter extends ImageMagickThumbnailFilter
*
* @return InputStream the resulting input stream
*/
public InputStream getDestinationStream(InputStream source)
@Override
public InputStream getDestinationStream(Item currentItem, InputStream source, boolean verbose)
throws Exception
{
File f = inputStreamToTempFile(source, "imthumb", ".tmp");
File f2 = null;
try
{
f2 = getThumbnailFile(f);
f2 = getThumbnailFile(f, verbose);
byte[] bytes = Files.readAllBytes(f2.toPath());
return new ByteArrayInputStream(bytes);
}

View File

@@ -7,13 +7,16 @@
*/
package org.dspace.app.mediafilter;
import org.dspace.content.Item;
import java.io.ByteArrayInputStream;
import java.io.File;
import java.io.InputStream;
import java.nio.file.Files;
public class ImageMagickPdfThumbnailFilter extends ImageMagickThumbnailFilter {
public InputStream getDestinationStream(InputStream source)
@Override
public InputStream getDestinationStream(Item currentItem, InputStream source, boolean verbose)
throws Exception
{
File f = inputStreamToTempFile(source, "impdfthumb", ".pdf");
@@ -21,8 +24,8 @@ public class ImageMagickPdfThumbnailFilter extends ImageMagickThumbnailFilter {
File f3 = null;
try
{
f2 = getImageFile(f, 0);
f3 = getThumbnailFile(f2);
f2 = getImageFile(f, 0, verbose);
f3 = getThumbnailFile(f2, verbose);
byte[] bytes = Files.readAllBytes(f3.toPath());
return new ByteArrayInputStream(bytes);
}
@@ -44,6 +47,7 @@ public class ImageMagickPdfThumbnailFilter extends ImageMagickThumbnailFilter {
}
public static final String[] PDF = {"Adobe PDF"};
@Override
public String[] getInputMIMETypes()
{
return PDF;

View File

@@ -16,11 +16,12 @@ import java.util.regex.PatternSyntaxException;
import javax.imageio.ImageIO;
import org.dspace.app.mediafilter.MediaFilter;
import org.dspace.app.mediafilter.SelfRegisterInputFormats;
import org.dspace.content.Bitstream;
import org.dspace.content.Bundle;
import org.dspace.content.BundleBitstream;
import org.dspace.content.Item;
import org.dspace.content.factory.ContentServiceFactory;
import org.dspace.content.service.ItemService;
import org.dspace.core.Context;
import org.im4java.core.ConvertCmd;
import org.im4java.core.IM4JavaException;
@@ -36,12 +37,13 @@ import org.dspace.core.ConfigurationManager;
*/
public abstract class ImageMagickThumbnailFilter extends MediaFilter implements SelfRegisterInputFormats
{
private static int width = 180;
private static int height = 120;
private static boolean flatten = true;
protected static int width = 180;
protected static int height = 120;
private static boolean flatten = true;
static String bitstreamDescription = "IM Thumbnail";
static final String defaultPattern = "Generated Thumbnail";
static Pattern replaceRegex = Pattern.compile(defaultPattern);
protected final ItemService itemService = ContentServiceFactory.getInstance().getItemService();
static {
String pre = ImageMagickThumbnailFilter.class.getName();
@@ -60,13 +62,13 @@ public abstract class ImageMagickThumbnailFilter extends MediaFilter implements
} catch(PatternSyntaxException e) {
System.err.println("Invalid thumbnail replacement pattern: "+e.getMessage());
}
}
public ImageMagickThumbnailFilter() {
}
@Override
public String getFilteredName(String oldFilename)
{
return oldFilename + ".jpg";
@@ -76,6 +78,7 @@ public abstract class ImageMagickThumbnailFilter extends MediaFilter implements
* @return String bundle name
*
*/
@Override
public String getBundleName()
{
return "THUMBNAIL";
@@ -84,6 +87,7 @@ public abstract class ImageMagickThumbnailFilter extends MediaFilter implements
/**
* @return String bitstreamformat
*/
@Override
public String getFormatString()
{
return "JPEG";
@@ -92,12 +96,13 @@ public abstract class ImageMagickThumbnailFilter extends MediaFilter implements
/**
* @return String bitstreamDescription
*/
@Override
public String getDescription()
{
return bitstreamDescription;
}
public static File inputStreamToTempFile(InputStream source, String prefix, String suffix) throws IOException {
public File inputStreamToTempFile(InputStream source, String prefix, String suffix) throws IOException {
File f = File.createTempFile(prefix, suffix);
f.deleteOnExit();
FileOutputStream fos = new FileOutputStream(f);
@@ -112,7 +117,7 @@ public abstract class ImageMagickThumbnailFilter extends MediaFilter implements
return f;
}
public static File getThumbnailFile(File f) throws IOException, InterruptedException, IM4JavaException {
public File getThumbnailFile(File f, boolean verbose) throws IOException, InterruptedException, IM4JavaException {
File f2 = new File(f.getParentFile(), f.getName() + ".jpg");
f2.deleteOnExit();
ConvertCmd cmd = new ConvertCmd();
@@ -120,14 +125,14 @@ public abstract class ImageMagickThumbnailFilter extends MediaFilter implements
op.addImage(f.getAbsolutePath());
op.thumbnail(width, height);
op.addImage(f2.getAbsolutePath());
if (MediaFilterManager.isVerbose) {
if (verbose) {
System.out.println("IM Thumbnail Param: "+op);
}
cmd.run(op);
return f2;
}
public static File getImageFile(File f, int page) throws IOException, InterruptedException, IM4JavaException {
public File getImageFile(File f, int page, boolean verbose) throws IOException, InterruptedException, IM4JavaException {
File f2 = new File(f.getParentFile(), f.getName() + ".jpg");
f2.deleteOnExit();
ConvertCmd cmd = new ConvertCmd();
@@ -139,20 +144,22 @@ public abstract class ImageMagickThumbnailFilter extends MediaFilter implements
op.flatten();
}
op.addImage(f2.getAbsolutePath());
if (MediaFilterManager.isVerbose) {
if (verbose) {
System.out.println("IM Image Param: "+op);
}
cmd.run(op);
return f2;
}
public boolean preProcessBitstream(Context c, Item item, Bitstream source)
@Override
public boolean preProcessBitstream(Context c, Item item, Bitstream source, boolean verbose)
throws Exception
{
String nsrc = source.getName();
for(Bundle b: item.getBundles("THUMBNAIL")) {
for(Bitstream bit: b.getBitstreams()) {
String n = bit.getName();
for(Bundle b: itemService.getBundles(item, "THUMBNAIL")) {
for(BundleBitstream bundleBitstream: b.getBitstreams()) {
Bitstream bit = bundleBitstream.getBitstream();
String n = bit.getName();
if (n != null) {
if (nsrc != null) {
if (!n.startsWith(nsrc)) continue;
@@ -162,13 +169,13 @@ public abstract class ImageMagickThumbnailFilter extends MediaFilter implements
//If anything other than a generated thumbnail is found, halt processing
if (description != null) {
if (replaceRegex.matcher(description).matches()) {
if (MediaFilterManager.isVerbose) {
if (verbose) {
System.out.println(description + " " + nsrc + " matches pattern and is replacable.");
}
continue;
}
if (description.equals(bitstreamDescription)) {
if (MediaFilterManager.isVerbose) {
if (verbose) {
System.out.println(bitstreamDescription + " " + nsrc + " is replacable.");
}
continue;
@@ -183,16 +190,19 @@ public abstract class ImageMagickThumbnailFilter extends MediaFilter implements
return true; //assume that the thumbnail is a custom one
}
@Override
public String[] getInputMIMETypes()
{
return ImageIO.getReaderMIMETypes();
}
@Override
public String[] getInputDescriptions()
{
return null;
}
@Override
public String[] getInputExtensions()
{
return ImageIO.getReaderFileSuffixes();

View File

@@ -18,6 +18,7 @@ import java.io.InputStream;
import javax.imageio.ImageIO;
import org.dspace.content.Item;
import org.dspace.core.ConfigurationManager;
/**
@@ -29,6 +30,7 @@ import org.dspace.core.ConfigurationManager;
*/
public class JPEGFilter extends MediaFilter implements SelfRegisterInputFormats
{
@Override
public String getFilteredName(String oldFilename)
{
return oldFilename + ".jpg";
@@ -38,6 +40,7 @@ public class JPEGFilter extends MediaFilter implements SelfRegisterInputFormats
* @return String bundle name
*
*/
@Override
public String getBundleName()
{
return "THUMBNAIL";
@@ -46,6 +49,7 @@ public class JPEGFilter extends MediaFilter implements SelfRegisterInputFormats
/**
* @return String bitstreamformat
*/
@Override
public String getFormatString()
{
return "JPEG";
@@ -54,6 +58,7 @@ public class JPEGFilter extends MediaFilter implements SelfRegisterInputFormats
/**
* @return String description
*/
@Override
public String getDescription()
{
return "Generated Thumbnail";
@@ -65,7 +70,8 @@ public class JPEGFilter extends MediaFilter implements SelfRegisterInputFormats
*
* @return InputStream the resulting input stream
*/
public InputStream getDestinationStream(InputStream source)
@Override
public InputStream getDestinationStream(Item currentItem, InputStream source, boolean verbose)
throws Exception
{
// read in bitstream's image
@@ -87,7 +93,7 @@ public class JPEGFilter extends MediaFilter implements SelfRegisterInputFormats
// if verbose flag is set, print out dimensions
// to STDOUT
if (MediaFilterManager.isVerbose)
if (verbose)
{
System.out.println("original size: " + xsize + "," + ysize);
}
@@ -100,7 +106,7 @@ public class JPEGFilter extends MediaFilter implements SelfRegisterInputFormats
// if verbose flag is set, print out extracted text
// to STDOUT
if (MediaFilterManager.isVerbose)
if (verbose)
{
System.out.println("x scale factor: " + scale_factor);
}
@@ -112,7 +118,7 @@ public class JPEGFilter extends MediaFilter implements SelfRegisterInputFormats
// if verbose flag is set, print out extracted text
// to STDOUT
if (MediaFilterManager.isVerbose)
if (verbose)
{
System.out.println("new size: " + xsize + "," + ysize);
}
@@ -130,7 +136,7 @@ public class JPEGFilter extends MediaFilter implements SelfRegisterInputFormats
}
// if verbose flag is set, print details to STDOUT
if (MediaFilterManager.isVerbose)
if (verbose)
{
System.out.println("created thumbnail size: " + xsize + ", "
+ ysize);
@@ -173,16 +179,19 @@ public class JPEGFilter extends MediaFilter implements SelfRegisterInputFormats
}
@Override
public String[] getInputMIMETypes()
{
return ImageIO.getReaderMIMETypes();
}
@Override
public String[] getInputDescriptions()
{
return null;
}
@Override
public String[] getInputExtensions()
{
// Temporarily disabled as JDK 1.6 only

View File

@@ -39,7 +39,8 @@ public abstract class MediaFilter implements FormatFilter
* @return true if bitstream processing should continue,
* false if this bitstream should be skipped
*/
public boolean preProcessBitstream(Context c, Item item, Bitstream source)
@Override
public boolean preProcessBitstream(Context c, Item item, Bitstream source, boolean verbose)
throws Exception
{
return true; //default to no pre-processing
@@ -62,6 +63,7 @@ public abstract class MediaFilter implements FormatFilter
* the bitstream which was generated by
* this filter.
*/
@Override
public void postProcessBitstream(Context c, Item item, Bitstream generatedBitstream)
throws Exception
{

View File

@@ -0,0 +1,332 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.app.mediafilter;
import org.apache.commons.cli.*;
import org.dspace.app.mediafilter.factory.MediaFilterServiceFactory;
import org.dspace.app.mediafilter.service.MediaFilterService;
import org.dspace.content.Collection;
import org.dspace.content.Community;
import org.dspace.content.DSpaceObject;
import org.dspace.content.Item;
import org.dspace.core.*;
import org.dspace.handle.factory.HandleServiceFactory;
import java.util.*;
/**
* MediaFilterManager is the class that invokes the media/format filters over the
* repository's content. A few command line flags affect the operation of the
* MFM: -v verbose outputs all extracted text to STDOUT; -f force forces all
* bitstreams to be processed, even if they have been before; -n noindex does not
* recreate index after processing bitstreams; -i [identifier] limits processing
* scope to a community, collection or item; and -m [max] limits processing to a
* maximum number of items.
*/
public class MediaFilterCLITool {
//key (in dspace.cfg) which lists all enabled filters by name
private static final String MEDIA_FILTER_PLUGINS_KEY = "filter.plugins";
//prefix (in dspace.cfg) for all filter properties
private static final String FILTER_PREFIX = "filter";
//suffix (in dspace.cfg) for input formats supported by each filter
private static final String INPUT_FORMATS_SUFFIX = "inputFormats";
public static void main(String[] argv) throws Exception
{
// set headless for non-gui workstations
System.setProperty("java.awt.headless", "true");
// create an options object and populate it
CommandLineParser parser = new PosixParser();
int status = 0;
Options options = new Options();
options.addOption("v", "verbose", false,
"print all extracted text and other details to STDOUT");
options.addOption("q", "quiet", false,
"do not print anything except in the event of errors.");
options.addOption("f", "force", false,
"force all bitstreams to be processed");
options.addOption("i", "identifier", true,
"ONLY process bitstreams belonging to identifier");
options.addOption("m", "maximum", true,
"process no more than maximum items");
options.addOption("h", "help", false, "help");
//create a "plugin" option (to specify specific MediaFilter plugins to run)
OptionBuilder.withLongOpt("plugins");
OptionBuilder.withValueSeparator(',');
OptionBuilder.withDescription(
"ONLY run the specified Media Filter plugin(s)\n" +
"listed from '" + MEDIA_FILTER_PLUGINS_KEY + "' in dspace.cfg.\n" +
"Separate multiple with a comma (,)\n" +
"(e.g. MediaFilterManager -p \n\"Word Text Extractor\",\"PDF Text Extractor\")");
Option pluginOption = OptionBuilder.create('p');
pluginOption.setArgs(Option.UNLIMITED_VALUES); //unlimited number of args
options.addOption(pluginOption);
//create a "skip" option (to specify communities/collections/items to skip)
OptionBuilder.withLongOpt("skip");
OptionBuilder.withValueSeparator(',');
OptionBuilder.withDescription(
"SKIP the bitstreams belonging to identifier\n" +
"Separate multiple identifiers with a comma (,)\n" +
"(e.g. MediaFilterManager -s \n 123456789/34,123456789/323)");
Option skipOption = OptionBuilder.create('s');
skipOption.setArgs(Option.UNLIMITED_VALUES); //unlimited number of args
options.addOption(skipOption);
boolean isVerbose = false;
boolean isQuiet = false;
boolean isForce = false; // default to not forced
String identifier = null; // object scope limiter
int max2Process = Integer.MAX_VALUE;
Map<String, List<String>> filterFormats = new HashMap<>();
CommandLine line = null;
try
{
line = parser.parse(options, argv);
}
catch(MissingArgumentException e)
{
System.out.println("ERROR: " + e.getMessage());
HelpFormatter myhelp = new HelpFormatter();
myhelp.printHelp("MediaFilterManager\n", options);
System.exit(1);
}
if (line.hasOption('h'))
{
HelpFormatter myhelp = new HelpFormatter();
myhelp.printHelp("MediaFilterManager\n", options);
System.exit(0);
}
if (line.hasOption('v'))
{
isVerbose = true;
}
isQuiet = line.hasOption('q');
if (line.hasOption('f'))
{
isForce = true;
}
if (line.hasOption('i'))
{
identifier = line.getOptionValue('i');
}
if (line.hasOption('m'))
{
max2Process = Integer.parseInt(line.getOptionValue('m'));
if (max2Process <= 1)
{
System.out.println("Invalid maximum value '" +
line.getOptionValue('m') + "' - ignoring");
max2Process = Integer.MAX_VALUE;
}
}
String filterNames[] = null;
if(line.hasOption('p'))
{
//specified which media filter plugins we are using
filterNames = line.getOptionValues('p');
if(filterNames==null || filterNames.length==0)
{ //display error, since no plugins specified
System.err.println("\nERROR: -p (-plugin) option requires at least one plugin to be specified.\n" +
"(e.g. MediaFilterManager -p \"Word Text Extractor\",\"PDF Text Extractor\")\n");
HelpFormatter myhelp = new HelpFormatter();
myhelp.printHelp("MediaFilterManager\n", options);
System.exit(1);
}
}
else
{
//retrieve list of all enabled media filter plugins!
String enabledPlugins = ConfigurationManager.getProperty(MEDIA_FILTER_PLUGINS_KEY);
filterNames = enabledPlugins.split(",\\s*");
}
MediaFilterService mediaFilterService = MediaFilterServiceFactory.getInstance().getMediaFilterService();
mediaFilterService.setForce(isForce);
mediaFilterService.setQuiet(isQuiet);
mediaFilterService.setVerbose(isVerbose);
mediaFilterService.setMax2Process(max2Process);
//initialize an array of our enabled filters
List<FormatFilter> filterList = new ArrayList<FormatFilter>();
//set up each filter
for(int i=0; i< filterNames.length; i++)
{
//get filter of this name & add to list of filters
FormatFilter filter = (FormatFilter) PluginManager.getNamedPlugin(FormatFilter.class, filterNames[i]);
if(filter==null)
{
System.err.println("\nERROR: Unknown MediaFilter specified (either from command-line or in dspace.cfg): '" + filterNames[i] + "'");
System.exit(1);
}
else
{
filterList.add(filter);
String filterClassName = filter.getClass().getName();
String pluginName = null;
//If this filter is a SelfNamedPlugin,
//then the input formats it accepts may differ for
//each "named" plugin that it defines.
//So, we have to look for every key that fits the
//following format: filter.<class-name>.<plugin-name>.inputFormats
if( SelfNamedPlugin.class.isAssignableFrom(filter.getClass()) )
{
//Get the plugin instance name for this class
pluginName = ((SelfNamedPlugin) filter).getPluginInstanceName();
}
//Retrieve our list of supported formats from dspace.cfg
//For SelfNamedPlugins, format of key is:
// filter.<class-name>.<plugin-name>.inputFormats
//For other MediaFilters, format of key is:
// filter.<class-name>.inputFormats
String formats = ConfigurationManager.getProperty(
FILTER_PREFIX + "." + filterClassName +
(pluginName!=null ? "." + pluginName : "") +
"." + INPUT_FORMATS_SUFFIX);
//add to internal map of filters to supported formats
if (formats != null)
{
//For SelfNamedPlugins, map key is:
// <class-name><separator><plugin-name>
//For other MediaFilters, map key is just:
// <class-name>
filterFormats.put(filterClassName +
(pluginName!=null ? MediaFilterService.FILTER_PLUGIN_SEPARATOR + pluginName : ""),
Arrays.asList(formats.split(",[\\s]*")));
}
}//end if filter!=null
}//end for
//If verbose, print out loaded mediafilter info
if(isVerbose)
{
System.out.println("The following MediaFilters are enabled: ");
Iterator<String> i = filterFormats.keySet().iterator();
while(i.hasNext())
{
String filterName = i.next();
System.out.println("Full Filter Name: " + filterName);
String pluginName = null;
if(filterName.contains(MediaFilterService.FILTER_PLUGIN_SEPARATOR))
{
String[] fields = filterName.split(MediaFilterService.FILTER_PLUGIN_SEPARATOR);
filterName=fields[0];
pluginName=fields[1];
}
System.out.println(filterName +
(pluginName!=null? " (Plugin: " + pluginName + ")": ""));
}
}
mediaFilterService.setFilterFormats(filterFormats);
//store our filter list into an internal array
mediaFilterService.setFilterClasses(filterList);
//Retrieve list of identifiers to skip (if any)
String skipIds[] = null;
if(line.hasOption('s'))
{
//specified which identifiers to skip when processing
skipIds = line.getOptionValues('s');
if(skipIds==null || skipIds.length==0)
{ //display error, since no identifiers specified to skip
System.err.println("\nERROR: -s (-skip) option requires at least one identifier to SKIP.\n" +
"Make sure to separate multiple identifiers with a comma!\n" +
"(e.g. MediaFilterManager -s 123456789/34,123456789/323)\n");
HelpFormatter myhelp = new HelpFormatter();
myhelp.printHelp("MediaFilterManager\n", options);
System.exit(0);
}
//save to a global skip list
mediaFilterService.setSkipList(Arrays.asList(skipIds));
}
Context c = null;
try
{
c = new Context();
// have to be super-user to do the filtering
c.turnOffAuthorisationSystem();
// now apply the filters
if (identifier == null)
{
mediaFilterService.applyFiltersAllItems(c);
}
else // restrict application scope to identifier
{
DSpaceObject dso = HandleServiceFactory.getInstance().getHandleService().resolveToObject(c, identifier);
if (dso == null)
{
throw new IllegalArgumentException("Cannot resolve "
+ identifier + " to a DSpace object");
}
switch (dso.getType())
{
case Constants.COMMUNITY:
mediaFilterService.applyFiltersCommunity(c, (Community) dso);
break;
case Constants.COLLECTION:
mediaFilterService.applyFiltersCollection(c, (Collection) dso);
break;
case Constants.ITEM:
mediaFilterService.applyFiltersItem(c, (Item) dso);
break;
}
}
c.complete();
c = null;
}
catch (Exception e)
{
status = 1;
}
finally
{
if (c != null)
{
c.abort();
}
}
System.exit(status);
}
}

View File

@@ -1,850 +0,0 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.app.mediafilter;
import java.io.InputStream;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.HashMap;
import java.util.Iterator;
import java.util.Map;
import java.util.List;
import org.apache.commons.cli.CommandLine;
import org.apache.commons.cli.CommandLineParser;
import org.apache.commons.cli.HelpFormatter;
import org.apache.commons.cli.MissingArgumentException;
import org.apache.commons.cli.Option;
import org.apache.commons.cli.OptionBuilder;
import org.apache.commons.cli.Options;
import org.apache.commons.cli.PosixParser;
import org.apache.commons.lang.ArrayUtils;
import org.apache.log4j.Logger;
import org.dspace.authorize.AuthorizeManager;
import org.dspace.content.Bitstream;
import org.dspace.content.BitstreamFormat;
import org.dspace.content.Bundle;
import org.dspace.content.Collection;
import org.dspace.content.Community;
import org.dspace.content.DCDate;
import org.dspace.content.DSpaceObject;
import org.dspace.content.Item;
import org.dspace.content.ItemIterator;
import org.dspace.core.ConfigurationManager;
import org.dspace.core.Constants;
import org.dspace.core.Context;
import org.dspace.core.PluginManager;
import org.dspace.core.SelfNamedPlugin;
import org.dspace.eperson.Group;
import org.dspace.handle.HandleManager;
import org.dspace.search.DSIndexer;
/**
* MediaFilterManager is the class that invokes the media/format filters over the
* repository's content. A few command line flags affect the operation of the
* MFM: -v verbose outputs all extracted text to STDOUT; -f force forces all
* bitstreams to be processed, even if they have been before; -n noindex does not
* recreate index after processing bitstreams; -i [identifier] limits processing
* scope to a community, collection or item; and -m [max] limits processing to a
* maximum number of items.
*/
public class MediaFilterManager
{
private static final Logger log = Logger.getLogger(MediaFilterManager.class);
//key (in dspace.cfg) which lists all enabled filters by name
public static final String MEDIA_FILTER_PLUGINS_KEY = "filter.plugins";
//prefix (in dspace.cfg) for all filter properties
public static final String FILTER_PREFIX = "filter";
//suffix (in dspace.cfg) for input formats supported by each filter
public static final String INPUT_FORMATS_SUFFIX = "inputFormats";
static boolean isVerbose = false; // default to not verbose
static boolean isQuiet = false; // default is noisy
static boolean isForce = false; // default to not forced
static String identifier = null; // object scope limiter
static int max2Process = Integer.MAX_VALUE; // maximum number items to process
static int processed = 0; // number items processed
private static Item currentItem = null; // current item being processed
private static FormatFilter[] filterClasses = null;
private static final Map<String, List<String>> filterFormats = new HashMap<>();
private static List<String> skipList = null; //list of identifiers to skip during processing
private static final List<String> publicFiltersClasses = new ArrayList<>();
//separator in filterFormats Map between a filter class name and a plugin name,
//for MediaFilters which extend SelfNamedPlugin (\034 is "file separator" char)
public static final String FILTER_PLUGIN_SEPARATOR = "\034";
static {
String publicPermissionFilters = ConfigurationManager.getProperty("filter.org.dspace.app.mediafilter.publicPermission");
if(publicPermissionFilters != null) {
String[] publicPermisionFiltersArray = publicPermissionFilters.split(",");
for(String filter : publicPermisionFiltersArray) {
publicFiltersClasses.add(filter.trim());
}
}
}
public static void main(String[] argv) throws Exception
{
// set headless for non-gui workstations
System.setProperty("java.awt.headless", "true");
// create an options object and populate it
CommandLineParser parser = new PosixParser();
int status = 0;
Options options = new Options();
options.addOption("v", "verbose", false,
"print all extracted text and other details to STDOUT");
options.addOption("q", "quiet", false,
"do not print anything except in the event of errors.");
options.addOption("f", "force", false,
"force all bitstreams to be processed");
options.addOption("i", "identifier", true,
"ONLY process bitstreams belonging to identifier");
options.addOption("m", "maximum", true,
"process no more than maximum items");
options.addOption("h", "help", false, "help");
//create a "plugin" option (to specify specific MediaFilter plugins to run)
OptionBuilder.withLongOpt("plugins");
OptionBuilder.withValueSeparator(',');
OptionBuilder.withDescription(
"ONLY run the specified Media Filter plugin(s)\n" +
"listed from '" + MEDIA_FILTER_PLUGINS_KEY + "' in dspace.cfg.\n" +
"Separate multiple with a comma (,)\n" +
"(e.g. MediaFilterManager -p \n\"Word Text Extractor\",\"PDF Text Extractor\")");
Option pluginOption = OptionBuilder.create('p');
pluginOption.setArgs(Option.UNLIMITED_VALUES); //unlimited number of args
options.addOption(pluginOption);
//create a "skip" option (to specify communities/collections/items to skip)
OptionBuilder.withLongOpt("skip");
OptionBuilder.withValueSeparator(',');
OptionBuilder.withDescription(
"SKIP the bitstreams belonging to identifier\n" +
"Separate multiple identifiers with a comma (,)\n" +
"(e.g. MediaFilterManager -s \n 123456789/34,123456789/323)");
Option skipOption = OptionBuilder.create('s');
skipOption.setArgs(Option.UNLIMITED_VALUES); //unlimited number of args
options.addOption(skipOption);
CommandLine line = null;
try
{
line = parser.parse(options, argv);
}
catch(MissingArgumentException e)
{
System.out.println("ERROR: " + e.getMessage());
HelpFormatter myhelp = new HelpFormatter();
myhelp.printHelp("MediaFilterManager\n", options);
System.exit(1);
}
if (line.hasOption('h'))
{
HelpFormatter myhelp = new HelpFormatter();
myhelp.printHelp("MediaFilterManager\n", options);
System.exit(0);
}
if (line.hasOption('v'))
{
isVerbose = true;
}
isQuiet = line.hasOption('q');
if (line.hasOption('f'))
{
isForce = true;
}
if (line.hasOption('i'))
{
identifier = line.getOptionValue('i');
}
if (line.hasOption('m'))
{
max2Process = Integer.parseInt(line.getOptionValue('m'));
if (max2Process <= 1)
{
System.out.println("Invalid maximum value '" +
line.getOptionValue('m') + "' - ignoring");
max2Process = Integer.MAX_VALUE;
}
}
String filterNames[] = null;
if(line.hasOption('p'))
{
//specified which media filter plugins we are using
filterNames = line.getOptionValues('p');
if(filterNames==null || filterNames.length==0)
{ //display error, since no plugins specified
System.err.println("\nERROR: -p (-plugin) option requires at least one plugin to be specified.\n" +
"(e.g. MediaFilterManager -p \"Word Text Extractor\",\"PDF Text Extractor\")\n");
HelpFormatter myhelp = new HelpFormatter();
myhelp.printHelp("MediaFilterManager\n", options);
System.exit(1);
}
}
else
{
//retrieve list of all enabled media filter plugins!
String enabledPlugins = ConfigurationManager.getProperty(MEDIA_FILTER_PLUGINS_KEY);
filterNames = enabledPlugins.split(",\\s*");
}
//initialize an array of our enabled filters
List<FormatFilter> filterList = new ArrayList<FormatFilter>();
//set up each filter
for(int i=0; i< filterNames.length; i++)
{
//get filter of this name & add to list of filters
FormatFilter filter = (FormatFilter) PluginManager.getNamedPlugin(FormatFilter.class, filterNames[i]);
if(filter==null)
{
System.err.println("\nERROR: Unknown MediaFilter specified (either from command-line or in dspace.cfg): '" + filterNames[i] + "'");
System.exit(1);
}
else
{
filterList.add(filter);
String filterClassName = filter.getClass().getName();
String pluginName = null;
//If this filter is a SelfNamedPlugin,
//then the input formats it accepts may differ for
//each "named" plugin that it defines.
//So, we have to look for every key that fits the
//following format: filter.<class-name>.<plugin-name>.inputFormats
if( SelfNamedPlugin.class.isAssignableFrom(filter.getClass()) )
{
//Get the plugin instance name for this class
pluginName = ((SelfNamedPlugin) filter).getPluginInstanceName();
}
//Retrieve our list of supported formats from dspace.cfg
//For SelfNamedPlugins, format of key is:
// filter.<class-name>.<plugin-name>.inputFormats
//For other MediaFilters, format of key is:
// filter.<class-name>.inputFormats
String formats = ConfigurationManager.getProperty(
FILTER_PREFIX + "." + filterClassName +
(pluginName!=null ? "." + pluginName : "") +
"." + INPUT_FORMATS_SUFFIX);
//add to internal map of filters to supported formats
if (formats != null)
{
//For SelfNamedPlugins, map key is:
// <class-name><separator><plugin-name>
//For other MediaFilters, map key is just:
// <class-name>
filterFormats.put(filterClassName +
(pluginName!=null ? FILTER_PLUGIN_SEPARATOR + pluginName : ""),
Arrays.asList(formats.split(",[\\s]*")));
}
}//end if filter!=null
}//end for
//If verbose, print out loaded mediafilter info
if(isVerbose)
{
System.out.println("The following MediaFilters are enabled: ");
Iterator<String> i = filterFormats.keySet().iterator();
while(i.hasNext())
{
String filterName = i.next();
System.out.println("Full Filter Name: " + filterName);
String pluginName = null;
if(filterName.contains(FILTER_PLUGIN_SEPARATOR))
{
String[] fields = filterName.split(FILTER_PLUGIN_SEPARATOR);
filterName=fields[0];
pluginName=fields[1];
}
System.out.println(filterName +
(pluginName!=null? " (Plugin: " + pluginName + ")": ""));
}
}
//store our filter list into an internal array
filterClasses = (FormatFilter[]) filterList.toArray(new FormatFilter[filterList.size()]);
//Retrieve list of identifiers to skip (if any)
String skipIds[] = null;
if(line.hasOption('s'))
{
//specified which identifiers to skip when processing
skipIds = line.getOptionValues('s');
if(skipIds==null || skipIds.length==0)
{ //display error, since no identifiers specified to skip
System.err.println("\nERROR: -s (-skip) option requires at least one identifier to SKIP.\n" +
"Make sure to separate multiple identifiers with a comma!\n" +
"(e.g. MediaFilterManager -s 123456789/34,123456789/323)\n");
HelpFormatter myhelp = new HelpFormatter();
myhelp.printHelp("MediaFilterManager\n", options);
System.exit(0);
}
//save to a global skip list
skipList = Arrays.asList(skipIds);
}
Context c = null;
try
{
c = new Context();
// have to be super-user to do the filtering
c.turnOffAuthorisationSystem();
// now apply the filters
if (identifier == null)
{
applyFiltersAllItems(c);
}
else // restrict application scope to identifier
{
DSpaceObject dso = HandleManager.resolveToObject(c, identifier);
if (dso == null)
{
throw new IllegalArgumentException("Cannot resolve "
+ identifier + " to a DSpace object");
}
switch (dso.getType())
{
case Constants.COMMUNITY:
applyFiltersCommunity(c, (Community)dso);
break;
case Constants.COLLECTION:
applyFiltersCollection(c, (Collection)dso);
break;
case Constants.ITEM:
applyFiltersItem(c, (Item)dso);
break;
}
}
c.complete();
c = null;
}
catch (Exception e)
{
status = 1;
}
finally
{
if (c != null)
{
c.abort();
}
}
System.exit(status);
}
public static void applyFiltersAllItems(Context c) throws Exception
{
if(skipList!=null)
{
//if a skip-list exists, we need to filter community-by-community
//so we can respect what is in the skip-list
Community[] topLevelCommunities = Community.findAllTop(c);
for(int i=0; i<topLevelCommunities.length; i++)
{
applyFiltersCommunity(c, topLevelCommunities[i]);
}
}
else
{
//otherwise, just find every item and process
ItemIterator i = Item.findAll(c);
try
{
while (i.hasNext() && processed < max2Process)
{
applyFiltersItem(c, i.next());
}
}
finally
{
if (i != null)
{
i.close();
}
}
}
}
public static void applyFiltersCommunity(Context c, Community community)
throws Exception
{ //only apply filters if community not in skip-list
if(!inSkipList(community.getHandle()))
{
Community[] subcommunities = community.getSubcommunities();
for (int i = 0; i < subcommunities.length; i++)
{
applyFiltersCommunity(c, subcommunities[i]);
}
Collection[] collections = community.getCollections();
for (int j = 0; j < collections.length; j++)
{
applyFiltersCollection(c, collections[j]);
}
}
}
public static void applyFiltersCollection(Context c, Collection collection)
throws Exception
{
//only apply filters if collection not in skip-list
if(!inSkipList(collection.getHandle()))
{
ItemIterator i = collection.getItems();
try
{
while (i.hasNext() && processed < max2Process)
{
applyFiltersItem(c, i.next());
}
}
finally
{
if (i != null)
{
i.close();
}
}
}
}
public static void applyFiltersItem(Context c, Item item) throws Exception
{
//only apply filters if item not in skip-list
if(!inSkipList(item.getHandle()))
{
//cache this item in MediaFilterManager
//so it can be accessed by MediaFilters as necessary
currentItem = item;
if (filterItem(c, item))
{
// commit changes after each filtered item
c.commit();
// increment processed count
++processed;
}
// clear item objects from context cache and internal cache
item.decache();
currentItem = null;
}
}
/**
* Iterate through the item's bitstreams in the ORIGINAL bundle, applying
* filters if possible.
*
* @return true if any bitstreams processed,
* false if none
*/
public static boolean filterItem(Context c, Item myItem) throws Exception
{
// get 'original' bundles
Bundle[] myBundles = myItem.getBundles("ORIGINAL");
boolean done = false;
for (int i = 0; i < myBundles.length; i++)
{
// now look at all of the bitstreams
Bitstream[] myBitstreams = myBundles[i].getBitstreams();
for (int k = 0; k < myBitstreams.length; k++)
{
done |= filterBitstream(c, myItem, myBitstreams[k]);
}
}
return done;
}
/**
* Attempt to filter a bitstream.
*
* An exception will be thrown if the media filter class cannot be
* instantiated. Exceptions from filtering will be logged to STDOUT and
* swallowed.
*
* @return true if bitstream processed,
* false if no applicable filter or already processed
*/
public static boolean filterBitstream(Context c, Item myItem,
Bitstream myBitstream) throws Exception
{
boolean filtered = false;
// iterate through filter classes. A single format may be actioned
// by more than one filter
for (int i = 0; i < filterClasses.length; i++)
{
//List fmts = (List)filterFormats.get(filterClasses[i].getClass().getName());
String pluginName = null;
//if this filter class is a SelfNamedPlugin,
//its list of supported formats is different for
//differently named "plugin"
if( SelfNamedPlugin.class.isAssignableFrom(filterClasses[i].getClass()) )
{
//get plugin instance name for this media filter
pluginName = ((SelfNamedPlugin)filterClasses[i]).getPluginInstanceName();
}
//Get list of supported formats for the filter (and possibly named plugin)
//For SelfNamedPlugins, map key is:
// <class-name><separator><plugin-name>
//For other MediaFilters, map key is just:
// <class-name>
List<String> fmts = filterFormats.get(filterClasses[i].getClass().getName() +
(pluginName!=null ? FILTER_PLUGIN_SEPARATOR + pluginName : ""));
if (fmts.contains(myBitstream.getFormat().getShortDescription()))
{
try
{
// only update item if bitstream not skipped
if (processBitstream(c, myItem, myBitstream, filterClasses[i]))
{
myItem.update(); // Make sure new bitstream has a sequence
// number
filtered = true;
}
}
catch (Exception e)
{
String handle = myItem.getHandle();
Bundle[] bundles = myBitstream.getBundles();
long size = myBitstream.getSize();
String checksum = myBitstream.getChecksum() + " ("+myBitstream.getChecksumAlgorithm()+")";
int assetstore = myBitstream.getStoreNumber();
// Printout helpful information to find the errored bitstream.
System.out.println("ERROR filtering, skipping bitstream:\n");
System.out.println("\tItem Handle: "+ handle);
for (Bundle bundle : bundles)
{
System.out.println("\tBundle Name: " + bundle.getName());
}
System.out.println("\tFile Size: " + size);
System.out.println("\tChecksum: " + checksum);
System.out.println("\tAsset Store: " + assetstore);
System.out.println(e);
e.printStackTrace();
}
}
else if (filterClasses[i] instanceof SelfRegisterInputFormats)
{
// Filter implements self registration, so check to see if it should be applied
// given the formats it claims to support
SelfRegisterInputFormats srif = (SelfRegisterInputFormats)filterClasses[i];
boolean applyFilter = false;
// Check MIME type
String[] mimeTypes = srif.getInputMIMETypes();
if (mimeTypes != null)
{
for (String mimeType : mimeTypes)
{
if (mimeType.equalsIgnoreCase(myBitstream.getFormat().getMIMEType()))
{
applyFilter = true;
}
}
}
// Check description
if (!applyFilter)
{
String[] descriptions = srif.getInputDescriptions();
if (descriptions != null)
{
for (String desc : descriptions)
{
if (desc.equalsIgnoreCase(myBitstream.getFormat().getShortDescription()))
{
applyFilter = true;
}
}
}
}
// Check extensions
if (!applyFilter)
{
String[] extensions = srif.getInputExtensions();
if (extensions != null)
{
for (String ext : extensions)
{
String[] formatExtensions = myBitstream.getFormat().getExtensions();
if (formatExtensions != null && ArrayUtils.contains(formatExtensions, ext))
{
applyFilter = true;
}
}
}
}
// Filter claims to handle this type of file, so attempt to apply it
if (applyFilter)
{
try
{
// only update item if bitstream not skipped
if (processBitstream(c, myItem, myBitstream, filterClasses[i]))
{
myItem.update(); // Make sure new bitstream has a sequence
// number
filtered = true;
}
}
catch (Exception e)
{
System.out.println("ERROR filtering, skipping bitstream #"
+ myBitstream.getID() + " " + e);
e.printStackTrace();
}
}
}
}
return filtered;
}
/**
* A utility class that calls the virtual methods
* from the current MediaFilter class.
* It scans the bitstreams in an item, and decides if a bitstream has
* already been filtered, and if not or if overWrite is set, invokes the
* filter.
*
* @param c
* context
* @param item
* item containing bitstream to process
* @param source
* source bitstream to process
* @param formatFilter
* FormatFilter to perform filtering
*
* @return true if new rendition is created, false if rendition already
* exists and overWrite is not set
*/
public static boolean processBitstream(Context c, Item item, Bitstream source, FormatFilter formatFilter)
throws Exception
{
//do pre-processing of this bitstream, and if it fails, skip this bitstream!
if(!formatFilter.preProcessBitstream(c, item, source))
{
return false;
}
boolean overWrite = MediaFilterManager.isForce;
// get bitstream filename, calculate destination filename
String newName = formatFilter.getFilteredName(source.getName());
Bitstream existingBitstream = null; // is there an existing rendition?
Bundle targetBundle = null; // bundle we're modifying
Bundle[] bundles = item.getBundles(formatFilter.getBundleName());
// check if destination bitstream exists
if (bundles.length > 0)
{
// only finds the last match (FIXME?)
for (int i = 0; i < bundles.length; i++)
{
Bitstream[] bitstreams = bundles[i].getBitstreams();
for (int j = 0; j < bitstreams.length; j++)
{
if (bitstreams[j].getName().equals(newName))
{
targetBundle = bundles[i];
existingBitstream = bitstreams[j];
}
}
}
}
// if exists and overwrite = false, exit
if (!overWrite && (existingBitstream != null))
{
if (!isQuiet)
{
System.out.println("SKIPPED: bitstream " + source.getID()
+ " (item: " + item.getHandle() + ") because '" + newName + "' already exists");
}
return false;
}
if(isVerbose) {
System.out.println("PROCESSING: bitstream " + source.getID()
+ " (item: " + item.getHandle() + ")");
}
InputStream destStream;
try {
System.out.println("File: " + newName);
destStream = formatFilter.getDestinationStream(source.retrieve());
if (destStream == null)
{
if (!isQuiet)
{
System.out.println("SKIPPED: bitstream " + source.getID()
+ " (item: " + item.getHandle() + ") because filtering was unsuccessful");
}
return false;
}
}
catch (OutOfMemoryError oome)
{
System.out.println("!!! OutOfMemoryError !!!");
return false;
}
// create new bundle if needed
if (bundles.length < 1)
{
targetBundle = item.createBundle(formatFilter.getBundleName());
}
else
{
// take the first match
targetBundle = bundles[0];
}
Bitstream b = targetBundle.createBitstream(destStream);
// Now set the format and name of the bitstream
b.setName(newName);
b.setSource("Written by FormatFilter " + formatFilter.getClass().getName() +
" on " + DCDate.getCurrent() + " (GMT).");
b.setDescription(formatFilter.getDescription());
// Find the proper format
BitstreamFormat bf = BitstreamFormat.findByShortDescription(c,
formatFilter.getFormatString());
b.setFormat(bf);
b.update();
//Set permissions on the derivative bitstream
//- First remove any existing policies
AuthorizeManager.removeAllPolicies(c, b);
//- Determine if this is a public-derivative format
if(publicFiltersClasses.contains(formatFilter.getClass().getSimpleName())) {
//- Set derivative bitstream to be publicly accessible
Group anonymous = Group.find(c, 0);
AuthorizeManager.addPolicy(c, b, Constants.READ, anonymous);
} else {
//- Inherit policies from the source bitstream
AuthorizeManager.inheritPolicies(c, source, b);
}
// fixme - set date?
// we are overwriting, so remove old bitstream
if (existingBitstream != null)
{
targetBundle.removeBitstream(existingBitstream);
}
if (!isQuiet)
{
System.out.println("FILTERED: bitstream " + source.getID()
+ " (item: " + item.getHandle() + ") and created '" + newName + "'");
}
//do post-processing of the generated bitstream
formatFilter.postProcessBitstream(c, item, b);
return true;
}
/**
* Return the item that is currently being processed/filtered
* by the MediaFilterManager.
* <p>
* This allows FormatFilters to retrieve the Item object
* in case they need access to item-level information for their format
* transformations/conversions.
*
* @return current Item being processed by MediaFilterManager
*/
public static Item getCurrentItem()
{
return currentItem;
}
/**
* Check whether or not to skip processing the given identifier.
*
* @param identifier
* identifier (handle) of a community, collection or item
*
* @return true if this community, collection or item should be skipped
* during processing. Otherwise, return false.
*/
public static boolean inSkipList(String identifier)
{
if(skipList!=null && skipList.contains(identifier))
{
if (!isQuiet)
{
System.out.println("SKIP-LIST: skipped bitstreams within identifier " + identifier);
}
return true;
}
else
{
return false;
}
}
}

View File

@@ -0,0 +1,481 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.app.mediafilter;
import java.io.InputStream;
import java.util.*;
import org.apache.log4j.Logger;
import org.dspace.app.mediafilter.service.MediaFilterService;
import org.dspace.authorize.service.AuthorizeService;
import org.dspace.content.*;
import org.dspace.content.Collection;
import org.dspace.content.service.*;
import org.dspace.core.ConfigurationManager;
import org.dspace.core.Constants;
import org.dspace.core.Context;
import org.dspace.core.SelfNamedPlugin;
import org.dspace.eperson.Group;
import org.dspace.eperson.service.GroupService;
import org.springframework.beans.factory.InitializingBean;
import org.springframework.beans.factory.annotation.Autowired;
/**
* MediaFilterManager is the class that invokes the media/format filters over the
* repository's content. A few command line flags affect the operation of the
* MFM: -v verbose outputs all extracted text to STDOUT; -f force forces all
* bitstreams to be processed, even if they have been before; -n noindex does not
* recreate index after processing bitstreams; -i [identifier] limits processing
* scope to a community, collection or item; and -m [max] limits processing to a
* maximum number of items.
*/
public class MediaFilterServiceImpl implements MediaFilterService, InitializingBean
{
@Autowired(required = true)
protected AuthorizeService authorizeService;
@Autowired(required = true)
protected BitstreamFormatService bitstreamFormatService;
@Autowired(required = true)
protected BitstreamService bitstreamService;
@Autowired(required = true)
protected BundleService bundleService;
@Autowired(required = true)
protected CollectionService collectionService;
@Autowired(required = true)
protected CommunityService communityService;
@Autowired(required = true)
protected GroupService groupService;
@Autowired(required = true)
protected ItemService itemService;
protected int max2Process = Integer.MAX_VALUE; // maximum number items to process
protected int processed = 0; // number items processed
protected Item currentItem = null; // current item being processed
protected List<FormatFilter> filterClasses = null;
protected Map<String, List<String>> filterFormats = new HashMap<>();
protected List<String> skipList = null; //list of identifiers to skip during processing
protected final List<String> publicFiltersClasses = new ArrayList<>();
protected boolean isVerbose = false;
protected boolean isQuiet = false;
protected boolean isForce = false; // default to not forced
@Override
public void afterPropertiesSet() throws Exception {
String publicPermissionFilters = ConfigurationManager.getProperty("filter.org.dspace.app.mediafilter.publicPermission");
if(publicPermissionFilters != null) {
String[] publicPermisionFiltersArray = publicPermissionFilters.split(",");
for(String filter : publicPermisionFiltersArray) {
publicFiltersClasses.add(filter.trim());
}
}
}
@Override
public void applyFiltersAllItems(Context context) throws Exception
{
if(skipList!=null)
{
//if a skip-list exists, we need to filter community-by-community
//so we can respect what is in the skip-list
List<Community> topLevelCommunities = communityService.findAllTop(context);
for (Community topLevelCommunity : topLevelCommunities) {
applyFiltersCommunity(context, topLevelCommunity);
}
}
else
{
//otherwise, just find every item and process
Iterator<Item> itemIterator = itemService.findAll(context);
while (itemIterator.hasNext() && processed < max2Process)
{
applyFiltersItem(context, itemIterator.next());
}
}
}
@Override
public void applyFiltersCommunity(Context context, Community community)
throws Exception
{ //only apply filters if community not in skip-list
if(!inSkipList(community.getHandle()))
{
List<Community> subcommunities = community.getSubcommunities();
for (Community subcommunity : subcommunities) {
applyFiltersCommunity(context, subcommunity);
}
List<Collection> collections = community.getCollections();
for (Collection collection : collections) {
applyFiltersCollection(context, collection);
}
}
}
@Override
public void applyFiltersCollection(Context context, Collection collection)
throws Exception
{
//only apply filters if collection not in skip-list
if(!inSkipList(collection.getHandle()))
{
Iterator<Item> itemIterator = itemService.findAllByCollection(context, collection);
while (itemIterator.hasNext() && processed < max2Process)
{
applyFiltersItem(context, itemIterator.next());
}
}
}
@Override
public void applyFiltersItem(Context c, Item item) throws Exception
{
//only apply filters if item not in skip-list
if(!inSkipList(item.getHandle()))
{
//cache this item in MediaFilterManager
//so it can be accessed by MediaFilters as necessary
currentItem = item;
if (filterItem(c, item))
{
// increment processed count
++processed;
}
// clear item objects from context cache and internal cache
currentItem = null;
}
}
@Override
public boolean filterItem(Context context, Item myItem) throws Exception
{
// get 'original' bundles
List<Bundle> myBundles = itemService.getBundles(myItem, "ORIGINAL");
boolean done = false;
for (Bundle myBundle : myBundles) {
// now look at all of the bitstreams
List<BundleBitstream> myBitstreams = myBundle.getBitstreams();
for (BundleBitstream myBitstream : myBitstreams) {
done |= filterBitstream(context, myItem, myBitstream.getBitstream());
}
}
return done;
}
@Override
public boolean filterBitstream(Context context, Item myItem,
Bitstream myBitstream) throws Exception
{
boolean filtered = false;
// iterate through filter classes. A single format may be actioned
// by more than one filter
for (FormatFilter filterClass : filterClasses) {
//List fmts = (List)filterFormats.get(filterClasses[i].getClass().getName());
String pluginName = null;
//if this filter class is a SelfNamedPlugin,
//its list of supported formats is different for
//differently named "plugin"
if (SelfNamedPlugin.class.isAssignableFrom(filterClass.getClass())) {
//get plugin instance name for this media filter
pluginName = ((SelfNamedPlugin) filterClass).getPluginInstanceName();
}
//Get list of supported formats for the filter (and possibly named plugin)
//For SelfNamedPlugins, map key is:
// <class-name><separator><plugin-name>
//For other MediaFilters, map key is just:
// <class-name>
List<String> fmts = filterFormats.get(filterClass.getClass().getName() +
(pluginName != null ? FILTER_PLUGIN_SEPARATOR + pluginName : ""));
if (fmts.contains(myBitstream.getFormat(context).getShortDescription())) {
try {
// only update item if bitstream not skipped
if (processBitstream(context, myItem, myBitstream, filterClass)) {
itemService.update(context, myItem); // Make sure new bitstream has a sequence
// number
filtered = true;
}
} catch (Exception e) {
String handle = myItem.getHandle();
List<BundleBitstream> bundles = myBitstream.getBundles();
long size = myBitstream.getSize();
String checksum = myBitstream.getChecksum() + " (" + myBitstream.getChecksumAlgorithm() + ")";
int assetstore = myBitstream.getStoreNumber();
// Printout helpful information to find the errored bitstream.
System.out.println("ERROR filtering, skipping bitstream:\n");
System.out.println("\tItem Handle: " + handle);
for (BundleBitstream bundle : bundles) {
System.out.println("\tBundle Name: " + bundle.getBundle().getName());
}
System.out.println("\tFile Size: " + size);
System.out.println("\tChecksum: " + checksum);
System.out.println("\tAsset Store: " + assetstore);
System.out.println(e);
e.printStackTrace();
}
} else if (filterClass instanceof SelfRegisterInputFormats) {
// Filter implements self registration, so check to see if it should be applied
// given the formats it claims to support
SelfRegisterInputFormats srif = (SelfRegisterInputFormats) filterClass;
boolean applyFilter = false;
// Check MIME type
String[] mimeTypes = srif.getInputMIMETypes();
if (mimeTypes != null) {
for (String mimeType : mimeTypes) {
if (mimeType.equalsIgnoreCase(myBitstream.getFormat(context).getMIMEType())) {
applyFilter = true;
}
}
}
// Check description
if (!applyFilter) {
String[] descriptions = srif.getInputDescriptions();
if (descriptions != null) {
for (String desc : descriptions) {
if (desc.equalsIgnoreCase(myBitstream.getFormat(context).getShortDescription())) {
applyFilter = true;
}
}
}
}
// Check extensions
if (!applyFilter) {
String[] extensions = srif.getInputExtensions();
if (extensions != null) {
for (String ext : extensions) {
List<String> formatExtensions = myBitstream.getFormat(context).getExtensions();
if (formatExtensions != null && formatExtensions.contains(ext)) {
applyFilter = true;
}
}
}
}
// Filter claims to handle this type of file, so attempt to apply it
if (applyFilter) {
try {
// only update item if bitstream not skipped
if (processBitstream(context, myItem, myBitstream, filterClass)) {
itemService.update(context, myItem); // Make sure new bitstream has a sequence
// number
filtered = true;
}
} catch (Exception e) {
System.out.println("ERROR filtering, skipping bitstream #"
+ myBitstream.getID() + " " + e);
e.printStackTrace();
}
}
}
}
return filtered;
}
@Override
public boolean processBitstream(Context context, Item item, Bitstream source, FormatFilter formatFilter)
throws Exception
{
//do pre-processing of this bitstream, and if it fails, skip this bitstream!
if(!formatFilter.preProcessBitstream(context, item, source, isVerbose))
{
return false;
}
boolean overWrite = isForce;
// get bitstream filename, calculate destination filename
String newName = formatFilter.getFilteredName(source.getName());
Bitstream existingBitstream = null; // is there an existing rendition?
Bundle targetBundle = null; // bundle we're modifying
List<Bundle> bundles = itemService.getBundles(item, formatFilter.getBundleName());
// check if destination bitstream exists
if (bundles.size() > 0)
{
// only finds the last match (FIXME?)
for (Bundle bundle : bundles) {
List<BundleBitstream> bitstreams = bundle.getBitstreams();
for (BundleBitstream bundleBitstream : bitstreams) {
if (bundleBitstream.getBitstream().getName().equals(newName)) {
targetBundle = bundle;
existingBitstream = bundleBitstream.getBitstream();
}
}
}
}
// if exists and overwrite = false, exit
if (!overWrite && (existingBitstream != null))
{
if (!isQuiet)
{
System.out.println("SKIPPED: bitstream " + source.getID()
+ " (item: " + item.getHandle() + ") because '" + newName + "' already exists");
}
return false;
}
if(isVerbose) {
System.out.println("PROCESSING: bitstream " + source.getID()
+ " (item: " + item.getHandle() + ")");
}
InputStream destStream;
try {
System.out.println("File: " + newName);
destStream = formatFilter.getDestinationStream(item, bitstreamService.retrieve(context, source), isVerbose);
if (destStream == null) {
if (!isQuiet) {
System.out.println("SKIPPED: bitstream " + source.getID()
+ " (item: " + item.getHandle() + ") because filtering was unsuccessful");
}
return false;
}
} catch (OutOfMemoryError oome) {
System.out.println("!!! OutOfMemoryError !!!");
return false;
}
// create new bundle if needed
if (bundles.size() < 1)
{
targetBundle = bundleService.create(context, item, formatFilter.getBundleName());
}
else
{
// take the first match
targetBundle = bundles.get(0);
}
Bitstream b = bitstreamService.create(context, targetBundle, destStream);
// Now set the format and name of the bitstream
b.setName(context, newName);
b.setSource(context, "Written by FormatFilter " + formatFilter.getClass().getName() +
" on " + DCDate.getCurrent() + " (GMT).");
b.setDescription(context, formatFilter.getDescription());
// Find the proper format
BitstreamFormat bf = bitstreamFormatService.findByShortDescription(context,
formatFilter.getFormatString());
bitstreamService.setFormat(context, b, bf);
bitstreamService.update(context, b);
//Set permissions on the derivative bitstream
//- First remove any existing policies
authorizeService.removeAllPolicies(context, b);
//- Determine if this is a public-derivative format
if(publicFiltersClasses.contains(formatFilter.getClass().getSimpleName())) {
//- Set derivative bitstream to be publicly accessible
Group anonymous = groupService.findByName(context, Group.ANONYMOUS);
authorizeService.addPolicy(context, b, Constants.READ, anonymous);
} else {
//- Inherit policies from the source bitstream
authorizeService.inheritPolicies(context, source, b);
}
// fixme - set date?
// we are overwriting, so remove old bitstream
if (existingBitstream != null)
{
bundleService.removeBitstream(context, targetBundle, existingBitstream);
}
if (!isQuiet)
{
System.out.println("FILTERED: bitstream " + source.getID()
+ " (item: " + item.getHandle() + ") and created '" + newName + "'");
}
//do post-processing of the generated bitstream
formatFilter.postProcessBitstream(context, item, b);
return true;
}
@Override
public Item getCurrentItem()
{
return currentItem;
}
@Override
public boolean inSkipList(String identifier)
{
if(skipList!=null && skipList.contains(identifier))
{
if (!isQuiet)
{
System.out.println("SKIP-LIST: skipped bitstreams within identifier " + identifier);
}
return true;
}
else
{
return false;
}
}
@Override
public void setVerbose(boolean isVerbose) {
this.isVerbose = isVerbose;
}
@Override
public void setQuiet(boolean isQuiet) {
this.isQuiet = isQuiet;
}
@Override
public void setForce(boolean isForce) {
this.isForce = isForce;
}
@Override
public void setMax2Process(int max2Process) {
this.max2Process = max2Process;
}
@Override
public void setFilterClasses(List<FormatFilter> filterClasses) {
this.filterClasses = filterClasses;
}
@Override
public void setSkipList(List<String> skipList) {
this.skipList = skipList;
}
@Override
public void setFilterFormats(Map<String, List<String>> filterFormats) {
this.filterFormats = filterFormats;
}
}

View File

@@ -19,6 +19,7 @@ import java.io.Writer;
import org.apache.log4j.Logger;
import org.apache.pdfbox.pdmodel.PDDocument;
import org.apache.pdfbox.util.PDFTextStripper;
import org.dspace.content.Item;
import org.dspace.core.ConfigurationManager;
/*
@@ -32,6 +33,7 @@ public class PDFFilter extends MediaFilter
private static Logger log = Logger.getLogger(PDFFilter.class);
@Override
public String getFilteredName(String oldFilename)
{
return oldFilename + ".txt";
@@ -41,6 +43,7 @@ public class PDFFilter extends MediaFilter
* @return String bundle name
*
*/
@Override
public String getBundleName()
{
return "TEXT";
@@ -49,6 +52,7 @@ public class PDFFilter extends MediaFilter
/**
* @return String bitstreamformat
*/
@Override
public String getFormatString()
{
return "Text";
@@ -57,6 +61,7 @@ public class PDFFilter extends MediaFilter
/**
* @return String description
*/
@Override
public String getDescription()
{
return "Extracted text";
@@ -68,7 +73,8 @@ public class PDFFilter extends MediaFilter
*
* @return InputStream the resulting input stream
*/
public InputStream getDestinationStream(InputStream source)
@Override
public InputStream getDestinationStream(Item currentItem, InputStream source, boolean verbose)
throws Exception
{
try

View File

@@ -16,6 +16,7 @@ import org.apache.poi.hslf.extractor.PowerPointExtractor;
import org.apache.poi.POITextExtractor;
import org.apache.log4j.Logger;
import org.dspace.content.Item;
/*
* TODO: Allow user to configure extraction of only text or only notes
@@ -26,6 +27,7 @@ public class PowerPointFilter extends MediaFilter
private static Logger log = Logger.getLogger(PowerPointFilter.class);
@Override
public String getFilteredName(String oldFilename)
{
return oldFilename + ".txt";
@@ -35,6 +37,7 @@ public class PowerPointFilter extends MediaFilter
* @return String bundle name
*
*/
@Override
public String getBundleName()
{
return "TEXT";
@@ -45,6 +48,7 @@ public class PowerPointFilter extends MediaFilter
*
* TODO: Check that this is correct
*/
@Override
public String getFormatString()
{
return "Text";
@@ -53,6 +57,7 @@ public class PowerPointFilter extends MediaFilter
/**
* @return String description
*/
@Override
public String getDescription()
{
return "Extracted text";
@@ -64,7 +69,8 @@ public class PowerPointFilter extends MediaFilter
*
* @return InputStream the resulting input stream
*/
public InputStream getDestinationStream(InputStream source)
@Override
public InputStream getDestinationStream(Item currentItem, InputStream source, boolean verbose)
throws Exception
{
@@ -101,7 +107,7 @@ public class PowerPointFilter extends MediaFilter
{
// if verbose flag is set, print out extracted text
// to STDOUT
if (MediaFilterManager.isVerbose)
if (verbose)
{
System.out.println(extractedText);
}

View File

@@ -13,6 +13,7 @@ import java.io.IOException;
import org.apache.log4j.Logger;
import org.dspace.content.Item;
import org.textmining.extraction.TextExtractor;
import org.textmining.extraction.word.WordTextExtractorFactory;
@@ -27,6 +28,7 @@ public class WordFilter extends MediaFilter
private static Logger log = Logger.getLogger(WordFilter.class);
@Override
public String getFilteredName(String oldFilename)
{
return oldFilename + ".txt";
@@ -36,6 +38,7 @@ public class WordFilter extends MediaFilter
* @return String bundle name
*
*/
@Override
public String getBundleName()
{
return "TEXT";
@@ -44,6 +47,7 @@ public class WordFilter extends MediaFilter
/**
* @return String bitstreamformat
*/
@Override
public String getFormatString()
{
return "Text";
@@ -52,6 +56,7 @@ public class WordFilter extends MediaFilter
/**
* @return String description
*/
@Override
public String getDescription()
{
return "Extracted text";
@@ -63,7 +68,8 @@ public class WordFilter extends MediaFilter
*
* @return InputStream the resulting input stream
*/
public InputStream getDestinationStream(InputStream source)
@Override
public InputStream getDestinationStream(Item currentItem, InputStream source, boolean verbose)
throws Exception
{
// get input stream from bitstream
@@ -76,7 +82,7 @@ public class WordFilter extends MediaFilter
// if verbose flag is set, print out extracted text
// to STDOUT
if (MediaFilterManager.isVerbose)
if (verbose)
{
System.out.println(extractedText);
}

View File

@@ -18,6 +18,7 @@ import java.io.OutputStream;
import java.util.Arrays;
import org.apache.log4j.Logger;
import org.dspace.content.Item;
import org.dspace.core.ConfigurationManager;
import org.dspace.core.Utils;
@@ -48,7 +49,7 @@ public class XPDF2Text extends MediaFilter
private static Logger log = Logger.getLogger(XPDF2Text.class);
// Command to get text from pdf; @infile@, @COMMAND@ are placeholders
private static final String XPDF_PDFTOTEXT_COMMAND[] =
protected static final String XPDF_PDFTOTEXT_COMMAND[] =
{
"@COMMAND@", "-q", "-enc", "UTF-8", "@infile@", "-"
};
@@ -57,27 +58,32 @@ public class XPDF2Text extends MediaFilter
// executable path that comes from DSpace config at runtime.
private String pdftotextPath = null;
@Override
public String getFilteredName(String oldFilename)
{
return oldFilename + ".txt";
}
@Override
public String getBundleName()
{
return "TEXT";
}
@Override
public String getFormatString()
{
return "Text";
}
@Override
public String getDescription()
{
return "Extracted Text";
}
public InputStream getDestinationStream(InputStream sourceStream)
@Override
public InputStream getDestinationStream(Item currentItem, InputStream sourceStream, boolean verbose)
throws Exception
{
// get configured value for path to XPDF command:

View File

@@ -29,6 +29,7 @@ import java.util.regex.Pattern;
import javax.imageio.ImageIO;
import org.apache.log4j.Logger;
import org.dspace.content.Item;
import org.dspace.core.ConfigurationManager;
import org.dspace.core.Utils;
@@ -62,62 +63,67 @@ public class XPDF2Thumbnail extends MediaFilter
private static Logger log = Logger.getLogger(XPDF2Thumbnail.class);
// maximum size of either preview image dimension
private static final int MAX_PX = 800;
protected static final int MAX_PX = 800;
// maxium DPI - use common screen res, 100dpi.
private static final int MAX_DPI = 100;
protected static final int MAX_DPI = 100;
// command to get image from PDF; @FILE@, @OUTPUT@ are placeholders
private static final String XPDF_PDFTOPPM_COMMAND[] =
protected static final String XPDF_PDFTOPPM_COMMAND[] =
{
"@COMMAND@", "-q", "-f", "1", "-l", "1",
"-r", "@DPI@", "@FILE@", "@OUTPUTFILE@"
};
// command to get image from PDF; @FILE@, @OUTPUT@ are placeholders
private static final String XPDF_PDFINFO_COMMAND[] =
protected static final String XPDF_PDFINFO_COMMAND[] =
{
"@COMMAND@", "-f", "1", "-l", "1", "-box", "@FILE@"
};
// executable path for "pdftoppm", comes from DSpace config at runtime.
private String pdftoppmPath = null;
protected String pdftoppmPath = null;
// executable path for "pdfinfo", comes from DSpace config at runtime.
private String pdfinfoPath = null;
protected String pdfinfoPath = null;
// match line in pdfinfo output that describes file's MediaBox
private static final Pattern MEDIABOX_PATT = Pattern.compile(
protected static final Pattern MEDIABOX_PATT = Pattern.compile(
"^Page\\s+\\d+\\s+MediaBox:\\s+([\\.\\d-]+)\\s+([\\.\\d-]+)\\s+([\\.\\d-]+)\\s+([\\.\\d-]+)");
// also from thumbnail.maxwidth in config
private int xmax = 0;
protected int xmax = 0;
// backup default for size, on the large side.
private static final int DEFAULT_XMAX = 500;
protected static final int DEFAULT_XMAX = 500;
@Override
public String getFilteredName(String oldFilename)
{
return oldFilename + ".jpg";
}
@Override
public String getBundleName()
{
return "THUMBNAIL";
}
@Override
public String getFormatString()
{
return "JPEG";
}
@Override
public String getDescription()
{
return "Generated Thumbnail";
}
// canonical MediaFilter method to generate the thumbnail as stream.
public InputStream getDestinationStream(InputStream sourceStream)
@Override
public InputStream getDestinationStream(Item currentItem, InputStream sourceStream, boolean verbose)
throws Exception
{
// get config params
@@ -301,7 +307,7 @@ public class XPDF2Thumbnail extends MediaFilter
// if verbose flag is set, print out dimensions
// to STDOUT
if (MediaFilterManager.isVerbose)
if (verbose)
{
System.out.println("original size: " + xsize + "," + ysize);
}
@@ -314,7 +320,7 @@ public class XPDF2Thumbnail extends MediaFilter
// if verbose flag is set, print out extracted text
// to STDOUT
if (MediaFilterManager.isVerbose)
if (verbose)
{
System.out.println("x scale factor: " + scale_factor);
}
@@ -326,7 +332,7 @@ public class XPDF2Thumbnail extends MediaFilter
// if verbose flag is set, print out extracted text
// to STDOUT
if (MediaFilterManager.isVerbose)
if (verbose)
{
System.out.println("new size: " + xsize + "," + ysize);
}
@@ -344,7 +350,7 @@ public class XPDF2Thumbnail extends MediaFilter
}
// if verbose flag is set, print details to STDOUT
if (MediaFilterManager.isVerbose)
if (verbose)
{
System.out.println("created thumbnail size: " + xsize + ", "
+ ysize);

View File

@@ -0,0 +1,25 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.app.mediafilter.factory;
import org.dspace.app.mediafilter.service.MediaFilterService;
import org.dspace.utils.DSpace;
/**
* Abstract factory to get services for the mediafilter package, use MediaFilterServiceFactory.getInstance() to retrieve an implementation
*
* @author kevinvandevelde at atmire.com
*/
public abstract class MediaFilterServiceFactory {
public abstract MediaFilterService getMediaFilterService();
public static MediaFilterServiceFactory getInstance(){
return new DSpace().getServiceManager().getServiceByName("mediaFilterServiceFactory", MediaFilterServiceFactory.class);
}
}

View File

@@ -0,0 +1,27 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.app.mediafilter.factory;
import org.dspace.app.mediafilter.service.MediaFilterService;
import org.springframework.beans.factory.annotation.Autowired;
/**
* Factory implementation to get services for the mediafilter package, use MediaFilterServiceFactory.getInstance() to retrieve an implementation
*
* @author kevinvandevelde at atmire.com
*/
public class MediaFilterServiceFactoryImpl extends MediaFilterServiceFactory {
@Autowired(required = true)
private MediaFilterService mediaFilterService;
@Override
public MediaFilterService getMediaFilterService() {
return mediaFilterService;
}
}

View File

@@ -0,0 +1,126 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.app.mediafilter.service;
import org.dspace.app.mediafilter.FormatFilter;
import org.dspace.content.Bitstream;
import org.dspace.content.Collection;
import org.dspace.content.Community;
import org.dspace.content.Item;
import org.dspace.core.Context;
import java.util.List;
import java.util.Map;
/**
* MediaFilterManager is the class that invokes the media/format filters over the
* repository's content. A few command line flags affect the operation of the
* MFM: -v verbose outputs all extracted text to STDOUT; -f force forces all
* bitstreams to be processed, even if they have been before; -n noindex does not
* recreate index after processing bitstreams; -i [identifier] limits processing
* scope to a community, collection or item; and -m [max] limits processing to a
* maximum number of items.
*/
public interface MediaFilterService {
//separator in filterFormats Map between a filter class name and a plugin name,
//for MediaFilters which extend SelfNamedPlugin (\034 is "file separator" char)
public static final String FILTER_PLUGIN_SEPARATOR = "\034";
public void applyFiltersAllItems(Context context) throws Exception;
public void applyFiltersCommunity(Context context, Community community)
throws Exception;
public void applyFiltersCollection(Context context, Collection collection)
throws Exception;
public void applyFiltersItem(Context c, Item item) throws Exception;
/**
* Iterate through the item's bitstreams in the ORIGINAL bundle, applying
* filters if possible.
*
* @return true if any bitstreams processed,
* false if none
*/
public boolean filterItem(Context context, Item myItem) throws Exception;
/**
* Attempt to filter a bitstream.
*
* An exception will be thrown if the media filter class cannot be
* instantiated. Exceptions from filtering will be logged to STDOUT and
* swallowed.
*
* @return true if bitstream processed,
* false if no applicable filter or already processed
*/
public boolean filterBitstream(Context c, Item myItem, Bitstream myBitstream) throws Exception;
/**
* A utility class that calls the virtual methods
* from the current MediaFilter class.
* It scans the bitstreams in an item, and decides if a bitstream has
* already been filtered, and if not or if overWrite is set, invokes the
* filter.
*
* @param c
* context
* @param item
* item containing bitstream to process
* @param source
* source bitstream to process
* @param formatFilter
* FormatFilter to perform filtering
*
* @return true if new rendition is created, false if rendition already
* exists and overWrite is not set
*/
public boolean processBitstream(Context context, Item item, Bitstream source, FormatFilter formatFilter)
throws Exception;
/**
* Return the item that is currently being processed/filtered
* by the MediaFilterManager.
* <p>
* This allows FormatFilters to retrieve the Item object
* in case they need access to item-level information for their format
* transformations/conversions.
*
* @return current Item being processed by MediaFilterManager
*/
public Item getCurrentItem();
/**
* Check whether or not to skip processing the given identifier.
*
* @param identifier
* identifier (handle) of a community, collection or item
*
* @return true if this community, collection or item should be skipped
* during processing. Otherwise, return false.
*/
public boolean inSkipList(String identifier);
public void setVerbose(boolean isVerbose);
public void setQuiet(boolean isQuiet);
public void setForce(boolean isForce);
public void setMax2Process(int max2Process);
public void setFilterClasses(List<FormatFilter> filterClasses);
public void setSkipList(List<String> skipList);
public void setFilterFormats(Map<String, List<String>> filterFormats);
}

View File

@@ -31,7 +31,9 @@ import org.dspace.core.Constants;
import org.dspace.core.Context;
import org.dspace.core.PluginManager;
import org.dspace.eperson.EPerson;
import org.dspace.handle.HandleManager;
import org.dspace.eperson.factory.EPersonServiceFactory;
import org.dspace.handle.factory.HandleServiceFactory;
import org.dspace.workflow.WorkflowException;
/**
* Command-line interface to the Packager plugin.
@@ -113,12 +115,12 @@ import org.dspace.handle.HandleManager;
public class Packager
{
/* Various private global settings/options */
private String packageType = null;
private boolean submit = true;
private boolean userInteractionEnabled = true;
protected String packageType = null;
protected boolean submit = true;
protected boolean userInteractionEnabled = true;
// die from illegal command line
private static void usageError(String msg)
protected static void usageError(String msg)
{
System.out.println(msg);
System.out.println(" (run with -h flag for details)");
@@ -316,7 +318,7 @@ public class Packager
// find the EPerson, assign to context
Context context = new Context();
EPerson myEPerson = null;
myEPerson = EPerson.findByEmail(context, eperson);
myEPerson = EPersonServiceFactory.getInstance().getEPersonService().findByEmail(context, eperson);
if (myEPerson == null)
{
usageError("Error, eperson cannot be found: " + eperson);
@@ -339,7 +341,7 @@ public class Packager
//if a specific identifier was specified, make sure it is valid
if(identifier!=null && identifier.length()>0)
{
objToReplace = HandleManager.resolveToObject(context, identifier);
objToReplace = HandleServiceFactory.getInstance().getHandleService().resolveToObject(context, identifier);
if (objToReplace == null)
{
throw new IllegalArgumentException("Bad identifier/handle -- "
@@ -407,7 +409,7 @@ public class Packager
for (int i = 0; i < parents.length; i++)
{
// sanity check: did handle resolve?
parentObjs[i] = HandleManager.resolveToObject(context,
parentObjs[i] = HandleServiceFactory.getInstance().getHandleService().resolveToObject(context,
parents[i]);
if (parentObjs[i] == null)
{
@@ -449,7 +451,7 @@ public class Packager
usageError("Error, Unknown package type: " + myPackager.packageType);
}
DSpaceObject dso = HandleManager.resolveToObject(context, identifier);
DSpaceObject dso = HandleServiceFactory.getInstance().getHandleService().resolveToObject(context, identifier);
if (dso == null)
{
throw new IllegalArgumentException("Bad identifier/handle -- "
@@ -504,101 +506,96 @@ public class Packager
//NOTE: at this point, Parent may be null -- in which case it is up to the PackageIngester
// to either determine the Parent (from package contents) or throw an error.
try{
//If we are doing a recursive ingest, call ingestAll()
if(pkgParams.recursiveModeEnabled()) {
System.out.println("\nAlso ingesting all referenced packages (recursive mode)..");
System.out.println("This may take a while, please check your logs for ongoing status while we process each package.");
//If we are doing a recursive ingest, call ingestAll()
if(pkgParams.recursiveModeEnabled())
{
System.out.println("\nAlso ingesting all referenced packages (recursive mode)..");
System.out.println("This may take a while, please check your logs for ongoing status while we process each package.");
//ingest first package & recursively ingest anything else that package references (child packages, etc)
List<String> hdlResults = sip.ingestAll(context, parent, pkgFile, pkgParams, null);
//ingest first package & recursively ingest anything else that package references (child packages, etc)
List<String> hdlResults = sip.ingestAll(context, parent, pkgFile, pkgParams, null);
if (hdlResults != null) {
//Report total objects created
System.out.println("\nCREATED a total of " + hdlResults.size() + " DSpace Objects.");
if(hdlResults!=null)
{
//Report total objects created
System.out.println("\nCREATED a total of " + hdlResults.size() + " DSpace Objects.");
//ingest first package & recursively ingest anything else that package references (child packages, etc)
List<String> dsoResults = sip.ingestAll(context, parent, pkgFile, pkgParams, null);
String choiceString = null;
//Ask if user wants full list printed to command line, as this may be rather long.
if(this.userInteractionEnabled)
{
BufferedReader input = new BufferedReader(new InputStreamReader(System.in));
System.out.print("\nWould you like to view a list of all objects that were created? [y/n]: ");
choiceString = input.readLine();
}
else
{
// user interaction disabled -- default answer to 'yes', as
// we want to provide user with as detailed a report as possible.
choiceString = "y";
}
if (dsoResults != null) {
//Report total objects created
System.out.println("\nCREATED a total of " + dsoResults.size() + " DSpace Objects.");
// Provide detailed report if user answered 'yes'
if (choiceString.equalsIgnoreCase("y"))
{
System.out.println("\n\n");
for(String result : hdlResults)
{
DSpaceObject dso = HandleManager.resolveToObject(context, result);
if(dso!=null)
String choiceString = null;
//Ask if user wants full list printed to command line, as this may be rather long.
if (this.userInteractionEnabled) {
BufferedReader input = new BufferedReader(new InputStreamReader(System.in));
System.out.print("\nWould you like to view a list of all objects that were created? [y/n]: ");
choiceString = input.readLine();
} else {
// user interaction disabled -- default answer to 'yes', as
// we want to provide user with as detailed a report as possible.
choiceString = "y";
}
// Provide detailed report if user answered 'yes'
if (choiceString.equalsIgnoreCase("y"))
{
if(pkgParams.restoreModeEnabled())
System.out.println("\n\n");
for (String result : hdlResults)
{
System.out.println("RESTORED DSpace " + Constants.typeText[dso.getType()] +
" [ hdl=" + dso.getHandle() + ", dbID=" + dso.getID() + " ] ");
}
else
{
System.out.println("CREATED new DSpace " + Constants.typeText[dso.getType()] +
" [ hdl=" + dso.getHandle() + ", dbID=" + dso.getID() + " ] ");
DSpaceObject dso = HandleServiceFactory.getInstance().getHandleService().resolveToObject(context, result);
if(dso!=null)
{
if (pkgParams.restoreModeEnabled()) {
System.out.println("RESTORED DSpace " + Constants.typeText[dso.getType()] +
" [ hdl=" + dso.getHandle() + ", dbID=" + dso.getID() + " ] ");
} else {
System.out.println("CREATED new DSpace " + Constants.typeText[dso.getType()] +
" [ hdl=" + dso.getHandle() + ", dbID=" + dso.getID() + " ] ");
}
}
}
}
}
}
} else {
}
}
else
{
//otherwise, just one package to ingest
try {
System.out.println("\n\n");
for (String result : hdlResults) {
DSpaceObject dso = HandleServiceFactory.getInstance().getHandleService().resolveToObject(context, result);
//otherwise, just one package to ingest
try
{
if (dso != null) {
if (pkgParams.restoreModeEnabled()) {
System.out.println("RESTORED DSpace " + Constants.typeText[dso.getType()] +
" [ hdl=" + dso.getHandle() + ", dbID=" + dso.getID() + " ] ");
} else {
System.out.println("CREATED new DSpace " + Constants.typeText[dso.getType()] +
" [ hdl=" + dso.getHandle() + ", dbID=" + dso.getID() + " ] ");
}
}
}
} catch (IllegalStateException ie) {
// NOTE: if we encounter an IllegalStateException, this means the
// handle is already in use and this object already exists.
DSpaceObject dso = sip.ingest(context, parent, pkgFile, pkgParams, null);
if(dso!=null)
{
if(pkgParams.restoreModeEnabled())
{
System.out.println("RESTORED DSpace " + Constants.typeText[dso.getType()] +
" [ hdl=" + dso.getHandle() + ", dbID=" + dso.getID() + " ] ");
}
else
{
System.out.println("CREATED new DSpace " + Constants.typeText[dso.getType()] +
" [ hdl=" + dso.getHandle() + ", dbID=" + dso.getID() + " ] ");
//if we are skipping over (i.e. keeping) existing objects
if (pkgParams.keepExistingModeEnabled()) {
System.out.println("\nSKIPPED processing package '" + pkgFile + "', as an Object already exists with this handle.");
} else // Pass this exception on -- which essentially causes a full rollback of all changes (this is the default)
{
throw ie;
}
}
}
}
catch(IllegalStateException ie)
{
// NOTE: if we encounter an IllegalStateException, this means the
// handle is already in use and this object already exists.
//if we are skipping over (i.e. keeping) existing objects
if(pkgParams.keepExistingModeEnabled())
{
System.out.println("\nSKIPPED processing package '" + pkgFile + "', as an Object already exists with this handle.");
}
else // Pass this exception on -- which essentially causes a full rollback of all changes (this is the default)
{
throw ie;
}
}
} catch (WorkflowException e) {
throw new PackageException(e);
}
}
@@ -726,60 +723,53 @@ public class Packager
//If we are doing a recursive replace, call replaceAll()
if(pkgParams.recursiveModeEnabled())
{
//ingest first object using package & recursively replace anything else that package references (child objects, etc)
List<String> hdlResults = sip.replaceAll(context, objToReplace, pkgFile, pkgParams);
try {
if (pkgParams.recursiveModeEnabled()) {
//ingest first object using package & recursively replace anything else that package references (child objects, etc)
List<String> hdlResults = sip.replaceAll(context, objToReplace, pkgFile, pkgParams);
if(hdlResults!=null)
{
//Report total objects replaced
System.out.println("\nREPLACED a total of " + hdlResults.size() + " DSpace Objects.");
if (hdlResults != null) {
//Report total objects replaced
System.out.println("\nREPLACED a total of " + hdlResults.size() + " DSpace Objects.");
String choiceString = null;
//Ask if user wants full list printed to command line, as this may be rather long.
if(this.userInteractionEnabled)
{
BufferedReader input = new BufferedReader(new InputStreamReader(System.in));
System.out.print("\nWould you like to view a list of all objects that were replaced? [y/n]: ");
choiceString = input.readLine();
}
else
{
// user interaction disabled -- default answer to 'yes', as
// we want to provide user with as detailed a report as possible.
choiceString = "y";
}
String choiceString = null;
//Ask if user wants full list printed to command line, as this may be rather long.
if (this.userInteractionEnabled) {
BufferedReader input = new BufferedReader(new InputStreamReader(System.in));
System.out.print("\nWould you like to view a list of all objects that were replaced? [y/n]: ");
choiceString = input.readLine();
} else {
// user interaction disabled -- default answer to 'yes', as
// we want to provide user with as detailed a report as possible.
choiceString = "y";
}
// Provide detailed report if user answered 'yes'
if (choiceString.equalsIgnoreCase("y"))
{
System.out.println("\n\n");
for(String result : hdlResults)
{
DSpaceObject dso = HandleManager.resolveToObject(context, result);
if(dso!=null)
{
System.out.println("REPLACED DSpace " + Constants.typeText[dso.getType()] +
// Provide detailed report if user answered 'yes'
if (choiceString.equalsIgnoreCase("y")) {
System.out.println("\n\n");
for (String result : hdlResults) {
DSpaceObject dso = HandleServiceFactory.getInstance().getHandleService().resolveToObject(context, result);
if (dso != null) {
System.out.println("REPLACED DSpace " + Constants.typeText[dso.getType()] +
" [ hdl=" + dso.getHandle() + " ] ");
}
}
}
}
} else {
//otherwise, just one object to replace
DSpaceObject dso = sip.replace(context, objToReplace, pkgFile, pkgParams);
}
}
else
{
//otherwise, just one object to replace
DSpaceObject dso = sip.replace(context, objToReplace, pkgFile, pkgParams);
if(dso!=null)
{
System.out.println("REPLACED DSpace " + Constants.typeText[dso.getType()] +
" [ hdl=" + dso.getHandle() + " ] ");
if (dso != null) {
System.out.println("REPLACED DSpace " + Constants.typeText[dso.getType()] +
" [ hdl=" + dso.getHandle() + " ] ");
}
}
} catch (WorkflowException e) {
throw new PackageException(e);
}
}

View File

@@ -7,138 +7,127 @@
*/
package org.dspace.app.requestitem;
import org.apache.log4j.Logger;
import org.dspace.core.Context;
import org.dspace.core.Utils;
import org.dspace.storage.rdbms.DatabaseManager;
import org.dspace.storage.rdbms.TableRow;
import org.dspace.content.Bitstream;
import org.dspace.content.Item;
import java.sql.SQLException;
import javax.persistence.*;
import java.util.Date;
/**
* Object representing an Item Request
*/
@Entity
@Table(name="requestitem", schema = "public")
public class RequestItem {
private static Logger log = Logger.getLogger(RequestItem.class);
private int bitstreamId, itemID;
@Id
@Column(name="requestitem_id")
@GeneratedValue(strategy = GenerationType.SEQUENCE ,generator="requestitem_seq")
@SequenceGenerator(name="requestitem_seq", sequenceName="requestitem_seq", allocationSize = 1)
private int requestitem_id;
@ManyToOne(fetch = FetchType.LAZY)
@JoinColumn(name = "bitstream_id")
private Bitstream bitstream;
@ManyToOne(fetch = FetchType.LAZY)
@JoinColumn(name = "item_id")
private Item item;
@Column(name = "request_email", length = 64)
private String reqEmail;
@Column(name = "request_name", length = 64)
private String reqName;
// @Column(name = "request_message")
// @Lob
@Column(name="request_message", columnDefinition = "text")
private String reqMessage;
@Column(name = "token", unique = true, length = 48)
private String token;
@Column(name = "allfiles")
private boolean allfiles;
private Date decision_date;
private boolean accept_request;
public RequestItem(int itemID, int bitstreamId, String reqEmail, String reqName, String reqMessage, boolean allfiles){
this.itemID = itemID;
this.bitstreamId = bitstreamId;
this.reqEmail = reqEmail;
this.reqName = reqName;
this.reqMessage = reqMessage;
@Column(name = "decision_date")
@Temporal(TemporalType.TIMESTAMP)
private Date decision_date = null;
@Column(name = "expires")
@Temporal(TemporalType.TIMESTAMP)
private Date expires = null;
@Column(name = "request_date")
@Temporal(TemporalType.TIMESTAMP)
private Date request_date = null;
@Column(name = "accept_request")
private Boolean accept_request = null;
public RequestItem() {
}
public int getID() {
return requestitem_id;
}
void setAllfiles(boolean allfiles) {
this.allfiles = allfiles;
}
private RequestItem(TableRow record) {
this.itemID = record.getIntColumn("item_id");
this.bitstreamId = record.getIntColumn("bitstream_id");
this.token = record.getStringColumn("token");
this.reqEmail = record.getStringColumn("request_email");
this.reqName = record.getStringColumn("request_name");
this.reqMessage = record.getStringColumn("request_message");
this.allfiles = record.getBooleanColumn("allfiles");
this.decision_date = record.getDateColumn("decision_date");
this.accept_request = record.getBooleanColumn("accept_request");
}
public static RequestItem findByToken(Context context, String token) {
try {
TableRow requestItem = DatabaseManager.findByUnique(context, "requestitem", "token", token);
return new RequestItem(requestItem);
} catch (SQLException e) {
log.error(e.getMessage());
return null;
}
}
/**
* Save updates to the record. Only accept_request, and decision_date are set-able.
* @param context
*/
public void update(Context context) {
try {
TableRow record = DatabaseManager.findByUnique(context, "requestitem", "token", token);
record.setColumn("accept_request", accept_request);
record.setColumn("decision_date", decision_date);
DatabaseManager.update(context, record);
} catch (SQLException e) {
log.error(e.getMessage());
}
}
/**
* Generate a unique id of the request and put it into the DB
* @param context
* @return
* @throws java.sql.SQLException
*/
public String getNewToken(Context context) throws SQLException
{
TableRow record = DatabaseManager.create(context, "requestitem");
record.setColumn("token", Utils.generateHexKey());
record.setColumn("bitstream_id", bitstreamId);
record.setColumn("item_id", itemID);
record.setColumn("allfiles", allfiles);
record.setColumn("request_email", reqEmail);
record.setColumn("request_name", reqName);
record.setColumn("request_message", reqMessage);
record.setColumnNull("accept_request");
record.setColumn("request_date", new Date());
record.setColumnNull("decision_date");
record.setColumnNull("expires");
DatabaseManager.update(context, record);
if (log.isDebugEnabled())
{
log.debug("Created requestitem_token " + record.getIntColumn("requestitem_id")
+ " with token " + record.getStringColumn("token") + "\"");
}
return record.getStringColumn("token");
}
public boolean isAllfiles() {
return allfiles;
}
void setReqMessage(String reqMessage) {
this.reqMessage = reqMessage;
}
public String getReqMessage() {
return reqMessage;
}
void setReqName(String reqName) {
this.reqName = reqName;
}
public String getReqName() {
return reqName;
}
void setReqEmail(String reqEmail) {
this.reqEmail = reqEmail;
}
public String getReqEmail() {
return reqEmail;
}
void setToken(String token) {
this.token = token;
}
public String getToken() {
return token;
}
public int getItemID() {
return itemID;
void setItem(Item item) {
this.item = item;
}
public int getBitstreamId() {
return bitstreamId;
public Item getItem() {
return item;
}
void setBitstream(Bitstream bitstream) {
this.bitstream = bitstream;
}
public Bitstream getBitstream() {
return bitstream;
}
public Date getDecision_date() {
@@ -156,4 +145,20 @@ public class RequestItem {
public void setAccept_request(boolean accept_request) {
this.accept_request = accept_request;
}
public Date getExpires() {
return expires;
}
void setExpires(Date expires) {
this.expires = expires;
}
public Date getRequest_date() {
return request_date;
}
void setRequest_date(Date request_date) {
this.request_date = request_date;
}
}

View File

@@ -15,6 +15,8 @@ import org.dspace.core.ConfigurationManager;
import org.dspace.core.Context;
import org.dspace.core.I18nUtil;
import org.dspace.eperson.EPerson;
import org.dspace.eperson.service.EPersonService;
import org.springframework.beans.factory.annotation.Autowired;
import java.sql.SQLException;
@@ -29,7 +31,10 @@ import java.sql.SQLException;
*/
public class RequestItemHelpdeskStrategy extends RequestItemSubmitterStrategy {
private static Logger log = Logger.getLogger(RequestItemHelpdeskStrategy.class);
private Logger log = Logger.getLogger(RequestItemHelpdeskStrategy.class);
@Autowired(required = true)
protected EPersonService ePersonService;
public RequestItemHelpdeskStrategy() {}
@@ -57,13 +62,9 @@ public class RequestItemHelpdeskStrategy extends RequestItemSubmitterStrategy {
public RequestItemAuthor getHelpDeskPerson(Context context, String helpDeskEmail) throws SQLException{
EPerson helpdeskEPerson = null;
try {
context.turnOffAuthorisationSystem();
helpdeskEPerson = EPerson.findByEmail(context, helpDeskEmail);
context.restoreAuthSystemState();
} catch (AuthorizeException e) {
log.error(e.getMessage());
}
context.turnOffAuthorisationSystem();
helpdeskEPerson = ePersonService.findByEmail(context, helpDeskEmail);
context.restoreAuthSystemState();
if(helpdeskEPerson != null) {
return new RequestItemAuthor(helpdeskEPerson);

View File

@@ -8,12 +8,15 @@
package org.dspace.app.requestitem;
import java.sql.SQLException;
import java.util.List;
import org.apache.commons.lang.StringUtils;
import org.dspace.content.Metadatum;
import org.dspace.content.MetadataValue;
import org.dspace.content.Item;
import org.dspace.content.service.ItemService;
import org.dspace.core.Context;
import org.dspace.core.I18nUtil;
import org.springframework.beans.factory.annotation.Autowired;
/**
* Try to look to an item metadata for the corresponding author name and email.
@@ -24,8 +27,11 @@ import org.dspace.core.I18nUtil;
*/
public class RequestItemMetadataStrategy extends RequestItemSubmitterStrategy {
private String emailMetadata;
private String fullNameMatadata;
protected String emailMetadata;
protected String fullNameMatadata;
@Autowired(required = true)
protected ItemService itemService;
public RequestItemMetadataStrategy() {
}
@@ -35,17 +41,17 @@ public class RequestItemMetadataStrategy extends RequestItemSubmitterStrategy {
throws SQLException {
if (emailMetadata != null)
{
Metadatum[] vals = item.getMetadataByMetadataString(emailMetadata);
if (vals.length > 0)
List<MetadataValue> vals = itemService.getMetadataByMetadataString(item, emailMetadata);
if (vals.size() > 0)
{
String email = vals[0].value;
String email = vals.iterator().next().getValue();
String fullname = null;
if (fullNameMatadata != null)
{
Metadatum[] nameVals = item.getMetadataByMetadataString(fullNameMatadata);
if (nameVals.length > 0)
List<MetadataValue> nameVals = itemService.getMetadataByMetadataString(item, fullNameMatadata);
if (nameVals.size() > 0)
{
fullname = nameVals[0].value;
fullname = nameVals.iterator().next().getValue();
}
}

View File

@@ -0,0 +1,77 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.app.requestitem;
import org.apache.log4j.Logger;
import org.dspace.app.requestitem.dao.RequestItemDAO;
import org.dspace.app.requestitem.service.RequestItemService;
import org.dspace.content.Bitstream;
import org.dspace.content.Item;
import org.dspace.core.Context;
import org.dspace.core.Utils;
import org.springframework.beans.factory.annotation.Autowired;
import java.sql.SQLException;
import java.util.Date;
/**
* Service implementation for the RequestItem object.
* This class is responsible for all business logic calls for the RequestItem object and is autowired by spring.
* This class should never be accessed directly.
*
* @author kevinvandevelde at atmire.com
*/
public class RequestItemServiceImpl implements RequestItemService {
private final Logger log = Logger.getLogger(RequestItemServiceImpl.class);
@Autowired(required = true)
protected RequestItemDAO requestItemDAO;
@Override
public String createRequest(Context context, Bitstream bitstream, Item item, boolean allFiles, String reqEmail, String reqName, String reqMessage) throws SQLException {
RequestItem requestItem = requestItemDAO.create(context, new RequestItem());
requestItem.setToken(Utils.generateHexKey());
requestItem.setBitstream(bitstream);
requestItem.setItem(item);
requestItem.setAllfiles(allFiles);
requestItem.setReqEmail(reqEmail);
requestItem.setReqName(reqName);
requestItem.setReqMessage(reqMessage);
requestItem.setRequest_date(new Date());
requestItemDAO.save(context, requestItem);
if (log.isDebugEnabled())
{
log.debug("Created requestitem_token " + requestItem.getID()
+ " with token " + requestItem.getToken() + "\"");
}
return requestItem.getToken();
}
@Override
public RequestItem findByToken(Context context, String token) {
try {
return requestItemDAO.findByToken(context, token);
} catch (SQLException e) {
log.error(e.getMessage());
return null;
}
}
@Override
public void update(Context context, RequestItem requestItem) {
try {
requestItemDAO.save(context, requestItem);
} catch (SQLException e) {
log.error(e.getMessage());
}
}
}

View File

@@ -0,0 +1,26 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.app.requestitem.dao;
import org.dspace.app.requestitem.RequestItem;
import org.dspace.core.Context;
import org.dspace.core.GenericDAO;
import java.sql.SQLException;
/**
* Database Access Object interface class for the RequestItem object.
* The implementation of this class is responsible for all database calls for the RequestItem object and is autowired by spring
* This class should only be accessed from a single service & should never be exposed outside of the API
*
* @author kevinvandevelde at atmire.com
*/
public interface RequestItemDAO extends GenericDAO<RequestItem> {
public RequestItem findByToken(Context context, String token) throws SQLException;
}

View File

@@ -0,0 +1,36 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.app.requestitem.dao.impl;
import org.dspace.app.requestitem.RequestItem;
import org.dspace.app.requestitem.dao.RequestItemDAO;
import org.dspace.core.Context;
import org.dspace.core.AbstractHibernateDAO;
import org.hibernate.Criteria;
import org.hibernate.criterion.Restrictions;
import java.sql.SQLException;
/**
* Hibernate implementation of the Database Access Object interface class for the RequestItem object.
* This class is responsible for all database calls for the RequestItem object and is autowired by spring
* This class should never be accessed directly.
*
* @author kevinvandevelde at atmire.com
*/
public class RequestItemDAOImpl extends AbstractHibernateDAO<RequestItem> implements RequestItemDAO
{
@Override
public RequestItem findByToken(Context context, String token) throws SQLException {
Criteria criteria = createCriteria(context, RequestItem.class);
criteria.add(Restrictions.eq("token", token));
return uniqueResult(criteria);
}
}

View File

@@ -0,0 +1,27 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.app.requestitem.factory;
import org.dspace.app.requestitem.service.RequestItemService;
import org.dspace.handle.service.HandleService;
import org.dspace.utils.DSpace;
/**
* Abstract factory to get services for the requestitem package, use RequestItemServiceFactory.getInstance() to retrieve an implementation
*
* @author kevinvandevelde at atmire.com
*/
public abstract class RequestItemServiceFactory {
public abstract RequestItemService getRequestItemService();
public static RequestItemServiceFactory getInstance()
{
return new DSpace().getServiceManager().getServiceByName("requestItemServiceFactory", RequestItemServiceFactory.class);
}
}

View File

@@ -0,0 +1,28 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.app.requestitem.factory;
import org.dspace.app.requestitem.service.RequestItemService;
import org.dspace.handle.service.HandleService;
import org.springframework.beans.factory.annotation.Autowired;
/**
* Factory implementation to get services for the requestitem package, use RequestItemServiceFactory.getInstance() to retrieve an implementation
*
* @author kevinvandevelde at atmire.com
*/
public class RequestItemServiceFactoryImpl extends RequestItemServiceFactory {
@Autowired(required = true)
private RequestItemService requestItemService;
@Override
public RequestItemService getRequestItemService() {
return requestItemService;
}
}

View File

@@ -0,0 +1,44 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.app.requestitem.service;
import org.dspace.app.requestitem.RequestItem;
import org.dspace.content.Bitstream;
import org.dspace.content.Item;
import org.dspace.core.Context;
import java.sql.SQLException;
/**
* Service interface class for the RequestItem object.
* The implementation of this class is responsible for all business logic calls for the RequestItem object and is autowired by spring
*
* @author kevinvandevelde at atmire.com
*/
public interface RequestItemService {
/**
* Generate a unique id of the request and put it into the DB
* @param context
* @return
* @throws java.sql.SQLException
*/
public String createRequest(Context context, Bitstream bitstream, Item item, boolean allFiles, String reqEmail, String reqName, String reqMessage)
throws SQLException;
public RequestItem findByToken(Context context, String token);
/**
* Save updates to the record. Only accept_request, and decision_date are set-able.
* @param context
*/
public void update(Context context, RequestItem requestItem);
}

View File

@@ -1,340 +0,0 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.app.sfx;
import java.io.File;
import java.io.IOException;
import java.net.URLEncoder;
import org.apache.commons.lang.StringUtils;
import org.apache.log4j.Logger;
import org.w3c.dom.Document;
import org.dspace.content.DCPersonName;
import org.dspace.content.Metadatum;
import org.dspace.content.Item;
import org.dspace.core.Constants;
import javax.xml.parsers.DocumentBuilderFactory;
import javax.xml.parsers.DocumentBuilder;
import javax.xml.parsers.ParserConfigurationException;
import org.w3c.dom.NamedNodeMap;
import org.w3c.dom.Node;
import org.w3c.dom.NodeList;
import org.xml.sax.SAXException;
/**
* XML configuration file reader for DSpace metadata fields (DC) mapping
* to OpenURL parameters.
* <p>
* This class reads the [dspace]/config/sfx.xml configuration file, which
* contains pairs of DSpace item metadata values and OpenURL parameter names.
* Then it takes an item and constructs an OpenURL for it with values of
* parameters filled in from the paired metadata fields.
* </p>
*
* @author Stuart Lewis
* @author Graham Triggs
* @version $Revision$
*/
public class SFXFileReader {
/** The SFX configuration file */
private static Document doc;
/** log4j logger */
private static final Logger log = Logger.getLogger(SFXFileReader.class);
/**
* Loads the SFX configuration file
*
* @param fileName The name of the SFX configuration file
* @param item The item to process, from which metadata values will be taken
*
* @return the SFX string
* @throws IOException
*/
public static String loadSFXFile(String fileName, Item item) throws IOException
{
// Parse XML file -> XML document will be built
if (doc == null)
{
doc = parseFile(fileName);
}
// Return final sfx Query String
return doNodes(doc, item);
}
/** Parses XML file and returns XML document.
* @param fileName XML file to parse
* @return XML document or <B>null</B> if error occurred. The error is caught and logged.
*/
public static Document parseFile(String fileName) {
log.info("Parsing XML file... " + fileName);
DocumentBuilder docBuilder;
Document doc = null;
DocumentBuilderFactory docBuilderFactory = DocumentBuilderFactory.newInstance();
docBuilderFactory.setIgnoringElementContentWhitespace(true);
try {
docBuilder = docBuilderFactory.newDocumentBuilder();
}
catch (ParserConfigurationException e) {
log.error("Wrong parser configuration: " + e.getMessage());
return null;
}
File sourceFile = new File(fileName);
try {
doc = docBuilder.parse(sourceFile);
}
catch (SAXException e) {
log.error("Wrong XML file structure: " + e.getMessage());
return null;
}
catch (IOException e) {
log.error("Could not read source file: " + e.getMessage());
}
log.info("XML file parsed");
return doc;
}
/**
* Process the item, mapping each of its metadata fields defined in the
* configuration file to an OpenURL parameter
*
* @param node DOM node of the mapping pair in the XML file (field element)
* @param item The item to process, from which metadata values will be taken
* @return processed fields.
* @throws IOException
*/
public static String doNodes(Node node, Item item) throws IOException
{
if (node == null)
{
log.error (" Empty Node ");
return null;
}
Node e = getElement(node);
NodeList nl = e.getChildNodes();
int len = nl.getLength();
String sfxfield = "";
int i = 0;
while ((i < len) && StringUtils.isEmpty(sfxfield))
{
Node nd = nl.item(i);
if ((nd == null) || isEmptyTextNode(nd))
{
i++;
continue;
}
String tagName = nd.getNodeName();
if (tagName.equals("query-pairs"))
{
sfxfield = processFields(nd, item);
}
i++;
}
log.info("Process fields : " + sfxfield);
return sfxfield;
}
/**
* Process the field nodes, mapping each metadata field defined in the
* configuration file to an OpenURL parameter
*
* @param e DOM node of the mapping pair in the XML file (field element)
* @param item The item to process, from which metadata values will be taken
* @return assembled OpenURL query.
* @throws IOException
*/
private static String processFields(Node e, Item item) throws IOException
{
NodeList cl = e.getChildNodes();
int lench = cl.getLength();
String myquery = "";
for (int j = 0; j < lench; j++)
{
Node nch = cl.item(j);
String querystring = "";
String schema = "";
String qualifier = "";
String element = "";
if (nch.getNodeName().equals("field"))
{
NodeList pl = nch.getChildNodes();
int plen = pl.getLength();
int finish = 0;
for (int k = 0; k < plen; k++)
{
Node vn= pl.item(k);
String vName = vn.getNodeName();
if (vName.equals("querystring"))
{
querystring = getValue(vn);
finish ++;
}
else if (vName.equals("dc-schema"))
{
schema = getValue(vn);
finish ++;
}
else if (vName.equals("dc-element"))
{
element = getValue(vn);
finish ++;
}
else if (vName.equals("dc-qualifier"))
{
qualifier = getValue(vn);
finish ++;
if (StringUtils.isEmpty(qualifier))
{
qualifier = null;
}
}
if (finish == 4)
{
Metadatum[] dcvalue = item.getMetadata(schema, element, qualifier, Item.ANY);
if (dcvalue.length > 0)
{
// Issued Date
if (element.equals("date") && qualifier.equals("issued"))
{
String fullDate = dcvalue[0].value;
// Remove the time if there is one - day is greatest granularity for SFX
if (fullDate.length() > 10)
{
fullDate = fullDate.substring(0, 10);
}
if (myquery.equals(""))
{ myquery = querystring + URLEncoder.encode(fullDate, Constants.DEFAULT_ENCODING); }
else
{ myquery = myquery + "&" + querystring + URLEncoder.encode(fullDate, Constants.DEFAULT_ENCODING); }
}
else
{
// Contributor Author
if (element.equals("contributor") && qualifier.equals("author"))
{
DCPersonName dpn = new DCPersonName(dcvalue[0].value);
String dpnName = dcvalue[0].value;
if (querystring.endsWith("aulast=")) { dpnName = dpn.getLastName(); }
else { if (querystring.endsWith("aufirst=")) { dpnName = dpn.getFirstNames(); }}
if (myquery.equals(""))
{ myquery = querystring + URLEncoder.encode(dpnName, Constants.DEFAULT_ENCODING); }
else
{ myquery = myquery + "&" + querystring + URLEncoder.encode(dpnName, Constants.DEFAULT_ENCODING); }
}
else
{
if (myquery.equals(""))
{ myquery = querystring + URLEncoder.encode(dcvalue[0].value, Constants.DEFAULT_ENCODING);}
else
{ myquery = myquery + "&" + querystring + URLEncoder.encode(dcvalue[0].value, Constants.DEFAULT_ENCODING);}
}
}
} // if dc.length > 0
finish = 0;
querystring = "";
schema = "";
element = "";
qualifier = "";
} // if finish == 4
} //for k
} // if field
} // for j
return myquery;
}
/** Returns element node
* @param node element (it is XML tag)
* @return Element node otherwise null
*/
public static Node getElement(Node node)
{
NodeList child = node.getChildNodes();
int length = child.getLength();
for (int i = 0; i < length; i++)
{
Node kid = child.item(i);
if (kid.getNodeType() == Node.ELEMENT_NODE)
{
return kid;
}
}
return null;
}
/** Is Empty text Node **/
public static boolean isEmptyTextNode(Node nd)
{
boolean isEmpty = false;
if (nd.getNodeType() == Node.TEXT_NODE)
{
String text = nd.getNodeValue().trim();
if (text.length() == 0)
{
isEmpty = true;
}
}
return isEmpty;
}
/**
* Returns the value of the node's attribute named <name>
**/
public static String getAttribute(Node e, String name)
{
NamedNodeMap attrs = e.getAttributes();
int len = attrs.getLength();
if (len > 0)
{
for (int i = 0; i < len; i++)
{
Node attr = attrs.item(i);
if (name.equals(attr.getNodeName()))
{
return attr.getNodeValue().trim();
}
}
}
//no such attribute
return null;
}
/**
* Returns the value found in the Text node (if any) in the
* node list that's passed in.
*/
public static String getValue(Node node)
{
NodeList child = node.getChildNodes();
for (int i = 0; i < child.getLength(); i++)
{
Node kid = child.item(i);
short type = kid.getNodeType();
if (type == Node.TEXT_NODE)
{
return kid.getNodeValue().trim();
}
}
// Didn't find a text node
return null;
}
}

View File

@@ -0,0 +1,290 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.app.sfx;
import java.io.File;
import java.io.IOException;
import java.net.URLEncoder;
import java.util.List;
import org.apache.commons.lang.StringUtils;
import org.apache.log4j.Logger;
import org.dspace.app.sfx.service.SFXFileReaderService;
import org.dspace.content.MetadataValue;
import org.dspace.content.service.ItemService;
import org.springframework.beans.factory.annotation.Autowired;
import org.w3c.dom.Document;
import org.dspace.content.DCPersonName;
import org.dspace.content.Item;
import org.dspace.core.Constants;
import javax.xml.parsers.DocumentBuilderFactory;
import javax.xml.parsers.DocumentBuilder;
import javax.xml.parsers.ParserConfigurationException;
import org.w3c.dom.NamedNodeMap;
import org.w3c.dom.Node;
import org.w3c.dom.NodeList;
import org.xml.sax.SAXException;
/**
* XML configuration file reader for DSpace metadata fields (DC) mapping
* to OpenURL parameters.
* <p>
* This class reads the [dspace]/config/sfx.xml configuration file, which
* contains pairs of DSpace item metadata values and OpenURL parameter names.
* Then it takes an item and constructs an OpenURL for it with values of
* parameters filled in from the paired metadata fields.
* </p>
*
* @author Stuart Lewis
* @author Graham Triggs
* @version $Revision$
*/
public class SFXFileReaderServiceImpl implements SFXFileReaderService {
/**
* The SFX configuration file
*/
protected Document doc;
@Autowired(required = true)
protected ItemService itemService;
/**
* log4j logger
*/
private final Logger log = Logger.getLogger(SFXFileReaderServiceImpl.class);
protected SFXFileReaderServiceImpl() {
}
@Override
public String loadSFXFile(String fileName, Item item) throws IOException {
// Parse XML file -> XML document will be built
if (doc == null) {
doc = parseFile(fileName);
}
// Return final sfx Query String
return doNodes(doc, item);
}
@Override
public Document parseFile(String fileName)
{
log.info("Parsing XML file... " + fileName);
DocumentBuilder docBuilder;
Document doc = null;
DocumentBuilderFactory docBuilderFactory = DocumentBuilderFactory.newInstance();
docBuilderFactory.setIgnoringElementContentWhitespace(true);
try {
docBuilder = docBuilderFactory.newDocumentBuilder();
} catch (ParserConfigurationException e) {
log.error("Wrong parser configuration: " + e.getMessage());
return null;
}
File sourceFile = new File(fileName);
try {
doc = docBuilder.parse(sourceFile);
} catch (SAXException e) {
log.error("Wrong XML file structure: " + e.getMessage());
return null;
} catch (IOException e) {
log.error("Could not read source file: " + e.getMessage());
}
log.info("XML file parsed");
return doc;
}
@Override
public String doNodes(Node node, Item item) throws IOException {
if (node == null) {
log.error(" Empty Node ");
return null;
}
Node e = getElement(node);
NodeList nl = e.getChildNodes();
int len = nl.getLength();
String sfxfield = "";
int i = 0;
while ((i < len) && StringUtils.isEmpty(sfxfield)) {
Node nd = nl.item(i);
if ((nd == null) || isEmptyTextNode(nd)) {
i++;
continue;
}
String tagName = nd.getNodeName();
if (tagName.equals("query-pairs")) {
sfxfield = processFields(nd, item);
}
i++;
}
log.info("Process fields : " + sfxfield);
return sfxfield;
}
/**
* Process the field nodes, mapping each metadata field defined in the
* configuration file to an OpenURL parameter
*
* @param e DOM node of the mapping pair in the XML file (field element)
* @param item The item to process, from which metadata values will be taken
* @return assembled OpenURL query.
* @throws IOException
*/
protected String processFields(Node e, Item item) throws IOException {
NodeList cl = e.getChildNodes();
int lench = cl.getLength();
String myquery = "";
for (int j = 0; j < lench; j++) {
Node nch = cl.item(j);
String querystring = "";
String schema = "";
String qualifier = "";
String element = "";
if (nch.getNodeName().equals("field")) {
NodeList pl = nch.getChildNodes();
int plen = pl.getLength();
int finish = 0;
for (int k = 0; k < plen; k++) {
Node vn = pl.item(k);
String vName = vn.getNodeName();
if (vName.equals("querystring")) {
querystring = getValue(vn);
finish++;
} else if (vName.equals("dc-schema")) {
schema = getValue(vn);
finish++;
} else if (vName.equals("dc-element")) {
element = getValue(vn);
finish++;
} else if (vName.equals("dc-qualifier")) {
qualifier = getValue(vn);
finish++;
if (StringUtils.isEmpty(qualifier)) {
qualifier = null;
}
}
if (finish == 4) {
List<MetadataValue> dcvalue = itemService.getMetadata(item, schema, element, qualifier, Item.ANY);
if (dcvalue.size() > 0) {
// Issued Date
if (element.equals("date") && qualifier.equals("issued")) {
String fullDate = dcvalue.get(0).getValue();
// Remove the time if there is one - day is greatest granularity for SFX
if (fullDate.length() > 10) {
fullDate = fullDate.substring(0, 10);
}
if (myquery.equals("")) {
myquery = querystring + URLEncoder.encode(fullDate, Constants.DEFAULT_ENCODING);
} else {
myquery = myquery + "&" + querystring + URLEncoder.encode(fullDate, Constants.DEFAULT_ENCODING);
}
} else {
// Contributor Author
if (element.equals("contributor") && qualifier.equals("author")) {
DCPersonName dpn = new DCPersonName(dcvalue.get(0).getValue());
String dpnName = dcvalue.get(0).getValue();
if (querystring.endsWith("aulast=")) {
dpnName = dpn.getLastName();
} else {
if (querystring.endsWith("aufirst=")) {
dpnName = dpn.getFirstNames();
}
}
if (myquery.equals("")) {
myquery = querystring + URLEncoder.encode(dpnName, Constants.DEFAULT_ENCODING);
} else {
myquery = myquery + "&" + querystring + URLEncoder.encode(dpnName, Constants.DEFAULT_ENCODING);
}
} else {
if (myquery.equals("")) {
myquery = querystring + URLEncoder.encode(dcvalue.get(0).getValue(), Constants.DEFAULT_ENCODING);
} else {
myquery = myquery + "&" + querystring + URLEncoder.encode(dcvalue.get(0).getValue(), Constants.DEFAULT_ENCODING);
}
}
}
} // if dc.length > 0
finish = 0;
querystring = "";
schema = "";
element = "";
qualifier = "";
} // if finish == 4
} //for k
} // if field
} // for j
return myquery;
}
@Override
public Node getElement(Node node) {
NodeList child = node.getChildNodes();
int length = child.getLength();
for (int i = 0; i < length; i++) {
Node kid = child.item(i);
if (kid.getNodeType() == Node.ELEMENT_NODE) {
return kid;
}
}
return null;
}
@Override
public boolean isEmptyTextNode(Node nd) {
boolean isEmpty = false;
if (nd.getNodeType() == Node.TEXT_NODE) {
String text = nd.getNodeValue().trim();
if (text.length() == 0) {
isEmpty = true;
}
}
return isEmpty;
}
@Override
public String getAttribute(Node e, String name) {
NamedNodeMap attrs = e.getAttributes();
int len = attrs.getLength();
if (len > 0) {
for (int i = 0; i < len; i++) {
Node attr = attrs.item(i);
if (name.equals(attr.getNodeName())) {
return attr.getNodeValue().trim();
}
}
}
//no such attribute
return null;
}
@Override
public String getValue(Node node) {
NodeList child = node.getChildNodes();
for (int i = 0; i < child.getLength(); i++) {
Node kid = child.item(i);
short type = kid.getNodeType();
if (type == Node.TEXT_NODE) {
return kid.getNodeValue().trim();
}
}
// Didn't find a text node
return null;
}
}

View File

@@ -0,0 +1,25 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.app.sfx.factory;
import org.dspace.app.sfx.service.SFXFileReaderService;
import org.dspace.utils.DSpace;
/**
* Abstract factory to get services for the sfx package, use SfxServiceFactory.getInstance() to retrieve an implementation
*
* @author kevinvandevelde at atmire.com
*/
public abstract class SfxServiceFactory {
public abstract SFXFileReaderService getSfxFileReaderService();
public static SfxServiceFactory getInstance(){
return new DSpace().getServiceManager().getServiceByName("sfxServiceFactory", SfxServiceFactory.class);
}
}

View File

@@ -0,0 +1,27 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.app.sfx.factory;
import org.dspace.app.sfx.service.SFXFileReaderService;
import org.springframework.beans.factory.annotation.Autowired;
/**
* Factory implementation to get services for the sfx package, use SfxServiceFactory.getInstance() to retrieve an implementation
*
* @author kevinvandevelde at atmire.com
*/
public class SfxServiceFactoryImpl extends SfxServiceFactory {
@Autowired(required = true)
private SFXFileReaderService sfxFileReaderService;
@Override
public SFXFileReaderService getSfxFileReaderService() {
return sfxFileReaderService;
}
}

View File

@@ -0,0 +1,83 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.app.sfx.service;
import org.dspace.content.Item;
import org.w3c.dom.Document;
import org.w3c.dom.Node;
import java.io.IOException;
/**
* XML configuration file reader for DSpace metadata fields (DC) mapping
* to OpenURL parameters.
* <p>
* This class reads the [dspace]/config/sfx.xml configuration file, which
* contains pairs of DSpace item metadata values and OpenURL parameter names.
* Then it takes an item and constructs an OpenURL for it with values of
* parameters filled in from the paired metadata fields.
* </p>
*
* @author Stuart Lewis
* @author Graham Triggs
* @version $Revision$
*/
public interface SFXFileReaderService {
/**
* Loads the SFX configuration file
*
* @param fileName The name of the SFX configuration file
* @param item The item to process, from which metadata values will be taken
*
* @return the SFX string
* @throws java.io.IOException
*/
public String loadSFXFile(String fileName, Item item) throws IOException;
/** Parses XML file and returns XML document.
* @param fileName XML file to parse
* @return XML document or <B>null</B> if error occurred. The error is caught and logged.
*/
public Document parseFile(String fileName);
/**
* Process the item, mapping each of its metadata fields defined in the
* configuration file to an OpenURL parameter
*
* @param node DOM node of the mapping pair in the XML file (field element)
* @param item The item to process, from which metadata values will be taken
* @return processed fields.
* @throws IOException
*/
public String doNodes(Node node, Item item) throws IOException;
/**
* Returns element node
*
* @param node element (it is XML tag)
* @return Element node otherwise null
*/
public Node getElement(Node node);
/**
* Is Empty text Node *
*/
public boolean isEmptyTextNode(Node nd);
/**
* Returns the value of the node's attribute named <name>
*/
public String getAttribute(Node e, String name);
/**
* Returns the value found in the Text node (if any) in the
* node list that's passed in.
*/
public String getValue(Node node);
}

View File

@@ -10,7 +10,10 @@ package org.dspace.app.sherpa.submit;
import java.util.List;
import org.dspace.content.Item;
import org.dspace.content.factory.ContentServiceFactory;
import org.dspace.content.service.ItemService;
import org.dspace.core.Context;
import org.springframework.beans.factory.annotation.Autowired;
public interface ISSNItemExtractor
{

View File

@@ -10,12 +10,17 @@ package org.dspace.app.sherpa.submit;
import java.util.ArrayList;
import java.util.List;
import org.dspace.content.Metadatum;
import org.dspace.content.MetadataValue;
import org.dspace.content.Item;
import org.dspace.content.service.ItemService;
import org.dspace.core.Context;
import org.springframework.beans.factory.annotation.Autowired;
public class MetadataAuthorityISSNExtractor implements ISSNItemExtractor
{
@Autowired(required = true)
public ItemService itemService;
private List<String> metadataList;
public void setMetadataList(List<String> metadataList)
@@ -29,10 +34,10 @@ public class MetadataAuthorityISSNExtractor implements ISSNItemExtractor
List<String> values = new ArrayList<String>();
for (String metadata : metadataList)
{
Metadatum[] dcvalues = item.getMetadataByMetadataString(metadata);
for (Metadatum dcvalue : dcvalues)
List<MetadataValue> dcvalues = itemService.getMetadataByMetadataString(item, metadata);
for (MetadataValue dcvalue : dcvalues)
{
values.add(dcvalue.authority);
values.add(dcvalue.getAuthority());
}
}
return values;

View File

@@ -10,12 +10,17 @@ package org.dspace.app.sherpa.submit;
import java.util.ArrayList;
import java.util.List;
import org.dspace.content.Metadatum;
import org.dspace.content.MetadataValue;
import org.dspace.content.Item;
import org.dspace.content.service.ItemService;
import org.dspace.core.Context;
import org.springframework.beans.factory.annotation.Autowired;
public class MetadataValueISSNExtractor implements ISSNItemExtractor
{
@Autowired(required = true)
public ItemService itemService;
private List<String> metadataList;
public void setMetadataList(List<String> metadataList)
@@ -29,10 +34,10 @@ public class MetadataValueISSNExtractor implements ISSNItemExtractor
List<String> values = new ArrayList<String>();
for (String metadata : metadataList)
{
Metadatum[] dcvalues = item.getMetadataByMetadataString(metadata);
for (Metadatum dcvalue : dcvalues)
List<MetadataValue> dcvalues = itemService.getMetadataByMetadataString(item, metadata);
for (MetadataValue dcvalue : dcvalues)
{
values.add(dcvalue.value);
values.add(dcvalue.getValue());
}
}
return values;

View File

@@ -18,6 +18,8 @@ import java.net.URL;
import java.net.URLEncoder;
import java.sql.SQLException;
import java.util.Date;
import java.util.Iterator;
import java.util.List;
import org.apache.commons.cli.CommandLine;
import org.apache.commons.cli.CommandLineParser;
@@ -30,7 +32,10 @@ import org.apache.log4j.Logger;
import org.dspace.content.Collection;
import org.dspace.content.Community;
import org.dspace.content.Item;
import org.dspace.content.ItemIterator;
import org.dspace.content.factory.ContentServiceFactory;
import org.dspace.content.service.CollectionService;
import org.dspace.content.service.CommunityService;
import org.dspace.content.service.ItemService;
import org.dspace.core.ConfigurationManager;
import org.dspace.core.Context;
import org.dspace.core.LogManager;
@@ -46,6 +51,10 @@ public class GenerateSitemaps
/** Logger */
private static Logger log = Logger.getLogger(GenerateSitemaps.class);
private static final CommunityService communityService = ContentServiceFactory.getInstance().getCommunityService();
private static final CollectionService collectionService = ContentServiceFactory.getInstance().getCollectionService();
private static final ItemService itemService = ContentServiceFactory.getInstance().getItemService();
public static void main(String[] args) throws Exception
{
final String usage = GenerateSitemaps.class.getCanonicalName();
@@ -178,88 +187,71 @@ public class GenerateSitemaps
Context c = new Context();
Community[] comms = Community.findAll(c);
List<Community> comms = communityService.findAll(c);
for (int i = 0; i < comms.length; i++)
{
String url = handleURLStem + comms[i].getHandle();
for (Community comm : comms) {
String url = handleURLStem + comm.getHandle();
if (makeHTMLMap)
{
if (makeHTMLMap) {
html.addURL(url, null);
}
if (makeSitemapOrg)
{
if (makeSitemapOrg) {
sitemapsOrg.addURL(url, null);
}
}
Collection[] colls = Collection.findAll(c);
List<Collection> colls = collectionService.findAll(c);
for (int i = 0; i < colls.length; i++)
{
String url = handleURLStem + colls[i].getHandle();
for (Collection coll : colls) {
String url = handleURLStem + coll.getHandle();
if (makeHTMLMap)
{
if (makeHTMLMap) {
html.addURL(url, null);
}
if (makeSitemapOrg)
{
if (makeSitemapOrg) {
sitemapsOrg.addURL(url, null);
}
}
ItemIterator allItems = Item.findAll(c);
try
Iterator<Item> allItems = itemService.findAll(c);
int itemCount = 0;
while (allItems.hasNext())
{
int itemCount = 0;
while (allItems.hasNext())
{
Item i = allItems.next();
String url = handleURLStem + i.getHandle();
Date lastMod = i.getLastModified();
if (makeHTMLMap)
{
html.addURL(url, lastMod);
}
if (makeSitemapOrg)
{
sitemapsOrg.addURL(url, lastMod);
}
i.decache();
itemCount++;
}
Item i = allItems.next();
String url = handleURLStem + i.getHandle();
Date lastMod = i.getLastModified();
if (makeHTMLMap)
{
int files = html.finish();
log.info(LogManager.getHeader(c, "write_sitemap",
"type=html,num_files=" + files + ",communities="
+ comms.length + ",collections=" + colls.length
+ ",items=" + itemCount));
html.addURL(url, lastMod);
}
if (makeSitemapOrg)
{
int files = sitemapsOrg.finish();
log.info(LogManager.getHeader(c, "write_sitemap",
"type=html,num_files=" + files + ",communities="
+ comms.length + ",collections=" + colls.length
+ ",items=" + itemCount));
sitemapsOrg.addURL(url, lastMod);
}
itemCount++;
}
finally
if (makeHTMLMap)
{
if (allItems != null)
{
allItems.close();
}
int files = html.finish();
log.info(LogManager.getHeader(c, "write_sitemap",
"type=html,num_files=" + files + ",communities="
+ comms.size() + ",collections=" + colls.size()
+ ",items=" + itemCount));
}
if (makeSitemapOrg)
{
int files = sitemapsOrg.finish();
log.info(LogManager.getHeader(c, "write_sitemap",
"type=html,num_files=" + files + ",communities="
+ comms.size() + ",collections=" + colls.size()
+ ",items=" + itemCount));
}
c.abort();
}

View File

@@ -23,10 +23,10 @@ import java.util.Date;
public class HTMLSitemapGenerator extends AbstractGenerator
{
/** Stem of URLs sitemaps will eventually appear at */
private String indexURLStem;
protected String indexURLStem;
/** Tail of URLs sitemaps will eventually appear at */
private String indexURLTail;
protected String indexURLTail;
/**
* Construct an HTML sitemap generator, writing files to the given
@@ -50,33 +50,39 @@ public class HTMLSitemapGenerator extends AbstractGenerator
indexURLTail = (urlTail == null ? "" : urlTail);
}
@Override
public String getFilename(int number)
{
return "sitemap" + number + ".html";
}
@Override
public String getLeadingBoilerPlate()
{
return "<!DOCTYPE HTML PUBLIC \"-//W3C//DTD HTML 4.01//EN\" \"http://www.w3.org/TR/html4/strict.dtd\">\n"
+ "<html><head><title>URL List</title></head><body><ul>";
}
@Override
public int getMaxSize()
{
// 50k
return 51200;
}
@Override
public int getMaxURLs()
{
return 1000;
}
@Override
public String getTrailingBoilerPlate()
{
return "</ul></body></html>\n";
}
@Override
public String getURLText(String url, Date lastMod)
{
StringBuffer urlText = new StringBuffer();
@@ -87,16 +93,19 @@ public class HTMLSitemapGenerator extends AbstractGenerator
return urlText.toString();
}
@Override
public boolean useCompression()
{
return false;
}
@Override
public String getIndexFilename()
{
return "sitemap_index.html";
}
@Override
public void writeIndex(PrintStream output, int sitemapCount)
throws IOException
{

View File

@@ -25,13 +25,13 @@ import java.util.Date;
public class SitemapsOrgGenerator extends AbstractGenerator
{
/** Stem of URLs sitemaps will eventually appear at */
private String indexURLStem;
protected String indexURLStem;
/** Tail of URLs sitemaps will eventually appear at */
private String indexURLTail;
protected String indexURLTail;
/** The correct date format */
private DateFormat w3dtfFormat = new SimpleDateFormat(
protected DateFormat w3dtfFormat = new SimpleDateFormat(
"yyyy-MM-dd'T'HH:mm:ss'Z'");
/**
@@ -56,33 +56,39 @@ public class SitemapsOrgGenerator extends AbstractGenerator
indexURLTail = (urlTail == null ? "" : urlTail);
}
@Override
public String getFilename(int number)
{
return "sitemap" + number + ".xml.gz";
}
@Override
public String getLeadingBoilerPlate()
{
return "<?xml version=\"1.0\" encoding=\"UTF-8\"?>\n"
+ "<urlset xmlns=\"http://www.sitemaps.org/schemas/sitemap/0.9\">";
}
@Override
public int getMaxSize()
{
// 10 Mb
return 10485760;
}
@Override
public int getMaxURLs()
{
return 50000;
}
@Override
public String getTrailingBoilerPlate()
{
return "</urlset>";
}
@Override
public String getURLText(String url, Date lastMod)
{
StringBuffer urlText = new StringBuffer();
@@ -98,16 +104,19 @@ public class SitemapsOrgGenerator extends AbstractGenerator
return urlText.toString();
}
@Override
public boolean useCompression()
{
return true;
}
@Override
public String getIndexFilename()
{
return "sitemap_index.xml.gz";
}
@Override
public void writeIndex(PrintStream output, int sitemapCount)
throws IOException
{

View File

@@ -69,6 +69,7 @@ public class HTMLReport implements Report
*
* @return the HTML report
*/
@Override
public String render()
{
StringBuffer frag = new StringBuffer();
@@ -165,6 +166,7 @@ public class HTMLReport implements Report
*
* @param stat the statistics object to be added to the report
*/
@Override
public void addBlock(Statistics stat)
{
blocks.add(stat);
@@ -177,6 +179,7 @@ public class HTMLReport implements Report
*
* @param start the start date for the report
*/
@Override
public void setStartDate(Date start)
{
this.start = (start == null ? null : new Date(start.getTime()));
@@ -188,6 +191,7 @@ public class HTMLReport implements Report
*
* @param end the end date for the report
*/
@Override
public void setEndDate(Date end)
{
this.end = (end == null ? null : new Date(end.getTime()));
@@ -200,6 +204,7 @@ public class HTMLReport implements Report
*
* @return a string containing date range information
*/
@Override
public String dateRange()
{
StringBuffer frag = new StringBuffer();
@@ -238,6 +243,7 @@ public class HTMLReport implements Report
*
* @return a string containing the title of the report
*/
@Override
public String mainTitle()
{
return "<div class=\"reportTitle\"><a name=\"top\">" + mainTitle + "</a></div>\n\n";
@@ -250,6 +256,7 @@ public class HTMLReport implements Report
* @param name the name of the service
* @param serverName the name of the server
*/
@Override
public void setMainTitle(String name, String serverName)
{
mainTitle = "Statistics for " + name + " on " + serverName;
@@ -266,6 +273,7 @@ public class HTMLReport implements Report
*
* @return a string containing the header for the report
*/
@Override
public String header()
{
return header("");
@@ -277,6 +285,7 @@ public class HTMLReport implements Report
*
* @param title the title of the item being headered
*/
@Override
public String header(String title)
{
// FIXME: this need to be figured out to integrate nicely into the
@@ -309,6 +318,7 @@ public class HTMLReport implements Report
*
* @return a string containing the section title HTML formatted
*/
@Override
public String sectionHeader(String title)
{
// prepare the title to be an <a name="#title"> style link
@@ -332,6 +342,7 @@ public class HTMLReport implements Report
*
* @return a string containing the statistics block HTML formatted
*/
@Override
public String statBlock(Statistics content)
{
StringBuffer frag = new StringBuffer();
@@ -417,6 +428,7 @@ public class HTMLReport implements Report
*
* @return a string containing floor information HTML formatted
*/
@Override
public String floorInfo(int floor)
{
if (floor > 0)
@@ -440,6 +452,7 @@ public class HTMLReport implements Report
*
* @return a string containing an explanaton HTML formatted
*/
@Override
public String blockExplanation(String explanation)
{
if (explanation != null)
@@ -461,6 +474,7 @@ public class HTMLReport implements Report
*
* @return a string containing the report footer
*/
@Override
public String footer()
{
return "";

View File

@@ -7,13 +7,14 @@
*/
package org.dspace.app.statistics;
import org.dspace.content.MetadataSchema;
import org.apache.commons.lang3.StringUtils;
import org.dspace.core.ConfigurationManager;
import org.dspace.core.Constants;
import org.dspace.core.Context;
import org.dspace.core.LogManager;
import org.dspace.storage.rdbms.DatabaseManager;
import org.dspace.storage.rdbms.TableRow;
import org.dspace.discovery.DiscoverQuery;
import org.dspace.discovery.SearchServiceException;
import org.dspace.discovery.SearchUtils;
import java.sql.SQLException;
@@ -292,8 +293,7 @@ public class LogAnalyser
String myFileTemplate, String myConfigFile,
String myOutFile, Date myStartDate,
Date myEndDate, boolean myLookUp)
throws IOException, SQLException
{
throws IOException, SQLException, SearchServiceException {
// FIXME: perhaps we should have all parameters and aggregators put
// together in a single aggregating object
@@ -1142,132 +1142,42 @@ public class LogAnalyser
* @return an integer containing the relevant count
*/
public static Integer getNumItems(Context context, String type)
throws SQLException
{
boolean oracle = DatabaseManager.isOracle();
throws SQLException, SearchServiceException {
// FIXME: this method is clearly not optimised
// FIXME: we don't yet collect total statistics, such as number of items
// withdrawn, number in process of submission etc. We should probably do
// that
// start the type constraint
String typeQuery = null;
if (type != null)
DiscoverQuery discoverQuery = new DiscoverQuery();
if(StringUtils.isNotBlank(type))
{
typeQuery = "SELECT resource_id " +
"FROM metadatavalue " +
"WHERE text_value LIKE '%" + type + "%' " + " AND resource_type_id="+ Constants.ITEM +
" AND metadata_field_id = (" +
" SELECT metadata_field_id " +
" FROM metadatafieldregistry " +
" WHERE metadata_schema_id = (" +
" SELECT metadata_schema_id" +
" FROM MetadataSchemaRegistry" +
" WHERE short_id = '" + MetadataSchema.DC_SCHEMA + "')" +
" AND element = 'type' " +
" AND qualifier IS NULL) ";
discoverQuery.addFilterQueries("dc.type=" + type +"*");
}
// start the date constraint query buffer
StringBuffer dateQuery = new StringBuffer();
if (oracle)
StringBuilder accessionedQuery = new StringBuilder();
accessionedQuery.append("dc.date.accessioned_dt:[");
if(startDate != null)
{
dateQuery.append("SELECT /*+ ORDERED_PREDICATES */ resource_id ");
accessionedQuery.append(unParseDate(startDate));
}
else
{
dateQuery.append("SELECT resource_id ");
accessionedQuery.append("*");
}
dateQuery.append("FROM metadatavalue " +
"WHERE " + "resource_type_id="+ Constants.ITEM + " AND metadata_field_id = (" +
" SELECT metadata_field_id " +
" FROM metadatafieldregistry " +
" WHERE metadata_schema_id = (" +
" SELECT metadata_schema_id" +
" FROM MetadataSchemaRegistry" +
" WHERE short_id = '" + MetadataSchema.DC_SCHEMA + "')" +
" AND element = 'date' " +
" AND qualifier = 'accessioned') ");
// Verifies that the metadata contains a valid date, otherwise the
// postgres queries blow up when doing the ::timestamp cast.
if (!oracle && (startDate != null || endDate != null)) {
dateQuery.append(" AND text_value LIKE '____-__-__T__:__:__Z' ");
}
if (startDate != null)
accessionedQuery.append(" TO ");
if(endDate != null)
{
if (oracle)
{
dateQuery.append(" AND TO_TIMESTAMP( TO_CHAR(text_value), "+
"'yyyy-mm-dd\"T\"hh24:mi:ss\"Z\"' ) >= TO_DATE('" +
unParseDate(startDate) + "', 'yyyy-MM-dd\"T\"hh24:mi:ss\"Z\"') ");
}
else
{
dateQuery.append(" AND text_value::timestamp >= '" +
unParseDate(startDate) + "'::timestamp ");
}
}
if (endDate != null)
{
// adjust end date to account for timestamp comparison
GregorianCalendar realEndDate = new GregorianCalendar();
realEndDate.setTime(endDate);
realEndDate.add(Calendar.DAY_OF_MONTH, 1);
Date queryEndDate = realEndDate.getTime();
if (oracle)
{
dateQuery.append(" AND TO_TIMESTAMP( TO_CHAR(text_value), "+
"'yyyy-mm-dd\"T\"hh24:mi:ss\"Z\"' ) < TO_DATE('" +
unParseDate(queryEndDate) + "', 'yyyy-MM-dd\"T\"hh24:mi:ss\"Z\"') ");
}
else
{
dateQuery.append(" AND text_value::timestamp < '" +
unParseDate(queryEndDate) + "'::timestamp ");
}
}
// build the final query
StringBuffer query = new StringBuffer();
query.append("SELECT COUNT(*) AS num " +
"FROM item " +
"WHERE in_archive = " + (oracle ? "1 " : "true ") +
"AND withdrawn = " + (oracle ? "0 " : "false "));
if (startDate != null || endDate != null)
{
query.append(" AND item_id IN ( " +
dateQuery.toString() + ") ");
}
if (type != null)
{
query.append(" AND item_id IN ( " +
typeQuery + ") ");
}
TableRow row = DatabaseManager.querySingle(context, query.toString());
Integer numItems;
if (oracle)
{
numItems = Integer.valueOf(row.getIntColumn("num"));
accessionedQuery.append(unParseDate(endDate));
}
else
{
// for some reason the number column is of "long" data type!
Long count = Long.valueOf(row.getLongColumn("num"));
numItems = Integer.valueOf(count.intValue());
accessionedQuery.append("*");
}
return numItems;
discoverQuery.addFilterQueries(accessionedQuery.toString());
discoverQuery.addFilterQueries("withdrawn: false");
discoverQuery.addFilterQueries("archived: true");
return SearchUtils.getSearchService().search(context, discoverQuery).getMaxResults();
}
@@ -1281,8 +1191,7 @@ public class LogAnalyser
* archive
*/
public static Integer getNumItems(Context context)
throws SQLException
{
throws SQLException, SearchServiceException {
return getNumItems(context, null);
}

View File

@@ -27,11 +27,15 @@ import java.util.StringTokenizer;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
import org.dspace.content.Metadatum;
import org.dspace.content.MetadataSchema;
import org.dspace.content.MetadataValue;
import org.dspace.content.Item;
import org.dspace.content.factory.ContentServiceFactory;
import org.dspace.content.service.ItemService;
import org.dspace.core.ConfigurationManager;
import org.dspace.core.Context;
import org.dspace.handle.HandleManager;
import org.dspace.handle.factory.HandleServiceFactory;
import org.dspace.handle.service.HandleService;
/**
* This class performs the action of coordinating a usage report being
@@ -140,7 +144,10 @@ public class ReportGenerator
/** the log file action to human readable action map */
private static String map = ConfigurationManager.getProperty("dspace.dir") +
File.separator + "config" + File.separator + "dstat.map";
private static final ItemService itemService = ContentServiceFactory.getInstance().getItemService();
private static final HandleService handleService = HandleServiceFactory.getInstance().getHandleService();
/**
* main method to be run from command line. See usage information for
@@ -785,7 +792,7 @@ public class ReportGenerator
// ensure that the handle exists
try
{
item = (Item) HandleManager.resolveToObject(context, handle);
item = (Item) handleService.resolveToObject(context, handle);
}
catch (Exception e)
{
@@ -801,24 +808,24 @@ public class ReportGenerator
// build the referece
// FIXME: here we have blurred the line between content and presentation
// and it should probably be un-blurred
Metadatum[] title = item.getDC("title", null, Item.ANY);
Metadatum[] author = item.getDC("contributor", "author", Item.ANY);
List<MetadataValue> title = itemService.getMetadata(item, MetadataSchema.DC_SCHEMA, "title", null, Item.ANY);
List<MetadataValue> author = itemService.getMetadata(item, MetadataSchema.DC_SCHEMA, "contributor", "author", Item.ANY);
StringBuffer authors = new StringBuffer();
if (author.length > 0)
if (author.size() > 0)
{
authors.append("(" + author[0].value);
authors.append("(" + author.get(0).getValue());
}
if (author.length > 1)
if (author.size() > 1)
{
authors.append(" et al");
}
if (author.length > 0)
if (author.size() > 0)
{
authors.append(")");
}
String content = title[0].value + " " + authors.toString();
String content = title.get(0).getValue() + " " + authors.toString();
return content;
}

View File

@@ -143,7 +143,8 @@ public class Stat implements Comparable
* @return +1, 0, -1 if o is less than, equal to, or greater than the
* current object value.
*/
public int compareTo(Object o)
@Override
public int compareTo(Object o)
{
int objectValue = ((Stat) o).getValue();

View File

@@ -101,6 +101,7 @@ public class StatisticsLoader
protected static Date[] sortDatesDescending(Date[] dates)
{
Arrays.sort(dates, new Comparator<Date>() {
@Override
public int compare(Date d1, Date d2)
{
if (d1 == null && d2 == null)
@@ -352,6 +353,7 @@ public class StatisticsLoader
*/
private static class AnalysisAndReportFilter implements FilenameFilter
{
@Override
public boolean accept(File dir, String name)
{
if (analysisMonthlyPattern.matcher(name).matches())

View File

@@ -8,22 +8,14 @@
package org.dspace.app.util;
import java.io.IOException;
import java.sql.SQLException;
import java.sql.Timestamp;
import java.util.ArrayList;
import java.util.Date;
import java.util.List;
import org.apache.http.client.HttpClient;
import org.apache.http.HttpResponse;
import org.apache.http.HttpStatus;
import org.apache.http.client.methods.HttpHead;
import org.apache.http.impl.client.DefaultHttpClient;
import org.dspace.app.util.factory.UtilServiceFactory;
import org.dspace.app.util.service.WebAppService;
import org.dspace.core.ConfigurationManager;
import org.dspace.core.Context;
import org.dspace.storage.rdbms.DatabaseManager;
import org.dspace.storage.rdbms.TableRow;
import org.dspace.storage.rdbms.TableRowIterator;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
@@ -39,13 +31,16 @@ abstract public class AbstractDSpaceWebapp
{
private static final Logger log = LoggerFactory.getLogger(AbstractDSpaceWebapp.class);
protected final WebAppService webAppService = UtilServiceFactory.getInstance().getWebAppService();
protected String kind;
protected Date started;
protected String url;
private TableRow row;
protected WebApp webApp;
/** Prevent null instantiation. */
protected AbstractDSpaceWebapp()
@@ -77,12 +72,7 @@ abstract public class AbstractDSpaceWebapp
Timestamp now = new Timestamp(started.getTime());
try {
Context context = new Context();
row = DatabaseManager.create(context, "Webapp");
row.setColumn("AppName", kind);
row.setColumn("URL", url);
row.setColumn("Started", now);
row.setColumn("isUI", isUI() ? 1 : 0); // update won't widen boolean to integer
DatabaseManager.update(context, row);
webApp = webAppService.create(context, kind, url, now, isUI() ? 1 : 0);
context.complete();
} catch (SQLException e) {
log.error("Failed to record startup in Webapp table.", e);
@@ -95,80 +85,13 @@ abstract public class AbstractDSpaceWebapp
// Remove the database entry
try {
Context context = new Context();
DatabaseManager.delete(context, row);
webAppService.delete(context, webApp);
context.complete();
} catch (SQLException e) {
log.error("Failed to record shutdown in Webapp table.", e);
}
}
/** Return the list of running applications. */
static public List<AbstractDSpaceWebapp> getApps()
{
ArrayList<AbstractDSpaceWebapp> apps = new ArrayList<AbstractDSpaceWebapp>();
TableRowIterator tri;
Context context = null;
HttpHead method = null;
try {
context = new Context();
tri = DatabaseManager.queryTable(context, "Webapp",
"SELECT * FROM Webapp");
for (TableRow row : tri.toList())
{
DSpaceWebapp app = new DSpaceWebapp();
app.kind = row.getStringColumn("AppName");
app.url = row.getStringColumn("URL");
app.started = row.getDateColumn("Started");
app.uiQ = row.getBooleanColumn("isUI");
method = new HttpHead(app.url);
HttpClient client = new DefaultHttpClient();
HttpResponse response = client.execute(method);
int status = response.getStatusLine().getStatusCode();
if (status != HttpStatus.SC_OK)
{
DatabaseManager.delete(context, row);
context.commit();
continue;
}
apps.add(app);
}
} catch (SQLException e) {
log.error("Unable to list running applications", e);
} catch (IOException e) {
log.error("Failure checking for a running webapp", e);
} finally {
if (null != method)
{
method.releaseConnection();
}
if (null != context)
{
context.abort();
}
}
return apps;
}
/** Container for retrieved database rows. */
static private class DSpaceWebapp
extends AbstractDSpaceWebapp
{
private boolean uiQ;
@Override
public boolean isUI()
{
return uiQ;
}
}
/* DSpaceWebappMXBean methods */
@Override
public String getKind()
{

View File

@@ -8,16 +8,16 @@
package org.dspace.app.util;
import java.sql.SQLException;
import java.util.List;
import org.dspace.authorize.AuthorizeConfiguration;
import org.dspace.authorize.AuthorizeException;
import org.dspace.authorize.AuthorizeManager;
import org.dspace.authorize.ResourcePolicy;
import org.dspace.content.Bitstream;
import org.dspace.content.Bundle;
import org.dspace.content.Collection;
import org.dspace.content.Community;
import org.dspace.content.Item;
import org.dspace.authorize.factory.AuthorizeServiceFactory;
import org.dspace.authorize.service.AuthorizeService;
import org.dspace.content.*;
import org.dspace.content.factory.ContentServiceFactory;
import org.dspace.content.service.*;
import org.dspace.core.Constants;
import org.dspace.core.Context;
@@ -31,10 +31,14 @@ import org.dspace.core.Context;
public class AuthorizeUtil
{
private static final AuthorizeService authorizeService = AuthorizeServiceFactory.getInstance().getAuthorizeService();
private static final ItemService itemService = ContentServiceFactory.getInstance().getItemService();
private static final CollectionService collectionService = ContentServiceFactory.getInstance().getCollectionService();
/**
* Is allowed manage (create, remove, edit) bitstream's policies in the
* current context?
*
*
* @param context
* the DSpace Context Object
* @param bitstream
@@ -48,8 +52,8 @@ public class AuthorizeUtil
public static void authorizeManageBitstreamPolicy(Context context,
Bitstream bitstream) throws AuthorizeException, SQLException
{
Bundle bundle = bitstream.getBundles()[0];
authorizeManageBundlePolicy(context, bundle);
BundleBitstream bundleBitstream = bitstream.getBundles().get(0);
authorizeManageBundlePolicy(context, bundleBitstream.getBundle());
}
/**
@@ -69,7 +73,7 @@ public class AuthorizeUtil
public static void authorizeManageBundlePolicy(Context context,
Bundle bundle) throws AuthorizeException, SQLException
{
Item item = bundle.getItems()[0];
Item item = bundle.getItems().get(0);
authorizeManageItemPolicy(context, item);
}
@@ -92,20 +96,20 @@ public class AuthorizeUtil
{
if (AuthorizeConfiguration.canItemAdminManagePolicies())
{
AuthorizeManager.authorizeAction(context, item, Constants.ADMIN);
authorizeService.authorizeAction(context, item, Constants.ADMIN);
}
else if (AuthorizeConfiguration.canCollectionAdminManageItemPolicies())
{
AuthorizeManager.authorizeAction(context, item
authorizeService.authorizeAction(context, item
.getOwningCollection(), Constants.ADMIN);
}
else if (AuthorizeConfiguration.canCommunityAdminManageItemPolicies())
{
AuthorizeManager
authorizeService
.authorizeAction(context, item.getOwningCollection()
.getCommunities()[0], Constants.ADMIN);
.getCommunities().get(0), Constants.ADMIN);
}
else if (!AuthorizeManager.isAdmin(context))
else if (!authorizeService.isAdmin(context))
{
throw new AuthorizeException(
"Only system admin are allowed to manage item policies");
@@ -131,16 +135,16 @@ public class AuthorizeUtil
{
if (AuthorizeConfiguration.canCollectionAdminManagePolicies())
{
AuthorizeManager.authorizeAction(context, collection,
authorizeService.authorizeAction(context, collection,
Constants.ADMIN);
}
else if (AuthorizeConfiguration
.canCommunityAdminManageCollectionPolicies())
{
AuthorizeManager.authorizeAction(context, collection
.getCommunities()[0], Constants.ADMIN);
authorizeService.authorizeAction(context, collection
.getCommunities().get(0), Constants.ADMIN);
}
else if (!AuthorizeManager.isAdmin(context))
else if (!authorizeService.isAdmin(context))
{
throw new AuthorizeException(
"Only system admin are allowed to manage collection policies");
@@ -166,10 +170,10 @@ public class AuthorizeUtil
{
if (AuthorizeConfiguration.canCommunityAdminManagePolicies())
{
AuthorizeManager.authorizeAction(context, community,
authorizeService.authorizeAction(context, community,
Constants.ADMIN);
}
else if (!AuthorizeManager.isAdmin(context))
else if (!authorizeService.isAdmin(context))
{
throw new AuthorizeException(
"Only system admin are allowed to manage community policies");
@@ -189,7 +193,7 @@ public class AuthorizeUtil
public static void requireAdminRole(Context context)
throws AuthorizeException, SQLException
{
if (!AuthorizeManager.isAdmin(context))
if (!authorizeService.isAdmin(context))
{
throw new AuthorizeException(
"Only system admin are allowed to perform this action");
@@ -215,25 +219,25 @@ public class AuthorizeUtil
{
try
{
AuthorizeManager.authorizeAction(context, item, Constants.ADD);
AuthorizeManager.authorizeAction(context, item, Constants.REMOVE);
authorizeService.authorizeAction(context, item, Constants.ADD);
authorizeService.authorizeAction(context, item, Constants.REMOVE);
}
catch (AuthorizeException authex)
{
if (AuthorizeConfiguration.canItemAdminManageCCLicense())
{
AuthorizeManager
authorizeService
.authorizeAction(context, item, Constants.ADMIN);
}
else if (AuthorizeConfiguration.canCollectionAdminManageCCLicense())
{
AuthorizeManager.authorizeAction(context, item
.getParentObject(), Constants.ADMIN);
authorizeService.authorizeAction(context, itemService
.getParentObject(context, item), Constants.ADMIN);
}
else if (AuthorizeConfiguration.canCommunityAdminManageCCLicense())
{
AuthorizeManager.authorizeAction(context, item
.getParentObject().getParentObject(), Constants.ADMIN);
authorizeService.authorizeAction(context, itemService
.getParentObject(context, item), Constants.ADMIN);
}
else
{
@@ -259,25 +263,25 @@ public class AuthorizeUtil
public static void authorizeManageTemplateItem(Context context,
Collection collection) throws AuthorizeException, SQLException
{
boolean isAuthorized = collection.canEditBoolean(false);
boolean isAuthorized = collectionService.canEditBoolean(context, collection, false);
if (!isAuthorized
&& AuthorizeConfiguration
.canCollectionAdminManageTemplateItem())
{
AuthorizeManager.authorizeAction(context, collection,
authorizeService.authorizeAction(context, collection,
Constants.ADMIN);
}
else if (!isAuthorized
&& AuthorizeConfiguration
.canCommunityAdminManageCollectionTemplateItem())
{
Community[] communities = collection.getCommunities();
Community parent = communities != null && communities.length > 0 ? communities[0]
List<Community> communities = collection.getCommunities();
Community parent = communities != null && communities.size() > 0 ? communities.get(0)
: null;
AuthorizeManager.authorizeAction(context, parent, Constants.ADMIN);
authorizeService.authorizeAction(context, parent, Constants.ADMIN);
}
else if (!isAuthorized && !AuthorizeManager.isAdmin(context))
else if (!isAuthorized && !authorizeService.isAdmin(context))
{
throw new AuthorizeException(
"You are not authorized to create a template item for the collection");
@@ -303,16 +307,16 @@ public class AuthorizeUtil
{
if (AuthorizeConfiguration.canCollectionAdminManageSubmitters())
{
AuthorizeManager.authorizeAction(context, collection,
authorizeService.authorizeAction(context, collection,
Constants.ADMIN);
}
else if (AuthorizeConfiguration
.canCommunityAdminManageCollectionSubmitters())
{
AuthorizeManager.authorizeAction(context, collection
.getCommunities()[0], Constants.ADMIN);
authorizeService.authorizeAction(context, collection
.getCommunities().get(0), Constants.ADMIN);
}
else if (!AuthorizeManager.isAdmin(context))
else if (!authorizeService.isAdmin(context))
{
throw new AuthorizeException(
"Only system admin are allowed to manage collection submitters");
@@ -338,16 +342,16 @@ public class AuthorizeUtil
{
if (AuthorizeConfiguration.canCollectionAdminManageWorkflows())
{
AuthorizeManager.authorizeAction(context, collection,
authorizeService.authorizeAction(context, collection,
Constants.ADMIN);
}
else if (AuthorizeConfiguration
.canCommunityAdminManageCollectionWorkflows())
{
AuthorizeManager.authorizeAction(context, collection
.getCommunities()[0], Constants.ADMIN);
authorizeService.authorizeAction(context, collection
.getCommunities().get(0), Constants.ADMIN);
}
else if (!AuthorizeManager.isAdmin(context))
else if (!authorizeService.isAdmin(context))
{
throw new AuthorizeException(
"Only system admin are allowed to manage collection workflow");
@@ -375,16 +379,16 @@ public class AuthorizeUtil
{
if (AuthorizeConfiguration.canCollectionAdminManageAdminGroup())
{
AuthorizeManager.authorizeAction(context, collection,
authorizeService.authorizeAction(context, collection,
Constants.ADMIN);
}
else if (AuthorizeConfiguration
.canCommunityAdminManageCollectionAdminGroup())
{
AuthorizeManager.authorizeAction(context, collection
.getCommunities()[0], Constants.ADMIN);
authorizeService.authorizeAction(context, collection
.getCommunities().get(0), Constants.ADMIN);
}
else if (!AuthorizeManager.isAdmin(context))
else if (!authorizeService.isAdmin(context))
{
throw new AuthorizeException(
"Only system admin are allowed to manage collection admin");
@@ -410,15 +414,15 @@ public class AuthorizeUtil
public static void authorizeRemoveAdminGroup(Context context,
Collection collection) throws AuthorizeException, SQLException
{
Community[] parentCommunities = collection.getCommunities();
List<Community> parentCommunities = collection.getCommunities();
if (AuthorizeConfiguration
.canCommunityAdminManageCollectionAdminGroup()
&& parentCommunities != null && parentCommunities.length > 0)
&& parentCommunities != null && parentCommunities.size() > 0)
{
AuthorizeManager.authorizeAction(context, collection
.getCommunities()[0], Constants.ADMIN);
authorizeService.authorizeAction(context, collection
.getCommunities().get(0), Constants.ADMIN);
}
else if (!AuthorizeManager.isAdmin(context))
else if (!authorizeService.isAdmin(context))
{
throw new AuthorizeException(
"Only system admin can remove the admin group of a collection");
@@ -446,10 +450,10 @@ public class AuthorizeUtil
{
if (AuthorizeConfiguration.canCommunityAdminManageAdminGroup())
{
AuthorizeManager.authorizeAction(context, community,
authorizeService.authorizeAction(context, community,
Constants.ADMIN);
}
else if (!AuthorizeManager.isAdmin(context))
else if (!authorizeService.isAdmin(context))
{
throw new AuthorizeException(
"Only system admin are allowed to manage community admin");
@@ -475,14 +479,19 @@ public class AuthorizeUtil
public static void authorizeRemoveAdminGroup(Context context,
Community community) throws SQLException, AuthorizeException
{
Community parentCommunity = community.getParentCommunity();
List<Community> parentCommunities = community.getParentCommunities();
Community parentCommunity = null;
if(0 < parentCommunities.size())
{
parentCommunity = parentCommunities.get(0);
}
if (AuthorizeConfiguration.canCommunityAdminManageAdminGroup()
&& parentCommunity != null)
{
AuthorizeManager.authorizeAction(context, parentCommunity,
authorizeService.authorizeAction(context, parentCommunity,
Constants.ADMIN);
}
else if (!AuthorizeManager.isAdmin(context))
else if (!authorizeService.isAdmin(context))
{
throw new AuthorizeException(
"Only system admin can remove the admin group of the community");
@@ -505,26 +514,23 @@ public class AuthorizeUtil
public static void authorizeManagePolicy(Context c, ResourcePolicy rp)
throws SQLException, AuthorizeException
{
switch (rp.getResourceType())
switch (rp.getdSpaceObject().getType())
{
case Constants.BITSTREAM:
authorizeManageBitstreamPolicy(c, Bitstream.find(c, rp
.getResourceID()));
authorizeManageBitstreamPolicy(c, (Bitstream) rp.getdSpaceObject());
break;
case Constants.BUNDLE:
authorizeManageBundlePolicy(c, Bundle.find(c, rp.getResourceID()));
authorizeManageBundlePolicy(c, (Bundle) rp.getdSpaceObject());
break;
case Constants.ITEM:
authorizeManageItemPolicy(c, Item.find(c, rp.getResourceID()));
authorizeManageItemPolicy(c, (Item) rp.getdSpaceObject());
break;
case Constants.COLLECTION:
authorizeManageCollectionPolicy(c, Collection.find(c, rp
.getResourceID()));
authorizeManageCollectionPolicy(c, (Collection) rp.getdSpaceObject());
break;
case Constants.COMMUNITY:
authorizeManageCommunityPolicy(c, Community.find(c, rp
.getResourceID()));
authorizeManageCommunityPolicy(c, (Community) rp.getdSpaceObject());
break;
default:
@@ -552,19 +558,19 @@ public class AuthorizeUtil
boolean authorized = false;
if (AuthorizeConfiguration.canCollectionAdminPerformItemWithdrawn())
{
authorized = AuthorizeManager.authorizeActionBoolean(context, item
authorized = authorizeService.authorizeActionBoolean(context, item
.getOwningCollection(), Constants.ADMIN);
}
else if (AuthorizeConfiguration.canCommunityAdminPerformItemWithdrawn())
{
authorized = AuthorizeManager
authorized = authorizeService
.authorizeActionBoolean(context, item.getOwningCollection()
.getCommunities()[0], Constants.ADMIN);
.getCommunities().get(0), Constants.ADMIN);
}
if (!authorized)
{
authorized = AuthorizeManager.authorizeActionBoolean(context, item
authorized = authorizeService.authorizeActionBoolean(context, item
.getOwningCollection(), Constants.REMOVE, false);
}
@@ -592,29 +598,29 @@ public class AuthorizeUtil
public static void authorizeReinstateItem(Context context, Item item)
throws SQLException, AuthorizeException
{
Collection[] colls = item.getCollections();
List<Collection> colls = item.getCollections();
for (int i = 0; i < colls.length; i++)
for (Collection coll : colls)
{
if (!AuthorizeConfiguration
.canCollectionAdminPerformItemReinstatiate())
{
if (AuthorizeConfiguration
.canCommunityAdminPerformItemReinstatiate()
&& AuthorizeManager.authorizeActionBoolean(context,
colls[i].getCommunities()[0], Constants.ADMIN))
&& authorizeService.authorizeActionBoolean(context,
coll.getCommunities().get(0), Constants.ADMIN))
{
// authorized
}
else
{
AuthorizeManager.authorizeAction(context, colls[i],
authorizeService.authorizeAction(context, coll,
Constants.ADD, false);
}
}
else
{
AuthorizeManager.authorizeAction(context, colls[i],
authorizeService.authorizeAction(context, coll,
Constants.ADD);
}
}

View File

@@ -51,15 +51,15 @@ public class CollectionDropDown {
separator = " > ";
}
Community[] getCom = null;
List<Community> getCom = null;
StringBuffer name = new StringBuffer("");
getCom = col.getCommunities(); // all communities containing given collection
for (Community com : getCom)
{
name.insert(0, com.getMetadata("name") + separator);
name.insert(0, com.getName() + separator);
}
name.append(col.getMetadata("name"));
name.append(col.getName());
if (maxchars != 0)
{
@@ -81,12 +81,12 @@ public class CollectionDropDown {
* @return A sorted array of collection path entries (essentially collection/path pairs).
* @throws SQLException In case there are problems annotating a collection with its path.
*/
public static CollectionPathEntry[] annotateWithPaths(Collection[] collections) throws SQLException
public static CollectionPathEntry[] annotateWithPaths(List<Collection> collections) throws SQLException
{
CollectionPathEntry[] result = new CollectionPathEntry[collections.length];
for (int i = 0; i < collections.length; i++)
CollectionPathEntry[] result = new CollectionPathEntry[collections.size()];
for (int i = 0; i < collections.size(); i++)
{
Collection collection = collections[i];
Collection collection = collections.get(i);
CollectionPathEntry entry = new CollectionPathEntry(collection, collectionPath(collection));
result[i] = entry;
}
@@ -117,7 +117,7 @@ public class CollectionDropDown {
{
return this.path.compareTo(other.path);
}
return Integer.compare(this.collection.getID(), other.collection.getID());
return this.collection.getID().compareTo(other.collection.getID());
}
@Override

View File

@@ -136,7 +136,7 @@ public class DCInputSet
return false;
}
private static boolean doField(DCInput dcf, boolean addTitleAlternative,
protected boolean doField(DCInput dcf, boolean addTitleAlternative,
boolean addPublishedBefore)
{
String rowName = dcf.getElement() + "." + dcf.getQualifier();

View File

@@ -8,7 +8,6 @@
package org.dspace.app.util;
import org.dspace.core.ConfigurationManager;
import org.dspace.storage.rdbms.DatabaseManager;
import org.apache.log4j.Logger;
import javax.servlet.ServletContextListener;
@@ -34,7 +33,6 @@ public class DSpaceContextListener implements ServletContextListener
*/
public static final String DSPACE_CONFIG_PARAMETER = "dspace-config";
private AbstractDSpaceWebapp webApp;
/**
* Initialize any resources required by the application.
@@ -115,24 +113,6 @@ public class DSpaceContextListener implements ServletContextListener
"the DSpace configuration file is stored in a context variable, 'dspace-config', in \n" +
"either the local servlet or global context.\n\n",e);
}
/**
* Stage 3
*
* Register that this application is running.
*/
try {
Class webappClass = Class.forName("org.dspace.utils.DSpaceWebapp");
webApp = (AbstractDSpaceWebapp) webappClass.newInstance();
webApp.register();
} catch (ClassNotFoundException ex) {
event.getServletContext().log("Can't create webapp MBean: " + ex.getMessage());
} catch (InstantiationException ex) {
event.getServletContext().log("Can't create webapp MBean: " + ex.getMessage());
} catch (IllegalAccessException ex) {
event.getServletContext().log("Can't create webapp MBean: " + ex.getMessage());
}
}
/**
@@ -143,13 +123,8 @@ public class DSpaceContextListener implements ServletContextListener
@Override
public void contextDestroyed(ServletContextEvent event)
{
webApp.deregister();
try
{
// Remove the database pool
DatabaseManager.shutdown();
// Clean out the introspector
Introspector.flushCaches();

View File

@@ -0,0 +1,45 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.app.util;
import javax.servlet.ServletContextEvent;
import javax.servlet.ServletContextListener;
/**
* Class that registers the web application upon startup of the application.
*
* @author kevinvandevelde at atmire.com
*/
public class DSpaceWebappListener implements ServletContextListener {
private AbstractDSpaceWebapp webApp;
@Override
public void contextInitialized(ServletContextEvent event) {
/*
* Register that this application is running.
*/
try {
Class webappClass = Class.forName("org.dspace.utils.DSpaceWebapp");
webApp = (AbstractDSpaceWebapp) webappClass.newInstance();
webApp.register();
} catch (ClassNotFoundException ex) {
event.getServletContext().log("Can't create webapp MBean: " + ex.getMessage());
} catch (InstantiationException ex) {
event.getServletContext().log("Can't create webapp MBean: " + ex.getMessage());
} catch (IllegalAccessException ex) {
event.getServletContext().log("Can't create webapp MBean: " + ex.getMessage());
}
}
@Override
public void contextDestroyed(ServletContextEvent sce) {
webApp.deregister();
}
}

View File

@@ -87,6 +87,7 @@ public class DailyFileAppender extends FileAppender
/* (non-Javadoc)
* @see org.apache.log4j.FileAppender#activateOptions()
*/
@Override
public void activateOptions()
{
setFileName();
@@ -102,6 +103,7 @@ public class DailyFileAppender extends FileAppender
return this.mstrDatePattern;
}
@Override
public String getFile()
{
return this.mstrFileName;
@@ -133,6 +135,7 @@ public class DailyFileAppender extends FileAppender
}
}
@Override
public void setFile(String file)
{
// Trim spaces from both ends. The users probably does not want
@@ -157,6 +160,7 @@ public class DailyFileAppender extends FileAppender
/* (non-Javadoc)
* @see org.apache.log4j.WriterAppender#subAppend(org.apache.log4j.spi.LoggingEvent)
*/
@Override
protected void subAppend(LoggingEvent pobjEvent)
{
Date dtNow = new Date(System.currentTimeMillis());

View File

@@ -11,8 +11,7 @@ import java.sql.SQLException;
import com.google.common.collect.ArrayListMultimap;
import com.google.common.collect.ListMultimap;
import org.dspace.authorize.AuthorizeManager;
import org.dspace.authorize.factory.AuthorizeServiceFactory;
import org.dspace.content.*;
import java.io.IOException;
@@ -23,8 +22,9 @@ import java.util.Enumeration;
import java.util.HashMap;
import java.util.Map;
import java.util.Properties;
import org.apache.log4j.Logger;
import org.dspace.content.factory.ContentServiceFactory;
import org.dspace.content.service.ItemService;
import org.dspace.core.ConfigurationManager;
import java.io.File;
@@ -35,10 +35,10 @@ import java.util.HashSet;
import java.util.List;
import java.util.Map.Entry;
import java.util.Set;
import org.dspace.core.Constants;
import org.dspace.core.Context;
import org.dspace.handle.HandleManager;
import org.dspace.handle.factory.HandleServiceFactory;
import org.jdom.Element;
/**
@@ -53,80 +53,82 @@ public class GoogleMetadata
private final static Logger log = Logger.getLogger(GoogleMetadata.class);
private static final String GOOGLE_PREFIX = "google.";
protected static final String GOOGLE_PREFIX = "google.";
private Item item;
protected ItemService itemService;
private String itemURL;
protected Item item;
protected String itemURL;
// Configuration keys and fields
private static Map<String, String> configuredFields = new HashMap<String, String>();
protected static Map<String, String> configuredFields = new HashMap<String, String>();
// Google field names (e.g. citation_fieldname) and formatted metadata
// values
private ListMultimap<String, String> metadataMappings = ArrayListMultimap.create();
protected ListMultimap<String, String> metadataMappings = ArrayListMultimap.create();
public static final String TITLE = "citation_title";
protected final String TITLE = "citation_title";
public static final String JOURNAL_TITLE = "citation_journal_title";
protected final String JOURNAL_TITLE = "citation_journal_title";
public static final String PUBLISHER = "citation_publisher";
protected final String PUBLISHER = "citation_publisher";
public static final String AUTHORS = "citation_author";
protected final String AUTHORS = "citation_author";
public static final String DATE = "citation_date";
protected final String DATE = "citation_date";
public static final String VOLUME = "citation_volume";
protected final String VOLUME = "citation_volume";
public static final String ISSUE = "citation_issue";
protected final String ISSUE = "citation_issue";
public static final String FIRSTPAGE = "citation_firstpage";
protected final String FIRSTPAGE = "citation_firstpage";
public static final String LASTPAGE = "citation_lastpage";
protected final String LASTPAGE = "citation_lastpage";
public static final String DOI = "citation_doi";
protected final String DOI = "citation_doi";
public static final String PMID = "citation_pmid";
protected final String PMID = "citation_pmid";
public static final String ABSTRACT = "citation_abstract_html_url";
protected final String ABSTRACT = "citation_abstract_html_url";
public static final String FULLTEXT = "citation_fulltext_html_url";
protected final String FULLTEXT = "citation_fulltext_html_url";
public static final String PDF = "citation_pdf_url";
protected final String PDF = "citation_pdf_url";
public static final String ISSN = "citation_issn";
protected final String ISSN = "citation_issn";
public static final String ISBN = "citation_isbn";
protected final String ISBN = "citation_isbn";
public static final String LANGUAGE = "citation_language";
protected final String LANGUAGE = "citation_language";
public static final String KEYWORDS = "citation_keywords";
protected final String KEYWORDS = "citation_keywords";
public static final String CONFERENCE = "citation_conference";
protected final String CONFERENCE = "citation_conference";
public static final String DISSERTATION_ID = "identifiers.dissertation";
protected final String DISSERTATION_ID = "identifiers.dissertation";
public static final String DISSERTATION_NAME = "citation_dissertation_name";
protected final String DISSERTATION_NAME = "citation_dissertation_name";
public static final String DISSERTATION_INSTITUTION = "citation_dissertation_institution";
protected final String DISSERTATION_INSTITUTION = "citation_dissertation_institution";
public static final String PATENT_ID = "identifiers.patent";
protected final String PATENT_ID = "identifiers.patent";
public static final String PATENT_NUMBER = "citation_patent_number";
protected final String PATENT_NUMBER = "citation_patent_number";
public static final String PATENT_COUNTRY = "citation_patent_country";
protected final String PATENT_COUNTRY = "citation_patent_country";
public static final String TECH_REPORT_ID = "identifiers.technical_report";
protected final String TECH_REPORT_ID = "identifiers.technical_report";
public static final String TECH_REPORT_NUMBER = "citation_technical_report_number";
protected final String TECH_REPORT_NUMBER = "citation_technical_report_number";
public static final String TECH_REPORT_INSTITUTION = "citation_technical_report_institution";
protected final String TECH_REPORT_INSTITUTION = "citation_technical_report_institution";
private static final int SINGLE = 0;
protected final int SINGLE = 0;
private static final int MULTI = 1;
protected final int MULTI = 1;
private static final int ALL_FIELDS_IN_OPTION = 2;
protected final int ALL_FIELDS_IN_OPTION = 2;
// Load configured fields from google-metadata.properties
static
@@ -218,7 +220,8 @@ public class GoogleMetadata
// Hold onto the item in case we need to refresh a stale parse
this.item = item;
itemURL = HandleManager.resolveToURL(context, item.getHandle());
this.itemService = ContentServiceFactory.getInstance().getItemService();
itemURL = HandleServiceFactory.getInstance().getHandleService().resolveToURL(context, item.getHandle());
parseItem();
}
@@ -229,7 +232,7 @@ public class GoogleMetadata
* @param fieldName
* @return
*/
private boolean addSingleField(String fieldName)
protected boolean addSingleField(String fieldName)
{
String config = configuredFields.get(fieldName);
@@ -270,11 +273,11 @@ public class GoogleMetadata
}
}
Metadatum v = resolveMetadataField(config);
MetadataValue v = resolveMetadataField(config);
if (null != v && (null != v.value) && !v.value.trim().equals(""))
if (null != v && (null != v.getValue()) && !v.getValue().trim().equals(""))
{
metadataMappings.put(fieldName, v.value);
metadataMappings.put(fieldName, v.getValue());
return true;
}
else
@@ -291,10 +294,10 @@ public class GoogleMetadata
* @param configFilter
* @return The first configured match of metadata field for the item.
*/
private Metadatum resolveMetadataField(String configFilter)
protected MetadataValue resolveMetadataField(String configFilter)
{
ArrayList<Metadatum> fields = resolveMetadata(configFilter, SINGLE);
ArrayList<MetadataValue> fields = resolveMetadata(configFilter, SINGLE);
if (null != fields && fields.size() > 0)
{
return fields.get(0);
@@ -310,10 +313,10 @@ public class GoogleMetadata
* @return Aggregate of all matching metadata fields configured in the first
* option field-set to return any number of filter matches.
*/
private ArrayList<Metadatum> resolveMetadataFields(String configFilter)
protected ArrayList<MetadataValue> resolveMetadataFields(String configFilter)
{
ArrayList<Metadatum> fields = resolveMetadata(configFilter, MULTI);
ArrayList<MetadataValue> fields = resolveMetadata(configFilter, MULTI);
if (null != fields && fields.size() > 0)
{
return fields;
@@ -329,7 +332,7 @@ public class GoogleMetadata
* @param returnType
* @return Array of configuration -> item-field matches
*/
private ArrayList<Metadatum> resolveMetadata(String configFilter,
protected ArrayList<MetadataValue> resolveMetadata(String configFilter,
int returnType)
{
@@ -370,19 +373,19 @@ public class GoogleMetadata
int optionMatches = 0;
String[] components;
Metadatum[] values;
ArrayList<Metadatum> resolvedFields = new ArrayList<Metadatum>();
List<MetadataValue> values;
ArrayList<MetadataValue> resolvedFields = new ArrayList<MetadataValue>();
for (String field : optionFields)
{
components = parseComponents(field);
values = item.getMetadata(components[0], components[1],
values = itemService.getMetadata(item, components[0], components[1],
components[2], Item.ANY);
if (values.length > 0)
if (values.size() > 0)
{
for (Metadatum v : values)
for (MetadataValue v : values)
{
resolvedFields.add(v);
@@ -391,11 +394,13 @@ public class GoogleMetadata
{
if (!resolvedFields.isEmpty())
{
if (log.isDebugEnabled()) {
log.debug("Resolved Field Value For This Item:");
for (Metadatum r : resolvedFields)
if (log.isDebugEnabled())
{
log
.debug("Resolved Field Value For This Item:");
for (MetadataValue r : resolvedFields)
{
log.debug("{" + r.value + "}");
log.debug("{" + r.getValue() + "}");
}
}
return resolvedFields;
@@ -412,9 +417,9 @@ public class GoogleMetadata
if (log.isDebugEnabled())
{
log.debug("Resolved Field Values For This Item:");
for (Metadatum v : resolvedFields)
for (MetadataValue v : resolvedFields)
{
log.debug("{" + v.value + "}");
log.debug("{" + v.getValue() + "}");
}
}
@@ -444,7 +449,7 @@ public class GoogleMetadata
* @param configFilter
* @return
*/
private ArrayList<ArrayList<String>> parseOptions(String configFilter)
protected ArrayList<ArrayList<String>> parseOptions(String configFilter)
{
ArrayList<String> options = new ArrayList<String>();
@@ -538,7 +543,7 @@ public class GoogleMetadata
* - Value of one metadata field configuration
* @return A vector of raw field configurations.
*/
private ArrayList<String> parseFields(String configString)
protected ArrayList<String> parseFields(String configString)
{
ArrayList<String> fields = new ArrayList<String>();
@@ -558,7 +563,7 @@ public class GoogleMetadata
* The configured field for one metadata field map
* @return Schema, Element, Qualifier of metadata field
*/
private String[] parseComponents(String field)
protected String[] parseComponents(String field)
{
int index = 0;
@@ -581,7 +586,7 @@ public class GoogleMetadata
* The field identifier containing a wildcard character.
* @return Expanded field list.
*/
private ArrayList<String> parseWildcard(String field)
protected ArrayList<String> parseWildcard(String field)
{
if (!field.contains("*"))
@@ -600,11 +605,11 @@ public class GoogleMetadata
}
}
Metadatum[] allMD = item.getMetadata(components[0], components[1],
List<MetadataValue> allMD = itemService.getMetadata(item, components[0], components[1],
components[2], Item.ANY);
ArrayList<String> expandedDC = new ArrayList<String>();
for (Metadatum v : allMD)
for (MetadataValue v : allMD)
{
// De-dup multiple occurrences of field names in item
@@ -635,15 +640,17 @@ public class GoogleMetadata
* The Metadatum to construct a name for.
* @return The complete metadata field name.
*/
private String buildFieldName(Metadatum v)
protected String buildFieldName(MetadataValue v)
{
StringBuilder name = new StringBuilder();
name.append(v.schema + "." + v.element);
if (null != v.qualifier)
MetadataField metadataField = v.getMetadataField();
MetadataSchema metadataSchema = v.getMetadataField().getMetadataSchema();
name.append(metadataSchema.getName()).append(".").append(metadataField.getElement());
if (null != metadataField.getQualifier())
{
name.append("." + v.qualifier);
name.append("." + metadataField.getQualifier());
}
return name.toString();
@@ -655,7 +662,7 @@ public class GoogleMetadata
* value strings. Field names & values contained in metadataMappings.
*
*/
private void parseItem()
protected void parseItem()
{
// TITLE
@@ -1001,7 +1008,7 @@ public class GoogleMetadata
* @param item
* @return URL that the PDF can be directly downloaded from
*/
private String getPDFSimpleUrl(Item item)
protected String getPDFSimpleUrl(Item item)
{
try {
Bitstream bitstream = findLinkableFulltext(item);
@@ -1043,31 +1050,30 @@ public class GoogleMetadata
* @return
* @throws SQLException
*/
private Bitstream findLinkableFulltext(Item item) throws SQLException {
protected Bitstream findLinkableFulltext(Item item) throws SQLException {
Bitstream bestSoFar = null;
Bundle[] contentBundles = item.getBundles("ORIGINAL");
int bitstreamCount = 0;
List<Bundle> contentBundles = itemService.getBundles(item, "ORIGINAL");
for (Bundle bundle : contentBundles) {
int primaryBitstreamId = bundle.getPrimaryBitstreamID();
Bitstream[] bitstreams = bundle.getBitstreams();
for (Bitstream candidate : bitstreams) {
if (candidate.getID() == primaryBitstreamId) { // is primary -> use this one
if (isPublic(candidate)) {
return candidate;
}
} else
{
if (bestSoFar == null && isPublic(candidate)) { //if bestSoFar is null but the candidate is not public you don't use it and try to find another
bestSoFar = candidate;
}
}
}
}
List<BundleBitstream> bundleBitstreams = bundle.getBitstreams();
for (BundleBitstream bundleBitstream : bundleBitstreams) {
Bitstream candidate = bundleBitstream.getBitstream();
if (candidate.equals(bundle.getPrimaryBitstream())) { // is primary -> use this one
if (isPublic(candidate)) {
return candidate;
}
} else {
if (bestSoFar == null && isPublic(candidate)) { //if bestSoFar is null but the candidate is not public you don't use it and try to find another
bestSoFar = candidate;
}
}
}
}
return bestSoFar;
}
private boolean isPublic(Bitstream bitstream) {
protected boolean isPublic(Bitstream bitstream) {
if (bitstream == null) {
return false;
}
@@ -1075,7 +1081,7 @@ public class GoogleMetadata
Context context = null;
try {
context = new Context();
result = AuthorizeManager.authorizeActionBoolean(context, bitstream, Constants.READ, true);
result = AuthorizeServiceFactory.getInstance().getAuthorizeService().authorizeActionBoolean(context, bitstream, Constants.READ, true);
} catch (SQLException e) {
log.error("Cannot determine whether bitstream is public, assuming it isn't. bitstream_id=" + bitstream.getID(), e);
} finally {
@@ -1094,11 +1100,11 @@ public class GoogleMetadata
* @param delim
* to delimit field values with
*/
private void addAggregateValues(String FIELD, String delim)
protected void addAggregateValues(String field, String delimiter)
{
String authorConfig = configuredFields.get(FIELD);
ArrayList<Metadatum> fields = resolveMetadataFields(authorConfig);
String authorConfig = configuredFields.get(field);
ArrayList<MetadataValue> fields = resolveMetadataFields(authorConfig);
if (null != fields && !fields.isEmpty())
{
@@ -1106,16 +1112,16 @@ public class GoogleMetadata
StringBuilder fieldMetadata = new StringBuilder();
int count = 0;
for (Metadatum field : fields)
for (MetadataValue metadataValue : fields)
{
fieldMetadata.append(field.value);
fieldMetadata.append(metadataValue.getValue());
if (count < fields.size() - 1)
{
fieldMetadata.append(delim + " ");
fieldMetadata.append(delimiter).append(" ");
count++;
}
}
metadataMappings.put(FIELD, fieldMetadata.toString());
metadataMappings.put(field, fieldMetadata.toString());
}
}
@@ -1123,27 +1129,27 @@ public class GoogleMetadata
* If metadata field contains multiple values, then add each value to the map separately
* @param FIELD
*/
private void addMultipleValues(String FIELD)
protected void addMultipleValues(String FIELD)
{
String fieldConfig = configuredFields.get(FIELD);
ArrayList<Metadatum> fields = resolveMetadataFields(fieldConfig);
ArrayList<MetadataValue> fields = resolveMetadataFields(fieldConfig);
if (null != fields && !fields.isEmpty())
{
for (Metadatum field : fields)
for (MetadataValue field : fields)
{
//TODO if this is author field, first-name first
metadataMappings.put(FIELD, field.value);
metadataMappings.put(FIELD, field.getValue());
}
}
}
/**
* Determine, based on config values, if this item is a dissertation.
*
* @return boolean
*/
private boolean itemIsDissertation()
protected boolean itemIsDissertation()
{
String dConfig = configuredFields.get(DISSERTATION_ID);
@@ -1162,7 +1168,7 @@ public class GoogleMetadata
*
* @return boolean
*/
private boolean itemIsPatent()
protected boolean itemIsPatent()
{
String dConfig = configuredFields.get(PATENT_ID);
@@ -1181,7 +1187,7 @@ public class GoogleMetadata
*
* @return boolean
*/
private boolean itemIsTechReport()
protected boolean itemIsTechReport()
{
String dConfig = configuredFields.get(TECH_REPORT_ID);
@@ -1203,7 +1209,7 @@ public class GoogleMetadata
* @param dConfig
* @return
*/
private boolean identifyItemType(String dConfig)
protected boolean identifyItemType(String dConfig)
{
// FIXME: Shouldn't have to parse identifiers for every identification.
@@ -1249,21 +1255,21 @@ public class GoogleMetadata
StringBuilder sb = new StringBuilder();
for (String value : mdPairs.keySet())
{
sb.append(value + " | ");
sb.append(value).append(" | ");
}
// Check resolved/present metadata fields against configured values
ArrayList<Metadatum> presentMD = resolveMetadataFields(sb.toString());
ArrayList<MetadataValue> presentMD = resolveMetadataFields(sb.toString());
if (null != presentMD && presentMD.size() != 0)
{
for (Metadatum v : presentMD)
for (MetadataValue v : presentMD)
{
String fieldName = buildFieldName(v);
if (mdPairs.containsKey(fieldName))
{
for (String configValue : mdPairs.get(fieldName))
{
if (configValue.equals(v.value))
if (configValue.equals(v.getValue()))
{
return true;
}

Some files were not shown because too many files have changed in this diff Show More